From 13bac489aeb4723ba25f6ef9455f728ecfbf1ad2 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 28 Feb 2020 15:23:02 +0100 Subject: [PATCH 0001/1416] Add first draft of gcom-c sgli reader --- satpy/etc/composites/sgli.yaml | 148 +++++++++ satpy/etc/readers/sgli_l1b.yaml | 523 ++++++++++++++++++++++++++++++++ satpy/readers/sgli_l1b.py | 123 ++++++++ 3 files changed, 794 insertions(+) create mode 100644 satpy/etc/composites/sgli.yaml create mode 100644 satpy/etc/readers/sgli_l1b.yaml create mode 100644 satpy/readers/sgli_l1b.py diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml new file mode 100644 index 0000000000..5dfd344422 --- /dev/null +++ b/satpy/etc/composites/sgli.yaml @@ -0,0 +1,148 @@ +sensor_name: visir/sgli + + +modifiers: + + rayleigh_corrected: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + rayleigh_corrected_marine_clean: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_clean_aerosol + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + rayleigh_corrected_marine_tropical: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: tropical + aerosol_type: marine_tropical_aerosol + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + rayleigh_corrected_desert: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: tropical + aerosol_type: desert_aerosol + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + rayleigh_corrected_land: + compositor: !!python/name:satpy.composites.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_average_aerosol + prerequisites: + - name: 'VN8' + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + + +composites: + true_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'VN8' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'VN5' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: true_color + + true_color_land: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] + standard_name: true_color + + true_color_desert: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + standard_name: true_color + + true_color_marine_clean: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + standard_name: true_color + + true_color_marine_tropical: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + standard_name: true_color + + true_color_raw: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'VN8' + modifiers: [effective_solar_pathlength_corrected] + - name: 'VN5' + modifiers: [effective_solar_pathlength_corrected] + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected] + standard_name: true_color + + ocean_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'Oa08' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'Oa06' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'Oa03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: ocean_color diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml new file mode 100644 index 0000000000..ce9b2d0f5e --- /dev/null +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -0,0 +1,523 @@ +reader: + description: Reader for SGLI data + reference: https://gportal.jaxa.jp/gpr/assets/mng_upload/GCOM-C/SGLI_Level1_Product_Format_Description_en.pdf + name: sgli_l1b + sensors: [sgli] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + gcom-c_l1b_v: + file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI + # GC1SG1_202002231142M25511_1BSG_VNRDQ_1008.h5 + file_patterns: + - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_VNR{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + +datasets: + longitude_v: + name: longitude_v + resolution: [250, 1000] + file_type: gcom-c_l1b + standard_name: longitude + units: degree + file_key: Geometry_data/Longitude + file_type: gcom-c_l1b_v + + latitude_v: + name: latitude_v + resolution: [250, 1000] + file_type: gcom-c_l1b + standard_name: latitude + units: degree + file_key: Geometry_data/Latitude + file_type: gcom-c_l1b_v + + solar_zenith_angle: + name: solar_zenith_angle + sensor: sgli + wavelength: [0.3925,0.4,0.4075] + resolution: [250, 1000] + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Geometry_data/Solar_zenith + + solar_azimuth_angle: + name: solar_azimuth_angle + sensor: sgli + wavelength: [0.3925,0.4,0.4075] + resolution: [250, 1000] + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Geometry_data/Solar_azimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + sensor: sgli + wavelength: [0.3925,0.4,0.4075] + resolution: [250, 1000] + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Geometry_data/Sensor_zenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + sensor: sgli + wavelength: [0.3925,0.4,0.4075] + resolution: [250, 1000] + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Geometry_data/Sensor_azimuth + + + VN1: + name: VN1 + sensor: sgli + wavelength: [0.375,0.38,0.385] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN01 + + VN3: + name: VN3 + sensor: sgli + wavelength: [0.438, 0.443, 0.448] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN03 + + VN5: + name: VN5 + sensor: sgli + wavelength: [0.520,0.530,0.540] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN05 + + VN6: + name: VN6 + sensor: sgli + wavelength: [0.555,0.565,0.575] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN06 + + VN8: + name: VN8 + sensor: sgli + wavelength: [0.6635,0.6735,0.6835] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN08 + + VN11: + name: VN11 + sensor: sgli + wavelength: [0.8585,0.8685,0.8785] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN11 + + # Oa02: + # name: Oa02 + # sensor: olci + # wavelength: [0.4075, 0.4125, 0.4175] + # resolution: 300 + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # coordinates: [longitude, latitude] + # file_type: esa_l1b + + # Oa03: + # name: Oa03 + # sensor: olci + # wavelength: [0.4375,0.4425,0.4475] + # resolution: 300 + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # coordinates: [longitude, latitude] + # file_type: esa_l1b + + # Oa04: + # name: Oa04 + # sensor: olci + # wavelength: [0.485,0.49,0.495] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa05: + # name: Oa05 + # sensor: olci + # wavelength: [0.505,0.51,0.515] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa06: + # name: Oa06 + # sensor: olci + # wavelength: [0.555,0.56,0.565] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa07: + # name: Oa07 + # sensor: olci + # wavelength: [0.615,0.62,0.625] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa08: + # name: Oa08 + # sensor: olci + # wavelength: [0.66,0.665,0.67] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa09: + # name: Oa09 + # sensor: olci + # wavelength: [0.67,0.67375,0.6775] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa10: + # name: Oa10 + # sensor: olci + # wavelength: [0.6775,0.68125,0.685] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa11: + # name: Oa11 + # sensor: olci + # wavelength: [0.70375,0.70875,0.71375] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa12: + # name: Oa12 + # sensor: olci + # wavelength: [0.75,0.75375,0.7575] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa13: + # name: Oa13 + # sensor: olci + # wavelength: [0.76,0.76125,0.7625] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa14: + # name: Oa14 + # sensor: olci + # wavelength: [0.760625, 0.764375, 0.768125] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa15: + # name: Oa15 + # sensor: olci + # wavelength: [0.76625, 0.7675, 0.76875] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa16: + # name: Oa16 + # sensor: olci + # wavelength: [0.77125, 0.77875, 0.78625] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa17: + # name: Oa17 + # sensor: olci + # wavelength: [0.855, 0.865, 0.875] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa18: + # name: Oa18 + # sensor: olci + # wavelength: [0.88, 0.885, 0.89] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa19: + # name: Oa19 + # sensor: olci + # wavelength: [0.895, 0.9, 0.905] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa20: + # name: Oa20 + # sensor: olci + # wavelength: [0.93, 0.94, 0.95] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # Oa21: + # name: Oa21 + # sensor: olci + # wavelength: [1.0, 1.02, 1.04] + # resolution: 300 + # coordinates: [longitude, latitude] + # calibration: + # radiance: + # standard_name: toa_outgoing_radiance_per_unit_wavelength + # units: W m-2 um-1 sr-1 + # reflectance: + # standard_name: toa_bidirectional_reflectance + # units: "%" + # file_type: esa_l1b + + # solar_zenith_angle: + # name: solar_zenith_angle + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_angles + + # solar_azimuth_angle: + # name: solar_azimuth_angle + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_angles + + # satellite_zenith_angle: + # name: satellite_zenith_angle + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_angles + + # satellite_azimuth_angle: + # name: satellite_azimuth_angle + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_angles + + # humidity: + # name: humidity + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_meteo + + # sea_level_pressure: + # name: sea_level_pressure + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_meteo + + # total_columnar_water_vapour: + # name: total_columnar_water_vapour + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_meteo + + # total_ozone: + # name: total_ozone + # sensor: olci + # resolution: 300 + # coordinates: [longitude, latitude] + # file_type: esa_meteo diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py new file mode 100644 index 0000000000..4d13be55e2 --- /dev/null +++ b/satpy/readers/sgli_l1b.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2020 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""GCOM-C SGLI L1b reader. + +GCOM-C has an imager instrument: SGLI +https://www.wmo-sat.info/oscar/instruments/view/505 + +Test data is available here: +https://suzaku.eorc.jaxa.jp/GCOM_C/data/product_std.html +The live data is available from here: +https://gportal.jaxa.jp/gpr/search?tab=1 +And the format description is here: +https://gportal.jaxa.jp/gpr/assets/mng_upload/GCOM-C/SGLI_Level1_Product_Format_Description_en.pdf + +""" + +from satpy.readers.file_handlers import BaseFileHandler +from datetime import datetime +from satpy import CHUNK_SIZE +import xarray as xr +import dask.array as da +import h5py +import logging +import numpy as np + +logger = logging.getLogger(__name__) + +resolutions = {'Q': 250, + 'K': 1000, + 'L': 1000} + + +def interpolate(arr, sampling, full_shape): + """Interpolate the angles and navigation.""" + # TODO: daskify this! + # TODO: do it in cartesian coordinates ! pbs at date line and poles + # possible + tie_x = np.arange(0, arr.shape[0] * sampling, sampling) + tie_y = np.arange(0, arr.shape[1] * sampling, sampling) + full_x = np.arange(0, full_shape[0]) + full_y = np.arange(0, full_shape[1]) + + from scipy.interpolate import RectBivariateSpline + spl = RectBivariateSpline( + tie_x, tie_y, arr) + + values = spl(full_x, full_y) + + return da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)) + + +class HDF5SGLI(BaseFileHandler): + """File handler for the SGLI l1b data.""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize the filehandler.""" + super(HDF5SGLI, self).__init__(filename, filename_info, filetype_info) + self.resolution = resolutions[self.filename_info['resolution']] + self.fh = h5py.File(self.filename, 'r') + + @property + def start_time(self): + """Get the start time.""" + the_time = self.fh['Global_attributes'].attrs['Scene_start_time'].item() + return datetime.strptime(the_time.decode('ascii'), '%Y%m%d %H:%M:%S.%f') + + @property + def end_time(self): + """Get the end time.""" + the_time = self.fh['Global_attributes'].attrs['Scene_end_time'].item() + return datetime.strptime(the_time.decode('ascii'), '%Y%m%d %H:%M:%S.%f') + + def get_dataset(self, key, info): + """Get the dataset.""" + if key.resolution != self.resolution: + return + + h5dataset = self.fh[info['file_key']] + resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) + if resampling_interval != 1: + logger.debug('Interpolating %s.', key.name) + full_shape = (self.fh['Image_data'].attrs['Number_of_lines'], + self.fh['Image_data'].attrs['Number_of_pixels']) + dataset = interpolate(h5dataset, resampling_interval, full_shape) + else: + dataset = da.from_array(h5dataset[:].astype(' Date: Fri, 28 Feb 2020 15:37:23 +0100 Subject: [PATCH 0002/1416] Apply scale and offset for all datasets --- satpy/readers/sgli_l1b.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 4d13be55e2..80c06ffab5 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -118,6 +118,8 @@ def get_dataset(self, key, info): # equivalent to the two lines above dataset = (dataset * h5dataset.attrs['Slope_reflectance'] + h5dataset.attrs['Offset_reflectance']) * 100 + else: + dataset = dataset * h5dataset.attrs['Slope'] + h5dataset.attrs['Offset'] dataset.attrs['platform_name'] = 'GCOM-C1' return dataset From ba2bc3821e53e8140ddb4cfefae09eb7ef7d583f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 3 Mar 2020 17:40:09 +0100 Subject: [PATCH 0003/1416] Fix saturation and missing value --- satpy/etc/composites/sgli.yaml | 22 ++++++++++++++++------ satpy/etc/readers/sgli_l1b.yaml | 16 ++++++++++++++++ satpy/readers/sgli_l1b.py | 9 ++++++++- 3 files changed, 40 insertions(+), 7 deletions(-) diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index 5dfd344422..f66744c037 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -8,7 +8,7 @@ modifiers: atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - - name: 'VN8' + - name: 'VN9' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle @@ -73,10 +73,18 @@ composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'VN8' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'VN5' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - compositor: !!python/name:satpy.composites.Filler + prerequisites: + - name: 'VN8' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - compositor: !!python/name:satpy.composites.Filler + prerequisites: + - name: 'VN5' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'VN3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color @@ -126,8 +134,10 @@ composites: standard_name: true_color true_color_raw: - compositor: !!python/name:satpy.composites.GenericCompositor + compositor: !!python/name:satpy.composites.FillingCompositor prerequisites: + - name: 'VN3' + modifiers: [effective_solar_pathlength_corrected] - name: 'VN8' modifiers: [effective_solar_pathlength_corrected] - name: 'VN5' diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index ce9b2d0f5e..8249e95513 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -150,6 +150,22 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN08 + VN9: + name: VN9 + sensor: sgli + wavelength: [0.757,0.763,0.769] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN09 + VN11: name: VN11 sensor: sgli diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 80c06ffab5..0483299088 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -103,7 +103,14 @@ def get_dataset(self, key, info): dataset.attrs.update(info) with xr.set_options(keep_attrs=True): if 'Mask' in h5dataset.attrs: - dataset = dataset & h5dataset.attrs['Mask'].item() + mask_value = h5dataset.attrs['Mask'].item() + dataset = dataset & mask_value + if 'Bit00(LSB)-13' in h5dataset.attrs: + mask_info = h5dataset.attrs['Bit00(LSB)-13'].item() + mask_vals = mask_info.split(b'\n')[1:] + missing = int(mask_vals[0].split(b':')[0].strip()) + saturation = int(mask_vals[1].split(b':')[0].strip()) + dataset = dataset.where(dataset < min(missing, saturation)) if 'Maximum_valid_DN' in h5dataset.attrs: # dataset = dataset.where(dataset <= h5dataset.attrs['Maximum_valid_DN'].item()) pass From 5a0b66ceb385dc967e1d6fc48e8f6251dfdb8b02 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 6 Mar 2020 16:45:29 +0100 Subject: [PATCH 0004/1416] Add support for polarized and IR channels --- satpy/etc/readers/sgli_l1b.yaml | 255 +++++++++++++++++++++++++++++++- satpy/readers/sgli_l1b.py | 11 +- 2 files changed, 258 insertions(+), 8 deletions(-) diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index 8249e95513..41cb3bed9e 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -14,6 +14,19 @@ file_types: - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_VNR{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + gcom-c_l1b_p: + file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI + file_patterns: + - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_POL{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + + gcom-c_l1b_ir: + file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI + file_patterns: + - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_IRS{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' + + datasets: longitude_v: name: longitude_v @@ -33,6 +46,44 @@ datasets: file_key: Geometry_data/Latitude file_type: gcom-c_l1b_v + longitude_p: + name: longitude_p + resolution: 1000 + polarization: [0, -60, 60] + file_type: gcom-c_l1b + standard_name: longitude + units: degree + file_key: Geometry_data/Longitude + file_type: gcom-c_l1b_p + + latitude_p: + name: latitude_p + resolution: 1000 + polarization: [0, -60, 60] + file_type: gcom-c_l1b + standard_name: latitude + units: degree + file_key: Geometry_data/Latitude + file_type: gcom-c_l1b_p + + longitude_ir: + name: longitude_ir + resolution: [250, 500, 1000] + file_type: gcom-c_l1b + standard_name: longitude + units: degree + file_key: Geometry_data/Longitude + file_type: gcom-c_l1b_ir + + latitude_ir: + name: latitude_ir + resolution: [250, 500, 1000] + file_type: gcom-c_l1b + standard_name: latitude + units: degree + file_key: Geometry_data/Latitude + file_type: gcom-c_l1b_ir + solar_zenith_angle: name: solar_zenith_angle sensor: sgli @@ -73,7 +124,7 @@ datasets: VN1: name: VN1 sensor: sgli - wavelength: [0.375,0.38,0.385] + wavelength: [0.375, 0.38, 0.385] resolution: [250, 1000] calibration: radiance: @@ -86,6 +137,23 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN01 + VN2: + name: VN2 + sensor: sgli + wavelength: [0.407, 0.412, 0.417] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN02 + + VN3: name: VN3 sensor: sgli @@ -102,10 +170,27 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN03 + VN4: + name: VN4 + sensor: sgli + wavelength: [0.485,0.49,0.495] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN04 + + VN5: name: VN5 sensor: sgli - wavelength: [0.520,0.530,0.540] + wavelength: [0.520, 0.530, 0.540] resolution: [250, 1000] calibration: radiance: @@ -121,7 +206,7 @@ datasets: VN6: name: VN6 sensor: sgli - wavelength: [0.555,0.565,0.575] + wavelength: [0.555, 0.565, 0.575] resolution: [250, 1000] calibration: radiance: @@ -134,10 +219,26 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN06 + VN7: + name: VN7 + sensor: sgli + wavelength: [0.6635, 0.6735, 0.6835] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN07 + VN8: name: VN8 sensor: sgli - wavelength: [0.6635,0.6735,0.6835] + wavelength: [0.6635, 0.6735, 0.6835] resolution: [250, 1000] calibration: radiance: @@ -153,7 +254,7 @@ datasets: VN9: name: VN9 sensor: sgli - wavelength: [0.757,0.763,0.769] + wavelength: [0.757, 0.763, 0.769] resolution: [250, 1000] calibration: radiance: @@ -166,10 +267,26 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN09 + VN10: + name: VN10 + sensor: sgli + wavelength: [0.8585, 0.8685, 0.8785] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_v, latitude_v] + file_type: gcom-c_l1b_v + file_key: Image_data/Lt_VN10 + VN11: name: VN11 sensor: sgli - wavelength: [0.8585,0.8685,0.8785] + wavelength: [0.8585, 0.8685, 0.8785] resolution: [250, 1000] calibration: radiance: @@ -182,6 +299,132 @@ datasets: file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN11 + P1: + name: P1 + sensor: sgli + wavelength: [0.6635, 0.6735, 0.6835] + resolution: 1000 + polarization: [0, -60, 60] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_p, latitude_p] + file_type: gcom-c_l1b_p + file_key: Image_data/Lt_P1_{pol} + + P2: + name: P2 + sensor: sgli + wavelength: [0.8585, 0.8685, 0.8785] + resolution: 1000 + polarization: [0, -60, 60] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_p, latitude_p] + file_type: gcom-c_l1b_p + file_key: Image_data/Lt_P2_{pol} + + SW1: + name: SW1 + sensor: sgli + wavelength: [1.04, 1.05, 1.05] + resolution: 1000 + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_SW01 + + SW2: + name: SW2 + sensor: sgli + wavelength: [1.37, 1.38, 1.39] + resolution: 1000 + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_SW02 + + SW3: + name: SW3 + sensor: sgli + wavelength: [1.53, 1.63, 1.73] + resolution: [250, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_SW03 + + SW4: + name: SW4 + sensor: sgli + wavelength: [2.185, 2.21, 2.235] + resolution: 1000 + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_SW04 + + T1: + name: T1 + sensor: sgli + wavelength: [10.45, 10.8, 11.15] + resolution: [250, 500, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_TI01 + + T2: + name: T2 + sensor: sgli + wavelength: [11.65, 12.0, 12.35] + resolution: [250, 500, 1000] + calibration: + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + + coordinates: [longitude_ir, latitude_ir] + file_type: gcom-c_l1b_ir + file_key: Image_data/Lt_TI02 + # Oa02: # name: Oa02 # sensor: olci diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 0483299088..2455c4b63b 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -90,7 +90,14 @@ def get_dataset(self, key, info): if key.resolution != self.resolution: return - h5dataset = self.fh[info['file_key']] + if key.polarization is not None: + pols = {0: '0', -60: 'm60', 60: 'p60'} + file_key = info['file_key'].format(pol=pols[key.polarization]) + else: + file_key = info['file_key'] + + h5dataset = self.fh[file_key] + resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) if resampling_interval != 1: logger.debug('Interpolating %s.', key.name) @@ -114,7 +121,7 @@ def get_dataset(self, key, info): if 'Maximum_valid_DN' in h5dataset.attrs: # dataset = dataset.where(dataset <= h5dataset.attrs['Maximum_valid_DN'].item()) pass - if key.name.startswith('VN'): + if key.name[:2] in ['VN', 'SW', 'P1', 'P2']: if key.calibration == 'counts': pass if key.calibration == 'radiance': From daefbac7484ec1f7afe09e2b8816a5165bf7a29b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 2 Jun 2021 12:44:11 +0000 Subject: [PATCH 0005/1416] Add GMS-5 VISSR reader --- satpy/etc/readers/gms5-vissr_l1b.yaml | 92 +++ satpy/readers/gms5_vissr_l1b.py | 624 ++++++++++++++++++ satpy/readers/gms5_vissr_navigation.py | 480 ++++++++++++++ satpy/readers/hrit_jma.py | 12 +- satpy/readers/utils.py | 8 + satpy/tests/reader_tests/test_ahi_hrit.py | 8 - .../tests/reader_tests/test_gms5_vissr_l1b.py | 359 ++++++++++ satpy/tests/reader_tests/test_utils.py | 7 + 8 files changed, 1572 insertions(+), 18 deletions(-) create mode 100644 satpy/etc/readers/gms5-vissr_l1b.yaml create mode 100644 satpy/readers/gms5_vissr_l1b.py create mode 100644 satpy/readers/gms5_vissr_navigation.py create mode 100644 satpy/tests/reader_tests/test_gms5_vissr_l1b.py diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml new file mode 100644 index 0000000000..d295eeac5b --- /dev/null +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -0,0 +1,92 @@ +reader: + name: gms5-vissr_l1b + short_name: GMS-5 VISSR L1b + long_name: GMS-5 VISSR Level 1b + description: > + Reader for GMS-5 VISSR Level 1b data. References: + + - https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf + - https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf + + sensors: [gms5-vissr] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + +file_types: + gms5_vissr_vis: + file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler + file_patterns: + - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.A.IMG' + + gms5_vissr_ir1: + file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler + file_patterns: + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.A.IMG' + + gms5_vissr_ir2: + file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler + file_patterns: + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.A.IMG' + + gms5_vissr_ir3: + file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler + file_patterns: + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.A.IMG' + + +datasets: + VIS: + name: VIS + sensor: gms5-vissr + wavelength: [0.55, 0.73, 0.9] + resolution: 1250 + calibration: + counts: + standard_name: counts + units: 1 +# reflectance: +# standard_name: toa_bidirectional_reflectance +# units: "%" + file_type: gms5_vissr_vis + + IR1: + name: IR1 + sensor: gms5-vissr + wavelength: [10.5, 11.0, 11.5] + resolution: 5000 + calibration: + counts: + standard_name: counts + units: 1 +# brightness_temperature: +# standard_name: toa_brightness_temperature +# units: "K" + file_type: gms5_vissr_ir1 + + IR2: + name: IR2 + sensor: gms5-vissr + wavelength: [11.5, 12.0, 12.5] + resolution: 5000 + calibration: + counts: + standard_name: counts + units: 1 +# brightness_temperature: +# standard_name: toa_brightness_temperature +# units: "K" + file_type: gms5_vissr_ir2 + + IR3: + name: IR3 + sensor: gms5-vissr + wavelength: [6.5, 6.75, 7.0] + resolution: 5000 + calibration: + counts: + standard_name: counts + units: 1 +# brightness_temperature: +# standard_name: toa_brightness_temperature +# units: "K" + file_type: gms5_vissr_ir3 diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py new file mode 100644 index 0000000000..7c089ac347 --- /dev/null +++ b/satpy/readers/gms5_vissr_l1b.py @@ -0,0 +1,624 @@ +"""Reader for GMS-5 VISSR Level 1B data. + +Introduction +------------ +TODO + +Navigation +---------- +TODO + +References +---------- + + - [FMT]: `VISSR Format Description`_ + - [UG]: `GMS User Guide`_ + +.. _VISSR Format Description: + https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf +.. _GMS User Guide: + https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf +""" + +import dask.array as da +import numba +import numpy as np +import xarray as xr + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.utils import modified_julian_day_to_datetime64 +import satpy.readers._geos_area as geos_area + +U1 = '>u1' +I2 = '>i2' +I4 = '>i4' +R4 = '>f4' +R8 = '>f8' + +VIS_CHANNEL = 'VIS' +IR_CHANNEL = 'IR' +ALT_CHANNEL_NAMES = { + 'VIS': 'VIS', + 'IR1': 'IR1', + 'IR2': 'IR2', + 'IR3': 'WV' +} +BLOCK_SIZE_VIS = 13504 +BLOCK_SIZE_IR = 3664 + +IMAGE_PARAM_ITEM_SIZE = 2688 +TIME = [('date', I4), ('time', I4)] +CHANNELS = [('VIS', R4), ('IR1', R4), ('IR2', R4), ('WV', R4)] +VISIR_SOLAR = [('VIS', R4), ('IR', R4)] + +CONTROL_BLOCK = np.dtype([('control_block_size', I2), + ('head_block_number_of_parameter_block', I2), + ('parameter_block_size', I2), + ('head_block_number_of_image_data', I2), + ('total_block_size_of_image_data', I2), + ('available_block_size_of_image_data', I2), + ('head_valid_line_number', I2), + ('final_valid_line_number', I2), + ('final_data_block_number', I2)]) + +MODE_BLOCK_FRAME_PARAMETERS = [('bit_length', I4), + ('number_of_lines', I4), + ('number_of_pixels', I4), + ('stepping_angle', R4), + ('sampling_angle', R4), + ('lcw_pixel_size', I4), + ('doc_pixel_size', I4), + ('reserved', I4)] + +MODE_BLOCK = np.dtype([('satellite_number', I4), + ('satellite_name', '|S12'), + ('observation_time_ad', '|S16'), + ('observation_time_mjd', R8), + ('gms_operation_mode', I4), + ('dpc_operation_mode', I4), + ('vissr_observation_mode', I4), + ('scanner_selection', I4), + ('sensor_selection', I4), + ('sensor_mode', I4), + ('scan_frame_mode', I4), + ('scan_mode', I4), + ('upper_limit_of_scan_number', I4), + ('lower_limit_of_scan_number', I4), + ('equatorial_scan_line_number', I4), + ('spin_rate', R4), + ('vis_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), + ('ir_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), + ('satellite_height', R4), + ('earth_radius', R4), + ('ssp_longitude', R4), + ('reserved_1', I4, 9), + ('table_of_sensor_trouble', I4, 14), + ('reserved_2', I4, 36), + ('status_tables_of_data_relative_address_segment', I4, 60)]) + +COORDINATE_CONVERSION_PARAMETERS = np.dtype([ + ('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('scheduled_observation_time', R8), + ('stepping_angle_along_line', CHANNELS), + ('sampling_angle_along_pixel', CHANNELS), + ('central_line_number_of_vissr_frame', CHANNELS), # TODO + ('central_pixel_number_of_vissr_frame', CHANNELS), # TODO + ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), # TODO + ('number_of_sensor_elements', CHANNELS), + ('total_number_of_vissr_frame_lines', CHANNELS), # TODO + ('total_number_of_vissr_frame_pixels', CHANNELS), # TODO + ('vissr_misalignment', R4, (3,)), + ('matrix_of_misalignment', R4, (3, 3)), + ('parameters', [('judgement_of_observation_convergence_time', R4), + ('judgement_of_line_convergence', R4), + ('east_west_angle_of_sun_light_condense_prism', R4), + ('north_south_angle_of_sun_light_condense_prism', R4), + ('pi', R4), + ('pi_divided_by_180', R4), + ('180_divided_by_pi', R4), + ('equatorial_radius', R4), + ('oblateness_of_earth', R4), + ('eccentricity_of_earth_orbit', R4), + ('first_angle_of_vissr_observation_in_sdb', R4), + ('upper_limited_line_of_2nd_prism_for_vis_solar_observation', R4), + ('lower_limited_line_of_1st_prism_for_vis_solar_observation', R4), + ('upper_limited_line_of_3rd_prism_for_vis_solar_observation', R4), + ('lower_limited_line_of_2nd_prism_for_vis_solar_observation', R4)]), + ('solar_stepping_angle_along_line', VISIR_SOLAR), + ('solar_sampling_angle_along_pixel', VISIR_SOLAR), + ('solar_center_line_of_vissr_frame', VISIR_SOLAR), + ('solar_center_pixel_of_vissr_frame', VISIR_SOLAR), + ('solar_pixel_difference_of_vissr_center_from_normal_position', VISIR_SOLAR), + ('solar_number_of_sensor_elements', VISIR_SOLAR), + ('solar_total_number_of_vissr_frame_lines', VISIR_SOLAR), + ('solar_total_number_of_vissr_frame_pixels', VISIR_SOLAR), + ('reserved_1', I4, 19), + ('orbital_parameters', [('epoch_time', R8), + ('semi_major_axis', R8), + ('eccentricity', R8), + ('orbital_inclination', R8), + ('longitude_of_ascending_node', R8), + ('argument_of_perigee', R8), + ('mean_anomaly', R8), + ('longitude_of_ssp', R8), # TODO + ('latitude_of_ssp', R8)]), # TODO + ('reserved_2', I4, 2), + ('attitude_parameters', [('epoch_time', R8), + ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8), + ('angle_change_rate_between_spin_axis_and_z_axis', R8), + ('angle_between_spin_axis_and_zy_axis', R8), + ('angle_change_rate_between_spin_axis_and_zt_axis', R8), + ('daily_mean_of_spin_rate', R8)]), + ('reserved_3', I4, 529), + ('correction_of_image_distortion', [('stepping_angle_along_line_of_ir1', R4), + ('stepping_angle_along_line_of_ir2', R4), + ('stepping_angle_along_line_of_wv', R4), + ('stepping_angle_along_line_of_vis', R4), + ('sampling_angle_along_pixel_of_ir1', R4), + ('sampling_angle_along_pixel_of_ir2', R4), + ('sampling_angle_along_pixel_of_wv', R4), + ('sampling_angle_along_pixel_of_vis', R4), + ('x_component_vissr_misalignment', R4), + ('y_component_vissr_misalignment', R4)]) +]) + +ATTITUDE_PREDICTION_DATA = np.dtype([('prediction_time_mjd', R8), + ('prediction_time_utc', TIME), + ('right_ascension_of_attitude', R8), + ('declination_of_attitude', R8), + ('sun_earth_angle', R8), + ('spin_rate', R8), + ('right_ascension_of_orbital_plane', R8), + ('declination_of_orbital_plane', R8), + ('reserved', R8), + ('eclipse_flag', I4), + ('spin_axis_flag', I4)]) + +ATTITUDE_PREDICTION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('start_time', R8), + ('end_time', R8), + ('prediction_interval_time', R8), + ('number_of_prediction', I4), + ('data_size', I4), + ('data', ATTITUDE_PREDICTION_DATA, (33,))]) + +ORBIT_PREDICTION_DATA = [('prediction_time_mjd', R8), + ('prediction_time_utc', TIME), + ('satellite_position_1950', R8, (3,)), + ('satellite_velocity_1950', R8, (3,)), + ('satellite_position_earth_fixed', R8, (3,)), + ('satellite_velocity_earth_fixed', R8, (3,)), + ('greenwich_sidereal_time', R8), + ('sat_sun_vector_1950', [('azimuth', R8), + ('elevation', R8)]), + ('sat_sun_vector_earth_fixed', [('azimuth', R8), + ('elevation', R8)]), + ('conversion_matrix', R8, (3, 3)), + ('moon_directional_vector', R8, (3,)), + ('satellite_position', [('ssp_longitude', R8), + ('ssp_latitude', R8), + ('satellite_height', R8)]), + ('eclipse_period_flag', I4), + ('reserved', I4)] + +ORBIT_PREDICTION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('start_time', R8), + ('end_time', R8), + ('prediction_interval_time', R8), + ('number_of_prediction', I4), + ('data_size', I4), + ('data', ORBIT_PREDICTION_DATA, (9,))]) + +VIS_CALIBRATION_TABLE = np.dtype([ + ('channel_number', I4), + ('data_validity', I4), + ('updated_time', TIME), + ('table_id', I4), + ('brightness_albedo_conversion_table', R4, (64,)), + ('vis_channel_staircase_brightness_data', R4, (6,)), + ('coefficients_table_of_vis_staircase_regression_curve', R4, (10,)), + ('brightness_table_for_calibration', [('universal_space_brightness', R4), + ('solar_brightness', R4)]), + ('calibration_uses_brightness_correspondence_voltage_chart', [('universal_space_voltage', R4), + ('solar_voltage', R4)]), + ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), + ('reserved', I4, (9,)) + ]) + +VIS_CALIBRATION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('sensor_group', I4), + ('vis1_calibration_table', VIS_CALIBRATION_TABLE), + ('vis2_calibration_table', VIS_CALIBRATION_TABLE), + ('vis3_calibration_table', VIS_CALIBRATION_TABLE), + ('reserved', I4, (267,))]) + +TELEMETRY_DATA = np.dtype([ + ('shutter_temp', R4), + ('redundant_mirror_temp', R4), + ('primary_mirror_temp', R4), + ('baffle_fw_temp', R4), + ('baffle_af_temp', R4), + ('15_volt_auxiliary_power_supply', R4), + ('radiative_cooler_temp_1', R4), + ('radiative_cooler_temp_2', R4), + ('electronics_module_temp', R4), + ('scan_mirror_temp', R4), + ('shutter_cavity_temp', R4), + ('primary_mirror_sealed_temp', R4), + ('redundant_mirror_sealed_temp', R4), + ('shutter_temp_2', R4), + ('reserved', R4, (2,)) +]) + +IR_CALIBRATION = np.dtype([ + ('data_segment', I4), + ('data_validity', I4), + ('updated_time', TIME), + ('sensor_group', I4), + ('table_id', I4), + ('reserved_1', I4, (2,)), + ('conversion_table_of_equivalent_black_body_radiation', R4, (256,)), + ('conversion_table_of_equivalent_black_body_temperature', R4, (256,)), + ('staircase_brightness_data', R4, (6,)), + ('coefficients_table_of_staircase_regression_curve', R4, (10,)), + ('brightness_data_for_calibration', [('brightness_of_space', R4), + ('brightness_of_black_body_shutter', R4), + ('reserved', R4)]), + ('voltage_table_for_brightness_of_calibration', [('voltage_of_space', R4), + ('voltage_of_black_body_shutter', R4), + ('reserved', R4)]), + ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), + ('valid_shutter_temperature', R4), + ('valid_shutter_radiation', R4), + ('telemetry_data_table', TELEMETRY_DATA), + ('flag_of_calid_shutter_temperature_calculation', I4), + ('reserved_2', I4, (109,)) +]) + +SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([ + ('coordinate_conversion_table', I2, (1250,)), + ('earth_equator_radius', R4), + ('satellite_height', R4), + ('stepping_angle', R4), + ('sampling_angle', R4), + ('ssp_latitude', R4), + ('ssp_longitude', R4), + ('ssp_line_number', R4), + ('ssp_pixel_number', R4), + ('pi', R4), + ('line_correction_ir1_vis', R4), + ('pixel_correction_ir1_vis', R4), + ('line_correction_ir1_ir2', R4), + ('pixel_correction_ir1_ir2', R4), + ('line_correction_ir1_wv', R4), + ('pixel_correction_ir1_wv', R4), + ('reserved', R4, (32,)), +]) + +IMAGE_PARAMS = { + 'mode': { + 'dtype': MODE_BLOCK, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS, + IR_CHANNEL: 2 * BLOCK_SIZE_IR + } + }, + 'coordinate_conversion': { + 'dtype': COORDINATE_CONVERSION_PARAMETERS, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 4 * BLOCK_SIZE_IR + } + }, + 'attitude_prediction': { + 'dtype': ATTITUDE_PREDICTION, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 5 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'orbit_prediction_1': { + 'dtype': ORBIT_PREDICTION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS, + IR_CHANNEL: 6 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'orbit_prediction_2': { + 'dtype': ORBIT_PREDICTION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 7 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'vis_calibration': { + 'dtype': VIS_CALIBRATION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 9 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'ir1_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, + IR_CHANNEL: 10 * BLOCK_SIZE_IR + }, + }, + 'ir2_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 11 * BLOCK_SIZE_IR + }, + }, + 'wv_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 12 * BLOCK_SIZE_IR + }, + }, + 'simple_coordinate_conversion_table': { + 'dtype': SIMPLE_COORDINATE_CONVERSION_TABLE, + 'offset': { + VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 16 * BLOCK_SIZE_IR + }, + } +} + +LINE_CONTROL_WORD = np.dtype([ + ('data_id', U1, (4, )), + ('line_number', I4), + ('line_name', I4), + ('error_line_flag', I4), # TODO + ('error_message', I4), + ('mode_error_flag', I4), + ('scan_time', R8), + ('beta_angle', R4), + ('west_side_earth_edge', I4), + ('east_side_earth_edge', I4), + ('received_time_1', R8), # Typo in format description (I*4) + ('received_time_2', I4), + ('reserved', U1, (8, )) +]) + +IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', LINE_CONTROL_WORD), + ('DOC', U1, (256,)), # Omitted + ('image_data', U1, 3344)]) + +IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', LINE_CONTROL_WORD), + ('DOC', U1, (64,)), # Omitted + ('image_data', U1, (13376,))]) + +IMAGE_DATA = { + VIS_CHANNEL: { + 'offset': 6 * BLOCK_SIZE_VIS, + 'dtype': IMAGE_DATA_BLOCK_VIS, + }, + IR_CHANNEL: { + 'offset': 18 * BLOCK_SIZE_IR, + 'dtype': IMAGE_DATA_BLOCK_IR + } +} + + +def recarr2dict(arr, preserve=None): + if not preserve: + preserve = [] + res = {} + for key, value in zip(arr.dtype.names, arr): + if key.startswith('reserved'): + continue + if value.dtype.names and key not in preserve: + # Nested record array + res[key] = recarr2dict(value) + else: + # Scalar or record array that shall be preserved + res[key] = value + return res + + +class GMS5VISSRFileHandler(BaseFileHandler): + def __init__(self, filename, filename_info, filetype_info): + super(GMS5VISSRFileHandler, self).__init__(filename, filename_info, filetype_info) + self._filename = filename + self._filename_info = filename_info + self._header, self._channel_type = self._read_header(filename) + self._mda = self._get_mda() + # + # print(self._header['image_parameters']['mode']['ssp_longitude']) + # print(self._header['image_parameters']['orbit_prediction']['data']['satellite_position']) + # print(self._header['image_parameters']['vis_calibration']['data_segment']) + # print(self._header['image_parameters']['vis_calibration']['vis1_calibration_table']['updated_time']) + # print(self._header['image_parameters']['ir1_calibration']['flag_of_calid_shutter_temperature_calculation']) + # print(self._header['image_parameters']['wv_calibration']['conversion_table_of_equivalent_black_body_temperature']) + # print(self._header['image_parameters']['coordinate_conversion']['central_line_number_of_vissr_frame']) + # print(self._header['image_parameters']['coordinate_conversion']['central_pixel_number_of_vissr_frame']) + # print(self._header['image_parameters']['coordinate_conversion']['pixel_difference_of_vissr_center_from_normal_position']) + + def _read_header(self, filename): + header = {} + with open(filename, mode='rb') as file_obj: + header['control_block'] = self._read_control_block(file_obj) + channel_type = self._get_channel_type(header['control_block']['parameter_block_size']) + header['image_parameters'] = self._read_image_params(file_obj, channel_type) + return header, channel_type + + @staticmethod + def _get_channel_type(parameter_block_size): + if parameter_block_size == 4: + return VIS_CHANNEL + elif parameter_block_size == 16: + return IR_CHANNEL + raise ValueError('Cannot determine channel type: Unknown parameter block size.') + + def _read_control_block(self, file_obj): + ctrl_block = np.fromfile(file_obj, dtype=CONTROL_BLOCK, count=1)[0] + return recarr2dict(ctrl_block) + + def _read_image_params(self, file_obj, channel_type): + """Read image parameters from the header.""" + image_params = {} + for name, param in IMAGE_PARAMS.items(): + image_params[name] = self._read_image_param(file_obj, param, channel_type) + + image_params['orbit_prediction'] = self._concat_orbit_prediction( + image_params.pop('orbit_prediction_1'), + image_params.pop('orbit_prediction_2') + ) + return image_params + + @staticmethod + def _read_image_param(file_obj, param, channel_type): + """Read a single image parameter block from the header.""" + file_obj.seek(param['offset'][channel_type]) + data = np.fromfile(file_obj, dtype=param['dtype'], count=1)[0] + return recarr2dict(data, preserve=param.get('preserve')) + + @staticmethod + def _concat_orbit_prediction(orb_pred_1, orb_pred_2): + """Concatenate orbit prediction data. + + It is split over two image parameter blocks in the header. + """ + orb_pred = orb_pred_1 + orb_pred['data'] = np.concatenate([orb_pred_1['data'], orb_pred_2['data']]) + return orb_pred + + def _get_frame_parameters_key(self): + if self._channel_type == VIS_CHANNEL: + return 'vis_frame_parameters' + return 'ir_frame_parameters' + + def _get_actual_shape(self): + actual_num_lines = self._header['control_block']['available_block_size_of_image_data'] + _, nominal_num_pixels = self._get_nominal_shape() + return actual_num_lines, nominal_num_pixels + + def _get_nominal_shape(self): + frame_params = self._header['image_parameters']['mode'][self._get_frame_parameters_key()] + return frame_params['number_of_lines'], frame_params['number_of_pixels'] + + def _get_mda(self): + mode_block = self._header['image_parameters']['mode'] + return { + 'platform': mode_block['satellite_name'].decode().strip().upper(), + 'sensor': 'VISSR' + } + + def get_dataset(self, dataset_id, ds_info): + num_lines, _ = self._get_actual_shape() + memmap = np.memmap( + filename=self._filename, + mode='r', + dtype=IMAGE_DATA[self._channel_type]['dtype'], + offset=IMAGE_DATA[self._channel_type]['offset'], + shape=(num_lines,) + ) + dask_array = da.from_array(memmap, chunks=(CHUNK_SIZE,)) + data = xr.DataArray( + dask_array['image_data'], + dims=('y', 'x'), + coords={ + 'acq_time': ('y', self._get_acq_time(dask_array)), + 'line_number': ('y', dask_array['LCW']['line_number'].compute()) + } + ) + return data + + def _get_acq_time(self, dask_array): + acq_time = dask_array['LCW']['scan_time'].compute() + return modified_julian_day_to_datetime64(acq_time) + + def _update_attrs(self): + print(fh_ir2._header['image_parameters']['coordinate_conversion']['orbital_parameters']) + + def _pad(self, data): + # Actual line/column numbers. Alternatively use "line number" coordinate. + print(fh_ir2._header['control_block']['head_valid_line_number']) + print(fh_ir2._header['control_block']['final_valid_line_number']) + + # Target shape for padding! + print(fh_ir2._header['image_parameters']['mode']['vis_frame_parameters']['number_of_lines']) + print( + fh_ir2._header['image_parameters']['mode']['vis_frame_parameters']['number_of_pixels'], + fh_ir2._header['image_parameters']['mode']['vis_frame_parameters']['number_of_lines']) + + def get_area_def(self, dsid): + """ + TODO: + - misalignment matrix, rotation matrix + - near sided perspective + + Checked: + - numerical accuracy of stepping angle + - following C routing strictly (discarding head_valid_line_number) -> worse + """ + alt_ch_name = ALT_CHANNEL_NAMES[dsid['name']] + num_lines, num_pixels = self._get_actual_shape() + mode_block = self._header['image_parameters']['mode'] + coord_conv = self._header['image_parameters']['coordinate_conversion'] + stepping_angle = coord_conv['stepping_angle_along_line'][alt_ch_name] + sampling_angle = coord_conv['sampling_angle_along_pixel'][alt_ch_name] + center_line_vissr_frame = coord_conv['central_line_number_of_vissr_frame'][alt_ch_name] + center_pixel_vissr_frame = coord_conv['central_pixel_number_of_vissr_frame'][alt_ch_name] + line_offset = self._header['control_block']['head_valid_line_number'] + pixel_offset = coord_conv['pixel_difference_of_vissr_center_from_normal_position'][ + alt_ch_name] + print(coord_conv['vissr_misalignment']) + print(coord_conv['matrix_of_misalignment']) + + equatorial_radius = coord_conv['parameters']['equatorial_radius'] + oblateness = coord_conv['parameters']['oblateness_of_earth'] + name_dict = geos_area.get_geos_area_naming({ + 'platform_name': self._mda['platform'], + 'instrument_name': self._mda['sensor'], + 'service_name': 'western-pacific', + 'service_desc': 'Western Pacific', + 'resolution': dsid['resolution'] + }) + proj_dict = { + 'a_name': name_dict['area_id'], + 'p_id': name_dict['area_id'], + 'a_desc': name_dict['description'], + 'ssp_lon': coord_conv['orbital_parameters']['longitude_of_ssp'], + 'a': equatorial_radius, + 'b': _get_polar_earth_radius(equatorial_radius, oblateness), + 'h': mode_block['satellite_height'], + 'nlines': num_lines, + 'ncols': num_pixels, + 'lfac': geos_area.sampling_to_lfac_cfac(stepping_angle), + 'cfac': geos_area.sampling_to_lfac_cfac(sampling_angle), + 'coff': center_pixel_vissr_frame - pixel_offset, + 'loff': center_line_vissr_frame - line_offset, + 'scandir': 'N2S' + } + from pprint import pprint + # pprint(mode_block) + pprint(coord_conv) + extent = geos_area.get_area_extent(proj_dict) + area = geos_area.get_area_definition(proj_dict, extent) + return area + + def get_lonlat(self): + mode_block = self._header['image_parameters']['mode'] + from pprint import pprint + pprint(mode_block) + params = { + 'spinning_rate': mode_block['spin_rate'] + } diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py new file mode 100644 index 0000000000..9f3b0e9666 --- /dev/null +++ b/satpy/readers/gms5_vissr_navigation.py @@ -0,0 +1,480 @@ +"""GMS-5 VISSR Navigation.""" + +import numba +import numpy as np + +@numba.njit +def get_lons_lats(lines, pixels, nav_params): + num_lines = len(lines) + num_pixels = len(pixels) + output_shape = (num_lines, num_pixels) + lons = np.zeros(output_shape) + lats = np.zeros(output_shape) + for i in range(num_lines): + for j in range(num_pixels): + line = lines[i] + pixel = pixels[j] + point = (line, pixel) + lon, lat = get_lon_lat(point, nav_params) + lons[i, j] = lon + lats[i, j] = lat + + +@numba.njit +def get_observation_time(point, start_time_of_scan, scan_params): + """Calculate observation time of a VISSR pixel.""" + relative_time = _get_relative_observation_time(point, scan_params) + return start_time_of_scan + relative_time + + +@numba.njit +def _get_relative_observation_time(point, scanning_params): + line, pixel = point + spinning_rate, num_sensors, sampling_angle = scanning_params + spinning_freq = 1440 * spinning_rate + line_step = np.floor((line - 1) / num_sensors) + pixel_step = (sampling_angle * pixel) / (2 * np.pi) + return (line_step + pixel_step) / spinning_freq + + +@numba.njit +def interpolate_nav_params_at_obs_time(nav_params, obs_time): + pass + + +@numba.njit +def interpolate_prediction(prediction_times, predicted_values, desired_time): + # TODO: Interpolate all fields + # TODO: How to interpolate angles? See mspVissrGetInsertValue + return np.interp(desired_time, prediction_times, predicted_values) + + +@numba.njit +def get_nearest_prediction(prediction_times, predicted_values, desired_time): + time_diff = desired_time - prediction_times + idx_of_nearest_prediction = np.argmin(np.fabs(time_diff)) + return predicted_values[idx_of_nearest_prediction] + + +def dict_to_nav_params(dictionary): + arr = np.empty(1, nav_params_dtype) + for key, val in dictionary.items(): + arr[key] = val + return arr + + + +nav_params_dtype = np.dtype([ + ('start_time_of_scan', np.float32), + ('spinning_rate', np.float32), + ('num_sensors', np.int32), + ('sampling_angle', np.float32), + ('equatorial_earth_radius', np.float32), + ('earth_flattening', np.float32) +]) + + +attitude_prediction_dtype = np.dtype( + [ + ('prediction_time_mjd', np.float64) + ] +) +orbit_prediction_dtype = np.dtype( + [ + ('prediction_time_mjd', np.float64) + ] +) +image_navigator_spec = [ + ('start_time_of_scan', numba.float64), + ('line_offset', numba.int32), + ('pixel_offset', numba.int32), + ('sampling_angle', numba.float32), + ('stepping_angle', numba.float32), + ('spinning_rate', numba.float32), + ('num_sensors', numba.int32), + ('misalignment', numba.float32[:, :]), + ('attitude_prediction', numba.from_dtype(attitude_prediction_dtype)[:]), + ('orbit_prediction', numba.from_dtype(orbit_prediction_dtype)[:]), + ('earth_flattening', numba.float32), + ('earth_equatorial_radius', numba.float32) +] # TODO: Compare types with header types + +@numba.experimental.jitclass(spec=image_navigator_spec) +class ImageNavigator: + def __init__(self, start_time_of_scan, line_offset, pixel_offset, sampling_angle, stepping_angle, + spinning_rate, num_sensors, misalignment, attitude_prediction, + orbit_prediction, earth_flattening, earth_equatorial_radius): + self.start_time_of_scan = start_time_of_scan + self.line_offset = line_offset + self.pixel_offset = pixel_offset + self.sampling_angle = sampling_angle + self.stepping_angle = stepping_angle + self.spinning_rate = spinning_rate + self.num_sensors = num_sensors + self.misalignment = misalignment + self.attitude_prediction = attitude_prediction + self.orbit_prediction = orbit_prediction + self.earth_flattening = earth_flattening + self.earth_equatorial_radius = earth_equatorial_radius + + + +@numba.experimental.jitclass([ + ('line_offset', numba.int32), + ('pixel_offset', numba.int32), + ('stepping_angle', numba.float64), + ('sampling_angle', numba.float64), + ('misalignment', numba.types.Array(numba.float64, 2, layout='C')), + ('greenwich_sidereal_time', numba.float64), + ('angle_between_earth_and_sun', numba.float64), + ('declination_from_sat_to_sun', numba.float64), + ('right_ascension_from_sat_to_sun', numba.float64), + ('angle_between_sat_spin_and_z_axis', numba.float64), + ('angle_between_sat_spin_and_yz_plane', numba.float64), + ('sat_position_earth_fixed_x', numba.float64), + ('sat_position_earth_fixed_y', numba.float64), + ('sat_position_earth_fixed_z', numba.float64), + ('nutation_precession', numba.types.Array(numba.float64, 2, layout='C')), + ('earth_flattening', numba.float64), + ('earth_equatorial_radius', numba.float64) +]) +class NavigationParameters: + def __init__( + self, + line_offset, + pixel_offset, + stepping_angle, + sampling_angle, + misalignment, + greenwich_sidereal_time, + angle_between_earth_and_sun, + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane, + sat_position_earth_fixed_x, + sat_position_earth_fixed_y, + sat_position_earth_fixed_z, + nutation_precession, + earth_flattening, + earth_equatorial_radius + ): + self.line_offset = line_offset + self.pixel_offset = pixel_offset + self.stepping_angle = stepping_angle + self.sampling_angle = sampling_angle + self.misalignment = misalignment + self.greenwich_sidereal_time = greenwich_sidereal_time + self.angle_between_earth_and_sun = angle_between_earth_and_sun + self.declination_from_sat_to_sun = declination_from_sat_to_sun + self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun + self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis + self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane + self.sat_position_earth_fixed_x = sat_position_earth_fixed_x + self.sat_position_earth_fixed_y = sat_position_earth_fixed_y + self.sat_position_earth_fixed_z = sat_position_earth_fixed_z + self.nutation_precession = nutation_precession + self.earth_flattening = earth_flattening + self.earth_equatorial_radius = earth_equatorial_radius + + # TODO: Remember that all angles are expected in rad + # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c + + def get_image_offset(self): + return self.line_offset, self.pixel_offset + + def get_sampling(self): + return self.stepping_angle, self.sampling_angle + + def get_sat_sun_angles(self): + return np.array([self.declination_from_sat_to_sun, self.right_ascension_from_sat_to_sun]) + + def get_spin_angles(self): + return np.array([self.angle_between_sat_spin_and_z_axis, self.angle_between_sat_spin_and_yz_plane]) + + def get_ellipsoid(self): + return np.array([self.earth_equatorial_radius, self.earth_flattening]) + + def get_sat_position(self): + return np.array((self.sat_position_earth_fixed_x, + self.sat_position_earth_fixed_y, + self.sat_position_earth_fixed_z)) + + +@numba.njit +def get_lon_lat(line, pixel, nav_params): + """Get longitude and latitude coordinates for a given image pixel.""" + scan_angles = transform_image_coords_to_scanning_angles( + point=(line, pixel), + offset=nav_params.get_image_offset(), + sampling=nav_params.get_sampling() + ) + view_vector_sat = _transform_scanning_angles_to_satellite_coords( + scan_angles, nav_params + ) + view_vector_earth_fixed = _transform_satellite_to_earth_fixed_coords( + view_vector_sat, nav_params + ) + point_on_earth = _intersect_with_earth(view_vector_earth_fixed, nav_params) + lon, lat = transform_earth_fixed_to_geodetic_coords( + point_on_earth, nav_params.earth_flattening + ) + return lon, lat + + +@numba.njit +def transform_image_coords_to_scanning_angles(point, offset, sampling): + """Transform image coordinates to scanning angles. + + Args: + point: Point (line, pixel) in image coordinates. + offset: Offset (line, pixel) from image center. + sampling: Stepping angle (along line) and sampling angle (along pixels) + in radians. + Returns: + Scanning angles (x, y) at the pixel center (rad). + """ + line, pixel = point + line_offset, pixel_offset = offset + stepping_angle, sampling_angle = sampling + x = sampling_angle * (pixel + 0.5 - pixel_offset) + y = stepping_angle * (line + 0.5 - line_offset) + return np.array([x, y]) + + +@numba.njit +def _transform_scanning_angles_to_satellite_coords(angles, nav_params): + transformer = ScanningAnglesToSatelliteCoordsTransformer( + nav_params.misalignment + ) + return transformer.transform(angles) + + +@numba.experimental.jitclass([ + ('misalignment', numba.types.Array(numba.float64, 2, layout='C')) +]) +class ScanningAnglesToSatelliteCoordsTransformer: + """Transform scanning angles to satellite angular momentum coordinates.""" + + def __init__(self, misalignment): + """Initialize the transformer. + + Args: + misalignment: Misalignment matrix (3x3) + """ + self.misalignment = misalignment + + def transform(self, angles): + """Transform scanning angles to satellite angular momentum coordinates. + + Args: + angles: Scanning angles (x, y) in radians. + + Returns: + View vector (x, y, z) in satellite angular momentum coordinates. + """ + rotation, vector = self._get_transforms(angles) + return np.dot(rotation, np.dot(self.misalignment, vector)) + + def _get_transforms(self, angles): + x, y = angles + cos_x = np.cos(x) + sin_x = np.sin(x) + rot = np.array(((cos_x, -sin_x, 0), + (sin_x, cos_x, 0), + (0, 0, 1))) + vec = np.array([np.cos(y), 0, np.sin(y)]) + return rot, vec + + +@numba.njit +def _transform_satellite_to_earth_fixed_coords(point, nav_params): + # TODO: kwargs possible here? + transformer = SatelliteToEarthFixedCoordsTransformer( + nav_params.greenwich_sidereal_time, + nav_params.get_sat_sun_angles(), + nav_params.angle_between_earth_and_sun, + nav_params.get_spin_angles(), + nav_params.nutation_precession + ) + return transformer.transform(point) + + +@numba.experimental.jitclass([ + ('greenwich_sidereal_time', numba.float64), + ('sat_sun_angles', numba.float64[:]), + ('earth_sun_angle', numba.float64), + ('spin_angles', numba.float64[:]), + ('nutation_precession', numba.types.Array(numba.float64, 2, layout='C')) +]) +class SatelliteToEarthFixedCoordsTransformer: + """Transform from earth-fixed to satellite angular momentum coordinates.""" + + def __init__(self, greenwich_sidereal_time, sat_sun_angles, earth_sun_angle, spin_angles, nutation_precession): + """Initialize the Transformer. + + Args: + greenwich_sidereal_time: True Greenwich sidereal time (rad). + sat_sun_angles: Declination from satellite to sun (rad), + right ascension from satellite to sun (rad) + earth_sun_angle: Angle between sun and earth center on the z-axis + vertical plane (rad) + spin_angles: Angle between satellite spin axis and z-axis (rad), + angle between satellite spin axis and yz-plane + nutation_precession: Nutation and precession matrix (3x3) + """ + self.greenwich_sidereal_time = greenwich_sidereal_time + self.sat_sun_angles = sat_sun_angles + self.earth_sun_angle = earth_sun_angle + self.spin_angles = spin_angles + self.nutation_precession = nutation_precession + + def transform(self, point): + """Transform from earth-fixed to satellite angular momentum coordinates. + + Args: + point: Point (x, y, z) in satellite angular momentum coordinates. + + Returns: + Point (x', y', z') in earth-fixed coordinates. + """ + sat_unit_vectors = self._get_satellite_unit_vectors() + return np.dot(sat_unit_vectors, point) + + def _get_satellite_unit_vectors(self): + unit_vector_z = self._get_satellite_unit_vector_z() + unit_vector_x = self._get_satellite_unit_vector_x(unit_vector_z) + unit_vector_y = self._get_satellite_unit_vector_y(unit_vector_x, unit_vector_z) + return np.stack((unit_vector_x, unit_vector_y, unit_vector_z), axis=-1) + + def _get_satellite_unit_vector_z(self): + sat_z_axis_1950 = self._get_satellite_z_axis_1950() + rotation = self._get_transform_from_1950_to_earth_fixed() + z_vec = np.dot(rotation, np.dot(self.nutation_precession, sat_z_axis_1950)) + return normalize_vector(z_vec) + + def _get_satellite_z_axis_1950(self): + """Get satellite z-axis (spin) in mean of 1950 coordinates.""" + alpha, delta = self.spin_angles + cos_delta = np.cos(delta) + x = np.sin(delta) + y = -cos_delta * np.sin(alpha) + z = cos_delta * np.cos(alpha) + return np.array([x, y, z]) + + def _get_transform_from_1950_to_earth_fixed(self): + cos = np.cos(self.greenwich_sidereal_time) + sin = np.sin(self.greenwich_sidereal_time) + return np.array( + ((cos, sin, 0), + (-sin, cos, 0), + (0, 0, 1)) + ) + + def _get_satellite_unit_vector_x(self, sat_unit_vector_z): + beta = self.earth_sun_angle + sat_sun_vector = self._get_vector_from_satellite_to_sun() + z_cross_satsun = np.cross(sat_unit_vector_z, sat_sun_vector) + z_cross_satsun = normalize_vector(z_cross_satsun) + x_vec = z_cross_satsun * np.sin(beta) + \ + np.cross(z_cross_satsun, sat_unit_vector_z) * np.cos(beta) + return normalize_vector(x_vec) + + def _get_vector_from_satellite_to_sun(self): + declination, right_ascension = self.sat_sun_angles + cos_declination = np.cos(declination) + x = cos_declination * np.cos(right_ascension) + y = cos_declination * np.sin(right_ascension) + z = np.sin(declination) + return np.array([x, y, z]) + + def _get_satellite_unit_vector_y(self, sat_unit_vector_x, sat_unit_vector_z): + y_vec = np.cross(sat_unit_vector_z, sat_unit_vector_x) + return normalize_vector(y_vec) + + +@numba.njit +def _intersect_with_earth(view_vector, nav_params): + intersector = EarthIntersector( + nav_params.get_sat_position(), + nav_params.get_ellipsoid() + ) + return intersector.intersect(view_vector) + + +@numba.experimental.jitclass([ + ('sat_pos', numba.float64[:]), + ('ellipsoid', numba.float64[:]) +]) +class EarthIntersector: + """Intersect instrument viewing vector with the earth's surface.""" + + def __init__(self, sat_pos, ellipsoid): + """ + Args: + sat_pos: Satellite position (x, y, z) in earth-fixed coordinates. + ellipsoid: Flattening and equatorial radius of the earth. + """ + self.sat_pos = sat_pos + self.ellipsoid = ellipsoid + + def intersect(self, view_vector): + """Intersect instrument viewing vector with the earth's surface. + + Args: + view_vector: Instrument viewing vector (x, y, z) in earth-fixed + coordinates. + Returns: + Intersection (x', y', z') with the earth's surface. + """ + distance = self._get_distance_to_intersection(view_vector) + return self.sat_pos + distance * view_vector + + def _get_distance_to_intersection(self, view_vector): + """Get distance to intersection with the earth. + + If the instrument is pointing towards the earth, there will be two + intersections with the surface. Choose the one on the instrument-facing + side of the earth. + """ + d1, d2 = self._get_distances_to_intersections(view_vector) + return min(d1, d2) + + def _get_distances_to_intersections(self, view_vector): + equatorial_radius, flattening = self.ellipsoid + flat2 = (1 - flattening) ** 2 + ux, uy, uz = view_vector + x, y, z = self.sat_pos + + a = flat2 * (ux**2 + uy**2) + uz**2 + b = flat2 * (x*ux + y*uy) + z*uz + c = flat2 * (x**2 + y**2 - equatorial_radius**2) + z**2 + + tmp = np.sqrt((b**2 - a*c)) + dist_1 = (-b + tmp) / a + dist_2 = (-b - tmp) / a + return dist_1, dist_2 + + +@numba.njit +def transform_earth_fixed_to_geodetic_coords(point, earth_flattening): + """Transform from earth-fixed to geodetic coordinates. + + Args: + point: Point (x, y, z) in earth-fixed coordinates. + earth_flattening: Flattening of the earth. + + Returns: + Geodetic longitude and latitude (degrees). + """ + x, y, z = point + f = earth_flattening + lon = np.arctan2(y, x) + lat = np.arctan2(z, ((1 - f)**2 * np.sqrt(x**2 + y**2))) + return np.rad2deg(lon), np.rad2deg(lat) + + +@numba.njit +def normalize_vector(v): + """Normalize the given vector.""" + return v / np.sqrt(np.dot(v, v)) diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index de24f4e6be..1c93094b77 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -101,7 +101,7 @@ annotation_header, base_hdr_map, image_data_function) from satpy.readers._geos_area import get_area_definition, get_area_extent -from satpy.readers.utils import get_geostationary_mask +from satpy.readers.utils import get_geostationary_mask, modified_julian_day_to_datetime64 logger = logging.getLogger('hrit_jma') @@ -175,14 +175,6 @@ } -def mjd2datetime64(mjd): - """Convert Modified Julian Day (MJD) to datetime64.""" - epoch = np.datetime64('1858-11-17 00:00') - day2usec = 24 * 3600 * 1E6 - mjd_usec = (mjd * day2usec).astype(np.int64).astype('timedelta64[us]') - return epoch + mjd_usec - - class HRITJMAFileHandler(HRITFileHandler): """JMA HRIT format reader.""" @@ -397,7 +389,7 @@ def _get_acq_time(self): times = np.interp(lines, lines_sparse, times_sparse) # Convert to np.datetime64 - times64 = mjd2datetime64(times) + times64 = modified_julian_day_to_datetime64(times) return times64 diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 9218656ee5..1049e0bc0e 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -365,3 +365,11 @@ def remove_earthsun_distance_correction(reflectance, utc_date=None): with xr.set_options(keep_attrs=True): reflectance = reflectance / (sun_earth_dist * sun_earth_dist) return reflectance + + +def modified_julian_day_to_datetime64(mjd): + """Convert Modified Julian Day (MJD) to datetime64.""" + epoch = np.datetime64('1858-11-17 00:00') + day2usec = 24 * 3600 * 1E6 + mjd_usec = (mjd * day2usec).astype(np.int64).astype('timedelta64[us]') + return epoch + mjd_usec diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py index b399989e77..6b52d56a63 100644 --- a/satpy/tests/reader_tests/test_ahi_hrit.py +++ b/satpy/tests/reader_tests/test_ahi_hrit.py @@ -301,14 +301,6 @@ def test_get_dataset(self, base_get_dataset): reader.get_dataset(key, {'units': '%', 'sensor': 'jami'}) log_mock.assert_called() - def test_mjd2datetime64(self): - """Test conversion from modified julian day to datetime64.""" - from satpy.readers.hrit_jma import mjd2datetime64 - self.assertEqual(mjd2datetime64(np.array([0])), - np.datetime64('1858-11-17', 'us')) - self.assertEqual(mjd2datetime64(np.array([40587.5])), - np.datetime64('1970-01-01 12:00', 'us')) - def test_get_acq_time(self): """Test computation of scanline acquisition times.""" dt_line = np.arange(1, 11000+1).astype('timedelta64[s]') diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py new file mode 100644 index 0000000000..eccfc04aab --- /dev/null +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -0,0 +1,359 @@ +"""Unit tests for GMS-5 VISSR reader.""" + +import numpy as np +import pytest + +import satpy.readers.gms5_vissr_navigation as nav + + +# Navigation references computed with JMA's Msial library (files +# VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS +# navigation is slightly off (< 0.01 deg) compared to JMA's reference. +# This is probably due to precision problems with the copied numbers. +IR_NAVIGATION_REFERENCE = [ + { + 'line': 686, + 'pixel': 1680, + 'lon': 139.990380, + 'lat': 35.047056, + 'tolerance': 0, + 'nav_params': { + 'line_offset': 1378.5, + 'pixel_offset': 1672.5, + 'stepping_angle': 0.000140000047395, + 'sampling_angle': 0.000095719995443, + 'misalignment': np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + 'greenwich_sidereal_time': 2.468529732418296, + 'angle_between_earth_and_sun': 3.997397917902958, + 'declination_from_sat_to_sun': -0.208770861178982, + 'right_ascension_from_sat_to_sun': 3.304369303579407, + 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + 'sat_position_earth_fixed_x': -32390963.148471601307392, + 'sat_position_earth_fixed_y': 27003395.381247851997614, + 'sat_position_earth_fixed_z': -228134.860026293463307, + 'nutation_precession': np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), + 'earth_flattening': 0.003352813177897, + 'earth_equatorial_radius': 6378136 + }, + }, + { + 'line': 2089, + 'pixel': 1793, + 'lon': 144.996967, + 'lat': -34.959853, + 'tolerance': 0, + 'nav_params': { + 'line_offset': 1378.5, + 'pixel_offset': 1672.5, + 'stepping_angle': 0.000140000047395, + 'sampling_angle': 0.000095719995443, + 'misalignment': np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + 'greenwich_sidereal_time': 2.530392320846865, + 'angle_between_earth_and_sun': 3.935707944355762, + 'declination_from_sat_to_sun': -0.208713576872247, + 'right_ascension_from_sat_to_sun': 3.242660398458377, + 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + 'sat_position_earth_fixed_x': -32390273.633551981300116, + 'sat_position_earth_fixed_y': 27003859.543135114014149, + 'sat_position_earth_fixed_z': -210800.087589388160268, + 'nutation_precession': np.array( + [[0.999936381432029, -0.010344763228876, -0.004496550050695], + [0.010344947502662, 0.999946489441823, 0.000017724053657], + [0.004496126086653, -0.000064239500295, 0.999989890310647]] + ), + 'earth_flattening': 0.003352813177897, + 'earth_equatorial_radius': 6378136 + }, + }, + { + 'line': 999, + 'pixel': 2996, + 'lon': -165.023842, + 'lat': 20.005603, + 'tolerance': 0, + 'nav_params': { + 'line_offset': 1378.5, + 'pixel_offset': 1672.5, + 'stepping_angle': 0.000140000047395, + 'sampling_angle': 0.000095719995443, + 'misalignment': np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + 'greenwich_sidereal_time': 2.482331732831616, + 'angle_between_earth_and_sun': 3.983634620574510, + 'declination_from_sat_to_sun': -0.208758095943038, + 'right_ascension_from_sat_to_sun': 3.290601673240597, + 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + 'sat_position_earth_fixed_x': -32390808.779549609869719, + 'sat_position_earth_fixed_y': 27003503.047290064394474, + 'sat_position_earth_fixed_z': -224351.430479845439550, + 'nutation_precession': np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), + 'earth_flattening': 0.003352813177897, + 'earth_equatorial_radius': 6378136 + }, + }, +] + + +VIS_NAVIGATION_REFERENCE = [ + { + 'line': 2744, + 'pixel': 6720, + 'lon': 139.975527, + 'lat': 35.078028, + 'tolerance': 0.01, + 'nav_params': { + 'line_offset': 5513.0, + 'pixel_offset': 6688.5, + 'stepping_angle': 0.000035000004573, + 'sampling_angle': 0.000023929998861, + 'misalignment': np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + 'greenwich_sidereal_time': 2.468529731914041, + 'angle_between_earth_and_sun': 3.997397918405798, + 'declination_from_sat_to_sun': -0.208770861179448, + 'right_ascension_from_sat_to_sun': 3.304369304082406, + 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + 'sat_position_earth_fixed_x': -32390963.148477241396904, + 'sat_position_earth_fixed_y': 27003395.381243918091059, + 'sat_position_earth_fixed_z': -228134.860164520738181, + 'nutation_precession': np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), + 'earth_flattening': 0.003352813177897, + 'earth_equatorial_radius': 6378136 + }, + }, + + { + 'line': 8356, + 'pixel': 7172, + 'lon': 144.980104, + 'lat': -34.929123, + 'tolerance': 0.01, + 'nav_params': { + 'line_offset': 5513.0, + 'pixel_offset': 6688.5, + 'stepping_angle': 0.000035000004573, + 'sampling_angle': 0.000023929998861, + 'misalignment': np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + 'greenwich_sidereal_time': 2.530392320342610, + 'angle_between_earth_and_sun': 3.935707944858620, + 'declination_from_sat_to_sun': -0.208713576872715, + 'right_ascension_from_sat_to_sun': 3.242660398961383, + 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + 'sat_position_earth_fixed_x': -32390273.633557569235563, + 'sat_position_earth_fixed_y': 27003859.543131537735462, + 'sat_position_earth_fixed_z': -210800.087734811415430, + 'nutation_precession': np.array( + [[0.999936381432029, -0.010344763228876, -0.004496550050695], + [0.010344947502662, 0.999946489441823, 0.000017724053657], + [0.004496126086653, -0.000064239500295, 0.999989890310647]] + ), + 'earth_flattening': 0.003352813177897, + 'earth_equatorial_radius': 6378136 + }, + }, + +] + +NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE + + +class TestVISSRNavigation: + """VISSR navigation tests.""" + + def test_interpolate_prediction(self): + """Test interpolation of orbit/attitude predictions.""" + res = nav.interpolate_prediction( + prediction_times=np.array([1, 2, 3]), + predicted_values=np.array([10, 20, 30]), + desired_time=np.array([1.5, 2.5]) + ) + np.testing.assert_allclose(res, [15, 25]) + + @pytest.mark.parametrize( + 'desired_time,nearest_pred_exp', + [ + (0, [10, 20]), + (2.5, [30, 40]), + (5, [50, 60]) + ] + ) + def test_get_nearest_prediction(self, desired_time, nearest_pred_exp): + """Test getting the nearest prediction.""" + res = nav.get_nearest_prediction( + prediction_times=np.array([1, 2, 3]), + predicted_values=np.array([[10, 20], [30, 40], [50, 60]]), + desired_time=desired_time + ) + np.testing.assert_allclose(res, nearest_pred_exp) + + def test_get_observation_time(self): + """Test getting the observation time of a given pixel.""" + spinning_rate = 100 + sampling_angle = 0.01 + num_sensors = 1 + scan_params = (spinning_rate, num_sensors, sampling_angle) + time = nav.get_observation_time( + point=np.array([11, 100]), + start_time_of_scan=50000, + scan_params=scan_params + ) + np.testing.assert_allclose(time, 50000.0000705496871047) + + @pytest.mark.parametrize( + 'line,pixel,params,lon_exp,lat_exp,tolerance', + [ + (ref['line'], + ref['pixel'], + ref['nav_params'], + ref['lon'], + ref['lat'], + ref['tolerance']) + for ref in NAVIGATION_REFERENCE + ] + ) + def test_get_lon_lat(self, line, pixel, params, lon_exp, lat_exp, + tolerance): + """Test getting lon/lat coordinates for a given pixel.""" + nav_params = nav.NavigationParameters(**params) + lon, lat = nav.get_lon_lat(line, pixel, nav_params) + np.testing.assert_allclose( + (lon, lat), (lon_exp, lat_exp), atol=tolerance + ) + + def test_nav_matrices_are_contiguous(self): + """Test that navigation matrices are stored as C-contiguous arrays.""" + nav_params = nav.NavigationParameters( + **NAVIGATION_REFERENCE[0]['nav_params'] + ) + assert nav_params.misalignment.flags['C_CONTIGUOUS'] + assert nav_params.nutation_precession.flags['C_CONTIGUOUS'] + + def test_transform_image_coords_to_scanning_angles(self): + """Test transformation from image coordinates to scanning angles.""" + angles = nav.transform_image_coords_to_scanning_angles( + point=np.array([200.5, 100.5]), + offset=np.array([101, 201]), + sampling=np.array([0.01, 0.02]) + ) + np.testing.assert_allclose(angles, [-2, 1]) + + def test_transform_scanning_angles_to_satellite_coords(self): + """Test transformation from scanning angles to satellite coordinates.""" + transformer = nav.ScanningAnglesToSatelliteCoordsTransformer( + misalignment=np.diag([1, 2, 3]).astype(float) + ) + point_sat = transformer.transform(np.array([np.pi, np.pi/2])) + np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1E-12) + + def test_transform_satellite_to_earth_fixed_coords(self): + """Test transformation from satellite to earth-fixed coordinates.""" + transformer = nav.SatelliteToEarthFixedCoordsTransformer( + greenwich_sidereal_time=np.pi, + sat_sun_angles=np.array([np.pi, np.pi/2]), + earth_sun_angle=np.pi, + spin_angles=np.array([np.pi, np.pi/2]), + nutation_precession=np.diag([1, 2, 3]).astype(float) + ) + res = transformer.transform(np.array([1, 2, 3], dtype=float)) + np.testing.assert_allclose(res, [-3, 1, -2]) + + def test_intersect_view_vector_with_earth(self): + """Test intersection of a view vector with the earth's surface.""" + eq_radius = 6371 * 1000 + flattening = 0.003 + intersector = nav.EarthIntersector( + sat_pos=np.array([36000 * 1000, 0, 0], dtype=float), + ellipsoid=np.array([eq_radius, flattening]) + ) + point = intersector.intersect(np.array([-1, 0, 0], dtype=float)) + np.testing.assert_allclose(point, [eq_radius, 0, 0]) + + @pytest.mark.parametrize( + 'point_earth_fixed,point_geodetic_exp', + [ + ([0, 0, 1], [0, 90]), + ([0, 0, -1], [0, -90]), + ([1, 0, 0], [0, 0]), + ([-1, 0, 0], [180, 0]), + ([1, 1, 1], [45, 35.426852]), + ] + ) + def test_transform_earth_fixed_to_geodetic_coords( + self, point_earth_fixed, point_geodetic_exp + ): + """Test transformation from earth-fixed to geodetic coordinates.""" + point_geodetic = nav.transform_earth_fixed_to_geodetic_coords( + np.array(point_earth_fixed), + 0.003 + ) + np.testing.assert_allclose(point_geodetic, point_geodetic_exp) + + def test_normalize_vector(self): + """Test vector normalization.""" + v = np.array([1, 2, 3], dtype=float) + normed = nav.normalize_vector(v) + np.testing.assert_allclose(normed, v / np.sqrt(14)) + + +# class TestImageNavigator: +# @pytest.fixture +# def navigator(self): +# attitude_prediction = np.ones(33, +# dtype=nav.attitude_prediction_dtype) +# orbit_prediction = np.ones(9, dtype=nav.orbit_prediction_dtype) +# return nav.ImageNavigator( +# start_time_of_scan=50000, +# line_offset=123, +# pixel_offset=123, +# sampling_angle=0.01, +# stepping_angle=0.02, +# spinning_rate=100, +# num_sensors=1, +# misalignment=np.diag([1, 2, 3]).astype(np.float32), +# attitude_prediction=attitude_prediction, +# orbit_prediction=orbit_prediction, +# earth_flattening=0.0003, +# earth_equatorial_radius=6378 +# ) +# +# def test_has_correct_line_offset(self, navigator): +# assert navigator.line_offset == 123 +# +# def test_has_correct_attitude_prediction(self, navigator): +# assert navigator.attitude_prediction.dtype == nav.attitude_prediction_dtype + diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 0a7a605c4d..5c405c3bfb 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -386,3 +386,10 @@ def test_remove_sunearth_corr(self): np.testing.assert_allclose(out_refl, self.raw_refl) self.assertFalse(out_refl.attrs['sun_earth_distance_correction_applied']) assert isinstance(out_refl.data, da.Array) + + def test_modified_julian_day_to_datetime64(self): + """Test conversion from modified julian day to datetime64.""" + self.assertEqual(hf.modified_julian_day_to_datetime64(np.array([0])), + np.datetime64('1858-11-17', 'us')) + self.assertEqual(hf.modified_julian_day_to_datetime64(np.array([40587.5])), + np.datetime64('1970-01-01 12:00', 'us')) From 1a35b4adbbaf5e23a02451621eeea0856930480e Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 8 Jun 2021 14:13:22 +0000 Subject: [PATCH 0006/1416] Add prediction interpolation --- satpy/readers/gms5_vissr_l1b.py | 16 +- satpy/readers/gms5_vissr_navigation.py | 555 ++++++++----- .../tests/reader_tests/test_gms5_vissr_l1b.py | 742 +++++++++++++----- 3 files changed, 922 insertions(+), 391 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 7c089ac347..83e372fe7a 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -104,12 +104,12 @@ ('scheduled_observation_time', R8), ('stepping_angle_along_line', CHANNELS), ('sampling_angle_along_pixel', CHANNELS), - ('central_line_number_of_vissr_frame', CHANNELS), # TODO - ('central_pixel_number_of_vissr_frame', CHANNELS), # TODO - ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), # TODO + ('central_line_number_of_vissr_frame', CHANNELS), + ('central_pixel_number_of_vissr_frame', CHANNELS), + ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), ('number_of_sensor_elements', CHANNELS), - ('total_number_of_vissr_frame_lines', CHANNELS), # TODO - ('total_number_of_vissr_frame_pixels', CHANNELS), # TODO + ('total_number_of_vissr_frame_lines', CHANNELS), + ('total_number_of_vissr_frame_pixels', CHANNELS), ('vissr_misalignment', R4, (3,)), ('matrix_of_misalignment', R4, (3, 3)), ('parameters', [('judgement_of_observation_convergence_time', R4), @@ -143,8 +143,8 @@ ('longitude_of_ascending_node', R8), ('argument_of_perigee', R8), ('mean_anomaly', R8), - ('longitude_of_ssp', R8), # TODO - ('latitude_of_ssp', R8)]), # TODO + ('longitude_of_ssp', R8), + ('latitude_of_ssp', R8)]), ('reserved_2', I4, 2), ('attitude_parameters', [('epoch_time', R8), ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8), @@ -385,7 +385,7 @@ ('data_id', U1, (4, )), ('line_number', I4), ('line_name', I4), - ('error_line_flag', I4), # TODO + ('error_line_flag', I4), ('error_message', I4), ('mode_error_flag', I4), ('scan_time', R8), diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 9f3b0e9666..e7e6fc4e7a 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -1,8 +1,16 @@ -"""GMS-5 VISSR Navigation.""" +"""GMS-5 VISSR Navigation. + + +Reference: `GMS User Guide`_, Appendix E, S-VISSR Mapping. + +.. _GMS User Guide: + https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf +""" import numba import numpy as np + @numba.njit def get_lons_lats(lines, pixels, nav_params): num_lines = len(lines) @@ -20,185 +28,37 @@ def get_lons_lats(lines, pixels, nav_params): lats[i, j] = lat -@numba.njit -def get_observation_time(point, start_time_of_scan, scan_params): - """Calculate observation time of a VISSR pixel.""" - relative_time = _get_relative_observation_time(point, scan_params) - return start_time_of_scan + relative_time - - -@numba.njit -def _get_relative_observation_time(point, scanning_params): - line, pixel = point - spinning_rate, num_sensors, sampling_angle = scanning_params - spinning_freq = 1440 * spinning_rate - line_step = np.floor((line - 1) / num_sensors) - pixel_step = (sampling_angle * pixel) / (2 * np.pi) - return (line_step + pixel_step) / spinning_freq - - -@numba.njit -def interpolate_nav_params_at_obs_time(nav_params, obs_time): - pass - - -@numba.njit -def interpolate_prediction(prediction_times, predicted_values, desired_time): - # TODO: Interpolate all fields - # TODO: How to interpolate angles? See mspVissrGetInsertValue - return np.interp(desired_time, prediction_times, predicted_values) - - -@numba.njit -def get_nearest_prediction(prediction_times, predicted_values, desired_time): - time_diff = desired_time - prediction_times - idx_of_nearest_prediction = np.argmin(np.fabs(time_diff)) - return predicted_values[idx_of_nearest_prediction] - - -def dict_to_nav_params(dictionary): - arr = np.empty(1, nav_params_dtype) - for key, val in dictionary.items(): - arr[key] = val - return arr - - - -nav_params_dtype = np.dtype([ - ('start_time_of_scan', np.float32), - ('spinning_rate', np.float32), - ('num_sensors', np.int32), - ('sampling_angle', np.float32), - ('equatorial_earth_radius', np.float32), - ('earth_flattening', np.float32) -]) - - -attitude_prediction_dtype = np.dtype( - [ - ('prediction_time_mjd', np.float64) - ] -) -orbit_prediction_dtype = np.dtype( +@numba.experimental.jitclass( [ - ('prediction_time_mjd', np.float64) + ('start_time_of_scan', numba.float64), + ('spinning_rate', numba.float64), + ('num_sensors', numba.int64), + ('sampling_angle', numba.float64) ] ) -image_navigator_spec = [ - ('start_time_of_scan', numba.float64), - ('line_offset', numba.int32), - ('pixel_offset', numba.int32), - ('sampling_angle', numba.float32), - ('stepping_angle', numba.float32), - ('spinning_rate', numba.float32), - ('num_sensors', numba.int32), - ('misalignment', numba.float32[:, :]), - ('attitude_prediction', numba.from_dtype(attitude_prediction_dtype)[:]), - ('orbit_prediction', numba.from_dtype(orbit_prediction_dtype)[:]), - ('earth_flattening', numba.float32), - ('earth_equatorial_radius', numba.float32) -] # TODO: Compare types with header types - -@numba.experimental.jitclass(spec=image_navigator_spec) -class ImageNavigator: - def __init__(self, start_time_of_scan, line_offset, pixel_offset, sampling_angle, stepping_angle, - spinning_rate, num_sensors, misalignment, attitude_prediction, - orbit_prediction, earth_flattening, earth_equatorial_radius): +class ScanningParameters: + def __init__(self, start_time_of_scan, spinning_rate, num_sensors, + sampling_angle): self.start_time_of_scan = start_time_of_scan - self.line_offset = line_offset - self.pixel_offset = pixel_offset - self.sampling_angle = sampling_angle - self.stepping_angle = stepping_angle self.spinning_rate = spinning_rate self.num_sensors = num_sensors - self.misalignment = misalignment - self.attitude_prediction = attitude_prediction - self.orbit_prediction = orbit_prediction - self.earth_flattening = earth_flattening - self.earth_equatorial_radius = earth_equatorial_radius - - - -@numba.experimental.jitclass([ - ('line_offset', numba.int32), - ('pixel_offset', numba.int32), - ('stepping_angle', numba.float64), - ('sampling_angle', numba.float64), - ('misalignment', numba.types.Array(numba.float64, 2, layout='C')), - ('greenwich_sidereal_time', numba.float64), - ('angle_between_earth_and_sun', numba.float64), - ('declination_from_sat_to_sun', numba.float64), - ('right_ascension_from_sat_to_sun', numba.float64), - ('angle_between_sat_spin_and_z_axis', numba.float64), - ('angle_between_sat_spin_and_yz_plane', numba.float64), - ('sat_position_earth_fixed_x', numba.float64), - ('sat_position_earth_fixed_y', numba.float64), - ('sat_position_earth_fixed_z', numba.float64), - ('nutation_precession', numba.types.Array(numba.float64, 2, layout='C')), - ('earth_flattening', numba.float64), - ('earth_equatorial_radius', numba.float64) -]) -class NavigationParameters: - def __init__( - self, - line_offset, - pixel_offset, - stepping_angle, - sampling_angle, - misalignment, - greenwich_sidereal_time, - angle_between_earth_and_sun, - declination_from_sat_to_sun, - right_ascension_from_sat_to_sun, - angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane, - sat_position_earth_fixed_x, - sat_position_earth_fixed_y, - sat_position_earth_fixed_z, - nutation_precession, - earth_flattening, - earth_equatorial_radius - ): - self.line_offset = line_offset - self.pixel_offset = pixel_offset - self.stepping_angle = stepping_angle self.sampling_angle = sampling_angle - self.misalignment = misalignment - self.greenwich_sidereal_time = greenwich_sidereal_time - self.angle_between_earth_and_sun = angle_between_earth_and_sun - self.declination_from_sat_to_sun = declination_from_sat_to_sun - self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun - self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis - self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane - self.sat_position_earth_fixed_x = sat_position_earth_fixed_x - self.sat_position_earth_fixed_y = sat_position_earth_fixed_y - self.sat_position_earth_fixed_z = sat_position_earth_fixed_z - self.nutation_precession = nutation_precession - self.earth_flattening = earth_flattening - self.earth_equatorial_radius = earth_equatorial_radius - - # TODO: Remember that all angles are expected in rad - # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c - - def get_image_offset(self): - return self.line_offset, self.pixel_offset - def get_sampling(self): - return self.stepping_angle, self.sampling_angle - def get_sat_sun_angles(self): - return np.array([self.declination_from_sat_to_sun, self.right_ascension_from_sat_to_sun]) - - def get_spin_angles(self): - return np.array([self.angle_between_sat_spin_and_z_axis, self.angle_between_sat_spin_and_yz_plane]) +@numba.njit +def get_observation_time(point, scan_params): + """Calculate observation time of a VISSR pixel.""" + relative_time = _get_relative_observation_time(point, scan_params) + return scan_params.start_time_of_scan + relative_time - def get_ellipsoid(self): - return np.array([self.earth_equatorial_radius, self.earth_flattening]) - def get_sat_position(self): - return np.array((self.sat_position_earth_fixed_x, - self.sat_position_earth_fixed_y, - self.sat_position_earth_fixed_z)) +@numba.njit +def _get_relative_observation_time(point, scan_params): + line, pixel = point + spinning_freq = 1440 * scan_params.spinning_rate + line_step = np.floor((line - 1) / scan_params.num_sensors) + pixel_step = (scan_params.sampling_angle * pixel) / (2 * np.pi) + return (line_step + pixel_step) / spinning_freq @numba.njit @@ -217,7 +77,7 @@ def get_lon_lat(line, pixel, nav_params): ) point_on_earth = _intersect_with_earth(view_vector_earth_fixed, nav_params) lon, lat = transform_earth_fixed_to_geodetic_coords( - point_on_earth, nav_params.earth_flattening + point_on_earth, nav_params.static_params.earth_flattening ) return lon, lat @@ -245,7 +105,7 @@ def transform_image_coords_to_scanning_angles(point, offset, sampling): @numba.njit def _transform_scanning_angles_to_satellite_coords(angles, nav_params): transformer = ScanningAnglesToSatelliteCoordsTransformer( - nav_params.misalignment + nav_params.static_params.misalignment ) return transformer.transform(angles) @@ -289,13 +149,12 @@ def _get_transforms(self, angles): @numba.njit def _transform_satellite_to_earth_fixed_coords(point, nav_params): - # TODO: kwargs possible here? transformer = SatelliteToEarthFixedCoordsTransformer( - nav_params.greenwich_sidereal_time, + nav_params.orbit.greenwich_sidereal_time, nav_params.get_sat_sun_angles(), - nav_params.angle_between_earth_and_sun, + nav_params.attitude.angle_between_earth_and_sun, nav_params.get_spin_angles(), - nav_params.nutation_precession + nav_params.orbit.nutation_precession ) return transformer.transform(point) @@ -478,3 +337,349 @@ def transform_earth_fixed_to_geodetic_coords(point, earth_flattening): def normalize_vector(v): """Normalize the given vector.""" return v / np.sqrt(np.dot(v, v)) + + +@numba.njit +def interpolate_cont(x, xp, yp): + """Linear interpolation of continuous quantities. + + Numpy equivalent would be np.interp(..., left=np.nan, right=np.nan), but + numba currently doesn't support those keyword arguments. + """ + try: + return _interpolate(x, xp, yp, False) + except Exception: + return np.nan + + +@numba.njit +def interpolate_angles(x, xp, yp): + """Linear interpolation of periodic angles. + + Takes care of phase jumps by wrapping angle differences to [-pi, pi]. + + Numpy equivalent would be np.interp(x, xp, np.unwrap(yp)), but + numba currently doesn't support np.unwrap. + """ + try: + return _interpolate(x, xp, yp, True) + except Exception: + return np.nan + + +@numba.njit +def _interpolate(x, xp, yp, wrap_2pi): + i = _find_enclosing_index(x, xp) + offset = yp[i] + x_diff = xp[i+1] - xp[i] + y_diff = yp[i+1] - yp[i] + if wrap_2pi: + y_diff = _wrap_2pi(y_diff) + slope = y_diff / x_diff + dist = x - xp[i] + return offset + slope * dist + + +@numba.njit +def _find_enclosing_index(x, xp): + """Find where xp encloses x.""" + for i in range(len(xp) - 1): + if xp[i] <= x < xp[i+1]: + return i + raise Exception('x not enclosed by xp') + + +@numba.njit +def _wrap_2pi(values): + """Wrap values to interval [-pi, pi]. + + Source: https://stackoverflow.com/a/15927914/5703449 + """ + return (values + np.pi) % (2 * np.pi) - np.pi + + +@numba.njit +def interpolate_nearest(x, xp, yp): + """Nearest neighbour interpolation.""" + try: + return _interpolate_nearest(x, xp, yp) + except Exception: + return np.nan * np.ones_like(yp[0]) + + +@numba.njit +def _interpolate_nearest(x, xp, yp): + i = _find_enclosing_index(x, xp) + return yp[i] + + +@numba.experimental.jitclass( + [ + ('line_offset', numba.float64), + ('pixel_offset', numba.float64), + ('stepping_angle', numba.float64), + ('sampling_angle', numba.float64), + ('misalignment', numba.types.Array(numba.float64, 2, layout='C')), + ('earth_flattening', numba.float64), + ('earth_equatorial_radius', numba.float64), + ] +) +class StaticNavigationParameters: + def __init__( + self, + line_offset, + pixel_offset, + stepping_angle, + sampling_angle, + misalignment, + earth_flattening, + earth_equatorial_radius + ): + self.line_offset = line_offset + self.pixel_offset = pixel_offset + self.stepping_angle = stepping_angle + self.sampling_angle = sampling_angle + self.misalignment = misalignment + self.earth_flattening = earth_flattening + self.earth_equatorial_radius = earth_equatorial_radius + + +@numba.experimental.jitclass( + [ + ('prediction_time', numba.float64[:]), + ('greenwich_sidereal_time', numba.float64[:]), + ('declination_from_sat_to_sun', numba.float64[:]), + ('right_ascension_from_sat_to_sun', numba.float64[:]), + ('sat_position_earth_fixed_x', numba.float64[:]), + ('sat_position_earth_fixed_y', numba.float64[:]), + ('sat_position_earth_fixed_z', numba.float64[:]), + ('nutation_precession', numba.types.Array(numba.float64, 3, layout='C')), + ] +) +class OrbitPrediction: + def __init__( + self, + prediction_time, + greenwich_sidereal_time, + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun, + sat_position_earth_fixed_x, + sat_position_earth_fixed_y, + sat_position_earth_fixed_z, + nutation_precession + ): + self.prediction_time = prediction_time + self.greenwich_sidereal_time = greenwich_sidereal_time + self.declination_from_sat_to_sun = declination_from_sat_to_sun + self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun + self.sat_position_earth_fixed_x = sat_position_earth_fixed_x + self.sat_position_earth_fixed_y = sat_position_earth_fixed_y + self.sat_position_earth_fixed_z = sat_position_earth_fixed_z + self.nutation_precession = nutation_precession + + def interpolate(self, observation_time): + greenwich_sidereal_time = self._interpolate_angles( + self.greenwich_sidereal_time, + observation_time + ) + declination_from_sat_to_sun = self._interpolate_angles( + self.declination_from_sat_to_sun, + observation_time + ) + right_ascension_from_sat_to_sun = self._interpolate_angles( + self.right_ascension_from_sat_to_sun, + observation_time + ) + sat_position_earth_fixed_x = self._interpolate_cont( + self.sat_position_earth_fixed_x, + observation_time + ) + sat_position_earth_fixed_y = self._interpolate_cont( + self.sat_position_earth_fixed_y, + observation_time + ) + sat_position_earth_fixed_z = self._interpolate_cont( + self.sat_position_earth_fixed_z, + observation_time + ) + nutation_precession = self._interpolate_nearest( + self.nutation_precession, + observation_time + ) + return Orbit( + greenwich_sidereal_time, + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun, + sat_position_earth_fixed_x, + sat_position_earth_fixed_y, + sat_position_earth_fixed_z, + nutation_precession + ) + + def _interpolate_cont(self, predicted_values, observation_time): + return interpolate_cont(observation_time, self.prediction_time, predicted_values) + + def _interpolate_angles(self, predicted_values, observation_time): + return interpolate_angles(observation_time, self.prediction_time, predicted_values) + + def _interpolate_nearest(self, predicted_values, observation_time): + return interpolate_nearest(observation_time, self.prediction_time, predicted_values) + + +@numba.experimental.jitclass( + [ + ('greenwich_sidereal_time', numba.float64), + ('declination_from_sat_to_sun', numba.float64), + ('right_ascension_from_sat_to_sun', numba.float64), + ('sat_position_earth_fixed_x', numba.float64), + ('sat_position_earth_fixed_y', numba.float64), + ('sat_position_earth_fixed_z', numba.float64), + ('nutation_precession', numba.types.Array(numba.float64, 2, layout='C')), + ] +) +class Orbit: + def __init__( + self, + greenwich_sidereal_time, + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun, + sat_position_earth_fixed_x, + sat_position_earth_fixed_y, + sat_position_earth_fixed_z, + nutation_precession + ): + self.greenwich_sidereal_time = greenwich_sidereal_time + self.declination_from_sat_to_sun = declination_from_sat_to_sun + self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun + self.sat_position_earth_fixed_x = sat_position_earth_fixed_x + self.sat_position_earth_fixed_y = sat_position_earth_fixed_y + self.sat_position_earth_fixed_z = sat_position_earth_fixed_z + self.nutation_precession = nutation_precession + + +@numba.experimental.jitclass( + [ + ('prediction_time', numba.float64[:]), + ('angle_between_earth_and_sun', numba.float64[:]), + ('angle_between_sat_spin_and_z_axis', numba.float64[:]), + ('angle_between_sat_spin_and_yz_plane', numba.float64[:]), + ] +) +class AttitudePrediction: + def __init__( + self, + prediction_time, + angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane + ): + self.prediction_time = prediction_time + self.angle_between_earth_and_sun = angle_between_earth_and_sun + self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis + self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane + + def interpolate(self, observation_time): + angle_between_earth_and_sun = self._interpolate( + observation_time, self.angle_between_earth_and_sun + ) + angle_between_sat_spin_and_z_axis = self._interpolate( + observation_time, self.angle_between_sat_spin_and_z_axis, + ) + angle_between_sat_spin_and_yz_plane = self._interpolate( + observation_time, self.angle_between_sat_spin_and_yz_plane + ) + return Attitude( + angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane + ) + + def _interpolate(self, observation_time, predicted_values): + return interpolate_angles(observation_time, self.prediction_time, predicted_values) + + +@numba.experimental.jitclass( + [ + ('angle_between_earth_and_sun', numba.float64), + ('angle_between_sat_spin_and_z_axis', numba.float64), + ('angle_between_sat_spin_and_yz_plane', numba.float64), + ] +) +class Attitude: + def __init__( + self, + angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane + ): + self.angle_between_earth_and_sun = angle_between_earth_and_sun + self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis + self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane + + +@numba.experimental.jitclass( + [ + ('attitude', Attitude.class_type.instance_type), + ('orbit', Orbit.class_type.instance_type), + ('static_params', StaticNavigationParameters.class_type.instance_type), + ] +) +class NavigationParameters: + def __init__(self, attitude, orbit, static_params): + self.attitude = attitude + self.orbit = orbit + self.static_params = static_params + + # TODO: Remember that all angles are expected in rad + # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c + + def get_image_offset(self): + return self.static_params.line_offset, self.static_params.pixel_offset + + def get_sampling(self): + return self.static_params.stepping_angle, self.static_params.sampling_angle + + def get_sat_sun_angles(self): + return np.array([ + self.orbit.declination_from_sat_to_sun, + self.orbit.right_ascension_from_sat_to_sun + ]) + + def get_spin_angles(self): + return np.array([ + self.attitude.angle_between_sat_spin_and_z_axis, + self.attitude.angle_between_sat_spin_and_yz_plane + ]) + + def get_ellipsoid(self): + return np.array([ + self.static_params.earth_equatorial_radius, + self.static_params.earth_flattening + ]) + + def get_sat_position(self): + return np.array((self.orbit.sat_position_earth_fixed_x, + self.orbit.sat_position_earth_fixed_y, + self.orbit.sat_position_earth_fixed_z)) + + +@numba.experimental.jitclass( + [ + ('attitude_prediction', AttitudePrediction.class_type.instance_type), + ('orbit_prediction', OrbitPrediction.class_type.instance_type), + ('static_params', StaticNavigationParameters.class_type.instance_type), + ] +) +class PredictionInterpolator: + def __init__(self, attitude_prediction, orbit_prediction, static_params): + self.attitude_prediction = attitude_prediction + self.orbit_prediction = orbit_prediction + self.static_params = static_params + + def interpolate(self, observation_time): + attitude = self.attitude_prediction.interpolate(observation_time) + orbit = self.orbit_prediction.interpolate(observation_time) + return self._get_nav_params(attitude, orbit) + + def _get_nav_params(self, attitude, orbit): + return NavigationParameters(attitude, orbit, self.static_params) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index eccfc04aab..1c6fcbc1d1 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -17,33 +17,39 @@ 'lon': 139.990380, 'lat': 35.047056, 'tolerance': 0, - 'nav_params': { - 'line_offset': 1378.5, - 'pixel_offset': 1672.5, - 'stepping_angle': 0.000140000047395, - 'sampling_angle': 0.000095719995443, - 'misalignment': np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + 'nav_params': nav.NavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=3.997397917902958, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - 'greenwich_sidereal_time': 2.468529732418296, - 'angle_between_earth_and_sun': 3.997397917902958, - 'declination_from_sat_to_sun': -0.208770861178982, - 'right_ascension_from_sat_to_sun': 3.304369303579407, - 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - 'sat_position_earth_fixed_x': -32390963.148471601307392, - 'sat_position_earth_fixed_y': 27003395.381247851997614, - 'sat_position_earth_fixed_z': -228134.860026293463307, - 'nutation_precession': np.array( - [[0.999936381496146, -0.010344758016410, -0.004496547784299], - [0.010344942303489, 0.999946489495557, 0.000017727054455], - [0.004496123789670, -0.000064242454080, 0.999989890320785]] + orbit=nav.Orbit( + greenwich_sidereal_time=2.468529732418296, + declination_from_sat_to_sun=-0.208770861178982, + right_ascension_from_sat_to_sun=3.304369303579407, + sat_position_earth_fixed_x=-32390963.148471601307392, + sat_position_earth_fixed_y=27003395.381247851997614, + sat_position_earth_fixed_z=-228134.860026293463307, + nutation_precession=np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), ), - 'earth_flattening': 0.003352813177897, - 'earth_equatorial_radius': 6378136 - }, + static_params=nav.StaticNavigationParameters( + line_offset=1378.5, + pixel_offset=1672.5, + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136.0 + ), + ) }, { 'line': 2089, @@ -51,68 +57,179 @@ 'lon': 144.996967, 'lat': -34.959853, 'tolerance': 0, - 'nav_params': { - 'line_offset': 1378.5, - 'pixel_offset': 1672.5, - 'stepping_angle': 0.000140000047395, - 'sampling_angle': 0.000095719995443, - 'misalignment': np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - ), - 'greenwich_sidereal_time': 2.530392320846865, - 'angle_between_earth_and_sun': 3.935707944355762, - 'declination_from_sat_to_sun': -0.208713576872247, - 'right_ascension_from_sat_to_sun': 3.242660398458377, - 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - 'sat_position_earth_fixed_x': -32390273.633551981300116, - 'sat_position_earth_fixed_y': 27003859.543135114014149, - 'sat_position_earth_fixed_z': -210800.087589388160268, - 'nutation_precession': np.array( - [[0.999936381432029, -0.010344763228876, -0.004496550050695], - [0.010344947502662, 0.999946489441823, 0.000017724053657], - [0.004496126086653, -0.000064239500295, 0.999989890310647]] + 'nav_params': nav.NavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=3.935707944355762, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - 'earth_flattening': 0.003352813177897, - 'earth_equatorial_radius': 6378136 - }, - }, - { - 'line': 999, - 'pixel': 2996, - 'lon': -165.023842, - 'lat': 20.005603, - 'tolerance': 0, - 'nav_params': { - 'line_offset': 1378.5, - 'pixel_offset': 1672.5, - 'stepping_angle': 0.000140000047395, - 'sampling_angle': 0.000095719995443, - 'misalignment': np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + orbit=nav.Orbit( + greenwich_sidereal_time=2.530392320846865, + declination_from_sat_to_sun=-0.208713576872247, + right_ascension_from_sat_to_sun=3.242660398458377, + sat_position_earth_fixed_x=-32390273.633551981300116, + sat_position_earth_fixed_y=27003859.543135114014149, + sat_position_earth_fixed_z=-210800.087589388160268, + nutation_precession=np.array( + [[0.999936381432029, -0.010344763228876, -0.004496550050695], + [0.010344947502662, 0.999946489441823, 0.000017724053657], + [0.004496126086653, -0.000064239500295, 0.999989890310647]] + ), ), - 'greenwich_sidereal_time': 2.482331732831616, - 'angle_between_earth_and_sun': 3.983634620574510, - 'declination_from_sat_to_sun': -0.208758095943038, - 'right_ascension_from_sat_to_sun': 3.290601673240597, - 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - 'sat_position_earth_fixed_x': -32390808.779549609869719, - 'sat_position_earth_fixed_y': 27003503.047290064394474, - 'sat_position_earth_fixed_z': -224351.430479845439550, - 'nutation_precession': np.array( - [[0.999936381496146, -0.010344758016410, -0.004496547784299], - [0.010344942303489, 0.999946489495557, 0.000017727054455], - [0.004496123789670, -0.000064242454080, 0.999989890320785]] + static_params=nav.StaticNavigationParameters( + line_offset=1378.5, + pixel_offset=1672.5, + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136 ), - 'earth_flattening': 0.003352813177897, - 'earth_equatorial_radius': 6378136 - }, + ) }, + + + + # { + # 'line': 686, + # 'pixel': 1680, + # 'lon': 139.990380, + # 'lat': 35.047056, + # 'tolerance': 0, + # 'nav_params': { + # 'line_offset': 1378.5, + # 'pixel_offset': 1672.5, + # 'stepping_angle': 0.000140000047395, + # 'sampling_angle': 0.000095719995443, + # 'misalignment': np.array( + # [[0.999999165534973, 0.000510364072397, 0.001214201096445], + # [-0.000511951977387, 0.999999046325684, 0.001307720085606], + # [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + # ), + # 'greenwich_sidereal_time': 2.468529732418296, + # 'angle_between_earth_and_sun': 3.997397917902958, + # 'declination_from_sat_to_sun': -0.208770861178982, + # 'right_ascension_from_sat_to_sun': 3.304369303579407, + # 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + # 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + # 'sat_position_earth_fixed_x': -32390963.148471601307392, + # 'sat_position_earth_fixed_y': 27003395.381247851997614, + # 'sat_position_earth_fixed_z': -228134.860026293463307, + # 'nutation_precession': np.array( + # [[0.999936381496146, -0.010344758016410, -0.004496547784299], + # [0.010344942303489, 0.999946489495557, 0.000017727054455], + # [0.004496123789670, -0.000064242454080, 0.999989890320785]] + # ), + # 'earth_flattening': 0.003352813177897, + # 'earth_equatorial_radius': 6378136 + # }, + # }, + # { + # 'line': 2089, + # 'pixel': 1793, + # 'lon': 144.996967, + # 'lat': -34.959853, + # 'tolerance': 0, + # 'nav_params': { + # 'line_offset': 1378.5, + # 'pixel_offset': 1672.5, + # 'stepping_angle': 0.000140000047395, + # 'sampling_angle': 0.000095719995443, + # 'misalignment': np.array( + # [[0.999999165534973, 0.000510364072397, 0.001214201096445], + # [-0.000511951977387, 0.999999046325684, 0.001307720085606], + # [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + # ), + # 'greenwich_sidereal_time': 2.530392320846865, + # 'angle_between_earth_and_sun': 3.935707944355762, + # 'declination_from_sat_to_sun': -0.208713576872247, + # 'right_ascension_from_sat_to_sun': 3.242660398458377, + # 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + # 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + # 'sat_position_earth_fixed_x': -32390273.633551981300116, + # 'sat_position_earth_fixed_y': 27003859.543135114014149, + # 'sat_position_earth_fixed_z': -210800.087589388160268, + # 'nutation_precession': np.array( + # [[0.999936381432029, -0.010344763228876, -0.004496550050695], + # [0.010344947502662, 0.999946489441823, 0.000017724053657], + # [0.004496126086653, -0.000064239500295, 0.999989890310647]] + # ), + # 'earth_flattening': 0.003352813177897, + # 'earth_equatorial_radius': 6378136 + # }, + # }, + # { + # 'line': 999, + # 'pixel': 2996, + # 'lon': -165.023842, + # 'lat': 20.005603, + # 'tolerance': 0, + # 'nav_params': { + # 'line_offset': 1378.5, + # 'pixel_offset': 1672.5, + # 'stepping_angle': 0.000140000047395, + # 'sampling_angle': 0.000095719995443, + # 'misalignment': np.array( + # [[0.999999165534973, 0.000510364072397, 0.001214201096445], + # [-0.000511951977387, 0.999999046325684, 0.001307720085606], + # [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + # ), + # 'greenwich_sidereal_time': 2.482331732831616, + # 'angle_between_earth_and_sun': 3.983634620574510, + # 'declination_from_sat_to_sun': -0.208758095943038, + # 'right_ascension_from_sat_to_sun': 3.290601673240597, + # 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + # 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + # 'sat_position_earth_fixed_x': -32390808.779549609869719, + # 'sat_position_earth_fixed_y': 27003503.047290064394474, + # 'sat_position_earth_fixed_z': -224351.430479845439550, + # 'nutation_precession': np.array( + # [[0.999936381496146, -0.010344758016410, -0.004496547784299], + # [0.010344942303489, 0.999946489495557, 0.000017727054455], + # [0.004496123789670, -0.000064242454080, 0.999989890320785]] + # ), + # 'earth_flattening': 0.003352813177897, + # 'earth_equatorial_radius': 6378136 + # }, + # }, + # { + # 'line': 0, + # 'pixel': 0, + # 'lon': np.nan, + # 'lat': np.nan, + # 'tolerance': 0, + # 'nav_params': { + # 'line_offset': 1378.5, + # 'pixel_offset': 1672.5, + # 'stepping_angle': 0.000140000047395, + # 'sampling_angle': 0.000095719995443, + # 'misalignment': np.array( + # [[0.999999165534973, 0.000510364072397, 0.001214201096445], + # [-0.000511951977387, 0.999999046325684, 0.001307720085606], + # [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + # ), + # 'greenwich_sidereal_time': 2.482331732831616, + # 'angle_between_earth_and_sun': 3.983634620574510, + # 'declination_from_sat_to_sun': -0.208758095943038, + # 'right_ascension_from_sat_to_sun': 3.290601673240597, + # 'angle_between_sat_spin_and_z_axis': 3.149118633034304, + # 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, + # 'sat_position_earth_fixed_x': -32390808.779549609869719, + # 'sat_position_earth_fixed_y': 27003503.047290064394474, + # 'sat_position_earth_fixed_z': -224351.430479845439550, + # 'nutation_precession': np.array( + # [[0.999936381496146, -0.010344758016410, -0.004496547784299], + # [0.010344942303489, 0.999946489495557, 0.000017727054455], + # [0.004496123789670, -0.000064242454080, 0.999989890320785]] + # ), + # 'earth_flattening': 0.003352813177897, + # 'earth_equatorial_radius': 6378136 + # }, + # }, ] @@ -123,119 +240,134 @@ 'lon': 139.975527, 'lat': 35.078028, 'tolerance': 0.01, - 'nav_params': { - 'line_offset': 5513.0, - 'pixel_offset': 6688.5, - 'stepping_angle': 0.000035000004573, - 'sampling_angle': 0.000023929998861, - 'misalignment': np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + 'nav_params': nav.NavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=3.997397918405798, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - 'greenwich_sidereal_time': 2.468529731914041, - 'angle_between_earth_and_sun': 3.997397918405798, - 'declination_from_sat_to_sun': -0.208770861179448, - 'right_ascension_from_sat_to_sun': 3.304369304082406, - 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - 'sat_position_earth_fixed_x': -32390963.148477241396904, - 'sat_position_earth_fixed_y': 27003395.381243918091059, - 'sat_position_earth_fixed_z': -228134.860164520738181, - 'nutation_precession': np.array( - [[0.999936381496146, -0.010344758016410, -0.004496547784299], - [0.010344942303489, 0.999946489495557, 0.000017727054455], - [0.004496123789670, -0.000064242454080, 0.999989890320785]] + orbit=nav.Orbit( + greenwich_sidereal_time=2.468529731914041, + declination_from_sat_to_sun=-0.208770861179448, + right_ascension_from_sat_to_sun=3.304369304082406, + sat_position_earth_fixed_x=-32390963.148477241396904, + sat_position_earth_fixed_y=27003395.381243918091059, + sat_position_earth_fixed_z=-228134.860164520738181, + nutation_precession=np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), ), - 'earth_flattening': 0.003352813177897, - 'earth_equatorial_radius': 6378136 - }, + static_params=nav.StaticNavigationParameters( + line_offset=5513.0, + pixel_offset=6688.5, + stepping_angle=0.000035000004573, + sampling_angle=0.000023929998861, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136 + ), + ) }, - { 'line': 8356, 'pixel': 7172, 'lon': 144.980104, 'lat': -34.929123, 'tolerance': 0.01, - 'nav_params': { - 'line_offset': 5513.0, - 'pixel_offset': 6688.5, - 'stepping_angle': 0.000035000004573, - 'sampling_angle': 0.000023929998861, - 'misalignment': np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + 'nav_params': nav.NavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=3.935707944858620, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - 'greenwich_sidereal_time': 2.530392320342610, - 'angle_between_earth_and_sun': 3.935707944858620, - 'declination_from_sat_to_sun': -0.208713576872715, - 'right_ascension_from_sat_to_sun': 3.242660398961383, - 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - 'sat_position_earth_fixed_x': -32390273.633557569235563, - 'sat_position_earth_fixed_y': 27003859.543131537735462, - 'sat_position_earth_fixed_z': -210800.087734811415430, - 'nutation_precession': np.array( - [[0.999936381432029, -0.010344763228876, -0.004496550050695], - [0.010344947502662, 0.999946489441823, 0.000017724053657], - [0.004496126086653, -0.000064239500295, 0.999989890310647]] + orbit=nav.Orbit( + greenwich_sidereal_time=2.530392320342610, + declination_from_sat_to_sun=-0.208713576872715, + right_ascension_from_sat_to_sun=3.242660398961383, + sat_position_earth_fixed_x=-32390273.633557569235563, + sat_position_earth_fixed_y=27003859.543131537735462, + sat_position_earth_fixed_z=-210800.087734811415430, + nutation_precession=np.array( + [[0.999936381432029, -0.010344763228876, -0.004496550050695], + [0.010344947502662, 0.999946489441823, 0.000017724053657], + [0.004496126086653, -0.000064239500295, 0.999989890310647]] + ), ), - 'earth_flattening': 0.003352813177897, - 'earth_equatorial_radius': 6378136 - }, + static_params=nav.StaticNavigationParameters( + line_offset=5513.0, + pixel_offset=6688.5, + stepping_angle=0.000035000004573, + sampling_angle=0.000023929998861, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136 + ), + ) }, - ] NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE -class TestVISSRNavigation: - """VISSR navigation tests.""" - - def test_interpolate_prediction(self): - """Test interpolation of orbit/attitude predictions.""" - res = nav.interpolate_prediction( - prediction_times=np.array([1, 2, 3]), - predicted_values=np.array([10, 20, 30]), - desired_time=np.array([1.5, 2.5]) +""" + { + 'line': , + 'pixel': , + 'lon': , + 'lat': , + 'tolerance': , + 'nav_params': nav.NavigationParameters( + attitude=nav.Attitude( + angle_between_earth_and_sun=, + angle_between_sat_spin_and_z_axis=, + angle_between_sat_spin_and_yz_plane=, + ), + orbit=nav.Orbit( + greenwich_sidereal_time=, + declination_from_sat_to_sun=, + right_ascension_from_sat_to_sun=, + sat_position_earth_fixed_x=, + sat_position_earth_fixed_y=, + sat_position_earth_fixed_z=, + nutation_precession=np.array( + [[], + [], + []] + ), + ), + static_params=nav.StaticNavigationParameters( + line_offset=, + pixel_offset=, + stepping_angle=, + sampling_angle=, + misalignment=np.array( + [[], + [], + []] + ), + earth_flattening=, + earth_equatorial_radius= + ), ) - np.testing.assert_allclose(res, [15, 25]) + }, - @pytest.mark.parametrize( - 'desired_time,nearest_pred_exp', - [ - (0, [10, 20]), - (2.5, [30, 40]), - (5, [50, 60]) - ] - ) - def test_get_nearest_prediction(self, desired_time, nearest_pred_exp): - """Test getting the nearest prediction.""" - res = nav.get_nearest_prediction( - prediction_times=np.array([1, 2, 3]), - predicted_values=np.array([[10, 20], [30, 40], [50, 60]]), - desired_time=desired_time - ) - np.testing.assert_allclose(res, nearest_pred_exp) - - def test_get_observation_time(self): - """Test getting the observation time of a given pixel.""" - spinning_rate = 100 - sampling_angle = 0.01 - num_sensors = 1 - scan_params = (spinning_rate, num_sensors, sampling_angle) - time = nav.get_observation_time( - point=np.array([11, 100]), - start_time_of_scan=50000, - scan_params=scan_params - ) - np.testing.assert_allclose(time, 50000.0000705496871047) +""" + +class TestSinglePixelNavigation: + """Test navigation of a single pixel.""" @pytest.mark.parametrize( - 'line,pixel,params,lon_exp,lat_exp,tolerance', + 'line,pixel,nav_params,lon_exp,lat_exp,tolerance', [ (ref['line'], ref['pixel'], @@ -246,10 +378,9 @@ def test_get_observation_time(self): for ref in NAVIGATION_REFERENCE ] ) - def test_get_lon_lat(self, line, pixel, params, lon_exp, lat_exp, + def test_get_lon_lat(self, line, pixel, nav_params, lon_exp, lat_exp, tolerance): """Test getting lon/lat coordinates for a given pixel.""" - nav_params = nav.NavigationParameters(**params) lon, lat = nav.get_lon_lat(line, pixel, nav_params) np.testing.assert_allclose( (lon, lat), (lon_exp, lat_exp), atol=tolerance @@ -257,11 +388,9 @@ def test_get_lon_lat(self, line, pixel, params, lon_exp, lat_exp, def test_nav_matrices_are_contiguous(self): """Test that navigation matrices are stored as C-contiguous arrays.""" - nav_params = nav.NavigationParameters( - **NAVIGATION_REFERENCE[0]['nav_params'] - ) - assert nav_params.misalignment.flags['C_CONTIGUOUS'] - assert nav_params.nutation_precession.flags['C_CONTIGUOUS'] + nav_params = NAVIGATION_REFERENCE[0]['nav_params'] + assert nav_params.static_params.misalignment.flags['C_CONTIGUOUS'] + assert nav_params.orbit.nutation_precession.flags['C_CONTIGUOUS'] def test_transform_image_coords_to_scanning_angles(self): """Test transformation from image coordinates to scanning angles.""" @@ -330,30 +459,227 @@ def test_normalize_vector(self): np.testing.assert_allclose(normed, v / np.sqrt(14)) -# class TestImageNavigator: -# @pytest.fixture -# def navigator(self): -# attitude_prediction = np.ones(33, -# dtype=nav.attitude_prediction_dtype) -# orbit_prediction = np.ones(9, dtype=nav.orbit_prediction_dtype) -# return nav.ImageNavigator( -# start_time_of_scan=50000, -# line_offset=123, -# pixel_offset=123, -# sampling_angle=0.01, -# stepping_angle=0.02, -# spinning_rate=100, -# num_sensors=1, -# misalignment=np.diag([1, 2, 3]).astype(np.float32), -# attitude_prediction=attitude_prediction, -# orbit_prediction=orbit_prediction, -# earth_flattening=0.0003, -# earth_equatorial_radius=6378 -# ) -# -# def test_has_correct_line_offset(self, navigator): -# assert navigator.line_offset == 123 -# -# def test_has_correct_attitude_prediction(self, navigator): -# assert navigator.attitude_prediction.dtype == nav.attitude_prediction_dtype +class TestPredictionInterpolation: + """Test interpolation of orbit and attitude predictions.""" + @pytest.mark.parametrize( + 'obs_time,expected', + [ + (-1, np.nan), + (1.5, 2.5), + (5, np.nan) + ] + ) + def test_interpolate_cont(self, obs_time, expected): + prediction_times = np.array([0, 1, 2, 3]) + predicted_values = np.array([1, 2, 3, 4]) + res = nav.interpolate_cont( + obs_time, + prediction_times, + predicted_values + ) + np.testing.assert_allclose(res, expected) + + @pytest.mark.parametrize( + 'obs_time,expected', + [ + (-1, np.nan), + (1.5, 0.75*np.pi), + (2.5, np.pi), + (3.5, -0.75*np.pi), + (5, np.nan), + ] + ) + def test_interpolate_angles(self, obs_time, expected): + prediction_times = np.array([0, 1, 2, 3, 4]) + predicted_angles = np.array([0, np.pi/2, np.pi, -np.pi, -np.pi/2]) + res = nav.interpolate_angles( + obs_time, + prediction_times, + predicted_angles + ) + np.testing.assert_allclose(res, expected) + + @pytest.mark.parametrize( + 'obs_time,expected', + [ + (-1, np.nan * np.ones((2, 2))), + (1.5, [[1, 0], [0, 2]]), + (3, np.nan * np.ones((2, 2))), + ] + ) + def test_interpolate_nearest(self, obs_time, expected): + prediction_times = np.array([0, 1, 2]) + predicted_angles = np.array([ + np.zeros((2, 2)), + np.diag((1, 2)), + np.zeros((2, 2)) + ]) + res = nav.interpolate_nearest( + obs_time, + prediction_times, + predicted_angles + ) + np.testing.assert_allclose(res, expected) + + def test_interpolate_orbit_prediction(self, obs_time, orbit_prediction, orbit_expected): + orbit = orbit_prediction.interpolate(obs_time) + self.assert_orbit_close(orbit, orbit_expected) + + def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, attitude_expected): + attitude = attitude_prediction.interpolate(obs_time) + self.assert_attitude_close(attitude, attitude_expected) + + def test_interpolate_prediction(self, obs_time, static_params, attitude_prediction, orbit_prediction, nav_params_expected): + interpolator = nav.PredictionInterpolator( + static_params=static_params, + attitude_prediction=attitude_prediction, + orbit_prediction=orbit_prediction + ) + nav_params = interpolator.interpolate(obs_time) + self.assert_nav_params_close(nav_params, nav_params_expected) + + @pytest.fixture + def obs_time(self): + return 2.5 + + @pytest.fixture + def orbit_prediction(self): + return nav.OrbitPrediction( + prediction_time=np.array([1.0, 2.0, 3.0, 4.0]), + greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), + declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), + right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), + sat_position_earth_fixed_x=np.array([0.3, 1.3, 2.3, 3.3]), + sat_position_earth_fixed_y=np.array([0.4, 1.4, 2.4, 3.4]), + sat_position_earth_fixed_z=np.array([0.5, 1.5, 2.5, 3.5]), + nutation_precession=np.array( + [ + 0.6*np.identity(3), + 1.6*np.identity(3), + 2.6*np.identity(3), + 3.6*np.identity(3) + ] + ) + ) + + @pytest.fixture + def orbit_expected(self): + return nav.Orbit( + greenwich_sidereal_time=1.5, + declination_from_sat_to_sun=1.6, + right_ascension_from_sat_to_sun=1.7, + sat_position_earth_fixed_x=1.8, + sat_position_earth_fixed_y=1.9, + sat_position_earth_fixed_z=2.0, + nutation_precession=1.6 * np.identity(3) + ) + + @pytest.fixture + def attitude_prediction(self): + return nav.AttitudePrediction( + prediction_time=np.array([1.0, 2.0, 3.0]), + angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), + angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), + angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), + ) + + @pytest.fixture + def attitude_expected(self): + return nav.Attitude( + angle_between_earth_and_sun=1.5, + angle_between_sat_spin_and_z_axis=1.6, + angle_between_sat_spin_and_yz_plane=1.7, + ) + + @pytest.fixture + def static_params(self): + return nav.StaticNavigationParameters( + line_offset=1378.5, + pixel_offset=1672.5, + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.identity(3).astype(np.float64), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136 + ) + + @pytest.fixture + def nav_params_expected(self, attitude_expected, orbit_expected, static_params): + return nav.NavigationParameters( + attitude_expected, orbit_expected, static_params + ) + + def assert_orbit_close(self, a, b): + """Assert that two Orbit instances are close. + + This would probably make more sense in the Orbit class. However, + numba doesn't support np.allclose, yet. + """ + attrs = [ + 'greenwich_sidereal_time', + 'declination_from_sat_to_sun', + 'right_ascension_from_sat_to_sun', + 'sat_position_earth_fixed_x', + 'sat_position_earth_fixed_y', + 'sat_position_earth_fixed_z', + 'nutation_precession', + ] + self._assert_attrs_close(a, b, attrs, 'Orbit') + + def assert_attitude_close(self, a, b): + """Assert that two Attitude instances are close. + + This would probably make more sense in the Attitude class. However, + numba doesn't support np.allclose, yet. + """ + attrs = [ + 'angle_between_earth_and_sun', + 'angle_between_sat_spin_and_z_axis', + 'angle_between_sat_spin_and_yz_plane' + ] + self._assert_attrs_close(a, b, attrs, 'Attitude') + + def assert_static_params_close(self, a, b): + """Assert that two StaticNavigationParameters instances are close. + + This would probably make more sense in the Attitude class. However, + numba doesn't support np.allclose, yet. + """ + attrs = [ + 'line_offset', + 'pixel_offset', + 'stepping_angle', + 'sampling_angle', + 'misalignment', + 'earth_flattening', + 'earth_equatorial_radius', + ] + self._assert_attrs_close(a, b, attrs, 'StaticNavigationParameters') + + def assert_nav_params_close(self, a, b): + self.assert_attitude_close(a.attitude, b.attitude) + self.assert_orbit_close(a.orbit, b.orbit) + self.assert_static_params_close(a.static_params, b.static_params) + + @staticmethod + def _assert_attrs_close(a, b, attrs, desc): + for attr in attrs: + np.testing.assert_allclose( + getattr(a, attr), + getattr(b, attr), + err_msg='{} attribute {} differs'.format(desc, attr) + ) + + + +def test_get_observation_time(): + scan_params = nav.ScanningParameters( + start_time_of_scan=50000.0, + spinning_rate=100, + num_sensors=1, + sampling_angle=0.01 + ) + point = np.array([11, 100]) + obs_time = nav.get_observation_time(point, scan_params) + np.testing.assert_allclose(obs_time, 50000.0000705496871047) From 966148c5ae4a202751951225690b86d8065662fb Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 06:53:14 +0000 Subject: [PATCH 0007/1416] Fix jitclasses with jit disabled --- satpy/readers/gms5_vissr_navigation.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index e7e6fc4e7a..8de78e7d69 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -11,6 +11,14 @@ import numpy as np +def get_jitclass_type(cls): + try: + return cls.class_type.instance_type + except AttributeError: + # With NUMBA_DISABLE_JIT=1 + return cls + + @numba.njit def get_lons_lats(lines, pixels, nav_params): num_lines = len(lines) @@ -619,9 +627,9 @@ def __init__( @numba.experimental.jitclass( [ - ('attitude', Attitude.class_type.instance_type), - ('orbit', Orbit.class_type.instance_type), - ('static_params', StaticNavigationParameters.class_type.instance_type), + ('attitude', get_jitclass_type(Attitude)), + ('orbit', get_jitclass_type(Orbit)), + ('static_params', get_jitclass_type(StaticNavigationParameters)), ] ) class NavigationParameters: @@ -665,9 +673,9 @@ def get_sat_position(self): @numba.experimental.jitclass( [ - ('attitude_prediction', AttitudePrediction.class_type.instance_type), - ('orbit_prediction', OrbitPrediction.class_type.instance_type), - ('static_params', StaticNavigationParameters.class_type.instance_type), + ('attitude_prediction', get_jitclass_type(AttitudePrediction)), + ('orbit_prediction', get_jitclass_type(OrbitPrediction)), + ('static_params', get_jitclass_type(StaticNavigationParameters)), ] ) class PredictionInterpolator: From bc19af26ad51cd09b6d1d920633949fecd8b8c32 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 07:25:53 +0000 Subject: [PATCH 0008/1416] Fix computation of scanning angles --- satpy/readers/gms5_vissr_navigation.py | 4 ++-- .../tests/reader_tests/test_gms5_vissr_l1b.py | 20 ++++++------------- 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 8de78e7d69..9b90f147c1 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -105,8 +105,8 @@ def transform_image_coords_to_scanning_angles(point, offset, sampling): line, pixel = point line_offset, pixel_offset = offset stepping_angle, sampling_angle = sampling - x = sampling_angle * (pixel + 0.5 - pixel_offset) - y = stepping_angle * (line + 0.5 - line_offset) + x = sampling_angle * (pixel + 1 - pixel_offset) + y = stepping_angle * (line + 1 - line_offset) return np.array([x, y]) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 1c6fcbc1d1..a1025f74f5 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -16,7 +16,6 @@ 'pixel': 1680, 'lon': 139.990380, 'lat': 35.047056, - 'tolerance': 0, 'nav_params': nav.NavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397917902958, @@ -56,7 +55,6 @@ 'pixel': 1793, 'lon': 144.996967, 'lat': -34.959853, - 'tolerance': 0, 'nav_params': nav.NavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944355762, @@ -239,7 +237,6 @@ 'pixel': 6720, 'lon': 139.975527, 'lat': 35.078028, - 'tolerance': 0.01, 'nav_params': nav.NavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397918405798, @@ -279,7 +276,6 @@ 'pixel': 7172, 'lon': 144.980104, 'lat': -34.929123, - 'tolerance': 0.01, 'nav_params': nav.NavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944858620, @@ -367,24 +363,20 @@ class TestSinglePixelNavigation: """Test navigation of a single pixel.""" @pytest.mark.parametrize( - 'line,pixel,nav_params,lon_exp,lat_exp,tolerance', + 'line,pixel,nav_params,lon_exp,lat_exp', [ (ref['line'], ref['pixel'], ref['nav_params'], ref['lon'], - ref['lat'], - ref['tolerance']) + ref['lat']) for ref in NAVIGATION_REFERENCE ] ) - def test_get_lon_lat(self, line, pixel, nav_params, lon_exp, lat_exp, - tolerance): + def test_get_lon_lat(self, line, pixel, nav_params, lon_exp, lat_exp): """Test getting lon/lat coordinates for a given pixel.""" lon, lat = nav.get_lon_lat(line, pixel, nav_params) - np.testing.assert_allclose( - (lon, lat), (lon_exp, lat_exp), atol=tolerance - ) + np.testing.assert_allclose((lon, lat), (lon_exp, lat_exp)) def test_nav_matrices_are_contiguous(self): """Test that navigation matrices are stored as C-contiguous arrays.""" @@ -395,8 +387,8 @@ def test_nav_matrices_are_contiguous(self): def test_transform_image_coords_to_scanning_angles(self): """Test transformation from image coordinates to scanning angles.""" angles = nav.transform_image_coords_to_scanning_angles( - point=np.array([200.5, 100.5]), - offset=np.array([101, 201]), + point=np.array([199, 99]), + offset=np.array([100, 200]), sampling=np.array([0.01, 0.02]) ) np.testing.assert_allclose(angles, [-2, 1]) From d94a6ae222c18c37babc83761ac5b9c423a74786 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 07:48:56 +0000 Subject: [PATCH 0009/1416] Refactor static -> projection parameters --- satpy/readers/gms5_vissr_navigation.py | 28 ++++++++-------- .../tests/reader_tests/test_gms5_vissr_l1b.py | 32 +++++++++---------- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 9b90f147c1..4baf9cff79 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -85,7 +85,7 @@ def get_lon_lat(line, pixel, nav_params): ) point_on_earth = _intersect_with_earth(view_vector_earth_fixed, nav_params) lon, lat = transform_earth_fixed_to_geodetic_coords( - point_on_earth, nav_params.static_params.earth_flattening + point_on_earth, nav_params.proj_params.earth_flattening ) return lon, lat @@ -113,7 +113,7 @@ def transform_image_coords_to_scanning_angles(point, offset, sampling): @numba.njit def _transform_scanning_angles_to_satellite_coords(angles, nav_params): transformer = ScanningAnglesToSatelliteCoordsTransformer( - nav_params.static_params.misalignment + nav_params.proj_params.misalignment ) return transformer.transform(angles) @@ -432,7 +432,7 @@ def _interpolate_nearest(x, xp, yp): ('earth_equatorial_radius', numba.float64), ] ) -class StaticNavigationParameters: +class ProjectionParameters: def __init__( self, line_offset, @@ -629,23 +629,23 @@ def __init__( [ ('attitude', get_jitclass_type(Attitude)), ('orbit', get_jitclass_type(Orbit)), - ('static_params', get_jitclass_type(StaticNavigationParameters)), + ('proj_params', get_jitclass_type(ProjectionParameters)), ] ) class NavigationParameters: - def __init__(self, attitude, orbit, static_params): + def __init__(self, attitude, orbit, proj_params): self.attitude = attitude self.orbit = orbit - self.static_params = static_params + self.proj_params = proj_params # TODO: Remember that all angles are expected in rad # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c def get_image_offset(self): - return self.static_params.line_offset, self.static_params.pixel_offset + return self.proj_params.line_offset, self.proj_params.pixel_offset def get_sampling(self): - return self.static_params.stepping_angle, self.static_params.sampling_angle + return self.proj_params.stepping_angle, self.proj_params.sampling_angle def get_sat_sun_angles(self): return np.array([ @@ -661,8 +661,8 @@ def get_spin_angles(self): def get_ellipsoid(self): return np.array([ - self.static_params.earth_equatorial_radius, - self.static_params.earth_flattening + self.proj_params.earth_equatorial_radius, + self.proj_params.earth_flattening ]) def get_sat_position(self): @@ -675,14 +675,14 @@ def get_sat_position(self): [ ('attitude_prediction', get_jitclass_type(AttitudePrediction)), ('orbit_prediction', get_jitclass_type(OrbitPrediction)), - ('static_params', get_jitclass_type(StaticNavigationParameters)), + ('proj_params', get_jitclass_type(ProjectionParameters)), ] ) class PredictionInterpolator: - def __init__(self, attitude_prediction, orbit_prediction, static_params): + def __init__(self, attitude_prediction, orbit_prediction, proj_params): self.attitude_prediction = attitude_prediction self.orbit_prediction = orbit_prediction - self.static_params = static_params + self.proj_params = proj_params def interpolate(self, observation_time): attitude = self.attitude_prediction.interpolate(observation_time) @@ -690,4 +690,4 @@ def interpolate(self, observation_time): return self._get_nav_params(attitude, orbit) def _get_nav_params(self, attitude, orbit): - return NavigationParameters(attitude, orbit, self.static_params) + return NavigationParameters(attitude, orbit, self.proj_params) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index a1025f74f5..a1299863cb 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -35,7 +35,7 @@ [0.004496123789670, -0.000064242454080, 0.999989890320785]] ), ), - static_params=nav.StaticNavigationParameters( + proj_params=nav.ProjectionParameters( line_offset=1378.5, pixel_offset=1672.5, stepping_angle=0.000140000047395, @@ -74,7 +74,7 @@ [0.004496126086653, -0.000064239500295, 0.999989890310647]] ), ), - static_params=nav.StaticNavigationParameters( + proj_params=nav.ProjectionParameters( line_offset=1378.5, pixel_offset=1672.5, stepping_angle=0.000140000047395, @@ -256,7 +256,7 @@ [0.004496123789670, -0.000064242454080, 0.999989890320785]] ), ), - static_params=nav.StaticNavigationParameters( + proj_params=nav.ProjectionParameters( line_offset=5513.0, pixel_offset=6688.5, stepping_angle=0.000035000004573, @@ -295,7 +295,7 @@ [0.004496126086653, -0.000064239500295, 0.999989890310647]] ), ), - static_params=nav.StaticNavigationParameters( + proj_params=nav.ProjectionParameters( line_offset=5513.0, pixel_offset=6688.5, stepping_angle=0.000035000004573, @@ -341,7 +341,7 @@ []] ), ), - static_params=nav.StaticNavigationParameters( + proj_params=nav.ProjectionParameters( line_offset=, pixel_offset=, stepping_angle=, @@ -381,7 +381,7 @@ def test_get_lon_lat(self, line, pixel, nav_params, lon_exp, lat_exp): def test_nav_matrices_are_contiguous(self): """Test that navigation matrices are stored as C-contiguous arrays.""" nav_params = NAVIGATION_REFERENCE[0]['nav_params'] - assert nav_params.static_params.misalignment.flags['C_CONTIGUOUS'] + assert nav_params.proj_params.misalignment.flags['C_CONTIGUOUS'] assert nav_params.orbit.nutation_precession.flags['C_CONTIGUOUS'] def test_transform_image_coords_to_scanning_angles(self): @@ -522,9 +522,9 @@ def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, at attitude = attitude_prediction.interpolate(obs_time) self.assert_attitude_close(attitude, attitude_expected) - def test_interpolate_prediction(self, obs_time, static_params, attitude_prediction, orbit_prediction, nav_params_expected): + def test_interpolate_prediction(self, obs_time, proj_params, attitude_prediction, orbit_prediction, nav_params_expected): interpolator = nav.PredictionInterpolator( - static_params=static_params, + proj_params=proj_params, attitude_prediction=attitude_prediction, orbit_prediction=orbit_prediction ) @@ -585,8 +585,8 @@ def attitude_expected(self): ) @pytest.fixture - def static_params(self): - return nav.StaticNavigationParameters( + def proj_params(self): + return nav.ProjectionParameters( line_offset=1378.5, pixel_offset=1672.5, stepping_angle=0.000140000047395, @@ -597,9 +597,9 @@ def static_params(self): ) @pytest.fixture - def nav_params_expected(self, attitude_expected, orbit_expected, static_params): + def nav_params_expected(self, attitude_expected, orbit_expected, proj_params): return nav.NavigationParameters( - attitude_expected, orbit_expected, static_params + attitude_expected, orbit_expected, proj_params ) def assert_orbit_close(self, a, b): @@ -632,8 +632,8 @@ def assert_attitude_close(self, a, b): ] self._assert_attrs_close(a, b, attrs, 'Attitude') - def assert_static_params_close(self, a, b): - """Assert that two StaticNavigationParameters instances are close. + def assert_proj_params_close(self, a, b): + """Assert that two ProjectionParameters instances are close. This would probably make more sense in the Attitude class. However, numba doesn't support np.allclose, yet. @@ -647,12 +647,12 @@ def assert_static_params_close(self, a, b): 'earth_flattening', 'earth_equatorial_radius', ] - self._assert_attrs_close(a, b, attrs, 'StaticNavigationParameters') + self._assert_attrs_close(a, b, attrs, 'ProjectionParameters') def assert_nav_params_close(self, a, b): self.assert_attitude_close(a.attitude, b.attitude) self.assert_orbit_close(a.orbit, b.orbit) - self.assert_static_params_close(a.static_params, b.static_params) + self.assert_proj_params_close(a.proj_params, b.proj_params) @staticmethod def _assert_attrs_close(a, b, attrs, desc): From d78be5f09ad0692b11b4646089efc9d240439dfe Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 07:51:22 +0000 Subject: [PATCH 0010/1416] Refactor prediction_time -> prediction_times --- satpy/readers/gms5_vissr_navigation.py | 20 +++++++++---------- .../tests/reader_tests/test_gms5_vissr_l1b.py | 4 ++-- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 4baf9cff79..710b275038 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -454,7 +454,7 @@ def __init__( @numba.experimental.jitclass( [ - ('prediction_time', numba.float64[:]), + ('prediction_times', numba.float64[:]), ('greenwich_sidereal_time', numba.float64[:]), ('declination_from_sat_to_sun', numba.float64[:]), ('right_ascension_from_sat_to_sun', numba.float64[:]), @@ -467,7 +467,7 @@ def __init__( class OrbitPrediction: def __init__( self, - prediction_time, + prediction_times, greenwich_sidereal_time, declination_from_sat_to_sun, right_ascension_from_sat_to_sun, @@ -476,7 +476,7 @@ def __init__( sat_position_earth_fixed_z, nutation_precession ): - self.prediction_time = prediction_time + self.prediction_times = prediction_times self.greenwich_sidereal_time = greenwich_sidereal_time self.declination_from_sat_to_sun = declination_from_sat_to_sun self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun @@ -525,13 +525,13 @@ def interpolate(self, observation_time): ) def _interpolate_cont(self, predicted_values, observation_time): - return interpolate_cont(observation_time, self.prediction_time, predicted_values) + return interpolate_cont(observation_time, self.prediction_times, predicted_values) def _interpolate_angles(self, predicted_values, observation_time): - return interpolate_angles(observation_time, self.prediction_time, predicted_values) + return interpolate_angles(observation_time, self.prediction_times, predicted_values) def _interpolate_nearest(self, predicted_values, observation_time): - return interpolate_nearest(observation_time, self.prediction_time, predicted_values) + return interpolate_nearest(observation_time, self.prediction_times, predicted_values) @numba.experimental.jitclass( @@ -567,7 +567,7 @@ def __init__( @numba.experimental.jitclass( [ - ('prediction_time', numba.float64[:]), + ('prediction_times', numba.float64[:]), ('angle_between_earth_and_sun', numba.float64[:]), ('angle_between_sat_spin_and_z_axis', numba.float64[:]), ('angle_between_sat_spin_and_yz_plane', numba.float64[:]), @@ -576,12 +576,12 @@ def __init__( class AttitudePrediction: def __init__( self, - prediction_time, + prediction_times, angle_between_earth_and_sun, angle_between_sat_spin_and_z_axis, angle_between_sat_spin_and_yz_plane ): - self.prediction_time = prediction_time + self.prediction_times = prediction_times self.angle_between_earth_and_sun = angle_between_earth_and_sun self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane @@ -603,7 +603,7 @@ def interpolate(self, observation_time): ) def _interpolate(self, observation_time, predicted_values): - return interpolate_angles(observation_time, self.prediction_time, predicted_values) + return interpolate_angles(observation_time, self.prediction_times, predicted_values) @numba.experimental.jitclass( diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index a1299863cb..52448cda87 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -538,7 +538,7 @@ def obs_time(self): @pytest.fixture def orbit_prediction(self): return nav.OrbitPrediction( - prediction_time=np.array([1.0, 2.0, 3.0, 4.0]), + prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), @@ -570,7 +570,7 @@ def orbit_expected(self): @pytest.fixture def attitude_prediction(self): return nav.AttitudePrediction( - prediction_time=np.array([1.0, 2.0, 3.0]), + prediction_times=np.array([1.0, 2.0, 3.0]), angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), From dbb2a39d4ab7a3eedaa4c3b6ae954ed15258bce6 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 07:54:51 +0000 Subject: [PATCH 0011/1416] Refactor cont -> continuous --- satpy/readers/gms5_vissr_navigation.py | 12 ++++++------ satpy/tests/reader_tests/test_gms5_vissr_l1b.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 710b275038..919a03c842 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -348,7 +348,7 @@ def normalize_vector(v): @numba.njit -def interpolate_cont(x, xp, yp): +def interpolate_continuous(x, xp, yp): """Linear interpolation of continuous quantities. Numpy equivalent would be np.interp(..., left=np.nan, right=np.nan), but @@ -498,15 +498,15 @@ def interpolate(self, observation_time): self.right_ascension_from_sat_to_sun, observation_time ) - sat_position_earth_fixed_x = self._interpolate_cont( + sat_position_earth_fixed_x = self._interpolate_continuous( self.sat_position_earth_fixed_x, observation_time ) - sat_position_earth_fixed_y = self._interpolate_cont( + sat_position_earth_fixed_y = self._interpolate_continuous( self.sat_position_earth_fixed_y, observation_time ) - sat_position_earth_fixed_z = self._interpolate_cont( + sat_position_earth_fixed_z = self._interpolate_continuous( self.sat_position_earth_fixed_z, observation_time ) @@ -524,8 +524,8 @@ def interpolate(self, observation_time): nutation_precession ) - def _interpolate_cont(self, predicted_values, observation_time): - return interpolate_cont(observation_time, self.prediction_times, predicted_values) + def _interpolate_continuous(self, predicted_values, observation_time): + return interpolate_continuous(observation_time, self.prediction_times, predicted_values) def _interpolate_angles(self, predicted_values, observation_time): return interpolate_angles(observation_time, self.prediction_times, predicted_values) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 52448cda87..e8bd8f6113 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -462,10 +462,10 @@ class TestPredictionInterpolation: (5, np.nan) ] ) - def test_interpolate_cont(self, obs_time, expected): + def test_interpolate_continuous(self, obs_time, expected): prediction_times = np.array([0, 1, 2, 3]) predicted_values = np.array([1, 2, 3, 4]) - res = nav.interpolate_cont( + res = nav.interpolate_continuous( obs_time, prediction_times, predicted_values From 6866b86f5ee045ec8d9ba15184c45417f78e202b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 07:58:29 +0000 Subject: [PATCH 0012/1416] Refactor xp/yp -> x_sample/y_sample --- satpy/readers/gms5_vissr_navigation.py | 44 +++++++++++++------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 919a03c842..e211beef01 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -348,53 +348,53 @@ def normalize_vector(v): @numba.njit -def interpolate_continuous(x, xp, yp): +def interpolate_continuous(x, x_sample, y_sample): """Linear interpolation of continuous quantities. Numpy equivalent would be np.interp(..., left=np.nan, right=np.nan), but numba currently doesn't support those keyword arguments. """ try: - return _interpolate(x, xp, yp, False) + return _interpolate(x, x_sample, y_sample, False) except Exception: return np.nan @numba.njit -def interpolate_angles(x, xp, yp): +def interpolate_angles(x, x_sample, y_sample): """Linear interpolation of periodic angles. Takes care of phase jumps by wrapping angle differences to [-pi, pi]. - Numpy equivalent would be np.interp(x, xp, np.unwrap(yp)), but + Numpy equivalent would be np.interp(x, x_sample, np.unwrap(y_sample)), but numba currently doesn't support np.unwrap. """ try: - return _interpolate(x, xp, yp, True) + return _interpolate(x, x_sample, y_sample, True) except Exception: return np.nan @numba.njit -def _interpolate(x, xp, yp, wrap_2pi): - i = _find_enclosing_index(x, xp) - offset = yp[i] - x_diff = xp[i+1] - xp[i] - y_diff = yp[i+1] - yp[i] +def _interpolate(x, x_sample, y_sample, wrap_2pi): + i = _find_enclosing_index(x, x_sample) + offset = y_sample[i] + x_diff = x_sample[i+1] - x_sample[i] + y_diff = y_sample[i+1] - y_sample[i] if wrap_2pi: y_diff = _wrap_2pi(y_diff) slope = y_diff / x_diff - dist = x - xp[i] + dist = x - x_sample[i] return offset + slope * dist @numba.njit -def _find_enclosing_index(x, xp): - """Find where xp encloses x.""" - for i in range(len(xp) - 1): - if xp[i] <= x < xp[i+1]: +def _find_enclosing_index(x, x_sample): + """Find where x_sample encloses x.""" + for i in range(len(x_sample) - 1): + if x_sample[i] <= x < x_sample[i+1]: return i - raise Exception('x not enclosed by xp') + raise Exception('x not enclosed by x_sample') @numba.njit @@ -407,18 +407,18 @@ def _wrap_2pi(values): @numba.njit -def interpolate_nearest(x, xp, yp): +def interpolate_nearest(x, x_sample, y_sample): """Nearest neighbour interpolation.""" try: - return _interpolate_nearest(x, xp, yp) + return _interpolate_nearest(x, x_sample, y_sample) except Exception: - return np.nan * np.ones_like(yp[0]) + return np.nan * np.ones_like(y_sample[0]) @numba.njit -def _interpolate_nearest(x, xp, yp): - i = _find_enclosing_index(x, xp) - return yp[i] +def _interpolate_nearest(x, x_sample, y_sample): + i = _find_enclosing_index(x, x_sample) + return y_sample[i] @numba.experimental.jitclass( From c212b3fa07df661ec6832f4d281a90226405a404 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 08:03:48 +0000 Subject: [PATCH 0013/1416] Reorder classes --- satpy/readers/gms5_vissr_navigation.py | 333 +++++++++++++------------ 1 file changed, 168 insertions(+), 165 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index e211beef01..3e03dca287 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -347,78 +347,55 @@ def normalize_vector(v): return v / np.sqrt(np.dot(v, v)) -@numba.njit -def interpolate_continuous(x, x_sample, y_sample): - """Linear interpolation of continuous quantities. - - Numpy equivalent would be np.interp(..., left=np.nan, right=np.nan), but - numba currently doesn't support those keyword arguments. - """ - try: - return _interpolate(x, x_sample, y_sample, False) - except Exception: - return np.nan - - -@numba.njit -def interpolate_angles(x, x_sample, y_sample): - """Linear interpolation of periodic angles. - - Takes care of phase jumps by wrapping angle differences to [-pi, pi]. - - Numpy equivalent would be np.interp(x, x_sample, np.unwrap(y_sample)), but - numba currently doesn't support np.unwrap. - """ - try: - return _interpolate(x, x_sample, y_sample, True) - except Exception: - return np.nan - - -@numba.njit -def _interpolate(x, x_sample, y_sample, wrap_2pi): - i = _find_enclosing_index(x, x_sample) - offset = y_sample[i] - x_diff = x_sample[i+1] - x_sample[i] - y_diff = y_sample[i+1] - y_sample[i] - if wrap_2pi: - y_diff = _wrap_2pi(y_diff) - slope = y_diff / x_diff - dist = x - x_sample[i] - return offset + slope * dist - - -@numba.njit -def _find_enclosing_index(x, x_sample): - """Find where x_sample encloses x.""" - for i in range(len(x_sample) - 1): - if x_sample[i] <= x < x_sample[i+1]: - return i - raise Exception('x not enclosed by x_sample') - - -@numba.njit -def _wrap_2pi(values): - """Wrap values to interval [-pi, pi]. - - Source: https://stackoverflow.com/a/15927914/5703449 - """ - return (values + np.pi) % (2 * np.pi) - np.pi - +@numba.experimental.jitclass( + [ + ('angle_between_earth_and_sun', numba.float64), + ('angle_between_sat_spin_and_z_axis', numba.float64), + ('angle_between_sat_spin_and_yz_plane', numba.float64), + ] +) +class Attitude: + def __init__( + self, + angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane + ): + self.angle_between_earth_and_sun = angle_between_earth_and_sun + self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis + self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane -@numba.njit -def interpolate_nearest(x, x_sample, y_sample): - """Nearest neighbour interpolation.""" - try: - return _interpolate_nearest(x, x_sample, y_sample) - except Exception: - return np.nan * np.ones_like(y_sample[0]) +@numba.experimental.jitclass( + [ + ('greenwich_sidereal_time', numba.float64), + ('declination_from_sat_to_sun', numba.float64), + ('right_ascension_from_sat_to_sun', numba.float64), + ('sat_position_earth_fixed_x', numba.float64), + ('sat_position_earth_fixed_y', numba.float64), + ('sat_position_earth_fixed_z', numba.float64), + ('nutation_precession', numba.types.Array(numba.float64, 2, layout='C')), + ] +) +class Orbit: + def __init__( + self, + greenwich_sidereal_time, + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun, + sat_position_earth_fixed_x, + sat_position_earth_fixed_y, + sat_position_earth_fixed_z, + nutation_precession + ): + self.greenwich_sidereal_time = greenwich_sidereal_time + self.declination_from_sat_to_sun = declination_from_sat_to_sun + self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun + self.sat_position_earth_fixed_x = sat_position_earth_fixed_x + self.sat_position_earth_fixed_y = sat_position_earth_fixed_y + self.sat_position_earth_fixed_z = sat_position_earth_fixed_z + self.nutation_precession = nutation_precession -@numba.njit -def _interpolate_nearest(x, x_sample, y_sample): - i = _find_enclosing_index(x, x_sample) - return y_sample[i] @numba.experimental.jitclass( @@ -452,6 +429,52 @@ def __init__( self.earth_equatorial_radius = earth_equatorial_radius +@numba.experimental.jitclass( + [ + ('attitude', get_jitclass_type(Attitude)), + ('orbit', get_jitclass_type(Orbit)), + ('proj_params', get_jitclass_type(ProjectionParameters)), + ] +) +class NavigationParameters: + def __init__(self, attitude, orbit, proj_params): + self.attitude = attitude + self.orbit = orbit + self.proj_params = proj_params + + # TODO: Remember that all angles are expected in rad + # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c + + def get_image_offset(self): + return self.proj_params.line_offset, self.proj_params.pixel_offset + + def get_sampling(self): + return self.proj_params.stepping_angle, self.proj_params.sampling_angle + + def get_sat_sun_angles(self): + return np.array([ + self.orbit.declination_from_sat_to_sun, + self.orbit.right_ascension_from_sat_to_sun + ]) + + def get_spin_angles(self): + return np.array([ + self.attitude.angle_between_sat_spin_and_z_axis, + self.attitude.angle_between_sat_spin_and_yz_plane + ]) + + def get_ellipsoid(self): + return np.array([ + self.proj_params.earth_equatorial_radius, + self.proj_params.earth_flattening + ]) + + def get_sat_position(self): + return np.array((self.orbit.sat_position_earth_fixed_x, + self.orbit.sat_position_earth_fixed_y, + self.orbit.sat_position_earth_fixed_z)) + + @numba.experimental.jitclass( [ ('prediction_times', numba.float64[:]), @@ -534,37 +557,6 @@ def _interpolate_nearest(self, predicted_values, observation_time): return interpolate_nearest(observation_time, self.prediction_times, predicted_values) -@numba.experimental.jitclass( - [ - ('greenwich_sidereal_time', numba.float64), - ('declination_from_sat_to_sun', numba.float64), - ('right_ascension_from_sat_to_sun', numba.float64), - ('sat_position_earth_fixed_x', numba.float64), - ('sat_position_earth_fixed_y', numba.float64), - ('sat_position_earth_fixed_z', numba.float64), - ('nutation_precession', numba.types.Array(numba.float64, 2, layout='C')), - ] -) -class Orbit: - def __init__( - self, - greenwich_sidereal_time, - declination_from_sat_to_sun, - right_ascension_from_sat_to_sun, - sat_position_earth_fixed_x, - sat_position_earth_fixed_y, - sat_position_earth_fixed_z, - nutation_precession - ): - self.greenwich_sidereal_time = greenwich_sidereal_time - self.declination_from_sat_to_sun = declination_from_sat_to_sun - self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun - self.sat_position_earth_fixed_x = sat_position_earth_fixed_x - self.sat_position_earth_fixed_y = sat_position_earth_fixed_y - self.sat_position_earth_fixed_z = sat_position_earth_fixed_z - self.nutation_precession = nutation_precession - - @numba.experimental.jitclass( [ ('prediction_times', numba.float64[:]), @@ -606,71 +598,6 @@ def _interpolate(self, observation_time, predicted_values): return interpolate_angles(observation_time, self.prediction_times, predicted_values) -@numba.experimental.jitclass( - [ - ('angle_between_earth_and_sun', numba.float64), - ('angle_between_sat_spin_and_z_axis', numba.float64), - ('angle_between_sat_spin_and_yz_plane', numba.float64), - ] -) -class Attitude: - def __init__( - self, - angle_between_earth_and_sun, - angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane - ): - self.angle_between_earth_and_sun = angle_between_earth_and_sun - self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis - self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane - - -@numba.experimental.jitclass( - [ - ('attitude', get_jitclass_type(Attitude)), - ('orbit', get_jitclass_type(Orbit)), - ('proj_params', get_jitclass_type(ProjectionParameters)), - ] -) -class NavigationParameters: - def __init__(self, attitude, orbit, proj_params): - self.attitude = attitude - self.orbit = orbit - self.proj_params = proj_params - - # TODO: Remember that all angles are expected in rad - # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c - - def get_image_offset(self): - return self.proj_params.line_offset, self.proj_params.pixel_offset - - def get_sampling(self): - return self.proj_params.stepping_angle, self.proj_params.sampling_angle - - def get_sat_sun_angles(self): - return np.array([ - self.orbit.declination_from_sat_to_sun, - self.orbit.right_ascension_from_sat_to_sun - ]) - - def get_spin_angles(self): - return np.array([ - self.attitude.angle_between_sat_spin_and_z_axis, - self.attitude.angle_between_sat_spin_and_yz_plane - ]) - - def get_ellipsoid(self): - return np.array([ - self.proj_params.earth_equatorial_radius, - self.proj_params.earth_flattening - ]) - - def get_sat_position(self): - return np.array((self.orbit.sat_position_earth_fixed_x, - self.orbit.sat_position_earth_fixed_y, - self.orbit.sat_position_earth_fixed_z)) - - @numba.experimental.jitclass( [ ('attitude_prediction', get_jitclass_type(AttitudePrediction)), @@ -691,3 +618,79 @@ def interpolate(self, observation_time): def _get_nav_params(self, attitude, orbit): return NavigationParameters(attitude, orbit, self.proj_params) + + +@numba.njit +def interpolate_continuous(x, x_sample, y_sample): + """Linear interpolation of continuous quantities. + + Numpy equivalent would be np.interp(..., left=np.nan, right=np.nan), but + numba currently doesn't support those keyword arguments. + """ + try: + return _interpolate(x, x_sample, y_sample, False) + except Exception: + return np.nan + + +@numba.njit +def interpolate_angles(x, x_sample, y_sample): + """Linear interpolation of periodic angles. + + Takes care of phase jumps by wrapping angle differences to [-pi, pi]. + + Numpy equivalent would be np.interp(x, x_sample, np.unwrap(y_sample)), but + numba currently doesn't support np.unwrap. + """ + try: + return _interpolate(x, x_sample, y_sample, True) + except Exception: + return np.nan + + +@numba.njit +def _interpolate(x, x_sample, y_sample, wrap_2pi): + i = _find_enclosing_index(x, x_sample) + offset = y_sample[i] + x_diff = x_sample[i+1] - x_sample[i] + y_diff = y_sample[i+1] - y_sample[i] + if wrap_2pi: + y_diff = _wrap_2pi(y_diff) + slope = y_diff / x_diff + dist = x - x_sample[i] + return offset + slope * dist + + +@numba.njit +def _find_enclosing_index(x, x_sample): + """Find where x_sample encloses x.""" + for i in range(len(x_sample) - 1): + if x_sample[i] <= x < x_sample[i+1]: + return i + raise Exception('x not enclosed by x_sample') + + +@numba.njit +def _wrap_2pi(values): + """Wrap values to interval [-pi, pi]. + + Source: https://stackoverflow.com/a/15927914/5703449 + """ + return (values + np.pi) % (2 * np.pi) - np.pi + + +@numba.njit +def interpolate_nearest(x, x_sample, y_sample): + """Nearest neighbour interpolation.""" + try: + return _interpolate_nearest(x, x_sample, y_sample) + except Exception: + return np.nan * np.ones_like(y_sample[0]) + + +@numba.njit +def _interpolate_nearest(x, x_sample, y_sample): + i = _find_enclosing_index(x, x_sample) + return y_sample[i] + + From a9c3b5e9ed27a98adcbb571b6dec5fbf3aeaef93 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 09:23:51 +0000 Subject: [PATCH 0014/1416] Simplify interpolation --- satpy/readers/gms5_vissr_navigation.py | 28 +++++++++++++------ .../tests/reader_tests/test_gms5_vissr_l1b.py | 12 +++++++- 2 files changed, 30 insertions(+), 10 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 3e03dca287..1f83fe8d24 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -628,7 +628,7 @@ def interpolate_continuous(x, x_sample, y_sample): numba currently doesn't support those keyword arguments. """ try: - return _interpolate(x, x_sample, y_sample, False) + return _interpolate(x, x_sample, y_sample) except Exception: return np.nan @@ -637,25 +637,22 @@ def interpolate_continuous(x, x_sample, y_sample): def interpolate_angles(x, x_sample, y_sample): """Linear interpolation of periodic angles. - Takes care of phase jumps by wrapping angle differences to [-pi, pi]. - - Numpy equivalent would be np.interp(x, x_sample, np.unwrap(y_sample)), but - numba currently doesn't support np.unwrap. + In order to preserve the periodicity, change phase jumps greater than pi + to their 2*pi complement, then perform interpolation and finally wrap + the results to [-pi, pi]. """ try: - return _interpolate(x, x_sample, y_sample, True) + return _wrap_2pi(_interpolate(x, x_sample, unwrap(y_sample))) except Exception: return np.nan @numba.njit -def _interpolate(x, x_sample, y_sample, wrap_2pi): +def _interpolate(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) offset = y_sample[i] x_diff = x_sample[i+1] - x_sample[i] y_diff = y_sample[i+1] - y_sample[i] - if wrap_2pi: - y_diff = _wrap_2pi(y_diff) slope = y_diff / x_diff dist = x - x_sample[i] return offset + slope * dist @@ -694,3 +691,16 @@ def _interpolate_nearest(x, x_sample, y_sample): return y_sample[i] +@numba.njit +def unwrap(p, discont=np.pi): + """Simple 1-D numba implementation of np.unwrap().""" + p = np.ascontiguousarray(p) + dd = np.diff(p) + slice1 = slice(1, None) + ddmod = np.mod(dd + np.pi, 2*np.pi) - np.pi + ddmod = np.where((ddmod == -np.pi) & (dd > 0), np.pi, ddmod) + ph_correct = ddmod - dd + ph_correct = np.where(np.fabs(dd) < discont, 0, ph_correct) + up = p.copy() + up[slice1] = p[slice1] + ph_correct.cumsum() + return up diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index e8bd8f6113..c101d22b6f 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -477,7 +477,7 @@ def test_interpolate_continuous(self, obs_time, expected): [ (-1, np.nan), (1.5, 0.75*np.pi), - (2.5, np.pi), + (2.5, -np.pi), (3.5, -0.75*np.pi), (5, np.nan), ] @@ -664,6 +664,16 @@ def _assert_attrs_close(a, b, attrs, desc): ) +@pytest.mark.parametrize( + 'angles', + [ + (np.array([0, np.pi/2, np.pi, -np.pi, -np.pi/2])), + (np.array([0, 0.78539816, 1.57079633, 5.49778714, 6.28318531])) + ] +) +def test_unwrap(angles): + np.testing.assert_allclose(nav.unwrap(angles), np.unwrap(angles)) + def test_get_observation_time(): scan_params = nav.ScanningParameters( From a7a3128d50b13aebfbd1d3f2318ad99dc17f1774 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 15:34:59 +0000 Subject: [PATCH 0015/1416] Refactor predicted navigation parameters --- satpy/readers/gms5_vissr_navigation.py | 18 +-- .../tests/reader_tests/test_gms5_vissr_l1b.py | 138 +++++++++++------- 2 files changed, 96 insertions(+), 60 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 1f83fe8d24..0208b4bb4a 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -20,7 +20,7 @@ def get_jitclass_type(cls): @numba.njit -def get_lons_lats(lines, pixels, nav_params): +def get_lons_lats(lines, pixels, scan_params, predicted_nav_params): num_lines = len(lines) num_pixels = len(pixels) output_shape = (num_lines, num_pixels) @@ -28,12 +28,13 @@ def get_lons_lats(lines, pixels, nav_params): lats = np.zeros(output_shape) for i in range(num_lines): for j in range(num_pixels): - line = lines[i] - pixel = pixels[j] - point = (line, pixel) + point = (lines[i], pixels[j]) + obs_time = get_observation_time(point, scan_params) + nav_params = predicted_nav_params.interpolate(obs_time) lon, lat = get_lon_lat(point, nav_params) lons[i, j] = lon lats[i, j] = lat + return lons, lats @numba.experimental.jitclass( @@ -70,10 +71,10 @@ def _get_relative_observation_time(point, scan_params): @numba.njit -def get_lon_lat(line, pixel, nav_params): +def get_lon_lat(point, nav_params): """Get longitude and latitude coordinates for a given image pixel.""" scan_angles = transform_image_coords_to_scanning_angles( - point=(line, pixel), + point=point, offset=nav_params.get_image_offset(), sampling=nav_params.get_sampling() ) @@ -397,7 +398,6 @@ def __init__( self.nutation_precession = nutation_precession - @numba.experimental.jitclass( [ ('line_offset', numba.float64), @@ -605,7 +605,7 @@ def _interpolate(self, observation_time, predicted_values): ('proj_params', get_jitclass_type(ProjectionParameters)), ] ) -class PredictionInterpolator: +class PredictedNavigationParameters: def __init__(self, attitude_prediction, orbit_prediction, proj_params): self.attitude_prediction = attitude_prediction self.orbit_prediction = orbit_prediction @@ -693,7 +693,7 @@ def _interpolate_nearest(x, x_sample, y_sample): @numba.njit def unwrap(p, discont=np.pi): - """Simple 1-D numba implementation of np.unwrap().""" + """Numba implementation of np.unwrap in one dimension.""" p = np.ascontiguousarray(p) dd = np.diff(p) slice1 = slice(1, None) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index c101d22b6f..5fed59a04b 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -359,24 +359,23 @@ """ + class TestSinglePixelNavigation: """Test navigation of a single pixel.""" @pytest.mark.parametrize( - 'line,pixel,nav_params,lon_exp,lat_exp', + 'point,nav_params,expected', [ - (ref['line'], - ref['pixel'], + ((ref['line'], ref['pixel']), ref['nav_params'], - ref['lon'], - ref['lat']) + (ref['lon'], ref['lat'])) for ref in NAVIGATION_REFERENCE ] ) - def test_get_lon_lat(self, line, pixel, nav_params, lon_exp, lat_exp): + def test_get_lon_lat(self, point, nav_params, expected): """Test getting lon/lat coordinates for a given pixel.""" - lon, lat = nav.get_lon_lat(line, pixel, nav_params) - np.testing.assert_allclose((lon, lat), (lon_exp, lat_exp)) + lon, lat = nav.get_lon_lat(point, nav_params) + np.testing.assert_allclose((lon, lat), expected) def test_nav_matrices_are_contiguous(self): """Test that navigation matrices are stored as C-contiguous arrays.""" @@ -523,38 +522,18 @@ def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, at self.assert_attitude_close(attitude, attitude_expected) def test_interpolate_prediction(self, obs_time, proj_params, attitude_prediction, orbit_prediction, nav_params_expected): - interpolator = nav.PredictionInterpolator( + predicted_nav_params = nav.PredictedNavigationParameters( proj_params=proj_params, attitude_prediction=attitude_prediction, orbit_prediction=orbit_prediction ) - nav_params = interpolator.interpolate(obs_time) + nav_params = predicted_nav_params.interpolate(obs_time) self.assert_nav_params_close(nav_params, nav_params_expected) @pytest.fixture def obs_time(self): return 2.5 - @pytest.fixture - def orbit_prediction(self): - return nav.OrbitPrediction( - prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), - greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), - declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), - right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), - sat_position_earth_fixed_x=np.array([0.3, 1.3, 2.3, 3.3]), - sat_position_earth_fixed_y=np.array([0.4, 1.4, 2.4, 3.4]), - sat_position_earth_fixed_z=np.array([0.5, 1.5, 2.5, 3.5]), - nutation_precession=np.array( - [ - 0.6*np.identity(3), - 1.6*np.identity(3), - 2.6*np.identity(3), - 3.6*np.identity(3) - ] - ) - ) - @pytest.fixture def orbit_expected(self): return nav.Orbit( @@ -567,15 +546,6 @@ def orbit_expected(self): nutation_precession=1.6 * np.identity(3) ) - @pytest.fixture - def attitude_prediction(self): - return nav.AttitudePrediction( - prediction_times=np.array([1.0, 2.0, 3.0]), - angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), - angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), - angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), - ) - @pytest.fixture def attitude_expected(self): return nav.Attitude( @@ -584,18 +554,6 @@ def attitude_expected(self): angle_between_sat_spin_and_yz_plane=1.7, ) - @pytest.fixture - def proj_params(self): - return nav.ProjectionParameters( - line_offset=1378.5, - pixel_offset=1672.5, - stepping_angle=0.000140000047395, - sampling_angle=0.000095719995443, - misalignment=np.identity(3).astype(np.float64), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136 - ) - @pytest.fixture def nav_params_expected(self, attitude_expected, orbit_expected, proj_params): return nav.NavigationParameters( @@ -664,6 +622,84 @@ def _assert_attrs_close(a, b, attrs, desc): ) +class TestImageNavigation: + def test_get_lons_lats(self, scan_params, predicted_nav_params): + lons, lats = nav.get_lons_lats( + lines=np.array([1000, 1500, 2000]), + pixels=np.array([1000, 1500, 2000]), + scan_params=scan_params, + predicted_nav_params=predicted_nav_params + ) + # TODO + assert 1 == 0 + + +@pytest.fixture +def sampling_angle(): + return 0.000095719995443 + + +@pytest.fixture +def scan_params(sampling_angle): + return nav.ScanningParameters( + start_time_of_scan=0, + spinning_rate=0.5, + num_sensors=1, + sampling_angle=sampling_angle + ) + + +@pytest.fixture +def predicted_nav_params(attitude_prediction, orbit_prediction, proj_params): + return nav.PredictedNavigationParameters( + attitude_prediction, orbit_prediction, proj_params + ) + + +@pytest.fixture +def attitude_prediction(): + return nav.AttitudePrediction( + prediction_times=np.array([1.0, 2.0, 3.0]), + angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), + angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), + angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), + ) + + +@pytest.fixture +def orbit_prediction(): + return nav.OrbitPrediction( + prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), + greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), + declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), + right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), + sat_position_earth_fixed_x=np.array([0.3, 1.3, 2.3, 3.3]), + sat_position_earth_fixed_y=np.array([0.4, 1.4, 2.4, 3.4]), + sat_position_earth_fixed_z=np.array([0.5, 1.5, 2.5, 3.5]), + nutation_precession=np.array( + [ + 0.6*np.identity(3), + 1.6*np.identity(3), + 2.6*np.identity(3), + 3.6*np.identity(3) + ] + ) + ) + + +@pytest.fixture +def proj_params(sampling_angle): + return nav.ProjectionParameters( + line_offset=1378.5, + pixel_offset=1672.5, + stepping_angle=0.000140000047395, + sampling_angle=sampling_angle, + misalignment=np.identity(3).astype(np.float64), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136 + ) + + @pytest.mark.parametrize( 'angles', [ From 793be49caf6e6402676992018e281722dd1ef498 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 9 Jun 2021 15:35:47 +0000 Subject: [PATCH 0016/1416] Update get_area_def --- satpy/readers/gms5_vissr_l1b.py | 65 +++++++++++++++++++++++++++++---- 1 file changed, 57 insertions(+), 8 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 83e372fe7a..07585b726d 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -21,7 +21,6 @@ """ import dask.array as da -import numba import numpy as np import xarray as xr @@ -29,6 +28,8 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import modified_julian_day_to_datetime64 import satpy.readers._geos_area as geos_area +import satpy.readers.gms5_vissr_navigation as nav + U1 = '>u1' I2 = '>i2' @@ -559,7 +560,7 @@ def _pad(self, data): fh_ir2._header['image_parameters']['mode']['vis_frame_parameters']['number_of_pixels'], fh_ir2._header['image_parameters']['mode']['vis_frame_parameters']['number_of_lines']) - def get_area_def(self, dsid): + def get_area_def_test(self, dsid): """ TODO: - misalignment matrix, rotation matrix @@ -615,10 +616,58 @@ def get_area_def(self, dsid): area = geos_area.get_area_definition(proj_dict, extent) return area - def get_lonlat(self): + def get_area_def(self, dsid): + alt_ch_name = ALT_CHANNEL_NAMES[dsid['name']] mode_block = self._header['image_parameters']['mode'] - from pprint import pprint - pprint(mode_block) - params = { - 'spinning_rate': mode_block['spin_rate'] - } + coord_conv = self._header['image_parameters']['coordinate_conversion'] + att_pred = self._header['image_parameters']['attitude_prediction']['data'] + orb_pred = self._header['image_parameters']['orbit_prediction']['data'] + + center_line_vissr_frame = coord_conv['central_line_number_of_vissr_frame'][alt_ch_name] + center_pixel_vissr_frame = coord_conv['central_pixel_number_of_vissr_frame'][alt_ch_name] + pixel_offset = coord_conv['pixel_difference_of_vissr_center_from_normal_position'][ + alt_ch_name] + + scan_params = nav.ScanningParameters( + start_time_of_scan=mode_block['observation_time_mjd'], + spinning_rate=mode_block['spin_rate'], + num_sensors=coord_conv['number_of_sensor_elements'][alt_ch_name], + sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], + ) + attitude_prediction = nav.AttitudePrediction( + prediction_times=att_pred['prediction_time_mjd'].astype(np.float64), + angle_between_earth_and_sun=att_pred['sun_earth_angle'].astype(np.float64), + angle_between_sat_spin_and_z_axis=att_pred['right_ascension_of_attitude'].astype(np.float64), + angle_between_sat_spin_and_yz_plane=att_pred['declination_of_attitude'].astype(np.float64), + ) + orbit_prediction = nav.OrbitPrediction( + prediction_times=orb_pred['prediction_time_mjd'].astype(np.float64), + greenwich_sidereal_time=np.deg2rad(orb_pred['greenwich_sidereal_time'].astype(np.float64)), + declination_from_sat_to_sun=np.deg2rad(orb_pred['sat_sun_vector_earth_fixed']['azimuth'].astype(np.float64)), + right_ascension_from_sat_to_sun=np.deg2rad(orb_pred['sat_sun_vector_earth_fixed']['elevation'].astype(np.float64)), + sat_position_earth_fixed_x=orb_pred['satellite_position_earth_fixed'][0].astype(np.float64), + sat_position_earth_fixed_y=orb_pred['satellite_position_earth_fixed'][1].astype(np.float64), + sat_position_earth_fixed_z=orb_pred['satellite_position_earth_fixed'][2].astype(np.float64), + nutation_precession=np.ascontiguousarray(orb_pred['conversion_matrix'].transpose().astype(np.float64)) + ) + # TODO: Check all angles in radians + + proj_params = nav.ProjectionParameters( + line_offset=center_line_vissr_frame, + pixel_offset=center_pixel_vissr_frame + pixel_offset, + stepping_angle=coord_conv['stepping_angle_along_line'][alt_ch_name], + sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], + misalignment=np.ascontiguousarray(coord_conv['matrix_of_misalignment']).astype(np.float64), + earth_flattening=coord_conv['parameters']['oblateness_of_earth'], + earth_equatorial_radius=coord_conv['parameters']['equatorial_radius'] + ) + predicted_nav_params = nav.PredictedNavigationParameters( + attitude_prediction, orbit_prediction, proj_params + ) + lons, lats = nav.get_lons_lats( + lines=np.array([686, 2089]), + pixels=np.array([1680, 1793]), + scan_params=scan_params, + predicted_nav_params=predicted_nav_params + ) + print(lons, lats) From a71b97550aa3c4dcef03b55641585eb23b85db08 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 10 Jun 2021 09:55:25 +0000 Subject: [PATCH 0017/1416] Add skeleton for file handler tests --- satpy/readers/gms5_vissr_l1b.py | 22 ++-- .../tests/reader_tests/test_gms5_vissr_l1b.py | 104 ++++++++++++++++++ 2 files changed, 114 insertions(+), 12 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 07585b726d..8847a4a39d 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -440,17 +440,11 @@ def __init__(self, filename, filename_info, filetype_info): self._filename = filename self._filename_info = filename_info self._header, self._channel_type = self._read_header(filename) + + from pprint import pprint # FIXME + pprint(self._header) + self._mda = self._get_mda() - # - # print(self._header['image_parameters']['mode']['ssp_longitude']) - # print(self._header['image_parameters']['orbit_prediction']['data']['satellite_position']) - # print(self._header['image_parameters']['vis_calibration']['data_segment']) - # print(self._header['image_parameters']['vis_calibration']['vis1_calibration_table']['updated_time']) - # print(self._header['image_parameters']['ir1_calibration']['flag_of_calid_shutter_temperature_calculation']) - # print(self._header['image_parameters']['wv_calibration']['conversion_table_of_equivalent_black_body_temperature']) - # print(self._header['image_parameters']['coordinate_conversion']['central_line_number_of_vissr_frame']) - # print(self._header['image_parameters']['coordinate_conversion']['central_pixel_number_of_vissr_frame']) - # print(self._header['image_parameters']['coordinate_conversion']['pixel_difference_of_vissr_center_from_normal_position']) def _read_header(self, filename): header = {} @@ -523,6 +517,10 @@ def _get_mda(self): } def get_dataset(self, dataset_id, ds_info): + """ + + TODO: Split in two methods + """ num_lines, _ = self._get_actual_shape() memmap = np.memmap( filename=self._filename, @@ -648,7 +646,7 @@ def get_area_def(self, dsid): sat_position_earth_fixed_x=orb_pred['satellite_position_earth_fixed'][0].astype(np.float64), sat_position_earth_fixed_y=orb_pred['satellite_position_earth_fixed'][1].astype(np.float64), sat_position_earth_fixed_z=orb_pred['satellite_position_earth_fixed'][2].astype(np.float64), - nutation_precession=np.ascontiguousarray(orb_pred['conversion_matrix'].transpose().astype(np.float64)) + nutation_precession=np.ascontiguousarray(orb_pred['conversion_matrix'].transpose(0, 2, 1).astype(np.float64)) ) # TODO: Check all angles in radians @@ -657,7 +655,7 @@ def get_area_def(self, dsid): pixel_offset=center_pixel_vissr_frame + pixel_offset, stepping_angle=coord_conv['stepping_angle_along_line'][alt_ch_name], sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], - misalignment=np.ascontiguousarray(coord_conv['matrix_of_misalignment']).astype(np.float64), + misalignment=np.ascontiguousarray(coord_conv['matrix_of_misalignment'].transpose().astype(np.float64)), earth_flattening=coord_conv['parameters']['oblateness_of_earth'], earth_equatorial_radius=coord_conv['parameters']['equatorial_radius'] ) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 5fed59a04b..7eb39990d9 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -2,8 +2,10 @@ import numpy as np import pytest +from unittest import mock import satpy.readers.gms5_vissr_navigation as nav +from satpy.readers.gms5_vissr_l1b import GMS5VISSRFileHandler # Navigation references computed with JMA's Msial library (files @@ -721,3 +723,105 @@ def test_get_observation_time(): point = np.array([11, 100]) obs_time = nav.get_observation_time(point, scan_params) np.testing.assert_allclose(obs_time, 50000.0000705496871047) + + +class TestFileHandler: + def test_get_attitude_prediction(self, file_handler): + # TODO: Go on here! + assert 1 == 0 + + @pytest.fixture + def file_handler(self, header, channel_type): + with mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_header') as _read_header: + _read_header.return_value = header, channel_type + fh = GMS5VISSRFileHandler('foo', {'foo': 'bar'}, {'foo': 'bar'}) + return fh + + @pytest.fixture(params=['VIS', 'IR']) + def channel_type(self, request): + return request.param + + @pytest.fixture + def header(self, control_block, image_params): + return { + 'control_block': control_block, + 'image_parameters': image_params + } + + @pytest.fixture + def control_block(self): + return { + + } + + @pytest.fixture + def image_params( + self, mode, coordinate_conversion, attitude_prediction, + orbit_prediction, vis_calibration, ir1_calibration, ir2_calibration, + wv_calibration, simple_coordinate_conversion_table + ): + return { + 'mode': mode, + 'coordinate_conversion': coordinate_conversion, + 'attitude_prediction': attitude_prediction, + 'orbit_prediction': orbit_prediction, + 'vis_calibration': vis_calibration, + 'ir1_calibration': ir1_calibration, + 'ir2_calibration': ir2_calibration, + 'wv_calibration': wv_calibration, + 'simple_coordinate_conversion_table': simple_coordinate_conversion_table + } + + @pytest.fixture + def mode(self): + return { + 'satellite_name': b'GMS-5 ' + } + + @pytest.fixture + def coordinate_conversion(self): + return { + + } + + @pytest.fixture + def attitude_prediction(self): + return { + + } + + @pytest.fixture + def orbit_prediction(self): + return { + + } + + @pytest.fixture + def vis_calibration(self): + return { + + } + + @pytest.fixture + def ir1_calibration(self): + return { + + } + + @pytest.fixture + def ir2_calibration(self): + return { + + } + + @pytest.fixture + def wv_calibration(self): + return { + + } + + @pytest.fixture + def simple_coordinate_conversion_table(self): + return { + + } From 6b025ef592735b6d5240a84da0d0bf95d6d6113f Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 16 Jun 2021 16:21:23 +0000 Subject: [PATCH 0018/1416] Read navigation parameters from the header --- satpy/readers/gms5_vissr_l1b.py | 31 +- satpy/readers/gms5_vissr_navigation.py | 6 + .../tests/reader_tests/test_gms5_vissr_l1b.py | 324 +++++++++++++----- 3 files changed, 260 insertions(+), 101 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 8847a4a39d..7d2e03f90a 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -615,6 +615,9 @@ def get_area_def_test(self, dsid): return area def get_area_def(self, dsid): + return None + + def get_lons_lats(self, dsid): alt_ch_name = ALT_CHANNEL_NAMES[dsid['name']] mode_block = self._header['image_parameters']['mode'] coord_conv = self._header['image_parameters']['coordinate_conversion'] @@ -627,7 +630,7 @@ def get_area_def(self, dsid): alt_ch_name] scan_params = nav.ScanningParameters( - start_time_of_scan=mode_block['observation_time_mjd'], + start_time_of_scan=coord_conv['scheduled_observation_time'], spinning_rate=mode_block['spin_rate'], num_sensors=coord_conv['number_of_sensor_elements'][alt_ch_name], sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], @@ -641,31 +644,35 @@ def get_area_def(self, dsid): orbit_prediction = nav.OrbitPrediction( prediction_times=orb_pred['prediction_time_mjd'].astype(np.float64), greenwich_sidereal_time=np.deg2rad(orb_pred['greenwich_sidereal_time'].astype(np.float64)), - declination_from_sat_to_sun=np.deg2rad(orb_pred['sat_sun_vector_earth_fixed']['azimuth'].astype(np.float64)), - right_ascension_from_sat_to_sun=np.deg2rad(orb_pred['sat_sun_vector_earth_fixed']['elevation'].astype(np.float64)), - sat_position_earth_fixed_x=orb_pred['satellite_position_earth_fixed'][0].astype(np.float64), - sat_position_earth_fixed_y=orb_pred['satellite_position_earth_fixed'][1].astype(np.float64), - sat_position_earth_fixed_z=orb_pred['satellite_position_earth_fixed'][2].astype(np.float64), + declination_from_sat_to_sun=np.deg2rad(orb_pred['sat_sun_vector_earth_fixed']['elevation'].astype(np.float64)), + right_ascension_from_sat_to_sun=np.deg2rad(orb_pred['sat_sun_vector_earth_fixed']['azimuth'].astype(np.float64)), + sat_position_earth_fixed_x=orb_pred['satellite_position_earth_fixed'][:, 0].astype(np.float64), + sat_position_earth_fixed_y=orb_pred['satellite_position_earth_fixed'][:, 1].astype(np.float64), + sat_position_earth_fixed_z=orb_pred['satellite_position_earth_fixed'][:, 2].astype(np.float64), nutation_precession=np.ascontiguousarray(orb_pred['conversion_matrix'].transpose(0, 2, 1).astype(np.float64)) ) - # TODO: Check all angles in radians + # Use earth radius and flattening from JMA's Msial library, because + # the values in the data seem to be pretty old. For example the + # equatorial radius is from the Bessel Ellipsoid (1841). proj_params = nav.ProjectionParameters( line_offset=center_line_vissr_frame, pixel_offset=center_pixel_vissr_frame + pixel_offset, stepping_angle=coord_conv['stepping_angle_along_line'][alt_ch_name], sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], misalignment=np.ascontiguousarray(coord_conv['matrix_of_misalignment'].transpose().astype(np.float64)), - earth_flattening=coord_conv['parameters']['oblateness_of_earth'], - earth_equatorial_radius=coord_conv['parameters']['equatorial_radius'] + earth_flattening=nav.EARTH_FLATTENING, + earth_equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS ) predicted_nav_params = nav.PredictedNavigationParameters( attitude_prediction, orbit_prediction, proj_params ) lons, lats = nav.get_lons_lats( - lines=np.array([686, 2089]), - pixels=np.array([1680, 1793]), + #lines=np.array([686, 2089]), + lines=np.array([686]), + # pixels=np.array([1680, 1793]), + pixels=np.array([1680]), scan_params=scan_params, predicted_nav_params=predicted_nav_params ) - print(lons, lats) + return lons, lats diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 0208b4bb4a..cdd326f770 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -10,6 +10,10 @@ import numba import numpy as np +EARTH_FLATTENING = 1/298.257 +EARTH_EQUATORIAL_RADIUS = 6378136.0 +"""Constants taken from JMA's Msial library.""" + def get_jitclass_type(cls): try: @@ -64,6 +68,8 @@ def get_observation_time(point, scan_params): @numba.njit def _get_relative_observation_time(point, scan_params): line, pixel = point + pixel = pixel + 1 + line = line + 1 spinning_freq = 1440 * scan_params.spinning_rate line_step = np.floor((line - 1) / scan_params.num_sensors) pixel_step = (scan_params.sampling_angle * pixel) / (2 * np.pi) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 7eb39990d9..516949cf64 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -5,7 +5,8 @@ from unittest import mock import satpy.readers.gms5_vissr_navigation as nav -from satpy.readers.gms5_vissr_l1b import GMS5VISSRFileHandler +import satpy.readers.gms5_vissr_l1b as vissr +from satpy.tests.utils import make_dataid # Navigation references computed with JMA's Msial library (files @@ -452,6 +453,24 @@ def test_normalize_vector(self): np.testing.assert_allclose(normed, v / np.sqrt(14)) +class TestImageNavigation: + def test_get_lons_lats(self, scan_params, predicted_nav_params): + lons_exp = [[-114.70480148, -112.23691703, -109.70496014], + [8.27367963, 8.74144293, 9.17639531], + [15.92344528, 16.27070079, 16.63288666]] + lats_exp = [[-22.86714851, -24.4190695, -25.92376274], + [-42.65777933, -39.93518282, -37.2094099], + [3.32018285, 6.04812029, 8.7739802]] + lons, lats = nav.get_lons_lats( + lines=np.array([1000, 1500, 2000]), + pixels=np.array([1000, 1500, 2000]), + scan_params=scan_params, + predicted_nav_params=predicted_nav_params + ) + np.testing.assert_allclose(lons, lons_exp) + np.testing.assert_allclose(lats, lats_exp) + + class TestPredictionInterpolation: """Test interpolation of orbit and attitude predictions.""" @@ -517,11 +536,11 @@ def test_interpolate_nearest(self, obs_time, expected): def test_interpolate_orbit_prediction(self, obs_time, orbit_prediction, orbit_expected): orbit = orbit_prediction.interpolate(obs_time) - self.assert_orbit_close(orbit, orbit_expected) + assert_orbit_close(orbit, orbit_expected) def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, attitude_expected): attitude = attitude_prediction.interpolate(obs_time) - self.assert_attitude_close(attitude, attitude_expected) + assert_attitude_close(attitude, attitude_expected) def test_interpolate_prediction(self, obs_time, proj_params, attitude_prediction, orbit_prediction, nav_params_expected): predicted_nav_params = nav.PredictedNavigationParameters( @@ -530,7 +549,7 @@ def test_interpolate_prediction(self, obs_time, proj_params, attitude_prediction orbit_prediction=orbit_prediction ) nav_params = predicted_nav_params.interpolate(obs_time) - self.assert_nav_params_close(nav_params, nav_params_expected) + assert_nav_params_close(nav_params, nav_params_expected) @pytest.fixture def obs_time(self): @@ -562,79 +581,6 @@ def nav_params_expected(self, attitude_expected, orbit_expected, proj_params): attitude_expected, orbit_expected, proj_params ) - def assert_orbit_close(self, a, b): - """Assert that two Orbit instances are close. - - This would probably make more sense in the Orbit class. However, - numba doesn't support np.allclose, yet. - """ - attrs = [ - 'greenwich_sidereal_time', - 'declination_from_sat_to_sun', - 'right_ascension_from_sat_to_sun', - 'sat_position_earth_fixed_x', - 'sat_position_earth_fixed_y', - 'sat_position_earth_fixed_z', - 'nutation_precession', - ] - self._assert_attrs_close(a, b, attrs, 'Orbit') - - def assert_attitude_close(self, a, b): - """Assert that two Attitude instances are close. - - This would probably make more sense in the Attitude class. However, - numba doesn't support np.allclose, yet. - """ - attrs = [ - 'angle_between_earth_and_sun', - 'angle_between_sat_spin_and_z_axis', - 'angle_between_sat_spin_and_yz_plane' - ] - self._assert_attrs_close(a, b, attrs, 'Attitude') - - def assert_proj_params_close(self, a, b): - """Assert that two ProjectionParameters instances are close. - - This would probably make more sense in the Attitude class. However, - numba doesn't support np.allclose, yet. - """ - attrs = [ - 'line_offset', - 'pixel_offset', - 'stepping_angle', - 'sampling_angle', - 'misalignment', - 'earth_flattening', - 'earth_equatorial_radius', - ] - self._assert_attrs_close(a, b, attrs, 'ProjectionParameters') - - def assert_nav_params_close(self, a, b): - self.assert_attitude_close(a.attitude, b.attitude) - self.assert_orbit_close(a.orbit, b.orbit) - self.assert_proj_params_close(a.proj_params, b.proj_params) - - @staticmethod - def _assert_attrs_close(a, b, attrs, desc): - for attr in attrs: - np.testing.assert_allclose( - getattr(a, attr), - getattr(b, attr), - err_msg='{} attribute {} differs'.format(desc, attr) - ) - - -class TestImageNavigation: - def test_get_lons_lats(self, scan_params, predicted_nav_params): - lons, lats = nav.get_lons_lats( - lines=np.array([1000, 1500, 2000]), - pixels=np.array([1000, 1500, 2000]), - scan_params=scan_params, - predicted_nav_params=predicted_nav_params - ) - # TODO - assert 1 == 0 - @pytest.fixture def sampling_angle(): @@ -726,20 +672,33 @@ def test_get_observation_time(): class TestFileHandler: - def test_get_attitude_prediction(self, file_handler): - # TODO: Go on here! - assert 1 == 0 + channel_types = { + 'VIS': 'VIS', + 'IR1': 'IR', + 'IR2': 'IR', + 'IR3': 'IR' + } + + def test_get_lons_lats(self, file_handler, dataset_id): + lons, lats = file_handler.get_lons_lats(dataset_id) + + # TODO: Go on here: Channel dependent expectations + lons_exp = [[139.990380]] + lats_exp = [[35.047056]] + np.testing.assert_allclose(lons, lons_exp, atol=1E-6) + np.testing.assert_allclose(lats, lats_exp, atol=1E-6) @pytest.fixture - def file_handler(self, header, channel_type): + def file_handler(self, header, dataset_id): + channel_type = self.channel_types[dataset_id['name']] with mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_header') as _read_header: _read_header.return_value = header, channel_type - fh = GMS5VISSRFileHandler('foo', {'foo': 'bar'}, {'foo': 'bar'}) + fh = vissr.GMS5VISSRFileHandler('foo', {'foo': 'bar'}, {'foo': 'bar'}) return fh - @pytest.fixture(params=['VIS', 'IR']) - def channel_type(self, request): - return request.param + @pytest.fixture(params=['VIS', 'IR1']) + def dataset_id(self, request): + return make_dataid(name=request.param) @pytest.fixture def header(self, control_block, image_params): @@ -775,25 +734,127 @@ def image_params( @pytest.fixture def mode(self): return { - 'satellite_name': b'GMS-5 ' + 'satellite_name': b'GMS-5 ', + 'spin_rate': 99.21774, } @pytest.fixture def coordinate_conversion(self): return { - + 'central_line_number_of_vissr_frame': { + 'IR1': 1378.5, + 'IR2': 1378.7, + 'VIS': 5513.0, + 'WV': 1379.1001 + }, + 'central_pixel_number_of_vissr_frame': { + 'IR1': 1672.5, + 'IR2': 1672.5, + 'VIS': 6688.5, + 'WV': 1672.5 + }, + 'pixel_difference_of_vissr_center_from_normal_position': { + 'IR1': 0.0, + 'IR2': 0.0, + 'VIS': 0.0, + 'WV': 0.0 + }, + 'scheduled_observation_time': 50130.979089568464, + 'number_of_sensor_elements': { + 'IR1': 1.0, + 'IR2': 1.0, + 'VIS': 4.0, + 'WV': 1.0 + }, + 'sampling_angle_along_pixel': { + 'IR1': 9.5719995e-05, + 'IR2': 9.5719995e-05, + 'VIS': 2.3929999e-05, + 'WV': 9.5719995e-05 + }, + 'stepping_angle_along_line': { + 'IR1': 0.00014000005, + 'IR2': 0.00014000005, + 'VIS': 3.5000005e-05, + 'WV': 0.00014000005 + }, + 'matrix_of_misalignment': np.array( + [[9.9999917e-01, -5.1195198e-04, -1.2135329e-03], + [5.1036407e-04, 9.9999905e-01, -1.3083406e-03], + [1.2142011e-03, 1.3077201e-03, 9.9999845e-01]], + dtype=np.float32 + ), + 'parameters': { + 'equatorial_radius': 6377397.0, + 'oblateness_of_earth': 0.003342773, + } } @pytest.fixture def attitude_prediction(self): return { - + 'data': np.array([ + (50130.93055556, (19960217, 222000), 3.14911863, 0.00054604, 4.3324597 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.93402778, (19960217, 222500), 3.14911863, 0.00054604, 4.31064812, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.9375 , (19960217, 223000), 3.14911863, 0.00054604, 4.28883633, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.94097222, (19960217, 223500), 3.14911863, 0.00054604, 4.26702432, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.94444444, (19960217, 224000), 3.14911863, 0.00054604, 4.2452121 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.94791667, (19960217, 224500), 3.14911863, 0.00054604, 4.22339966, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.95138889, (19960217, 225000), 3.14911863, 0.00054604, 4.201587 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.95486111, (19960217, 225500), 3.14911863, 0.00054604, 4.17977411, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.95833333, (19960217, 230000), 3.14911863, 0.00054604, 4.157961 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.96180556, (19960217, 230500), 3.14911863, 0.00054604, 4.13614765, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.96527778, (19960217, 231000), 3.14911863, 0.00054604, 4.11433408, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.96875 , (19960217, 231500), 3.14911863, 0.00054604, 4.09252027, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.97222222, (19960217, 232000), 3.14911863, 0.00054604, 4.07070622, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.97569444, (19960217, 232500), 3.14911863, 0.00054604, 4.04889193, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.97916667, (19960217, 233000), 3.14911863, 0.00054604, 4.02707741, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.98263889, (19960217, 233500), 3.14911863, 0.00054604, 4.00526265, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.98611111, (19960217, 234000), 3.14911863, 0.00054604, 3.98344765, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.98958333, (19960217, 234500), 3.14911863, 0.00054604, 3.96163241, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.99305556, (19960217, 235000), 3.14911863, 0.00054604, 3.93981692, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.99652778, (19960217, 235500), 3.14911863, 0.00054604, 3.9180012 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131. , (19960218, 0), 3.14911863, 0.00054604, 3.89618523, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.00347222, (19960218, 500), 3.14911863, 0.00054604, 3.87436903, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.00694444, (19960218, 1000), 3.14911863, 0.00054604, 3.85255258, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.01041667, (19960218, 1500), 3.14911863, 0.00054604, 3.8307359 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.01388889, (19960218, 2000), 3.14911863, 0.00054604, 3.80891898, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.01736111, (19960218, 2500), 3.14911863, 0.00054604, 3.78710182, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.02083333, (19960218, 3000), 3.14911863, 0.00054604, 3.76528442, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.02430556, (19960218, 3500), 3.14911863, 0.00054604, 3.74346679, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.02777778, (19960218, 4000), 3.14911863, 0.00054604, 3.72164893, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.03125 , (19960218, 4500), 3.14911863, 0.00054604, 3.69983084, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.03472222, (19960218, 5000), 3.14911863, 0.00054604, 3.67801252, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.03819444, (19960218, 5500), 3.14911863, 0.00054604, 3.65619398, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.04166667, (19960218, 10000), 3.14911863, 0.00054604, 3.63437521, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0)], + dtype=vissr.ATTITUDE_PREDICTION_DATA + ), } @pytest.fixture def orbit_prediction(self): return { - + 'data': np.array([ + (50130.96180556, (960217, 230500), [ 2247604.14185506, -42110997.39399951, -276688.79765022], [3069.77904265, 164.12584895, 3.65437628], [-32392525.09983424, 27002204.93121811, -263873.25702763], [ 0.81859376, 0.6760037 , 17.44588753], 133.46391815, (330.12326803, -12.19424863), (197.27884747, -11.96904141), [[ 9.99936382e-01, 1.03449318e-02, 4.49611916e-03], [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01]], [ 2.46885475e+08, -2.07840219e+08, -7.66028692e+07], (-0.35887085, 140.18562594, 35793706.31768975), 0, 0), + (50130.96527778, (960217, 231000), [ 3167927.33749398, -42051692.51095297, -275526.52514815], [3065.46435995, 231.22434208, 4.09379482], [-32392279.4626506 , 27002405.27592725, -258576.96255205], [ 0.81939962, 0.66017389, 17.86159393], 134.71734048, (330.12643276, -12.19310271), (196.02858456, -11.9678881 ), [[ 9.99936382e-01, 1.03449336e-02, 4.49611993e-03], [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01]], [ 2.46204142e+08, -2.07689897e+08, -7.65268207e+07], (-0.35166851, 140.18520316, 35793613.0815237 ), 0, 0), + (50130.96875 , (960217, 231500), [ 4086736.12968183, -41972273.80964861, -274232.7185828 ], [3059.68341675, 298.21262775, 4.53123515], [-32392033.65156128, 27002600.83510851, -253157.23498394], [ 0.81975174, 0.6441 , 18.26873686], 135.97076281, (330.12959087, -12.19195587), (194.77831505, -11.96673388), [[ 9.99936382e-01, 1.03449353e-02, 4.49612071e-03], [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01]], [ 2.45524133e+08, -2.07559497e+08, -7.64508451e+07], (-0.3442983 , 140.18478523, 35793516.57370046), 0, 0), + (50130.97222222, (960217, 232000), [ 5003591.03339227, -41872779.15809826, -272808.0027587 ], [3052.43895532, 365.05867777, 4.9664885 ], [-32391787.80234722, 27002791.53735474, -247616.67261456], [ 0.81965461, 0.62779672, 18.66712192], 137.22418515, (330.13274246, -12.19080808), (193.52803902, -11.9655787 ), [[ 9.99936382e-01, 1.03449371e-02, 4.49612148e-03], [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01]], [ 2.44845888e+08, -2.07448982e+08, -7.63749418e+07], (-0.33676374, 140.18437233, 35793416.91561355), 0, 0), + (50130.97569444, (960217, 232500), [ 5918053.49286455, -41753256.02295399, -271253.06495935], [3043.73441705, 431.73053079, 5.39934712], [-32391542.0492856 , 27002977.3157848 , -241957.93142027], [ 0.81911313, 0.61127876, 19.05655891], 138.47760748, (330.13588763, -12.1896593 ), (192.27775657, -11.96442254), [[ 9.99936382e-01, 1.03449388e-02, 4.49612225e-03], [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01]], [ 2.44169846e+08, -2.07358303e+08, -7.62991102e+07], (-0.32906846, 140.18396465, 35793314.23041636), 0, 0), + (50130.97916667, (960217, 233000), [ 6829686.08751574, -41613761.44760592, -269568.65462124], [3033.5739409 , 498.19630731, 5.82960444], [-32391296.52466749, 27003158.10847847, -236183.72381214], [ 0.81813262, 0.59456087, 19.43686189], 139.73102981, (330.1390265 , -12.18850951), (191.02746783, -11.96326537), [[ 9.99936382e-01, 1.03449406e-02, 4.49612302e-03], [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01]], [ 2.43496443e+08, -2.07287406e+08, -7.62233495e+07], (-0.32121612, 140.18356238, 35793208.6428103 ), 0, 0), + (50130.98263889, (960217, 233500), [ 7738052.74476409, -41454362.02480648, -267755.58296603], [3021.96236148, 564.42422513, 6.25705512], [-32391051.35918404, 27003333.85786499, -230296.81731314], [ 0.81671881, 0.57765777, 19.80784932], 140.98445214, (330.14215916, -12.18735869), (189.77717289, -11.96210717), [[ 9.99936381e-01, 1.03449423e-02, 4.49612379e-03], [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01]], [ 2.42826115e+08, -2.07236222e+08, -7.61476592e+07], (-0.3132105 , 140.18316567, 35793100.27882991), 0, 0), + (50130.98611111, (960217, 234000), [ 8642718.9445816 , -41275133.86582235, -265814.72261683], [3008.90520686, 630.38261431, 6.68149519], [-32390806.68247503, 27003504.50991426, -224300.03325666], [ 0.81487783, 0.56058415, 20.16934411], 142.23787447, (330.14528573, -12.18620679), (188.52687186, -11.9609479 ), [[ 9.99936381e-01, 1.03449440e-02, 4.49612456e-03], [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01]], [ 2.42159297e+08, -2.07204676e+08, -7.60720382e+07], (-0.30505542, 140.18277471, 35792989.2656269 ), 0, 0), + (50130.98958333, (960217, 234500), [ 9543251.93095296, -41076162.56379041, -263747.00717057], [2994.40869593, 696.03993248, 7.10272213], [-32390562.62077149, 27003670.01680953, -218196.24541058], [ 0.81261619, 0.54335463, 20.52117372], 143.4912968 , (330.14840632, -12.18505381), (187.27656486, -11.95978754), [[ 9.99936381e-01, 1.03449458e-02, 4.49612532e-03], [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01]], [ 2.41496422e+08, -2.07192684e+08, -7.59964859e+07], (-0.29675479, 140.18238966, 35792875.73125207), 0, 0), + (50130.99305556, (960217, 235000), [ 10439220.91492008, -40857543.15396438, -261553.43075696], [2978.47973561, 761.36477969, 7.52053495], [-32390319.30020279, 27003830.33282405, -211988.37862591], [ 0.80994076, 0.52598377, 20.86317023], 144.74471913, (330.15152105, -12.1838997 ), (186.026252 , -11.95862606), [[ 9.99936381e-01, 1.03449475e-02, 4.49612609e-03], [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01]], [ 2.40837919e+08, -2.07200148e+08, -7.59210011e+07], (-0.28831259, 140.18201066, 35792759.80443729), 0, 0), + (50130.99652778, (960217, 235500), [ 11330197.2840407 , -40619380.06793167, -259235.04755252], [2961.12591755, 826.32591367, 7.93473432], [-32390076.84311398, 27003985.41857829, -205679.40741202], [ 0.80685878, 0.50848599, 21.19517045], 145.99814147, (330.15463004, -12.18274445), (184.77593341, -11.95746344), [[ 9.99936381e-01, 1.03449492e-02, 4.49612685e-03], [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01]], [ 2.40184218e+08, -2.07226967e+08, -7.58455830e+07], (-0.27973286, 140.18163787, 35792641.6143761 ), 0, 0), + (50131. , (960218, 0), [ 12215754.80493221, -40361787.08463053, -256792.97127933], [2942.35551459, 890.89226454, 8.34512262], [-32389835.37113104, 27004135.23720251, -199272.35452792], [ 0.8033778 , 0.49087558, 21.51701595], 147.2515638 , (330.15773341, -12.18158803), (183.5256092 , -11.95629965), [[ 9.99936381e-01, 1.03449510e-02, 4.49612761e-03], [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01]], [ 2.39535744e+08, -2.07273025e+08, -7.57702305e+07], (-0.2710197 , 140.18127143, 35792521.29050537), 0, 0), + (50131.00347222, (960218, 500), [ 13095469.82708225, -40084887.27645436, -254228.37467049], [2922.17747695, 955.03294974, 8.75150409], [-32389595.00191828, 27004279.7580633 , -192770.28953487], [ 0.79950572, 0.47316669, 21.82855319], 148.50498613, (330.16083128, -12.18043041), (182.27527951, -11.95513466), [[ 9.99936381e-01, 1.03449527e-02, 4.49612837e-03], [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01]], [ 2.38892921e+08, -2.07338200e+08, -7.56949425e+07], (-0.26217728, 140.18091148, 35792398.96228714), 0, 0), + (50131.00694444, (960218, 1000), [ 13968921.48773305, -39788812.95011112, -251542.48890031], [2900.60142795, 1018.71728887, 9.15368488], [-32389355.85220329, 27004418.95297137, -186176.32730922], [ 0.79525074, 0.45537327, 22.12963356], 149.75840846, (330.16392379, -12.17927157), (181.02494445, -11.95396845), [[ 9.99936381e-01, 1.03449544e-02, 4.49612913e-03], [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01]], [ 2.38256170e+08, -2.07422360e+08, -7.56197178e+07], (-0.25320985, 140.18055815, 35792274.75899146), 0, 0), + (50131.01041667, (960218, 1500), [ 14835691.90970188, -39473705.58489136, -248736.60300345], [2877.63765957, 1081.9148182 , 9.55147314], [-32389118.03536845, 27004552.79890675, -179493.62657611], [ 0.79062131, 0.43750908, 22.42011344], 151.01183079, (330.16701107, -12.17811148), (179.77462147, -11.952801 ), [[ 9.99936381e-01, 1.03449561e-02, 4.49612989e-03], [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01]], [ 2.37625908e+08, -2.07525364e+08, -7.55445552e+07], (-0.24412169, 140.18021156, 35792148.80948149), 0, 0), + (50131.01388889, (960218, 2000), [ 15695366.40490882, -39139715.76420763, -245812.06324505], [2853.29712752, 1144.59530548, 9.94467917], [-32388881.66227116, 27004681.27687033, -172725.38836895], [ 0.7856262 , 0.41958762, 22.69985431], 152.26525312, (330.17009324, -12.17695013), (178.52427609, -11.95163228), [[ 9.99936381e-01, 1.03449578e-02, 4.49613064e-03], [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01]], [ 2.37002549e+08, -2.07647061e+08, -7.54694534e+07], (-0.23491716, 140.17987182, 35792021.2420001 ), 0, 0), + (50131.01736111, (960218, 2500), [ 16547533.6691137 , -38787003.10533711, -242770.27248672], [2827.5914462 , 1206.72876414, 10.33311542], [-32388646.84104986, 27004804.37195345, -165874.85452439], [ 0.78027439, 0.40162218, 22.96872279], 153.51867545, (330.17317044, -12.17578748), (177.27392574, -11.95046228), [[ 9.99936381e-01, 1.03449595e-02, 4.49613140e-03], [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01]], [ 2.36386506e+08, -2.07787291e+08, -7.53944111e+07], (-0.22560065, 140.17953905, 35791892.18395986), 0, 0), + (50131.02083333, (960218, 3000), [ 17391785.98229151, -38415736.18212036, -239612.68950141], [2800.53288309, 1268.28546791, 10.71659666], [-32388413.67874206, 27004922.07123395, -158945.30610131], [ 0.77457509, 0.38362576, 23.2265907 ], 154.77209777, (330.17624281, -12.17462353), (176.02357057, -11.94929096), [[ 9.99936381e-01, 1.03449612e-02, 4.49613215e-03], [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01]], [ 2.35778185e+08, -2.07945887e+08, -7.53194268e+07], (-0.21617663, 140.17921335, 35791761.76173551), 0, 0)], + dtype=vissr.ORBIT_PREDICTION_DATA + ) } @pytest.fixture @@ -825,3 +886,88 @@ def simple_coordinate_conversion_table(self): return { } + + +def _get_attributes(obj): + import inspect + non_callable_attrs = inspect.getmembers( + obj, lambda x: not inspect.ismethod(x) + ) + return set(attr for attr in non_callable_attrs if not attr.startswith('_')) + + +def _assert_have_same_attributes(a, b): + a_attrs = _get_attributes(a) + b_attrs = _get_attributes(b) + assert a_attrs == b_attrs, "Different set of attribtues" + return a_attrs + + +def assert_numba_objects_close(a, b): + _assert_have_same_attributes(a, b) + _assert_attrs_close(a, b) + + +def assert_orbit_close(a, b): + """Assert that two Orbit instances are close. + + This would probably make more sense in the Orbit class. However, + numba doesn't support np.allclose, yet. + """ + attrs = [ + 'greenwich_sidereal_time', + 'declination_from_sat_to_sun', + 'right_ascension_from_sat_to_sun', + 'sat_position_earth_fixed_x', + 'sat_position_earth_fixed_y', + 'sat_position_earth_fixed_z', + 'nutation_precession', + ] + _assert_attrs_close(a, b, attrs, 'Orbit') + + +def assert_attitude_close(a, b): + """Assert that two Attitude instances are close. + + This would probably make more sense in the Attitude class. However, + numba doesn't support np.allclose, yet. + """ + attrs = [ + 'angle_between_earth_and_sun', + 'angle_between_sat_spin_and_z_axis', + 'angle_between_sat_spin_and_yz_plane' + ] + _assert_attrs_close(a, b, attrs, 'Attitude') + + +def assert_proj_params_close(a, b): + """Assert that two ProjectionParameters instances are close. + + This would probably make more sense in the Attitude class. However, + numba doesn't support np.allclose, yet. + """ + attrs = [ + 'line_offset', + 'pixel_offset', + 'stepping_angle', + 'sampling_angle', + 'misalignment', + 'earth_flattening', + 'earth_equatorial_radius', + ] + _assert_attrs_close(a, b, attrs, 'ProjectionParameters') + + +def assert_nav_params_close(a, b): + assert_attitude_close(a.attitude, b.attitude) + assert_orbit_close(a.orbit, b.orbit) + assert_proj_params_close(a.proj_params, b.proj_params) + + +def _assert_attrs_close(a, b): + for attr in _get_attributes(a): + np.testing.assert_allclose( + getattr(a, attr), + getattr(b, attr), + err_msg='{} attribute {} differs'.format(a._numba_type, attr) + ) \ No newline at end of file From 2d3eed24a3193020fb4b8cb4429bd8bc94a413af Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 22 Jun 2021 14:48:39 +0000 Subject: [PATCH 0019/1416] Add lat/lon coords to dataset --- satpy/etc/readers/gms5-vissr_l1b.yaml | 2 +- satpy/readers/gms5_vissr_l1b.py | 55 ++++++-------- .../tests/reader_tests/test_gms5_vissr_l1b.py | 76 +++++++++++++++---- 3 files changed, 86 insertions(+), 47 deletions(-) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index d295eeac5b..77ecf32ed8 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -10,7 +10,7 @@ reader: sensors: [gms5-vissr] default_channels: [] - reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: gms5_vissr_vis: diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 7d2e03f90a..619f94a396 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -521,7 +521,8 @@ def get_dataset(self, dataset_id, ds_info): TODO: Split in two methods """ - num_lines, _ = self._get_actual_shape() + + num_lines, num_pixels = self._get_actual_shape() memmap = np.memmap( filename=self._filename, mode='r', @@ -538,36 +539,21 @@ def get_dataset(self, dataset_id, ds_info): 'line_number': ('y', dask_array['LCW']['line_number'].compute()) } ) + + lines, pixels = self._get_image_coords(data) + lons, lats = self._get_lons_lats(dataset_id, lines, pixels) + lons = xr.DataArray(lons, dims=('y', 'x'), attrs={'standard_name': 'longitude'}) + lats = xr.DataArray(lats, dims=('y', 'x'), attrs={'standard_name': 'latitude'}) + data.coords['lon'] = lons + data.coords['lat'] = lats + return data def _get_acq_time(self, dask_array): acq_time = dask_array['LCW']['scan_time'].compute() return modified_julian_day_to_datetime64(acq_time) - def _update_attrs(self): - print(fh_ir2._header['image_parameters']['coordinate_conversion']['orbital_parameters']) - - def _pad(self, data): - # Actual line/column numbers. Alternatively use "line number" coordinate. - print(fh_ir2._header['control_block']['head_valid_line_number']) - print(fh_ir2._header['control_block']['final_valid_line_number']) - - # Target shape for padding! - print(fh_ir2._header['image_parameters']['mode']['vis_frame_parameters']['number_of_lines']) - print( - fh_ir2._header['image_parameters']['mode']['vis_frame_parameters']['number_of_pixels'], - fh_ir2._header['image_parameters']['mode']['vis_frame_parameters']['number_of_lines']) - def get_area_def_test(self, dsid): - """ - TODO: - - misalignment matrix, rotation matrix - - near sided perspective - - Checked: - - numerical accuracy of stepping angle - - following C routing strictly (discarding head_valid_line_number) -> worse - """ alt_ch_name = ALT_CHANNEL_NAMES[dsid['name']] num_lines, num_pixels = self._get_actual_shape() mode_block = self._header['image_parameters']['mode'] @@ -614,11 +600,9 @@ def get_area_def_test(self, dsid): area = geos_area.get_area_definition(proj_dict, extent) return area - def get_area_def(self, dsid): - return None - - def get_lons_lats(self, dsid): - alt_ch_name = ALT_CHANNEL_NAMES[dsid['name']] + def _get_lons_lats(self, dataset_id, lines, pixels): + # TODO: Store channel name in self.channel_name + alt_ch_name = ALT_CHANNEL_NAMES[dataset_id['name']] mode_block = self._header['image_parameters']['mode'] coord_conv = self._header['image_parameters']['coordinate_conversion'] att_pred = self._header['image_parameters']['attitude_prediction']['data'] @@ -668,11 +652,16 @@ def get_lons_lats(self, dsid): attitude_prediction, orbit_prediction, proj_params ) lons, lats = nav.get_lons_lats( - #lines=np.array([686, 2089]), - lines=np.array([686]), - # pixels=np.array([1680, 1793]), - pixels=np.array([1680]), + # lines=np.array([686, 2089]), + # pixels=np.array([1680, 1793]), # FIXME TODO + lines=lines.astype(np.float64), + pixels=pixels.astype(np.float64), scan_params=scan_params, predicted_nav_params=predicted_nav_params ) return lons, lats + + def _get_image_coords(self, data): + lines = data.coords['line_number'].values + pixels = np.arange(data.shape[1]) + return lines, pixels diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 516949cf64..cfc6389c10 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -679,27 +679,43 @@ class TestFileHandler: 'IR3': 'IR' } - def test_get_lons_lats(self, file_handler, dataset_id): - lons, lats = file_handler.get_lons_lats(dataset_id) - - # TODO: Go on here: Channel dependent expectations - lons_exp = [[139.990380]] - lats_exp = [[35.047056]] + def test_dataset_navigation(self, file_handler, dataset_id, lons_lats_exp): + lons_exp, lats_exp = lons_lats_exp + dataset = file_handler.get_dataset(dataset_id, {}) + lons = dataset.coords['lon'] + lats = dataset.coords['lat'] np.testing.assert_allclose(lons, lons_exp, atol=1E-6) np.testing.assert_allclose(lats, lats_exp, atol=1E-6) @pytest.fixture - def file_handler(self, header, dataset_id): + def file_handler(self, header, dataset_id, image_data): channel_type = self.channel_types[dataset_id['name']] - with mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_header') as _read_header: + with mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_header') as _read_header, \ + mock.patch('satpy.readers.gms5_vissr_l1b.np.memmap') as memmap: _read_header.return_value = header, channel_type + memmap.return_value = image_data fh = vissr.GMS5VISSRFileHandler('foo', {'foo': 'bar'}, {'foo': 'bar'}) - return fh + # Yield instead of return, to make the memmap mock succeed. + # See https://stackoverflow.com/a/59045506/5703449 + yield fh @pytest.fixture(params=['VIS', 'IR1']) def dataset_id(self, request): return make_dataid(name=request.param) + @pytest.fixture + def image_data(self, dataset_id): + line_control_word = np.dtype([ + ('line_number', vissr.I4), + ('scan_time', vissr.R8), + ]) + dtype = np.dtype([('LCW', line_control_word), + ('image_data', vissr.U1, (2,))]) + if dataset_id['name'] == 'IR1': + return np.array([((686, 50000), (1, 2)), ((2089, 50000), (3, 4))], dtype=dtype) + else: + raise NotImplementedError + @pytest.fixture def header(self, control_block, image_params): return { @@ -710,7 +726,7 @@ def header(self, control_block, image_params): @pytest.fixture def control_block(self): return { - + 'available_block_size_of_image_data': 2 } @pytest.fixture @@ -732,14 +748,31 @@ def image_params( } @pytest.fixture - def mode(self): + def mode(self, ir_frame_parameters, vis_frame_parameters): return { 'satellite_name': b'GMS-5 ', 'spin_rate': 99.21774, + 'ir_frame_parameters': ir_frame_parameters, + 'vis_frame_parameters': vis_frame_parameters } + @pytest.fixture + def ir_frame_parameters(self): + return {'number_of_lines': 2, 'number_of_pixels': 2} + + @pytest.fixture + def vis_frame_parameters(self): + return {'number_of_lines': 2, 'number_of_pixels': 2} + @pytest.fixture def coordinate_conversion(self): + """Provide parameters for coordinate conversions. + + Since we are testing with very small images, adjust pixel offset so that + the first column is at the image center. This has the advantage, that + the lat/lon coordinates are finite for every column. Otherwise they + would be in space. + """ return { 'central_line_number_of_vissr_frame': { 'IR1': 1378.5, @@ -748,9 +781,9 @@ def coordinate_conversion(self): 'WV': 1379.1001 }, 'central_pixel_number_of_vissr_frame': { - 'IR1': 1672.5, + 'IR1': 0.5, # to obtain finite lat/lon coordinates 'IR2': 1672.5, - 'VIS': 6688.5, + 'VIS': 0.5, # to obtain finite lat/lon coordinates 'WV': 1672.5 }, 'pixel_difference_of_vissr_center_from_normal_position': { @@ -887,6 +920,23 @@ def simple_coordinate_conversion_table(self): } + @pytest.fixture + def lons_lats_exp(self, dataset_id): + """Get expected lon/lat coordinates. + + Computed with JMA's Msial library. + """ + if dataset_id['name'] == 'IR1': + lons_exp = [[139.680120, 139.718902], + [140.307367, 140.346062]] + lats_exp = [[35.045132, 35.045361], + [-34.971012, -34.970738]] + elif dataset_id == 'VIS': + lons_exp = lats_exp = None + else: + raise NotImplementedError + return lons_exp, lats_exp + def _get_attributes(obj): import inspect From 2280a40418a9de9530532fd822e63e6957c48b96 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 14 Jul 2021 11:04:12 +0200 Subject: [PATCH 0020/1416] Add a green-sar composite --- satpy/etc/composites/sar.yaml | 13 +++++++++++++ satpy/etc/enhancements/generic.yaml | 16 ++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/satpy/etc/composites/sar.yaml b/satpy/etc/composites/sar.yaml index 3f9359d4b2..c624bcd7e6 100644 --- a/satpy/etc/composites/sar.yaml +++ b/satpy/etc/composites/sar.yaml @@ -93,3 +93,16 @@ composites: calibration: gamma quantity: dB standard_name: sar-ice-log + + green-sar: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: measurement + polarization: vh + - name: measurement + polarization: vv + quantity: dB + - name: measurement + polarization: vv + quantity: natural + standard_name: green-sar diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 73de008986..9b3e970990 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -331,6 +331,22 @@ enhancements: args: - [true, true, true] + green-sar: + standard_name: green-sar + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0, -19.18, 0] + max_stretch: [0.07, -1.294, .43] + #min_stretch: [0, -30, 0] + #max_stretch: [1, 10, 2] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [1.82, 0.74, 1] + sar-quick: standard_name: sar-quick operations: From c791c87facc3f85f489dc1e8ed985b9ce7f93da2 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 14 Jul 2021 11:05:30 +0200 Subject: [PATCH 0021/1416] Clean up the sar-c reader --- satpy/readers/sar_c_safe.py | 58 ++++++++++++++++++++----------------- 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 1b4e128627..ed725f48f3 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -71,8 +71,7 @@ def _dictify(r): return r.text for x in r.findall("./*"): if x.tag in d and not isinstance(d[x.tag], list): - d[x.tag] = [d[x.tag]] - d[x.tag].append(_dictify(x)) + d[x.tag] = [d[x.tag], _dictify(x)] else: d[x.tag] = _dictify(x) return d @@ -160,8 +159,7 @@ def get_calibration_constant(self): def get_calibration(self, calibration, chunks=None): """Get the calibration array.""" calibration_name = _get_calibration_name(calibration) - calibration_vector = self._get_calibration_vector(calibration_name, chunks) - return calibration_vector + return self._get_calibration_vector(calibration_name, chunks) def _get_calibration_vector(self, calibration_name, chunks): """Get the calibration vector.""" @@ -241,9 +239,7 @@ def __init__(self, root, shape): def read_azimuth_noise_array(self, chunks=CHUNK_SIZE): """Read the azimuth noise vectors.""" self._read_azimuth_noise_blocks(chunks) - populated_array = self._assemble_azimuth_noise_blocks(chunks) - - return populated_array + return self._assemble_azimuth_noise_blocks(chunks) def _read_azimuth_noise_blocks(self, chunks): """Read the azimuth noise blocks.""" @@ -289,16 +285,15 @@ def _create_dask_slice_from_block_line(self, current_line, chunks): pieces = [arr.sel(y=current_y) for arr in current_blocks] dask_pieces = self._get_padded_dask_pieces(pieces, chunks) - new_slice = da.hstack(dask_pieces) - return new_slice + return da.hstack(dask_pieces) def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" - current_blocks = [] - for block in self.blocks: - if block.coords['y'][0] <= current_line <= block.coords['y'][-1]: - current_blocks.append(block) - return current_blocks + return [ + block + for block in self.blocks + if block.coords['y'][0] <= current_line <= block.coords['y'][-1] + ] def _get_padded_dask_pieces(self, pieces, chunks): """Get the padded pieces of a slice.""" @@ -538,23 +533,34 @@ def get_dataset(self, key, info): if key['name'] in ['longitude', 'latitude', 'altitude']: logger.debug('Constructing coordinate arrays.') - arrays = dict() + arrays = {} arrays['longitude'], arrays['latitude'], arrays['altitude'] = self.get_lonlatalts() data = arrays[key['name']] data.attrs.update(info) else: - data = rioxarray.open_rasterio(self.filename, lock=False, chunks=(1, CHUNK_SIZE, CHUNK_SIZE)).squeeze() - data = data.assign_coords(x=np.arange(len(data.coords['x'])), - y=np.arange(len(data.coords['y']))) - data = self._calibrate_and_denoise(data, key) - data.attrs.update(info) - data.attrs.update({'platform_name': self._mission_id}) + data = self.get_measurement(key, info) + return data - data = self._change_quantity(data, key['quantity']) + def get_measurement(self, key, info): + """Get the measurement data.""" + result = rioxarray.open_rasterio( + self.filename, lock=False, chunks=(1, CHUNK_SIZE, CHUNK_SIZE) + ).squeeze() - return data + result = result.assign_coords( + x=np.arange(len(result.coords['x'])), + y=np.arange(len(result.coords['y'])), + ) + + result = self._calibrate_and_denoise(result, key) + result.attrs.update(info) + result.attrs.update({'platform_name': self._mission_id}) + + result = self._change_quantity(result, key['quantity']) + + return result @staticmethod def _change_quantity(data, quantity): @@ -581,8 +587,7 @@ def _get_digital_number(self, data): """Get the digital numbers (uncalibrated data).""" data = data.where(data > 0) data = data.astype(np.float64) - dn = data * data - return dn + return data * data def _denoise(self, dn, chunks): """Denoise the data.""" @@ -597,8 +602,7 @@ def _calibrate(self, dn, chunks, key): cal = self.calibration.get_calibration(key['calibration'], chunks=chunks) cal_constant = self.calibration.get_calibration_constant() logger.debug('Calibrating.') - data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) - return data + return ((dn + cal_constant) / (cal ** 2)).clip(min=0) @lru_cache(maxsize=2) def get_lonlatalts(self): From 178f76ec7373c01832b769beabf5808014907488 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 26 Aug 2021 13:41:51 +0000 Subject: [PATCH 0022/1416] Tidy up tests --- .../tests/reader_tests/test_gms5_vissr_l1b.py | 185 +----------------- 1 file changed, 1 insertion(+), 184 deletions(-) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index cfc6389c10..77194f79bd 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -91,146 +91,7 @@ earth_equatorial_radius=6378136 ), ) - }, - - - - # { - # 'line': 686, - # 'pixel': 1680, - # 'lon': 139.990380, - # 'lat': 35.047056, - # 'tolerance': 0, - # 'nav_params': { - # 'line_offset': 1378.5, - # 'pixel_offset': 1672.5, - # 'stepping_angle': 0.000140000047395, - # 'sampling_angle': 0.000095719995443, - # 'misalignment': np.array( - # [[0.999999165534973, 0.000510364072397, 0.001214201096445], - # [-0.000511951977387, 0.999999046325684, 0.001307720085606], - # [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - # ), - # 'greenwich_sidereal_time': 2.468529732418296, - # 'angle_between_earth_and_sun': 3.997397917902958, - # 'declination_from_sat_to_sun': -0.208770861178982, - # 'right_ascension_from_sat_to_sun': 3.304369303579407, - # 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - # 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - # 'sat_position_earth_fixed_x': -32390963.148471601307392, - # 'sat_position_earth_fixed_y': 27003395.381247851997614, - # 'sat_position_earth_fixed_z': -228134.860026293463307, - # 'nutation_precession': np.array( - # [[0.999936381496146, -0.010344758016410, -0.004496547784299], - # [0.010344942303489, 0.999946489495557, 0.000017727054455], - # [0.004496123789670, -0.000064242454080, 0.999989890320785]] - # ), - # 'earth_flattening': 0.003352813177897, - # 'earth_equatorial_radius': 6378136 - # }, - # }, - # { - # 'line': 2089, - # 'pixel': 1793, - # 'lon': 144.996967, - # 'lat': -34.959853, - # 'tolerance': 0, - # 'nav_params': { - # 'line_offset': 1378.5, - # 'pixel_offset': 1672.5, - # 'stepping_angle': 0.000140000047395, - # 'sampling_angle': 0.000095719995443, - # 'misalignment': np.array( - # [[0.999999165534973, 0.000510364072397, 0.001214201096445], - # [-0.000511951977387, 0.999999046325684, 0.001307720085606], - # [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - # ), - # 'greenwich_sidereal_time': 2.530392320846865, - # 'angle_between_earth_and_sun': 3.935707944355762, - # 'declination_from_sat_to_sun': -0.208713576872247, - # 'right_ascension_from_sat_to_sun': 3.242660398458377, - # 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - # 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - # 'sat_position_earth_fixed_x': -32390273.633551981300116, - # 'sat_position_earth_fixed_y': 27003859.543135114014149, - # 'sat_position_earth_fixed_z': -210800.087589388160268, - # 'nutation_precession': np.array( - # [[0.999936381432029, -0.010344763228876, -0.004496550050695], - # [0.010344947502662, 0.999946489441823, 0.000017724053657], - # [0.004496126086653, -0.000064239500295, 0.999989890310647]] - # ), - # 'earth_flattening': 0.003352813177897, - # 'earth_equatorial_radius': 6378136 - # }, - # }, - # { - # 'line': 999, - # 'pixel': 2996, - # 'lon': -165.023842, - # 'lat': 20.005603, - # 'tolerance': 0, - # 'nav_params': { - # 'line_offset': 1378.5, - # 'pixel_offset': 1672.5, - # 'stepping_angle': 0.000140000047395, - # 'sampling_angle': 0.000095719995443, - # 'misalignment': np.array( - # [[0.999999165534973, 0.000510364072397, 0.001214201096445], - # [-0.000511951977387, 0.999999046325684, 0.001307720085606], - # [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - # ), - # 'greenwich_sidereal_time': 2.482331732831616, - # 'angle_between_earth_and_sun': 3.983634620574510, - # 'declination_from_sat_to_sun': -0.208758095943038, - # 'right_ascension_from_sat_to_sun': 3.290601673240597, - # 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - # 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - # 'sat_position_earth_fixed_x': -32390808.779549609869719, - # 'sat_position_earth_fixed_y': 27003503.047290064394474, - # 'sat_position_earth_fixed_z': -224351.430479845439550, - # 'nutation_precession': np.array( - # [[0.999936381496146, -0.010344758016410, -0.004496547784299], - # [0.010344942303489, 0.999946489495557, 0.000017727054455], - # [0.004496123789670, -0.000064242454080, 0.999989890320785]] - # ), - # 'earth_flattening': 0.003352813177897, - # 'earth_equatorial_radius': 6378136 - # }, - # }, - # { - # 'line': 0, - # 'pixel': 0, - # 'lon': np.nan, - # 'lat': np.nan, - # 'tolerance': 0, - # 'nav_params': { - # 'line_offset': 1378.5, - # 'pixel_offset': 1672.5, - # 'stepping_angle': 0.000140000047395, - # 'sampling_angle': 0.000095719995443, - # 'misalignment': np.array( - # [[0.999999165534973, 0.000510364072397, 0.001214201096445], - # [-0.000511951977387, 0.999999046325684, 0.001307720085606], - # [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - # ), - # 'greenwich_sidereal_time': 2.482331732831616, - # 'angle_between_earth_and_sun': 3.983634620574510, - # 'declination_from_sat_to_sun': -0.208758095943038, - # 'right_ascension_from_sat_to_sun': 3.290601673240597, - # 'angle_between_sat_spin_and_z_axis': 3.149118633034304, - # 'angle_between_sat_spin_and_yz_plane': 0.000546042025980, - # 'sat_position_earth_fixed_x': -32390808.779549609869719, - # 'sat_position_earth_fixed_y': 27003503.047290064394474, - # 'sat_position_earth_fixed_z': -224351.430479845439550, - # 'nutation_precession': np.array( - # [[0.999936381496146, -0.010344758016410, -0.004496547784299], - # [0.010344942303489, 0.999946489495557, 0.000017727054455], - # [0.004496123789670, -0.000064242454080, 0.999989890320785]] - # ), - # 'earth_flattening': 0.003352813177897, - # 'earth_equatorial_radius': 6378136 - # }, - # }, + } ] @@ -318,50 +179,6 @@ NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE -""" - { - 'line': , - 'pixel': , - 'lon': , - 'lat': , - 'tolerance': , - 'nav_params': nav.NavigationParameters( - attitude=nav.Attitude( - angle_between_earth_and_sun=, - angle_between_sat_spin_and_z_axis=, - angle_between_sat_spin_and_yz_plane=, - ), - orbit=nav.Orbit( - greenwich_sidereal_time=, - declination_from_sat_to_sun=, - right_ascension_from_sat_to_sun=, - sat_position_earth_fixed_x=, - sat_position_earth_fixed_y=, - sat_position_earth_fixed_z=, - nutation_precession=np.array( - [[], - [], - []] - ), - ), - proj_params=nav.ProjectionParameters( - line_offset=, - pixel_offset=, - stepping_angle=, - sampling_angle=, - misalignment=np.array( - [[], - [], - []] - ), - earth_flattening=, - earth_equatorial_radius= - ), - ) - }, - -""" - class TestSinglePixelNavigation: """Test navigation of a single pixel.""" From 0a6ae3c760eb5ad7f27ca3d8cac97589e308f44b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 27 Aug 2021 08:01:29 +0000 Subject: [PATCH 0023/1416] Refactor single-pixel navigation Use njit decorator instead of jitclass --- satpy/readers/gms5_vissr_navigation.py | 932 +++++++++--------- .../tests/reader_tests/test_gms5_vissr_l1b.py | 77 +- 2 files changed, 488 insertions(+), 521 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index cdd326f770..e17f57d021 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -7,6 +7,8 @@ https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf """ +from collections import namedtuple + import numba import numpy as np @@ -41,21 +43,9 @@ def get_lons_lats(lines, pixels, scan_params, predicted_nav_params): return lons, lats -@numba.experimental.jitclass( - [ - ('start_time_of_scan', numba.float64), - ('spinning_rate', numba.float64), - ('num_sensors', numba.int64), - ('sampling_angle', numba.float64) - ] +ScanningParameters = namedtuple( + 'ScanningParameters', ['start_time_of_scan', 'spinning_rate', 'num_sensors', 'sampling_angle'] ) -class ScanningParameters: - def __init__(self, start_time_of_scan, spinning_rate, num_sensors, - sampling_angle): - self.start_time_of_scan = start_time_of_scan - self.spinning_rate = spinning_rate - self.num_sensors = num_sensors - self.sampling_angle = sampling_angle @numba.njit @@ -77,26 +67,85 @@ def _get_relative_observation_time(point, scan_params): @numba.njit -def get_lon_lat(point, nav_params): - """Get longitude and latitude coordinates for a given image pixel.""" +def get_lon_lat(point, attitude, orbit, proj_params): + """Get longitude and latitude coordinates for a given image pixel. + + Args: + point: Point (line, pixel) in image coordinates. + attitude: Attitude parameters. + orbit: Orbital parameters. + proj_params: Projection parameters. + Returns: + Longitude and latitude in degrees. + """ scan_angles = transform_image_coords_to_scanning_angles( - point=point, - offset=nav_params.get_image_offset(), - sampling=nav_params.get_sampling() + point, + _get_image_offset(proj_params), + _get_sampling(proj_params) ) - view_vector_sat = _transform_scanning_angles_to_satellite_coords( - scan_angles, nav_params + view_vector_sat = transform_scanning_angles_to_satellite_coords( + scan_angles, + proj_params.misalignment ) - view_vector_earth_fixed = _transform_satellite_to_earth_fixed_coords( - view_vector_sat, nav_params + view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords( + view_vector_sat, + orbit.greenwich_sidereal_time, + _get_sat_sun_angles(orbit), + attitude.angle_between_earth_and_sun, + _get_spin_angles(attitude), + orbit.nutation_precession + ) + point_on_earth = intersect_with_earth( + view_vector_earth_fixed, + _get_sat_pos(orbit), + _get_ellipsoid(proj_params) ) - point_on_earth = _intersect_with_earth(view_vector_earth_fixed, nav_params) lon, lat = transform_earth_fixed_to_geodetic_coords( - point_on_earth, nav_params.proj_params.earth_flattening + point_on_earth, + proj_params.earth_flattening ) return lon, lat +@numba.njit +def _get_image_offset(proj_params): + return proj_params.line_offset, proj_params.pixel_offset + + +@numba.njit +def _get_sampling(proj_params): + return proj_params.stepping_angle, proj_params.sampling_angle + + +@numba.njit +def _get_sat_sun_angles(orbit): + return np.array(( + orbit.declination_from_sat_to_sun, + orbit.right_ascension_from_sat_to_sun + )) + + +@numba.njit +def _get_spin_angles(attitude): + return np.array(( + attitude.angle_between_sat_spin_and_z_axis, + attitude.angle_between_sat_spin_and_yz_plane + )) + + +@numba.njit +def _get_sat_pos(orbit): + return np.array((orbit.sat_position_earth_fixed_x, + orbit.sat_position_earth_fixed_y, + orbit.sat_position_earth_fixed_z)) + + +@numba.njit +def _get_ellipsoid(proj_params): + return np.array((proj_params.earth_equatorial_radius, + proj_params.earth_flattening)) + + @numba.njit def transform_image_coords_to_scanning_angles(point, offset, sampling): """Transform image coordinates to scanning angles. @@ -118,216 +167,191 @@ def transform_image_coords_to_scanning_angles(point, offset, sampling): @numba.njit -def _transform_scanning_angles_to_satellite_coords(angles, nav_params): - transformer = ScanningAnglesToSatelliteCoordsTransformer( - nav_params.proj_params.misalignment +def transform_scanning_angles_to_satellite_coords(angles, misalignment): + """Transform scanning angles to satellite angular momentum coordinates. + + Args: + angles: Scanning angles (x, y) in radians. + misalignment: Misalignment matrix (3x3) + + Returns: + View vector (x, y, z) in satellite angular momentum coordinates. + """ + rotation, vector = _get_transforms_from_scanning_angles_to_satellite_coords( + angles ) - return transformer.transform(angles) - - -@numba.experimental.jitclass([ - ('misalignment', numba.types.Array(numba.float64, 2, layout='C')) -]) -class ScanningAnglesToSatelliteCoordsTransformer: - """Transform scanning angles to satellite angular momentum coordinates.""" - - def __init__(self, misalignment): - """Initialize the transformer. - - Args: - misalignment: Misalignment matrix (3x3) - """ - self.misalignment = misalignment - - def transform(self, angles): - """Transform scanning angles to satellite angular momentum coordinates. - - Args: - angles: Scanning angles (x, y) in radians. - - Returns: - View vector (x, y, z) in satellite angular momentum coordinates. - """ - rotation, vector = self._get_transforms(angles) - return np.dot(rotation, np.dot(self.misalignment, vector)) - - def _get_transforms(self, angles): - x, y = angles - cos_x = np.cos(x) - sin_x = np.sin(x) - rot = np.array(((cos_x, -sin_x, 0), - (sin_x, cos_x, 0), - (0, 0, 1))) - vec = np.array([np.cos(y), 0, np.sin(y)]) - return rot, vec - - -@numba.njit -def _transform_satellite_to_earth_fixed_coords(point, nav_params): - transformer = SatelliteToEarthFixedCoordsTransformer( - nav_params.orbit.greenwich_sidereal_time, - nav_params.get_sat_sun_angles(), - nav_params.attitude.angle_between_earth_and_sun, - nav_params.get_spin_angles(), - nav_params.orbit.nutation_precession + return np.dot(rotation, np.dot(misalignment, vector)) + + +@numba.njit +def _get_transforms_from_scanning_angles_to_satellite_coords(angles): + x, y = angles + cos_x = np.cos(x) + sin_x = np.sin(x) + rot = np.array(((cos_x, -sin_x, 0), + (sin_x, cos_x, 0), + (0, 0, 1))) + vec = np.array([np.cos(y), 0, np.sin(y)]) + return rot, vec + + +@numba.njit +def transform_satellite_to_earth_fixed_coords( + point, + greenwich_sidereal_time, + sat_sun_angles, + earth_sun_angle, + spin_angles, + nutation_precession +): + """Transform from earth-fixed to satellite angular momentum coordinates. + + Args: + point: Point (x, y, z) in satellite angular momentum coordinates. + greenwich_sidereal_time: True Greenwich sidereal time (rad). + sat_sun_angles: Declination from satellite to sun (rad), + right ascension from satellite to sun (rad) + earth_sun_angle: Angle between sun and earth center on the z-axis + vertical plane (rad) + spin_angles: Angle between satellite spin axis and z-axis (rad), + angle between satellite spin axis and yz-plane + nutation_precession: Nutation and precession matrix (3x3) + Returns: + Point (x', y', z') in earth-fixed coordinates. + """ + sat_unit_vectors = _get_satellite_unit_vectors( + greenwich_sidereal_time, + sat_sun_angles, + earth_sun_angle, + spin_angles, + nutation_precession ) - return transformer.transform(point) - - -@numba.experimental.jitclass([ - ('greenwich_sidereal_time', numba.float64), - ('sat_sun_angles', numba.float64[:]), - ('earth_sun_angle', numba.float64), - ('spin_angles', numba.float64[:]), - ('nutation_precession', numba.types.Array(numba.float64, 2, layout='C')) -]) -class SatelliteToEarthFixedCoordsTransformer: - """Transform from earth-fixed to satellite angular momentum coordinates.""" - - def __init__(self, greenwich_sidereal_time, sat_sun_angles, earth_sun_angle, spin_angles, nutation_precession): - """Initialize the Transformer. - - Args: - greenwich_sidereal_time: True Greenwich sidereal time (rad). - sat_sun_angles: Declination from satellite to sun (rad), - right ascension from satellite to sun (rad) - earth_sun_angle: Angle between sun and earth center on the z-axis - vertical plane (rad) - spin_angles: Angle between satellite spin axis and z-axis (rad), - angle between satellite spin axis and yz-plane - nutation_precession: Nutation and precession matrix (3x3) - """ - self.greenwich_sidereal_time = greenwich_sidereal_time - self.sat_sun_angles = sat_sun_angles - self.earth_sun_angle = earth_sun_angle - self.spin_angles = spin_angles - self.nutation_precession = nutation_precession - - def transform(self, point): - """Transform from earth-fixed to satellite angular momentum coordinates. - - Args: - point: Point (x, y, z) in satellite angular momentum coordinates. - - Returns: - Point (x', y', z') in earth-fixed coordinates. - """ - sat_unit_vectors = self._get_satellite_unit_vectors() - return np.dot(sat_unit_vectors, point) - - def _get_satellite_unit_vectors(self): - unit_vector_z = self._get_satellite_unit_vector_z() - unit_vector_x = self._get_satellite_unit_vector_x(unit_vector_z) - unit_vector_y = self._get_satellite_unit_vector_y(unit_vector_x, unit_vector_z) - return np.stack((unit_vector_x, unit_vector_y, unit_vector_z), axis=-1) - - def _get_satellite_unit_vector_z(self): - sat_z_axis_1950 = self._get_satellite_z_axis_1950() - rotation = self._get_transform_from_1950_to_earth_fixed() - z_vec = np.dot(rotation, np.dot(self.nutation_precession, sat_z_axis_1950)) - return normalize_vector(z_vec) - - def _get_satellite_z_axis_1950(self): - """Get satellite z-axis (spin) in mean of 1950 coordinates.""" - alpha, delta = self.spin_angles - cos_delta = np.cos(delta) - x = np.sin(delta) - y = -cos_delta * np.sin(alpha) - z = cos_delta * np.cos(alpha) - return np.array([x, y, z]) - - def _get_transform_from_1950_to_earth_fixed(self): - cos = np.cos(self.greenwich_sidereal_time) - sin = np.sin(self.greenwich_sidereal_time) - return np.array( - ((cos, sin, 0), - (-sin, cos, 0), - (0, 0, 1)) - ) - - def _get_satellite_unit_vector_x(self, sat_unit_vector_z): - beta = self.earth_sun_angle - sat_sun_vector = self._get_vector_from_satellite_to_sun() - z_cross_satsun = np.cross(sat_unit_vector_z, sat_sun_vector) - z_cross_satsun = normalize_vector(z_cross_satsun) - x_vec = z_cross_satsun * np.sin(beta) + \ - np.cross(z_cross_satsun, sat_unit_vector_z) * np.cos(beta) - return normalize_vector(x_vec) - - def _get_vector_from_satellite_to_sun(self): - declination, right_ascension = self.sat_sun_angles - cos_declination = np.cos(declination) - x = cos_declination * np.cos(right_ascension) - y = cos_declination * np.sin(right_ascension) - z = np.sin(declination) - return np.array([x, y, z]) - - def _get_satellite_unit_vector_y(self, sat_unit_vector_x, sat_unit_vector_z): - y_vec = np.cross(sat_unit_vector_z, sat_unit_vector_x) - return normalize_vector(y_vec) - - -@numba.njit -def _intersect_with_earth(view_vector, nav_params): - intersector = EarthIntersector( - nav_params.get_sat_position(), - nav_params.get_ellipsoid() + return np.dot(sat_unit_vectors, point) + + +@numba.njit +def _get_satellite_unit_vectors( + greenwich_sidereal_time, + sat_sun_angles, + earth_sun_angle, + spin_angles, + nutation_precession +): + unit_vector_z = _get_satellite_unit_vector_z( + spin_angles, greenwich_sidereal_time, nutation_precession ) - return intersector.intersect(view_vector) - - -@numba.experimental.jitclass([ - ('sat_pos', numba.float64[:]), - ('ellipsoid', numba.float64[:]) -]) -class EarthIntersector: - """Intersect instrument viewing vector with the earth's surface.""" - - def __init__(self, sat_pos, ellipsoid): - """ - Args: - sat_pos: Satellite position (x, y, z) in earth-fixed coordinates. - ellipsoid: Flattening and equatorial radius of the earth. - """ - self.sat_pos = sat_pos - self.ellipsoid = ellipsoid - - def intersect(self, view_vector): - """Intersect instrument viewing vector with the earth's surface. - - Args: - view_vector: Instrument viewing vector (x, y, z) in earth-fixed - coordinates. - Returns: - Intersection (x', y', z') with the earth's surface. - """ - distance = self._get_distance_to_intersection(view_vector) - return self.sat_pos + distance * view_vector - - def _get_distance_to_intersection(self, view_vector): - """Get distance to intersection with the earth. - - If the instrument is pointing towards the earth, there will be two - intersections with the surface. Choose the one on the instrument-facing - side of the earth. - """ - d1, d2 = self._get_distances_to_intersections(view_vector) - return min(d1, d2) - - def _get_distances_to_intersections(self, view_vector): - equatorial_radius, flattening = self.ellipsoid - flat2 = (1 - flattening) ** 2 - ux, uy, uz = view_vector - x, y, z = self.sat_pos - - a = flat2 * (ux**2 + uy**2) + uz**2 - b = flat2 * (x*ux + y*uy) + z*uz - c = flat2 * (x**2 + y**2 - equatorial_radius**2) + z**2 - - tmp = np.sqrt((b**2 - a*c)) - dist_1 = (-b + tmp) / a - dist_2 = (-b - tmp) / a - return dist_1, dist_2 + unit_vector_x = _get_satellite_unit_vector_x( + earth_sun_angle, sat_sun_angles, unit_vector_z + ) + unit_vector_y = _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z) + return np.stack((unit_vector_x, unit_vector_y, unit_vector_z), axis=-1) + + +@numba.njit +def _get_satellite_unit_vector_z(spin_angles, greenwich_sidereal_time, nutation_precession): + sat_z_axis_1950 = _get_satellite_z_axis_1950(spin_angles) + rotation = _get_transform_from_1950_to_earth_fixed(greenwich_sidereal_time) + z_vec = np.dot(rotation, np.dot(nutation_precession, sat_z_axis_1950)) + return normalize_vector(z_vec) + + +@numba.njit +def _get_satellite_z_axis_1950(spin_angles): + """Get satellite z-axis (spin) in mean of 1950 coordinates.""" + alpha, delta = spin_angles + cos_delta = np.cos(delta) + x = np.sin(delta) + y = -cos_delta * np.sin(alpha) + z = cos_delta * np.cos(alpha) + return np.array([x, y, z]) + + +@numba.njit +def _get_transform_from_1950_to_earth_fixed(greenwich_sidereal_time): + cos = np.cos(greenwich_sidereal_time) + sin = np.sin(greenwich_sidereal_time) + return np.array( + ((cos, sin, 0), + (-sin, cos, 0), + (0, 0, 1)) + ) + + +@numba.njit +def _get_satellite_unit_vector_x(earth_sun_angle, sat_sun_angles, + sat_unit_vector_z): + beta = earth_sun_angle + sat_sun_vector = _get_vector_from_satellite_to_sun(sat_sun_angles) + z_cross_satsun = np.cross(sat_unit_vector_z, sat_sun_vector) + z_cross_satsun = normalize_vector(z_cross_satsun) + x_vec = z_cross_satsun * np.sin(beta) + \ + np.cross(z_cross_satsun, sat_unit_vector_z) * np.cos(beta) + return normalize_vector(x_vec) + + +@numba.njit +def _get_vector_from_satellite_to_sun(sat_sun_angles): + declination, right_ascension = sat_sun_angles + cos_declination = np.cos(declination) + x = cos_declination * np.cos(right_ascension) + y = cos_declination * np.sin(right_ascension) + z = np.sin(declination) + return np.array([x, y, z]) + + +@numba.njit +def _get_satellite_unit_vector_y(sat_unit_vector_x, sat_unit_vector_z): + y_vec = np.cross(sat_unit_vector_z, sat_unit_vector_x) + return normalize_vector(y_vec) + + +@numba.njit +def intersect_with_earth(view_vector, sat_pos, ellipsoid): + """Intersect instrument viewing vector with the earth's surface. + + Args: + view_vector: Instrument viewing vector (x, y, z) in earth-fixed + coordinates. + sat_pos: Satellite position (x, y, z) in earth-fixed coordinates. + ellipsoid: Flattening and equatorial radius of the earth. + Returns: + Intersection (x', y', z') with the earth's surface. + """ + distance = _get_distance_to_intersection( + view_vector, + sat_pos, + ellipsoid + ) + return sat_pos + distance * view_vector + + +@numba.njit +def _get_distance_to_intersection(view_vector, sat_pos, ellipsoid): + """Get distance to intersection with the earth. + + If the instrument is pointing towards the earth, there will be two + intersections with the surface. Choose the one on the instrument-facing + side of the earth. + """ + d1, d2 = _get_distances_to_intersections(view_vector, sat_pos, ellipsoid) + return min(d1, d2) + + +@numba.njit +def _get_distances_to_intersections(view_vector, sat_pos, ellipsoid): + equatorial_radius, flattening = ellipsoid + flat2 = (1 - flattening) ** 2 + ux, uy, uz = view_vector + x, y, z = sat_pos + + a = flat2 * (ux**2 + uy**2) + uz**2 + b = flat2 * (x*ux + y*uy) + z*uz + c = flat2 * (x**2 + y**2 - equatorial_radius**2) + z**2 + + tmp = np.sqrt((b**2 - a*c)) + dist_1 = (-b + tmp) / a + dist_2 = (-b - tmp) / a + return dist_1, dist_2 @numba.njit @@ -354,276 +378,210 @@ def normalize_vector(v): return v / np.sqrt(np.dot(v, v)) -@numba.experimental.jitclass( - [ - ('angle_between_earth_and_sun', numba.float64), - ('angle_between_sat_spin_and_z_axis', numba.float64), - ('angle_between_sat_spin_and_yz_plane', numba.float64), - ] -) -class Attitude: - def __init__( - self, - angle_between_earth_and_sun, - angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane - ): - self.angle_between_earth_and_sun = angle_between_earth_and_sun - self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis - self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane - - -@numba.experimental.jitclass( - [ - ('greenwich_sidereal_time', numba.float64), - ('declination_from_sat_to_sun', numba.float64), - ('right_ascension_from_sat_to_sun', numba.float64), - ('sat_position_earth_fixed_x', numba.float64), - ('sat_position_earth_fixed_y', numba.float64), - ('sat_position_earth_fixed_z', numba.float64), - ('nutation_precession', numba.types.Array(numba.float64, 2, layout='C')), - ] -) -class Orbit: - def __init__( - self, - greenwich_sidereal_time, - declination_from_sat_to_sun, - right_ascension_from_sat_to_sun, - sat_position_earth_fixed_x, - sat_position_earth_fixed_y, - sat_position_earth_fixed_z, - nutation_precession - ): - self.greenwich_sidereal_time = greenwich_sidereal_time - self.declination_from_sat_to_sun = declination_from_sat_to_sun - self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun - self.sat_position_earth_fixed_x = sat_position_earth_fixed_x - self.sat_position_earth_fixed_y = sat_position_earth_fixed_y - self.sat_position_earth_fixed_z = sat_position_earth_fixed_z - self.nutation_precession = nutation_precession - - -@numba.experimental.jitclass( +Attitude = namedtuple( + 'Attitude', [ - ('line_offset', numba.float64), - ('pixel_offset', numba.float64), - ('stepping_angle', numba.float64), - ('sampling_angle', numba.float64), - ('misalignment', numba.types.Array(numba.float64, 2, layout='C')), - ('earth_flattening', numba.float64), - ('earth_equatorial_radius', numba.float64), + 'angle_between_earth_and_sun', + 'angle_between_sat_spin_and_z_axis', + 'angle_between_sat_spin_and_yz_plane' ] ) -class ProjectionParameters: - def __init__( - self, - line_offset, - pixel_offset, - stepping_angle, - sampling_angle, - misalignment, - earth_flattening, - earth_equatorial_radius - ): - self.line_offset = line_offset - self.pixel_offset = pixel_offset - self.stepping_angle = stepping_angle - self.sampling_angle = sampling_angle - self.misalignment = misalignment - self.earth_flattening = earth_flattening - self.earth_equatorial_radius = earth_equatorial_radius - - -@numba.experimental.jitclass( + + +Orbit = namedtuple( + 'Orbit', [ - ('attitude', get_jitclass_type(Attitude)), - ('orbit', get_jitclass_type(Orbit)), - ('proj_params', get_jitclass_type(ProjectionParameters)), + 'greenwich_sidereal_time', + 'declination_from_sat_to_sun', + 'right_ascension_from_sat_to_sun', + 'sat_position_earth_fixed_x', + 'sat_position_earth_fixed_y', + 'sat_position_earth_fixed_z', + 'nutation_precession', ] ) -class NavigationParameters: - def __init__(self, attitude, orbit, proj_params): - self.attitude = attitude - self.orbit = orbit - self.proj_params = proj_params - - # TODO: Remember that all angles are expected in rad - # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c - - def get_image_offset(self): - return self.proj_params.line_offset, self.proj_params.pixel_offset - - def get_sampling(self): - return self.proj_params.stepping_angle, self.proj_params.sampling_angle - - def get_sat_sun_angles(self): - return np.array([ - self.orbit.declination_from_sat_to_sun, - self.orbit.right_ascension_from_sat_to_sun - ]) - - def get_spin_angles(self): - return np.array([ - self.attitude.angle_between_sat_spin_and_z_axis, - self.attitude.angle_between_sat_spin_and_yz_plane - ]) - - def get_ellipsoid(self): - return np.array([ - self.proj_params.earth_equatorial_radius, - self.proj_params.earth_flattening - ]) - - def get_sat_position(self): - return np.array((self.orbit.sat_position_earth_fixed_x, - self.orbit.sat_position_earth_fixed_y, - self.orbit.sat_position_earth_fixed_z)) - - -@numba.experimental.jitclass( + + +ProjectionParameters = namedtuple( + 'ProjectionParameters', [ - ('prediction_times', numba.float64[:]), - ('greenwich_sidereal_time', numba.float64[:]), - ('declination_from_sat_to_sun', numba.float64[:]), - ('right_ascension_from_sat_to_sun', numba.float64[:]), - ('sat_position_earth_fixed_x', numba.float64[:]), - ('sat_position_earth_fixed_y', numba.float64[:]), - ('sat_position_earth_fixed_z', numba.float64[:]), - ('nutation_precession', numba.types.Array(numba.float64, 3, layout='C')), + 'line_offset', + 'pixel_offset', + 'stepping_angle', + 'sampling_angle', + 'misalignment', + 'earth_flattening', + 'earth_equatorial_radius', ] ) -class OrbitPrediction: - def __init__( - self, - prediction_times, - greenwich_sidereal_time, - declination_from_sat_to_sun, - right_ascension_from_sat_to_sun, - sat_position_earth_fixed_x, - sat_position_earth_fixed_y, - sat_position_earth_fixed_z, - nutation_precession - ): - self.prediction_times = prediction_times - self.greenwich_sidereal_time = greenwich_sidereal_time - self.declination_from_sat_to_sun = declination_from_sat_to_sun - self.right_ascension_from_sat_to_sun = right_ascension_from_sat_to_sun - self.sat_position_earth_fixed_x = sat_position_earth_fixed_x - self.sat_position_earth_fixed_y = sat_position_earth_fixed_y - self.sat_position_earth_fixed_z = sat_position_earth_fixed_z - self.nutation_precession = nutation_precession - - def interpolate(self, observation_time): - greenwich_sidereal_time = self._interpolate_angles( - self.greenwich_sidereal_time, - observation_time - ) - declination_from_sat_to_sun = self._interpolate_angles( - self.declination_from_sat_to_sun, - observation_time - ) - right_ascension_from_sat_to_sun = self._interpolate_angles( - self.right_ascension_from_sat_to_sun, - observation_time - ) - sat_position_earth_fixed_x = self._interpolate_continuous( - self.sat_position_earth_fixed_x, - observation_time - ) - sat_position_earth_fixed_y = self._interpolate_continuous( - self.sat_position_earth_fixed_y, - observation_time - ) - sat_position_earth_fixed_z = self._interpolate_continuous( - self.sat_position_earth_fixed_z, - observation_time - ) - nutation_precession = self._interpolate_nearest( - self.nutation_precession, - observation_time - ) - return Orbit( - greenwich_sidereal_time, - declination_from_sat_to_sun, - right_ascension_from_sat_to_sun, - sat_position_earth_fixed_x, - sat_position_earth_fixed_y, - sat_position_earth_fixed_z, - nutation_precession - ) - - def _interpolate_continuous(self, predicted_values, observation_time): - return interpolate_continuous(observation_time, self.prediction_times, predicted_values) - - def _interpolate_angles(self, predicted_values, observation_time): - return interpolate_angles(observation_time, self.prediction_times, predicted_values) - - def _interpolate_nearest(self, predicted_values, observation_time): - return interpolate_nearest(observation_time, self.prediction_times, predicted_values) - - -@numba.experimental.jitclass( + + +# FIXME +# @numba.experimental.jitclass( +# [ +# ('attitude', get_jitclass_type(Attitude)), +# ('orbit', get_jitclass_type(Orbit)), +# ('proj_params', get_jitclass_type(ProjectionParameters)), +# ] +# ) +# class NavigationParameters: +# def __init__(self, attitude, orbit, proj_params): +# self.attitude = attitude +# self.orbit = orbit +# self.proj_params = proj_params +# +# # TODO: Remember that all angles are expected in rad +# # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c +# +# def get_image_offset(self): +# return self.proj_params.line_offset, self.proj_params.pixel_offset +# +# def get_sampling(self): +# return self.proj_params.stepping_angle, self.proj_params.sampling_angle +# +# def get_sat_sun_angles(self): +# return np.array([ +# self.orbit.declination_from_sat_to_sun, +# self.orbit.right_ascension_from_sat_to_sun +# ]) +# +# def get_spin_angles(self): +# return np.array([ +# self.attitude.angle_between_sat_spin_and_z_axis, +# self.attitude.angle_between_sat_spin_and_yz_plane +# ]) +# +# def get_ellipsoid(self): +# return np.array([ +# self.proj_params.earth_equatorial_radius, +# self.proj_params.earth_flattening +# ]) +# +# def get_sat_position(self): +# return np.array((self.orbit.sat_position_earth_fixed_x, +# self.orbit.sat_position_earth_fixed_y, +# self.orbit.sat_position_earth_fixed_z)) + + +OrbitPrediction = namedtuple( + 'OrbitPrediction', [ - ('prediction_times', numba.float64[:]), - ('angle_between_earth_and_sun', numba.float64[:]), - ('angle_between_sat_spin_and_z_axis', numba.float64[:]), - ('angle_between_sat_spin_and_yz_plane', numba.float64[:]), + 'prediction_times', + 'greenwich_sidereal_time', + 'declination_from_sat_to_sun', + 'right_ascension_from_sat_to_sun', + 'sat_position_earth_fixed_x', + 'sat_position_earth_fixed_y', + 'sat_position_earth_fixed_z', + 'nutation_precession', ] ) -class AttitudePrediction: - def __init__( - self, - prediction_times, - angle_between_earth_and_sun, - angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane - ): - self.prediction_times = prediction_times - self.angle_between_earth_and_sun = angle_between_earth_and_sun - self.angle_between_sat_spin_and_z_axis = angle_between_sat_spin_and_z_axis - self.angle_between_sat_spin_and_yz_plane = angle_between_sat_spin_and_yz_plane - - def interpolate(self, observation_time): - angle_between_earth_and_sun = self._interpolate( - observation_time, self.angle_between_earth_and_sun - ) - angle_between_sat_spin_and_z_axis = self._interpolate( - observation_time, self.angle_between_sat_spin_and_z_axis, - ) - angle_between_sat_spin_and_yz_plane = self._interpolate( - observation_time, self.angle_between_sat_spin_and_yz_plane - ) - return Attitude( - angle_between_earth_and_sun, - angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane - ) - - def _interpolate(self, observation_time, predicted_values): - return interpolate_angles(observation_time, self.prediction_times, predicted_values) - - -@numba.experimental.jitclass( + + +@numba.njit +def interpolate_orbit_prediction(orbit_prediction, observation_time): + greenwich_sidereal_time = interpolate_angles( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.greenwich_sidereal_time + ) + declination_from_sat_to_sun = interpolate_angles( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.declination_from_sat_to_sun + ) + right_ascension_from_sat_to_sun = interpolate_angles( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.right_ascension_from_sat_to_sun + ) + sat_position_earth_fixed_x = interpolate_continuous( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.sat_position_earth_fixed_x + ) + sat_position_earth_fixed_y = interpolate_continuous( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.sat_position_earth_fixed_y + ) + sat_position_earth_fixed_z = interpolate_continuous( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.sat_position_earth_fixed_z + ) + nutation_precession = interpolate_nearest( + observation_time, + orbit_prediction.prediction_times, + orbit_prediction.nutation_precession + ) + return Orbit( + greenwich_sidereal_time, + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun, + sat_position_earth_fixed_x, + sat_position_earth_fixed_y, + sat_position_earth_fixed_z, + nutation_precession + ) + + +AttitudePrediction = namedtuple( + 'AttitudePrediction', [ - ('attitude_prediction', get_jitclass_type(AttitudePrediction)), - ('orbit_prediction', get_jitclass_type(OrbitPrediction)), - ('proj_params', get_jitclass_type(ProjectionParameters)), + 'prediction_times', + 'angle_between_earth_and_sun', + 'angle_between_sat_spin_and_z_axis', + 'angle_between_sat_spin_and_yz_plane', ] ) -class PredictedNavigationParameters: - def __init__(self, attitude_prediction, orbit_prediction, proj_params): - self.attitude_prediction = attitude_prediction - self.orbit_prediction = orbit_prediction - self.proj_params = proj_params - - def interpolate(self, observation_time): - attitude = self.attitude_prediction.interpolate(observation_time) - orbit = self.orbit_prediction.interpolate(observation_time) - return self._get_nav_params(attitude, orbit) - - def _get_nav_params(self, attitude, orbit): - return NavigationParameters(attitude, orbit, self.proj_params) + + +@numba.njit +def interpolate_attitude_prediction(attitude_prediction, observation_time): + angle_between_earth_and_sun = interpolate_angles( + observation_time, + attitude_prediction.prediction_times, + attitude_prediction.angle_between_earth_and_sun + ) + angle_between_sat_spin_and_z_axis = interpolate_angles( + observation_time, + attitude_prediction.prediction_times, + attitude_prediction.angle_between_sat_spin_and_z_axis + ) + angle_between_sat_spin_and_yz_plane = interpolate_angles( + observation_time, + attitude_prediction.prediction_times, + attitude_prediction.angle_between_sat_spin_and_yz_plane + ) + return Attitude( + angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane + ) + + +# FIXME +# @numba.experimental.jitclass( +# [ +# ('attitude_prediction', get_jitclass_type(AttitudePrediction)), +# ('orbit_prediction', get_jitclass_type(OrbitPrediction)), +# ('proj_params', get_jitclass_type(ProjectionParameters)), +# ] +# ) +# class PredictedNavigationParameters: +# def __init__(self, attitude_prediction, orbit_prediction, proj_params): +# self.attitude_prediction = attitude_prediction +# self.orbit_prediction = orbit_prediction +# self.proj_params = proj_params +# +# def interpolate(self, observation_time): +# attitude = self.attitude_prediction.interpolate(observation_time) +# orbit = self.orbit_prediction.interpolate(observation_time) +# return self._get_nav_params(attitude, orbit) +# +# def _get_nav_params(self, attitude, orbit): +# return NavigationParameters(attitude, orbit, self.proj_params) @numba.njit diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 77194f79bd..750315e5b2 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -19,13 +19,13 @@ 'pixel': 1680, 'lon': 139.990380, 'lat': 35.047056, - 'nav_params': nav.NavigationParameters( - attitude=nav.Attitude( + 'nav_params': ( + nav.Attitude( angle_between_earth_and_sun=3.997397917902958, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - orbit=nav.Orbit( + nav.Orbit( greenwich_sidereal_time=2.468529732418296, declination_from_sat_to_sun=-0.208770861178982, right_ascension_from_sat_to_sun=3.304369303579407, @@ -38,7 +38,7 @@ [0.004496123789670, -0.000064242454080, 0.999989890320785]] ), ), - proj_params=nav.ProjectionParameters( + nav.ProjectionParameters( line_offset=1378.5, pixel_offset=1672.5, stepping_angle=0.000140000047395, @@ -58,13 +58,13 @@ 'pixel': 1793, 'lon': 144.996967, 'lat': -34.959853, - 'nav_params': nav.NavigationParameters( - attitude=nav.Attitude( + 'nav_params': ( + nav.Attitude( angle_between_earth_and_sun=3.935707944355762, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - orbit=nav.Orbit( + nav.Orbit( greenwich_sidereal_time=2.530392320846865, declination_from_sat_to_sun=-0.208713576872247, right_ascension_from_sat_to_sun=3.242660398458377, @@ -77,7 +77,7 @@ [0.004496126086653, -0.000064239500295, 0.999989890310647]] ), ), - proj_params=nav.ProjectionParameters( + nav.ProjectionParameters( line_offset=1378.5, pixel_offset=1672.5, stepping_angle=0.000140000047395, @@ -101,13 +101,13 @@ 'pixel': 6720, 'lon': 139.975527, 'lat': 35.078028, - 'nav_params': nav.NavigationParameters( - attitude=nav.Attitude( + 'nav_params': ( + nav.Attitude( angle_between_earth_and_sun=3.997397918405798, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - orbit=nav.Orbit( + nav.Orbit( greenwich_sidereal_time=2.468529731914041, declination_from_sat_to_sun=-0.208770861179448, right_ascension_from_sat_to_sun=3.304369304082406, @@ -120,7 +120,7 @@ [0.004496123789670, -0.000064242454080, 0.999989890320785]] ), ), - proj_params=nav.ProjectionParameters( + nav.ProjectionParameters( line_offset=5513.0, pixel_offset=6688.5, stepping_angle=0.000035000004573, @@ -140,13 +140,13 @@ 'pixel': 7172, 'lon': 144.980104, 'lat': -34.929123, - 'nav_params': nav.NavigationParameters( - attitude=nav.Attitude( + 'nav_params': ( + nav.Attitude( angle_between_earth_and_sun=3.935707944858620, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - orbit=nav.Orbit( + nav.Orbit( greenwich_sidereal_time=2.530392320342610, declination_from_sat_to_sun=-0.208713576872715, right_ascension_from_sat_to_sun=3.242660398961383, @@ -159,7 +159,7 @@ [0.004496126086653, -0.000064239500295, 0.999989890310647]] ), ), - proj_params=nav.ProjectionParameters( + nav.ProjectionParameters( line_offset=5513.0, pixel_offset=6688.5, stepping_angle=0.000035000004573, @@ -194,14 +194,15 @@ class TestSinglePixelNavigation: ) def test_get_lon_lat(self, point, nav_params, expected): """Test getting lon/lat coordinates for a given pixel.""" - lon, lat = nav.get_lon_lat(point, nav_params) + attitude, orbit, proj_params = nav_params + lon, lat = nav.get_lon_lat(point, attitude, orbit, proj_params) np.testing.assert_allclose((lon, lat), expected) def test_nav_matrices_are_contiguous(self): """Test that navigation matrices are stored as C-contiguous arrays.""" - nav_params = NAVIGATION_REFERENCE[0]['nav_params'] - assert nav_params.proj_params.misalignment.flags['C_CONTIGUOUS'] - assert nav_params.orbit.nutation_precession.flags['C_CONTIGUOUS'] + attitude, orbit, proj_params = NAVIGATION_REFERENCE[0]['nav_params'] + assert proj_params.misalignment.flags['C_CONTIGUOUS'] + assert orbit.nutation_precession.flags['C_CONTIGUOUS'] def test_transform_image_coords_to_scanning_angles(self): """Test transformation from image coordinates to scanning angles.""" @@ -214,33 +215,41 @@ def test_transform_image_coords_to_scanning_angles(self): def test_transform_scanning_angles_to_satellite_coords(self): """Test transformation from scanning angles to satellite coordinates.""" - transformer = nav.ScanningAnglesToSatelliteCoordsTransformer( - misalignment=np.diag([1, 2, 3]).astype(float) + scanning_angles = np.array([np.pi, np.pi / 2]) + misalignment = np.diag([1, 2, 3]).astype(float) + point_sat = nav.transform_scanning_angles_to_satellite_coords( + scanning_angles, misalignment ) - point_sat = transformer.transform(np.array([np.pi, np.pi/2])) np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1E-12) def test_transform_satellite_to_earth_fixed_coords(self): """Test transformation from satellite to earth-fixed coordinates.""" - transformer = nav.SatelliteToEarthFixedCoordsTransformer( - greenwich_sidereal_time=np.pi, - sat_sun_angles=np.array([np.pi, np.pi/2]), - earth_sun_angle=np.pi, - spin_angles=np.array([np.pi, np.pi/2]), - nutation_precession=np.diag([1, 2, 3]).astype(float) + point_sat = np.array([1, 2, 3], dtype=float) + greenwich_sidereal_time = np.pi + sat_sun_angles = np.array([np.pi, np.pi / 2]) + earth_sun_angle = np.pi + spin_angles = np.array([np.pi, np.pi / 2]) + nutation_precession = np.diag([1, 2, 3]).astype(float) + res = nav.transform_satellite_to_earth_fixed_coords( + point_sat, + greenwich_sidereal_time, + sat_sun_angles, + earth_sun_angle, + spin_angles, + nutation_precession ) - res = transformer.transform(np.array([1, 2, 3], dtype=float)) np.testing.assert_allclose(res, [-3, 1, -2]) def test_intersect_view_vector_with_earth(self): """Test intersection of a view vector with the earth's surface.""" + view_vector = np.array([-1, 0, 0], dtype=float) + sat_pos = np.array([36000 * 1000, 0, 0], dtype=float) eq_radius = 6371 * 1000 flattening = 0.003 - intersector = nav.EarthIntersector( - sat_pos=np.array([36000 * 1000, 0, 0], dtype=float), - ellipsoid=np.array([eq_radius, flattening]) + ellipsoid = np.array([eq_radius, flattening]) + point = nav.intersect_with_earth( + view_vector, sat_pos, ellipsoid ) - point = intersector.intersect(np.array([-1, 0, 0], dtype=float)) np.testing.assert_allclose(point, [eq_radius, 0, 0]) @pytest.mark.parametrize( From 16ae2381c8d7954877eb10cf3542d8daa8d6a426 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 27 Aug 2021 10:08:43 +0000 Subject: [PATCH 0024/1416] Refactor image navigation Use njit decorator instead of jitclass --- satpy/readers/gms5_vissr_l1b.py | 7 +- satpy/readers/gms5_vissr_navigation.py | 136 +++++------------- .../tests/reader_tests/test_gms5_vissr_l1b.py | 132 +++-------------- 3 files changed, 62 insertions(+), 213 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 619f94a396..b26656f092 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -648,16 +648,13 @@ def _get_lons_lats(self, dataset_id, lines, pixels): earth_flattening=nav.EARTH_FLATTENING, earth_equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS ) - predicted_nav_params = nav.PredictedNavigationParameters( - attitude_prediction, orbit_prediction, proj_params - ) lons, lats = nav.get_lons_lats( # lines=np.array([686, 2089]), # pixels=np.array([1680, 1793]), # FIXME TODO lines=lines.astype(np.float64), pixels=pixels.astype(np.float64), - scan_params=scan_params, - predicted_nav_params=predicted_nav_params + static_params=(scan_params, proj_params), + predicted_params=(attitude_prediction, orbit_prediction) ) return lons, lats diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index e17f57d021..5b20e1ecd5 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -17,16 +17,10 @@ """Constants taken from JMA's Msial library.""" -def get_jitclass_type(cls): - try: - return cls.class_type.instance_type - except AttributeError: - # With NUMBA_DISABLE_JIT=1 - return cls - - @numba.njit -def get_lons_lats(lines, pixels, scan_params, predicted_nav_params): +def get_lons_lats(lines, pixels, static_params, predicted_params): + scan_params, proj_params = static_params + attitude_prediction, orbit_prediction = predicted_params num_lines = len(lines) num_pixels = len(pixels) output_shape = (num_lines, num_pixels) @@ -36,7 +30,10 @@ def get_lons_lats(lines, pixels, scan_params, predicted_nav_params): for j in range(num_pixels): point = (lines[i], pixels[j]) obs_time = get_observation_time(point, scan_params) - nav_params = predicted_nav_params.interpolate(obs_time) + attitude, orbit = interpolate_navigation_prediction( + attitude_prediction, orbit_prediction, obs_time + ) + nav_params = (attitude, orbit, proj_params) lon, lat = get_lon_lat(point, nav_params) lons[i, j] = lon lats[i, j] = lat @@ -67,17 +64,32 @@ def _get_relative_observation_time(point, scan_params): @numba.njit -def get_lon_lat(point, attitude, orbit, proj_params): +def interpolate_navigation_prediction( + attitude_prediction, + orbit_prediction, + observation_time +): + attitude = interpolate_attitude_prediction( + attitude_prediction, observation_time + ) + orbit = interpolate_orbit_prediction( + orbit_prediction, observation_time + ) + return attitude, orbit + + +@numba.njit +def get_lon_lat(point, nav_params): """Get longitude and latitude coordinates for a given image pixel. Args: point: Point (line, pixel) in image coordinates. - attitude: Attitude parameters. - orbit: Orbital parameters. - proj_params: Projection parameters. + nav_params: Navigation parameters (Attitude, Orbit, Projection + Parameters) Returns: Longitude and latitude in degrees. """ + attitude, orbit, proj_params = nav_params scan_angles = transform_image_coords_to_scanning_angles( point, _get_image_offset(proj_params), @@ -119,31 +131,27 @@ def _get_sampling(proj_params): @numba.njit def _get_sat_sun_angles(orbit): - return np.array(( - orbit.declination_from_sat_to_sun, - orbit.right_ascension_from_sat_to_sun - )) + return (orbit.declination_from_sat_to_sun, + orbit.right_ascension_from_sat_to_sun) @numba.njit def _get_spin_angles(attitude): - return np.array(( - attitude.angle_between_sat_spin_and_z_axis, - attitude.angle_between_sat_spin_and_yz_plane - )) + return (attitude.angle_between_sat_spin_and_z_axis, + attitude.angle_between_sat_spin_and_yz_plane) @numba.njit def _get_sat_pos(orbit): return np.array((orbit.sat_position_earth_fixed_x, - orbit.sat_position_earth_fixed_y, - orbit.sat_position_earth_fixed_z)) + orbit.sat_position_earth_fixed_y, + orbit.sat_position_earth_fixed_z)) @numba.njit def _get_ellipsoid(proj_params): - return np.array((proj_params.earth_equatorial_radius, - proj_params.earth_flattening)) + return (proj_params.earth_equatorial_radius, + proj_params.earth_flattening) @numba.njit @@ -416,53 +424,6 @@ def normalize_vector(v): ) -# FIXME -# @numba.experimental.jitclass( -# [ -# ('attitude', get_jitclass_type(Attitude)), -# ('orbit', get_jitclass_type(Orbit)), -# ('proj_params', get_jitclass_type(ProjectionParameters)), -# ] -# ) -# class NavigationParameters: -# def __init__(self, attitude, orbit, proj_params): -# self.attitude = attitude -# self.orbit = orbit -# self.proj_params = proj_params -# -# # TODO: Remember that all angles are expected in rad -# # TODO: Watch out shape of 3x3 matrices! See msVissrNav.c -# -# def get_image_offset(self): -# return self.proj_params.line_offset, self.proj_params.pixel_offset -# -# def get_sampling(self): -# return self.proj_params.stepping_angle, self.proj_params.sampling_angle -# -# def get_sat_sun_angles(self): -# return np.array([ -# self.orbit.declination_from_sat_to_sun, -# self.orbit.right_ascension_from_sat_to_sun -# ]) -# -# def get_spin_angles(self): -# return np.array([ -# self.attitude.angle_between_sat_spin_and_z_axis, -# self.attitude.angle_between_sat_spin_and_yz_plane -# ]) -# -# def get_ellipsoid(self): -# return np.array([ -# self.proj_params.earth_equatorial_radius, -# self.proj_params.earth_flattening -# ]) -# -# def get_sat_position(self): -# return np.array((self.orbit.sat_position_earth_fixed_x, -# self.orbit.sat_position_earth_fixed_y, -# self.orbit.sat_position_earth_fixed_z)) - - OrbitPrediction = namedtuple( 'OrbitPrediction', [ @@ -561,29 +522,6 @@ def interpolate_attitude_prediction(attitude_prediction, observation_time): ) -# FIXME -# @numba.experimental.jitclass( -# [ -# ('attitude_prediction', get_jitclass_type(AttitudePrediction)), -# ('orbit_prediction', get_jitclass_type(OrbitPrediction)), -# ('proj_params', get_jitclass_type(ProjectionParameters)), -# ] -# ) -# class PredictedNavigationParameters: -# def __init__(self, attitude_prediction, orbit_prediction, proj_params): -# self.attitude_prediction = attitude_prediction -# self.orbit_prediction = orbit_prediction -# self.proj_params = proj_params -# -# def interpolate(self, observation_time): -# attitude = self.attitude_prediction.interpolate(observation_time) -# orbit = self.orbit_prediction.interpolate(observation_time) -# return self._get_nav_params(attitude, orbit) -# -# def _get_nav_params(self, attitude, orbit): -# return NavigationParameters(attitude, orbit, self.proj_params) - - @numba.njit def interpolate_continuous(x, x_sample, y_sample): """Linear interpolation of continuous quantities. @@ -593,7 +531,8 @@ def interpolate_continuous(x, x_sample, y_sample): """ try: return _interpolate(x, x_sample, y_sample) - except Exception: + except: + # Numba cannot distinguish exception types return np.nan @@ -607,7 +546,8 @@ def interpolate_angles(x, x_sample, y_sample): """ try: return _wrap_2pi(_interpolate(x, x_sample, unwrap(y_sample))) - except Exception: + except: + # Numba cannot distinguish exception types return np.nan diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 750315e5b2..373970e8dc 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -194,16 +194,9 @@ class TestSinglePixelNavigation: ) def test_get_lon_lat(self, point, nav_params, expected): """Test getting lon/lat coordinates for a given pixel.""" - attitude, orbit, proj_params = nav_params - lon, lat = nav.get_lon_lat(point, attitude, orbit, proj_params) + lon, lat = nav.get_lon_lat(point, nav_params) np.testing.assert_allclose((lon, lat), expected) - def test_nav_matrices_are_contiguous(self): - """Test that navigation matrices are stored as C-contiguous arrays.""" - attitude, orbit, proj_params = NAVIGATION_REFERENCE[0]['nav_params'] - assert proj_params.misalignment.flags['C_CONTIGUOUS'] - assert orbit.nutation_precession.flags['C_CONTIGUOUS'] - def test_transform_image_coords_to_scanning_angles(self): """Test transformation from image coordinates to scanning angles.""" angles = nav.transform_image_coords_to_scanning_angles( @@ -280,7 +273,13 @@ def test_normalize_vector(self): class TestImageNavigation: - def test_get_lons_lats(self, scan_params, predicted_nav_params): + def test_get_lons_lats( + self, + scan_params, + attitude_prediction, + orbit_prediction, + proj_params + ): lons_exp = [[-114.70480148, -112.23691703, -109.70496014], [8.27367963, 8.74144293, 9.17639531], [15.92344528, 16.27070079, 16.63288666]] @@ -290,8 +289,8 @@ def test_get_lons_lats(self, scan_params, predicted_nav_params): lons, lats = nav.get_lons_lats( lines=np.array([1000, 1500, 2000]), pixels=np.array([1000, 1500, 2000]), - scan_params=scan_params, - predicted_nav_params=predicted_nav_params + static_params=(scan_params, proj_params), + predicted_params=(attitude_prediction, orbit_prediction) ) np.testing.assert_allclose(lons, lons_exp) np.testing.assert_allclose(lats, lats_exp) @@ -361,21 +360,16 @@ def test_interpolate_nearest(self, obs_time, expected): np.testing.assert_allclose(res, expected) def test_interpolate_orbit_prediction(self, obs_time, orbit_prediction, orbit_expected): - orbit = orbit_prediction.interpolate(obs_time) - assert_orbit_close(orbit, orbit_expected) + orbit = nav.interpolate_orbit_prediction( + orbit_prediction, obs_time + ) + assert_namedtuple_close(orbit, orbit_expected) def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, attitude_expected): - attitude = attitude_prediction.interpolate(obs_time) - assert_attitude_close(attitude, attitude_expected) - - def test_interpolate_prediction(self, obs_time, proj_params, attitude_prediction, orbit_prediction, nav_params_expected): - predicted_nav_params = nav.PredictedNavigationParameters( - proj_params=proj_params, - attitude_prediction=attitude_prediction, - orbit_prediction=orbit_prediction + attitude = nav.interpolate_attitude_prediction( + attitude_prediction, obs_time ) - nav_params = predicted_nav_params.interpolate(obs_time) - assert_nav_params_close(nav_params, nav_params_expected) + assert_namedtuple_close(attitude, attitude_expected) @pytest.fixture def obs_time(self): @@ -423,13 +417,6 @@ def scan_params(sampling_angle): ) -@pytest.fixture -def predicted_nav_params(attitude_prediction, orbit_prediction, proj_params): - return nav.PredictedNavigationParameters( - attitude_prediction, orbit_prediction, proj_params - ) - - @pytest.fixture def attitude_prediction(): return nav.AttitudePrediction( @@ -764,86 +751,11 @@ def lons_lats_exp(self, dataset_id): return lons_exp, lats_exp -def _get_attributes(obj): - import inspect - non_callable_attrs = inspect.getmembers( - obj, lambda x: not inspect.ismethod(x) - ) - return set(attr for attr in non_callable_attrs if not attr.startswith('_')) - - -def _assert_have_same_attributes(a, b): - a_attrs = _get_attributes(a) - b_attrs = _get_attributes(b) - assert a_attrs == b_attrs, "Different set of attribtues" - return a_attrs - - -def assert_numba_objects_close(a, b): - _assert_have_same_attributes(a, b) - _assert_attrs_close(a, b) - - -def assert_orbit_close(a, b): - """Assert that two Orbit instances are close. - - This would probably make more sense in the Orbit class. However, - numba doesn't support np.allclose, yet. - """ - attrs = [ - 'greenwich_sidereal_time', - 'declination_from_sat_to_sun', - 'right_ascension_from_sat_to_sun', - 'sat_position_earth_fixed_x', - 'sat_position_earth_fixed_y', - 'sat_position_earth_fixed_z', - 'nutation_precession', - ] - _assert_attrs_close(a, b, attrs, 'Orbit') - - -def assert_attitude_close(a, b): - """Assert that two Attitude instances are close. - - This would probably make more sense in the Attitude class. However, - numba doesn't support np.allclose, yet. - """ - attrs = [ - 'angle_between_earth_and_sun', - 'angle_between_sat_spin_and_z_axis', - 'angle_between_sat_spin_and_yz_plane' - ] - _assert_attrs_close(a, b, attrs, 'Attitude') - - -def assert_proj_params_close(a, b): - """Assert that two ProjectionParameters instances are close. - - This would probably make more sense in the Attitude class. However, - numba doesn't support np.allclose, yet. - """ - attrs = [ - 'line_offset', - 'pixel_offset', - 'stepping_angle', - 'sampling_angle', - 'misalignment', - 'earth_flattening', - 'earth_equatorial_radius', - ] - _assert_attrs_close(a, b, attrs, 'ProjectionParameters') - - -def assert_nav_params_close(a, b): - assert_attitude_close(a.attitude, b.attitude) - assert_orbit_close(a.orbit, b.orbit) - assert_proj_params_close(a.proj_params, b.proj_params) - - -def _assert_attrs_close(a, b): - for attr in _get_attributes(a): +def assert_namedtuple_close(a, b): + assert a.__class__ == b.__class__ + for attr in a._fields: np.testing.assert_allclose( getattr(a, attr), getattr(b, attr), - err_msg='{} attribute {} differs'.format(a._numba_type, attr) - ) \ No newline at end of file + err_msg='{} attribute {} differs'.format(a.__class__, attr) + ) From a7ffd2ff640d9bdc94d9ad23e46e7e1fb582bcdd Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 7 Sep 2021 16:30:15 +0000 Subject: [PATCH 0025/1416] Add lon/lat coordinates to yaml definition --- satpy/etc/readers/gms5-vissr_l1b.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index 77ecf32ed8..cfd1dd16fe 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -48,6 +48,9 @@ datasets: # standard_name: toa_bidirectional_reflectance # units: "%" file_type: gms5_vissr_vis + coordinates: + - longitude + - latitude IR1: name: IR1 @@ -62,6 +65,9 @@ datasets: # standard_name: toa_brightness_temperature # units: "K" file_type: gms5_vissr_ir1 + coordinates: + - longitude + - latitude IR2: name: IR2 @@ -76,6 +82,9 @@ datasets: # standard_name: toa_brightness_temperature # units: "K" file_type: gms5_vissr_ir2 + coordinates: + - longitude + - latitude IR3: name: IR3 @@ -90,3 +99,6 @@ datasets: # standard_name: toa_brightness_temperature # units: "K" file_type: gms5_vissr_ir3 + coordinates: + - longitude + - latitude From 4f080e4344a023d12ef7c9a985c1362a027f54d2 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 11:19:17 +0000 Subject: [PATCH 0026/1416] Add reader for VIIRS level 2 products produced by the NOAA enterprise suite. --- satpy/etc/readers/viirs_jrr.yaml | 37 ++++++++++++++ satpy/readers/viirs_jrr.py | 85 ++++++++++++++++++++++++++++++++ 2 files changed, 122 insertions(+) create mode 100644 satpy/etc/readers/viirs_jrr.yaml create mode 100644 satpy/readers/viirs_jrr.py diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml new file mode 100644 index 0000000000..fe896c1cf8 --- /dev/null +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -0,0 +1,37 @@ +reader: + description: VIIRS NOAA Enterprise L2 product reader + name: viirs_jrr + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [viirs] + + +file_types: + jrr_cloudmask: + file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + + +datasets: + longitude: + name: longitude + standard_name: longitude + file_type: [jrr_cloudmask] + file_key: "Longitude" + units: 'degrees_east' + latitude: + name: latitude + standard_name: latitude + file_type: [jrr_cloudmask] + file_key: "Latitude" + units: 'degrees_north' + cloud_mask: + name: cloud_mask + file_type: [jrr_cloudmask] + file_key: "CloudMask" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] + flag_values: [0, 1, 2, 3] + _FillValue: -128 \ No newline at end of file diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py new file mode 100644 index 0000000000..2ae344bd95 --- /dev/null +++ b/satpy/readers/viirs_jrr.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""VIIRS NOAA enterprise L2 product reader. + +This module implements readers for the NOAA enterprise level 2 products for the +VIIRS instrument. These replace the 'old' EDR products. +""" + +import logging + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler + +import dask.array as da +import numpy as np +import xarray as xr + +# map platform attributes to Oscar standard name +PLATFORM_MAP = { + "NPP": "Suomi-NPP", + "J01": "NOAA-20", + "J02": "NOAA-21" +} + +LOG = logging.getLogger(__name__) + + +class VIIRSJRRFileHandler(BaseFileHandler): + """NetCDF4 reader for VIIRS Active Fires.""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize the geo filehandler.""" + super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, + filetype_info) + self.nc = xr.open_dataset(self.filename, + decode_cf=True, + mask_and_scale=True, + chunks={'Columns': CHUNK_SIZE, + 'Rows': CHUNK_SIZE}) + self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) + if 'Latitude' in self.nc: + self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) + if 'Longitude' in self.nc: + self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) + + def get_dataset(self, dataset_id, info): + """Get the dataset.""" + ds = self.nc[info['file_key']] + + return ds + + @property + def start_time(self): + """Get first date/time when observations were recorded.""" + return self.filename_info['start_time'] + + @property + def end_time(self): + """Get last date/time when observations were recorded.""" + return self.filename_info.get('end_time', self.start_time) + + @property + def sensor_name(self): + """Name of sensor for this file.""" + return self["sensor"] + + @property + def platform_name(self): + """Name of platform/satellite for this file.""" + return self["platform_name"] From 439cbdfbc6f60a20f4c1cf8650406d7a23238aeb Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 11:28:38 +0000 Subject: [PATCH 0027/1416] Add reader for VIIRS level 2 products produced by the NOAA enterprise suite. --- satpy/readers/viirs_jrr.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index 2ae344bd95..0704834da4 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -34,7 +34,7 @@ PLATFORM_MAP = { "NPP": "Suomi-NPP", "J01": "NOAA-20", - "J02": "NOAA-21" + "J02": "NOAA-21", } LOG = logging.getLogger(__name__) @@ -53,11 +53,16 @@ def __init__(self, filename, filename_info, filetype_info): chunks={'Columns': CHUNK_SIZE, 'Rows': CHUNK_SIZE}) self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) + + # For some reason, no 'standard_name' is defined in the netCDF files, so + # here we manually make the definitions. if 'Latitude' in self.nc: self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) if 'Longitude' in self.nc: self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) + self.algorithm_version = filename_info['platform_shortname'] + def get_dataset(self, dataset_id, info): """Get the dataset.""" ds = self.nc[info['file_key']] @@ -74,12 +79,13 @@ def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info.get('end_time', self.start_time) - @property - def sensor_name(self): - """Name of sensor for this file.""" - return self["sensor"] - @property def platform_name(self): - """Name of platform/satellite for this file.""" - return self["platform_name"] + """Get platform name.""" + platform_path = self.filetype_info['platform_name'] + platform_dict = {'NPP': 'Suomi-NPP', + 'JPSS-1': 'NOAA-20', + 'J01': 'NOAA-20', + 'JPSS-2': 'NOAA-21', + 'J02': 'NOAA-21'} + return platform_dict[platform_path] From d53e6b146e2da2d7a22960b452f4ee7015a9632d Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 11:42:24 +0000 Subject: [PATCH 0028/1416] Complete JRR cloudmask product list --- satpy/etc/readers/viirs_jrr.yaml | 43 ++++++++++++++++++++++++++++++++ satpy/readers/viirs_jrr.py | 7 ++---- 2 files changed, 45 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml index fe896c1cf8..d9326460e7 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -34,4 +34,47 @@ datasets: units: '1' flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] flag_values: [0, 1, 2, 3] + _FillValue: -128 + cloud_mask_binary: + name: cloud_mask_binary + file_type: [jrr_cloudmask] + file_key: "CloudMaskBinary" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Cloudy'] + flag_values: [0, 1] + _FillValue: -128 + cloud_probability: + name: cloud_probability + file_type: [jrr_cloudmask] + file_key: "CloudProbability" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -999. + dust_mask: + name: dust_mask + file_type: [jrr_cloudmask] + file_key: "Dust_Mask" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Dusty'] + flag_values: [0, 1] + _FillValue: -128 + fire_mask: + name: fire_mask + file_type: [jrr_cloudmask] + file_key: "Fire_Mask" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['No fire', 'Fire'] + flag_values: [0, 1] + _FillValue: -128 + smoke_mask: + name: smoke_mask + file_type: [jrr_cloudmask] + file_key: "Smoke_Mask" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Smoky'] + flag_values: [0, 1] _FillValue: -128 \ No newline at end of file diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index 0704834da4..42f13547f0 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -21,14 +21,11 @@ VIIRS instrument. These replace the 'old' EDR products. """ -import logging -from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler - -import dask.array as da -import numpy as np +from satpy import CHUNK_SIZE import xarray as xr +import logging # map platform attributes to Oscar standard name PLATFORM_MAP = { From c1523d1b108122c23ba28b34686fc083bbd45e46 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 12:21:36 +0000 Subject: [PATCH 0029/1416] Add JRR aerosol product list --- satpy/etc/readers/viirs_jrr.yaml | 117 +++++++++++++++++++++++++++++-- 1 file changed, 113 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml index d9326460e7..70a49ed041 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -11,24 +11,32 @@ file_types: variable_prefix: "" file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_aerosol_product: + file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: + # Common datasets longitude: name: longitude standard_name: longitude - file_type: [jrr_cloudmask] + file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Longitude" units: 'degrees_east' latitude: name: latitude standard_name: latitude - file_type: [jrr_cloudmask] + file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Latitude" units: 'degrees_north' + + # Cloudmask product datasets cloud_mask: name: cloud_mask - file_type: [jrr_cloudmask] + file_type: jrr_cloudmask file_key: "CloudMask" coordinates: [longitude, latitude] units: '1' @@ -77,4 +85,105 @@ datasets: units: '1' flag_meanings: ['Clear', 'Smoky'] flag_values: [0, 1] - _FillValue: -128 \ No newline at end of file + _FillValue: -128 + + # Aerosol optical depth product datasets + ash_mask: + name: ash_mask + file_type: [jrr_aerosol_product] + file_key: "Ash" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Ash'] + flag_values: [0, 1] + _FillValue: -128 + cloud_mask_adp: + name: cloud_mask_adp + file_type: [jrr_aerosol_product] + file_key: "Cloud" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] + flag_values: [0, 1, 2, 3] + _FillValue: -128 + dust_smoke_discrimination_index: + name: dust_smoke_discrimination_index + file_type: [jrr_aerosol_product] + file_key: "DSDI" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -999 + nuc: + name: nuc + file_type: [jrr_aerosol_product] + file_key: "NUC" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['No', 'Yes'] + flag_values: [0, 1] + _FillValue: -128 + pqi1: + name: pqi1 + file_type: [jrr_aerosol_product] + file_key: "PQI1" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + pqi2: + name: pqi2 + file_type: [jrr_aerosol_product] + file_key: "PQI2" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + pqi3: + name: pqi3 + file_type: [jrr_aerosol_product] + file_key: "PQI3" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + pqi4: + name: pqi4 + file_type: [jrr_aerosol_product] + file_key: "PQI4" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + qcflag: + name: qcflag + file_type: [jrr_aerosol_product] + file_key: "QC_Flag" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -128 + saai: + name: saai + file_type: [jrr_aerosol_product] + file_key: "SAAI" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -999 + smoke: + name: smoke + file_type: [jrr_aerosol_product] + file_key: "Smoke" + coordinates: [longitude, latitude] + units: '1' + _FillValue: -999 + smoke_concentration: + name: smoke_concentration + file_type: [jrr_aerosol_product] + file_key: "SmokeCon" + coordinates: [longitude, latitude] + units: 'ug/m^3' + _FillValue: -999 + snow_ice: + name: snow_ice + file_type: [jrr_aerosol_product] + file_key: "SnowIce" + coordinates: [longitude, latitude] + units: '1' + flag_meanings: ['No', 'Yes'] + flag_values: [0, 1] + _FillValue: -128 From d5f3ba573231ddaf4bab397b539a4680f7b30fec Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 12:54:09 +0000 Subject: [PATCH 0030/1416] Add surface reflectance JRR product --- satpy/etc/readers/viirs_jrr.yaml | 166 ++++++++++++++++++++++++++++++- satpy/readers/viirs_jrr.py | 8 +- 2 files changed, 171 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml index 70a49ed041..81468aa313 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -16,10 +16,15 @@ file_types: variable_prefix: "" file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_surfref_product: + file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: - # Common datasets + # Geolocation datasets longitude: name: longitude standard_name: longitude @@ -32,6 +37,30 @@ datasets: file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Latitude" units: 'degrees_north' + longitude_375: + name: longitude_375 + standard_name: longitude + file_type: jrr_surfref_product + file_key: "Longitude_at_375m_resolution" + units: 'degrees_east' + latitude_375: + name: latitude_375 + standard_name: latitude + file_type: jrr_surfref_product + file_key: "Latitude_at_375m_resolution" + units: 'degrees_north' + longitude_750: + name: longitude_750 + standard_name: longitude + file_type: jrr_surfref_product + file_key: "Longitude_at_750m_resolution" + units: 'degrees_east' + latitude_750: + name: latitude_750 + standard_name: latitude + file_type: jrr_surfref_product + file_key: "Latitude_at_750m_resolution" + units: 'degrees_north' # Cloudmask product datasets cloud_mask: @@ -187,3 +216,138 @@ datasets: flag_meanings: ['No', 'Yes'] flag_values: [0, 1] _FillValue: -128 + + # Surface reflectance products + surf_refl_I01: + name: surf_refl_I01 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band I1" + coordinates: [longitude_375, latitude_375] + units: '1' + _FillValue: -9999 + surf_refl_I02: + name: surf_refl_I02 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band I2" + coordinates: [longitude_375, latitude_375] + units: '1' + _FillValue: -9999 + surf_refl_I03: + name: surf_refl_I03 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band I3" + coordinates: [longitude_375, latitude_375] + units: '1' + _FillValue: -9999 + surf_refl_M01: + name: surf_refl_M01 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M1" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M02: + name: surf_refl_M02 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M2" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M03: + name: surf_refl_M03 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M3" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M04: + name: surf_refl_M04 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M4" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M05: + name: surf_refl_M05 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M5" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M06: + name: surf_refl_M06 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M6" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M07: + name: surf_refl_M07 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M7" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M08: + name: surf_refl_M08 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M8" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_M10: + name: surf_refl_M10 + file_type: [jrr_surfref_product] + file_key: "375m Surface Reflectance Band M10" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf1: + name: surf_refl_qf1 + file_type: [jrr_surfref_product] + file_key: "QF1 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf2: + name: surf_refl_qf2 + file_type: [jrr_surfref_product] + file_key: "QF2 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf3: + name: surf_refl_qf3 + file_type: [jrr_surfref_product] + file_key: "QF3 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf4: + name: surf_refl_qf4 + file_type: [jrr_surfref_product] + file_key: "QF4 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf5: + name: surf_refl_qf5 + file_type: [jrr_surfref_product] + file_key: "QF5 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf6: + name: surf_refl_qf6 + file_type: [jrr_surfref_product] + file_key: "QF6 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 + surf_refl_qf7: + name: surf_refl_qf7 + file_type: [jrr_surfref_product] + file_key: "QF7 Surface Reflectance" + coordinates: [longitude_750, latitude_750] + units: '1' + _FillValue: -9999 diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index 42f13547f0..2d4944ae3d 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -49,9 +49,13 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=True, chunks={'Columns': CHUNK_SIZE, 'Rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) + if 'columns' in self.nc.dims: + self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) + elif 'Along_Track_375m' in self.nc.dims: + self.nc = self.nc.rename({'Along_Scan_375m': 'x', 'Along_Track_375m': 'y'}) + self.nc = self.nc.rename({'Along_Scan_750m': 'x', 'Along_Track_750m': 'y'}) - # For some reason, no 'standard_name' is defined in the netCDF files, so + # For some reason, no 'standard_name' is defined in some netCDF files, so # here we manually make the definitions. if 'Latitude' in self.nc: self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) From 9f4954da02b325b3b6d340d7dd9575b23e0c2d1f Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 13:35:11 +0000 Subject: [PATCH 0031/1416] Add VIIRS JRR composites, update JRR dataset keys for resolution. --- satpy/etc/composites/viirs.yaml | 24 ++++++++++ satpy/etc/readers/viirs_jrr.yaml | 76 ++++++++++++++++++++++++++++---- satpy/readers/viirs_jrr.py | 1 + 3 files changed, 92 insertions(+), 9 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 4fbc3a6b3c..da324d58b0 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -306,6 +306,30 @@ composites: modifiers: [sunz_corrected_iband] standard_name: natural_color + natural_color_iband_surf_nocorr: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: surf_refl_I03 + - name: surf_refl_I02 + - name: surf_refl_I01 + standard_name: natural_color + + natural_color_mband_surf_nocorr: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: surf_refl_M10 + - name: surf_refl_M07 + - name: surf_refl_M05 + standard_name: natural_color + + true_color_mband_nocorr: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: surf_refl_M05 + - name: surf_refl_M04 + - name: surf_refl_M03 + standard_name: true_color + natural_color_sun_lowres: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_jrr.yaml index 81468aa313..7255756418 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_jrr.yaml @@ -3,6 +3,8 @@ reader: name: viirs_jrr reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] + group_keys: ['platform_shortname'] + default_datasets: file_types: @@ -31,40 +33,47 @@ datasets: file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Longitude" units: 'degrees_east' + resolution: 750 latitude: name: latitude standard_name: latitude file_type: [jrr_cloudmask, jrr_aerosol_product] file_key: "Latitude" units: 'degrees_north' + resolution: 750 longitude_375: name: longitude_375 standard_name: longitude file_type: jrr_surfref_product file_key: "Longitude_at_375m_resolution" units: 'degrees_east' + resolution: 375 latitude_375: name: latitude_375 standard_name: latitude file_type: jrr_surfref_product file_key: "Latitude_at_375m_resolution" units: 'degrees_north' + resolution: 375 longitude_750: name: longitude_750 standard_name: longitude file_type: jrr_surfref_product file_key: "Longitude_at_750m_resolution" units: 'degrees_east' + resolution: 750 latitude_750: name: latitude_750 standard_name: latitude file_type: jrr_surfref_product file_key: "Latitude_at_750m_resolution" units: 'degrees_north' + resolution: 750 # Cloudmask product datasets cloud_mask: name: cloud_mask + resolution: 750 file_type: jrr_cloudmask file_key: "CloudMask" coordinates: [longitude, latitude] @@ -74,6 +83,7 @@ datasets: _FillValue: -128 cloud_mask_binary: name: cloud_mask_binary + resolution: 750 file_type: [jrr_cloudmask] file_key: "CloudMaskBinary" coordinates: [longitude, latitude] @@ -83,6 +93,7 @@ datasets: _FillValue: -128 cloud_probability: name: cloud_probability + resolution: 750 file_type: [jrr_cloudmask] file_key: "CloudProbability" coordinates: [longitude, latitude] @@ -90,6 +101,7 @@ datasets: _FillValue: -999. dust_mask: name: dust_mask + resolution: 750 file_type: [jrr_cloudmask] file_key: "Dust_Mask" coordinates: [longitude, latitude] @@ -99,6 +111,7 @@ datasets: _FillValue: -128 fire_mask: name: fire_mask + resolution: 750 file_type: [jrr_cloudmask] file_key: "Fire_Mask" coordinates: [longitude, latitude] @@ -108,6 +121,7 @@ datasets: _FillValue: -128 smoke_mask: name: smoke_mask + resolution: 750 file_type: [jrr_cloudmask] file_key: "Smoke_Mask" coordinates: [longitude, latitude] @@ -119,6 +133,7 @@ datasets: # Aerosol optical depth product datasets ash_mask: name: ash_mask + resolution: 750 file_type: [jrr_aerosol_product] file_key: "Ash" coordinates: [longitude, latitude] @@ -128,6 +143,7 @@ datasets: _FillValue: -128 cloud_mask_adp: name: cloud_mask_adp + resolution: 750 file_type: [jrr_aerosol_product] file_key: "Cloud" coordinates: [longitude, latitude] @@ -137,6 +153,7 @@ datasets: _FillValue: -128 dust_smoke_discrimination_index: name: dust_smoke_discrimination_index + resolution: 750 file_type: [jrr_aerosol_product] file_key: "DSDI" coordinates: [longitude, latitude] @@ -144,6 +161,7 @@ datasets: _FillValue: -999 nuc: name: nuc + resolution: 750 file_type: [jrr_aerosol_product] file_key: "NUC" coordinates: [longitude, latitude] @@ -153,6 +171,7 @@ datasets: _FillValue: -128 pqi1: name: pqi1 + resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI1" coordinates: [longitude, latitude] @@ -160,6 +179,7 @@ datasets: _FillValue: -128 pqi2: name: pqi2 + resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI2" coordinates: [longitude, latitude] @@ -167,6 +187,7 @@ datasets: _FillValue: -128 pqi3: name: pqi3 + resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI3" coordinates: [longitude, latitude] @@ -174,6 +195,7 @@ datasets: _FillValue: -128 pqi4: name: pqi4 + resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI4" coordinates: [longitude, latitude] @@ -181,6 +203,7 @@ datasets: _FillValue: -128 qcflag: name: qcflag + resolution: 750 file_type: [jrr_aerosol_product] file_key: "QC_Flag" coordinates: [longitude, latitude] @@ -188,6 +211,7 @@ datasets: _FillValue: -128 saai: name: saai + resolution: 750 file_type: [jrr_aerosol_product] file_key: "SAAI" coordinates: [longitude, latitude] @@ -195,6 +219,7 @@ datasets: _FillValue: -999 smoke: name: smoke + resolution: 750 file_type: [jrr_aerosol_product] file_key: "Smoke" coordinates: [longitude, latitude] @@ -202,6 +227,7 @@ datasets: _FillValue: -999 smoke_concentration: name: smoke_concentration + resolution: 750 file_type: [jrr_aerosol_product] file_key: "SmokeCon" coordinates: [longitude, latitude] @@ -209,6 +235,7 @@ datasets: _FillValue: -999 snow_ice: name: snow_ice + resolution: 750 file_type: [jrr_aerosol_product] file_key: "SnowIce" coordinates: [longitude, latitude] @@ -220,6 +247,8 @@ datasets: # Surface reflectance products surf_refl_I01: name: surf_refl_I01 + resolution: 375 + wavelength: [0.600, 0.640, 0.680] file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] @@ -227,6 +256,8 @@ datasets: _FillValue: -9999 surf_refl_I02: name: surf_refl_I02 + resolution: 375 + wavelength: [0.845, 0.865, 0.884] file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] @@ -234,6 +265,8 @@ datasets: _FillValue: -9999 surf_refl_I03: name: surf_refl_I03 + resolution: 375 + wavelength: [1.580, 1.610, 1.640] file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] @@ -241,69 +274,88 @@ datasets: _FillValue: -9999 surf_refl_M01: name: surf_refl_M01 + resolution: 750 + wavelength: [0.402, 0.412, 0.422] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M1" + file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M02: name: surf_refl_M02 + resolution: 750 + wavelength: [0.436, 0.445, 0.454] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M2" + file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M03: name: surf_refl_M03 + resolution: 750 + wavelength: [0.478, 0.488, 0.498] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M3" + file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M04: name: surf_refl_M04 + resolution: 750 + wavelength: [0.545, 0.555, 0.565] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M4" + file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M05: name: surf_refl_M05 + resolution: 750 + wavelength: [0.662, 0.672, 0.682] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M5" + file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M06: name: surf_refl_M06 + resolution: 750 + wavelength: [0.739, 0.746, 0.754] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M6" + file_key: "750m Surface Reflectance Band M6" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M07: name: surf_refl_M07 + resolution: 750 + wavelength: [0.846, 0.865, 0.885] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M7" + file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M08: name: surf_refl_M08 + resolution: 750 + wavelength: [1.230, 1.240, 1.250] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M8" + file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_M10: name: surf_refl_M10 + resolution: 750 + wavelength: [1.580, 1.610, 1.640] file_type: [jrr_surfref_product] - file_key: "375m Surface Reflectance Band M10" + file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 surf_refl_qf1: name: surf_refl_qf1 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -311,6 +363,7 @@ datasets: _FillValue: -9999 surf_refl_qf2: name: surf_refl_qf2 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -318,6 +371,7 @@ datasets: _FillValue: -9999 surf_refl_qf3: name: surf_refl_qf3 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -325,6 +379,7 @@ datasets: _FillValue: -9999 surf_refl_qf4: name: surf_refl_qf4 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -332,6 +387,7 @@ datasets: _FillValue: -9999 surf_refl_qf5: name: surf_refl_qf5 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -339,6 +395,7 @@ datasets: _FillValue: -9999 surf_refl_qf6: name: surf_refl_qf6 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] @@ -346,6 +403,7 @@ datasets: _FillValue: -9999 surf_refl_qf7: name: surf_refl_qf7 + resolution: 750 file_type: [jrr_surfref_product] file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index 2d4944ae3d..a04886d852 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -63,6 +63,7 @@ def __init__(self, filename, filename_info, filetype_info): self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) self.algorithm_version = filename_info['platform_shortname'] + self.sensor_name = 'viirs' def get_dataset(self, dataset_id, info): """Get the dataset.""" From dea90d659e1c652e2a71875ab270d2e297dc3557 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 13:41:39 +0000 Subject: [PATCH 0032/1416] Update VIIRS JRR module docstring. --- satpy/readers/viirs_jrr.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_jrr.py index a04886d852..f8a502e583 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_jrr.py @@ -17,8 +17,28 @@ # satpy. If not, see . """VIIRS NOAA enterprise L2 product reader. -This module implements readers for the NOAA enterprise level 2 products for the -VIIRS instrument. These replace the 'old' EDR products. +This module defines the :class:`VIIRSJRRFileHandler` file handler, to +be used for reading VIIRS Level 2 products generated by the NOAA enterprise +suite, which are downloadable via NOAA CLASS. +A wide variety of such products exist and, at present, only three are +supported here, showing example filenames: + - Cloud mask: JRR-CloudMask_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc + - Aerosol properties: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc + - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc +All products use the same base reader `viirs_jrr` and can be read through satpy with:: + + import satpy + import glob + + filenames = glob.glob('JRR-ADP*.nc') + scene = satpy.Scene(filenames, + reader='viirs_jrr') + scene.load(['smoke_concentration']) + +NOTE: + Multiple products contain datasets with the same name! For example, both the cloud mask + and aerosol files contain a cloud mask, but these are not identical. + For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. """ From 2a9cf101065374123a0c620c917ac59ec9103ff8 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 14:52:17 +0000 Subject: [PATCH 0033/1416] Update VIIRS JRR reader name and add tests. --- .../{viirs_jrr.yaml => viirs_l2_jrr.yaml} | 6 +- .../readers/{viirs_jrr.py => viirs_l2_jrr.py} | 14 +-- satpy/tests/reader_tests/test_viirs_l2_jrr.py | 93 +++++++++++++++++++ 3 files changed, 100 insertions(+), 13 deletions(-) rename satpy/etc/readers/{viirs_jrr.yaml => viirs_l2_jrr.yaml} (98%) rename satpy/readers/{viirs_jrr.py => viirs_l2_jrr.py} (93%) create mode 100644 satpy/tests/reader_tests/test_viirs_l2_jrr.py diff --git a/satpy/etc/readers/viirs_jrr.yaml b/satpy/etc/readers/viirs_l2_jrr.yaml similarity index 98% rename from satpy/etc/readers/viirs_jrr.yaml rename to satpy/etc/readers/viirs_l2_jrr.yaml index 7255756418..98bcd9253c 100644 --- a/satpy/etc/readers/viirs_jrr.yaml +++ b/satpy/etc/readers/viirs_l2_jrr.yaml @@ -9,17 +9,17 @@ reader: file_types: jrr_cloudmask: - file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aerosol_product: - file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref_product: - file_reader: !!python/name:satpy.readers.viirs_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' diff --git a/satpy/readers/viirs_jrr.py b/satpy/readers/viirs_l2_jrr.py similarity index 93% rename from satpy/readers/viirs_jrr.py rename to satpy/readers/viirs_l2_jrr.py index f8a502e583..4c0694fcd5 100644 --- a/satpy/readers/viirs_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -43,17 +43,11 @@ from satpy.readers.file_handlers import BaseFileHandler +from datetime import datetime from satpy import CHUNK_SIZE import xarray as xr import logging -# map platform attributes to Oscar standard name -PLATFORM_MAP = { - "NPP": "Suomi-NPP", - "J01": "NOAA-20", - "J02": "NOAA-21", -} - LOG = logging.getLogger(__name__) @@ -99,15 +93,15 @@ def start_time(self): @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info['end_time'] @property def platform_name(self): """Get platform name.""" - platform_path = self.filetype_info['platform_name'] + platform_path = self.filename_info['platform_shortname'] platform_dict = {'NPP': 'Suomi-NPP', 'JPSS-1': 'NOAA-20', 'J01': 'NOAA-20', 'JPSS-2': 'NOAA-21', 'J02': 'NOAA-21'} - return platform_dict[platform_path] + return platform_dict[platform_path.upper()] diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_l2_jrr.py new file mode 100644 index 0000000000..c482319721 --- /dev/null +++ b/satpy/tests/reader_tests/test_viirs_l2_jrr.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.viirs_jrr module. +Note: This is adapted from the test_slstr_l2.py code. +""" + +from unittest.mock import MagicMock +from datetime import datetime +from unittest import mock +import xarray as xr +import unittest + + +from satpy.readers.viirs_l2_jrr import VIIRSJRRFileHandler + + +class TestVIIRSJRRReader(unittest.TestCase): + """Test the VIIRS JRR L2 reader.""" + @mock.patch('xarray.open_dataset') + def test_instantiate(self, mocked_dataset): + """Test initialization of file handlers.""" + filename_info = {'platform_shortname': 'npp'} + tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') + tmp.rename.return_value = tmp + xr.open_dataset.return_value = tmp + VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) + mocked_dataset.assert_called() + mocked_dataset.reset_mock() + + @mock.patch('xarray.open_dataset') + def test_get_dataset(self, mocked_dataset): + """Test retrieval of datasets.""" + filename_info = {'platform_shortname': 'npp'} + tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') + xr.open_dataset.return_value = tmp + test = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) + test.nc = {'Longitude': xr.Dataset(), + 'Latitude': xr.Dataset(), + 'smoke_concentration': xr.Dataset(), + 'fire_mask': xr.Dataset(), + 'surf_refl_I01': xr.Dataset(), + 'surf_refl_M05': xr.Dataset(), + } + test.get_dataset('longitude', {'file_key': 'Longitude'}) + test.get_dataset('latitude', {'file_key': 'Latitude'}) + test.get_dataset('smoke_concentration', {'file_key': 'smoke_concentration'}) + test.get_dataset('fire_mask', {'file_key': 'fire_mask'}) + with self.assertRaises(KeyError): + test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) + mocked_dataset.assert_called() + mocked_dataset.reset_mock() + test.get_dataset('surf_refl_I01', {'file_key': 'surf_refl_I01'}) + + @mock.patch('xarray.open_dataset') + def test_get_startend_times(self, mocked_dataset): + """Test finding start and end times of granules.""" + filename_info = {'platform_shortname': 'npp', + 'start_time': datetime(2021, 4, 3, 12, 0, 10), + 'end_time': datetime(2021, 4, 3, 12, 4, 28)} + tmp = MagicMock() + tmp.rename.return_value = tmp + xr.open_dataset.return_value = tmp + hdl = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) + self.assertEqual(hdl.start_time, datetime(2021, 4, 3, 12, 0, 10)) + self.assertEqual(hdl.end_time, datetime(2021, 4, 3, 12, 4, 28)) + + @mock.patch('xarray.open_dataset') + def test_get_platformname(self, mocked_dataset): + """Test finding start and end times of granules.""" + tmp = MagicMock() + tmp.rename.return_value = tmp + xr.open_dataset.return_value = tmp + hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'npp'}, None) + self.assertEqual(hdl.platform_name, 'Suomi-NPP') + hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'JPSS-1'}, None) + self.assertEqual(hdl.platform_name, 'NOAA-20') + hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'J01'}, None) + self.assertEqual(hdl.platform_name, 'NOAA-20') From 1fa0aecd0203f77a7680d1b33a2553edc0c9fd70 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 14:57:01 +0000 Subject: [PATCH 0034/1416] Remove unused import. --- satpy/readers/viirs_l2_jrr.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index 4c0694fcd5..73de4d499d 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -43,7 +43,6 @@ from satpy.readers.file_handlers import BaseFileHandler -from datetime import datetime from satpy import CHUNK_SIZE import xarray as xr import logging From 213d7358cdde0419b537ea9cf28bb7c635f56fb1 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 15:04:21 +0000 Subject: [PATCH 0035/1416] Add blank lines. --- satpy/tests/reader_tests/test_viirs_l2_jrr.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_l2_jrr.py index c482319721..4364b5c43c 100644 --- a/satpy/tests/reader_tests/test_viirs_l2_jrr.py +++ b/satpy/tests/reader_tests/test_viirs_l2_jrr.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_jrr module. + Note: This is adapted from the test_slstr_l2.py code. """ @@ -31,6 +32,7 @@ class TestVIIRSJRRReader(unittest.TestCase): """Test the VIIRS JRR L2 reader.""" + @mock.patch('xarray.open_dataset') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" From e194830020e6e7dc31ee5bdfcdce7d4a5e4a911e Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 15:12:39 +0000 Subject: [PATCH 0036/1416] Fix indentation. --- satpy/readers/viirs_l2_jrr.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index 73de4d499d..09eac0cb17 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -36,9 +36,9 @@ scene.load(['smoke_concentration']) NOTE: - Multiple products contain datasets with the same name! For example, both the cloud mask - and aerosol files contain a cloud mask, but these are not identical. - For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. +Multiple products contain datasets with the same name! For example, both the cloud mask +and aerosol files contain a cloud mask, but these are not identical. +For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. """ From caadd86139d5b43b509598eaab4b5380e9faa350 Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 15:37:53 +0000 Subject: [PATCH 0037/1416] Update some reader name changes that were missed previously. --- satpy/etc/readers/viirs_l2_jrr.yaml | 2 +- satpy/readers/viirs_l2_jrr.py | 4 ++-- satpy/tests/reader_tests/test_viirs_l2_jrr.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/viirs_l2_jrr.yaml b/satpy/etc/readers/viirs_l2_jrr.yaml index 98bcd9253c..f337909134 100644 --- a/satpy/etc/readers/viirs_l2_jrr.yaml +++ b/satpy/etc/readers/viirs_l2_jrr.yaml @@ -1,6 +1,6 @@ reader: description: VIIRS NOAA Enterprise L2 product reader - name: viirs_jrr + name: viirs_l2_jrr reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] group_keys: ['platform_shortname'] diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index 09eac0cb17..e1e01c9de7 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -25,14 +25,14 @@ - Cloud mask: JRR-CloudMask_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc - Aerosol properties: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc -All products use the same base reader `viirs_jrr` and can be read through satpy with:: +All products use the same base reader `viirs_l2_jrr` and can be read through satpy with:: import satpy import glob filenames = glob.glob('JRR-ADP*.nc') scene = satpy.Scene(filenames, - reader='viirs_jrr') + reader='viirs_l2_jrr') scene.load(['smoke_concentration']) NOTE: diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_l2_jrr.py index 4364b5c43c..c572481adb 100644 --- a/satpy/tests/reader_tests/test_viirs_l2_jrr.py +++ b/satpy/tests/reader_tests/test_viirs_l2_jrr.py @@ -15,7 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Module for testing the satpy.readers.viirs_jrr module. +"""Module for testing the satpy.readers.viirs_l2_jrr module. Note: This is adapted from the test_slstr_l2.py code. """ From a65f58dfff0b71a6a92d5dff4b1a3c7d918f4c4f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 15:40:40 +0000 Subject: [PATCH 0038/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/readers/viirs_l2_jrr.py | 8 +++++--- satpy/tests/reader_tests/test_viirs_l2_jrr.py | 6 +++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index e1e01c9de7..a27e9c30dc 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -42,11 +42,13 @@ """ -from satpy.readers.file_handlers import BaseFileHandler -from satpy import CHUNK_SIZE -import xarray as xr import logging +import xarray as xr + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler + LOG = logging.getLogger(__name__) diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_l2_jrr.py index c572481adb..a462ec1416 100644 --- a/satpy/tests/reader_tests/test_viirs_l2_jrr.py +++ b/satpy/tests/reader_tests/test_viirs_l2_jrr.py @@ -20,12 +20,12 @@ Note: This is adapted from the test_slstr_l2.py code. """ -from unittest.mock import MagicMock +import unittest from datetime import datetime from unittest import mock -import xarray as xr -import unittest +from unittest.mock import MagicMock +import xarray as xr from satpy.readers.viirs_l2_jrr import VIIRSJRRFileHandler From 6e5d46d8611195984796cccafae5f5844b77163b Mon Sep 17 00:00:00 2001 From: simonrp84 Date: Tue, 11 Jan 2022 16:26:35 +0000 Subject: [PATCH 0039/1416] Remove unnecessary indentation. --- satpy/readers/viirs_l2_jrr.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_l2_jrr.py index a27e9c30dc..8d07b3a7c5 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_l2_jrr.py @@ -27,13 +27,13 @@ - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc All products use the same base reader `viirs_l2_jrr` and can be read through satpy with:: - import satpy - import glob + import satpy + import glob - filenames = glob.glob('JRR-ADP*.nc') - scene = satpy.Scene(filenames, - reader='viirs_l2_jrr') - scene.load(['smoke_concentration']) + filenames = glob.glob('JRR-ADP*.nc') + scene = satpy.Scene(filenames, + reader='viirs_l2_jrr') + scene.load(['smoke_concentration']) NOTE: Multiple products contain datasets with the same name! For example, both the cloud mask From 076748c348467aded6b7ca038c8f591ee824c7af Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 10 Mar 2022 19:27:38 -0600 Subject: [PATCH 0040/1416] Remove unused navigations section in modis_l1b reader --- satpy/etc/readers/modis_l1b.yaml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/satpy/etc/readers/modis_l1b.yaml b/satpy/etc/readers/modis_l1b.yaml index 3670a1af7d..033677739f 100644 --- a/satpy/etc/readers/modis_l1b.yaml +++ b/satpy/etc/readers/modis_l1b.yaml @@ -5,15 +5,6 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] -navigations: - hdf_eos_geo: - description: MODIS navigation - file_type: hdf_eos_geo - latitude_key: Latitude - longitude_key: Longitude - nadir_resolution: [1000] - rows_per_scan: 10 - datasets: '1': name: '1' From 20bf879233a22990e886dc3751d8cab21961e12c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 10 Mar 2022 19:33:03 -0600 Subject: [PATCH 0041/1416] Add resolution-dependent chunk sizing to 'modis_l1b' reader --- satpy/readers/hdfeos_base.py | 44 +++++++++++++++++++++- satpy/readers/modis_l1b.py | 9 +++-- satpy/tests/reader_tests/test_modis_l1b.py | 21 +++++++++-- 3 files changed, 65 insertions(+), 9 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 731611765c..af24a855f1 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -25,12 +25,13 @@ from contextlib import suppress from datetime import datetime +import dask.array.core import numpy as np import xarray as xr from pyhdf.error import HDF4Error from pyhdf.SD import SD -from satpy import CHUNK_SIZE, DataID +from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) @@ -216,7 +217,8 @@ def load_dataset(self, dataset_name, is_category=False): from satpy.readers.hdf4_utils import from_sds dataset = self._read_dataset_in_file(dataset_name) - dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) + chunks = self._chunks_for_variable(dataset) + dask_arr = from_sds(dataset, chunks=chunks) dims = ('y', 'x') if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) @@ -224,6 +226,39 @@ def load_dataset(self, dataset_name, is_category=False): return data + def _chunks_for_variable(self, hdf_dataset): + scan_length_250m = 40 + var_shape = hdf_dataset.info()[2] + res_multiplier = self._get_res_multiplier(var_shape) + non_yx_chunks = tuple() + if len(var_shape) == 3: + # assume (band, y, x) + non_yx_chunks = ((1,) * var_shape[0],) + var_shape = var_shape[-2:] + elif len(var_shape) != 2: + # don't guess + return dask.array.core.normalize_chunks("auto", shape=var_shape, dtype=np.float32) + shape_for_250m = tuple(dim_size * res_multiplier for dim_size in var_shape) + chunks_for_250m = dask.array.core.normalize_chunks(("auto", -1), shape=shape_for_250m, dtype=np.float32) + row_chunks_for_250m = chunks_for_250m[0][0] + scanbased_row_chunks_for_250m = np.round(row_chunks_for_250m / scan_length_250m) * scan_length_250m + var_row_chunks = scanbased_row_chunks_for_250m / res_multiplier + var_row_chunks = max(var_row_chunks, scan_length_250m / res_multiplier) # avoid getting 0 chunk size + return non_yx_chunks + (var_row_chunks, -1) + + @staticmethod + def _get_res_multiplier(var_shape): + num_columns_to_multiplier = { + 271: 20, # 5km + 1354: 4, # 1km + 2708: 2, # 500m + 5416: 1, # 250m + } + for max_columns, res_multiplier in num_columns_to_multiplier.items(): + if var_shape[-1] <= max_columns: + return res_multiplier + return 1 + def _scale_and_mask_data_array(self, data, is_category=False): good_mask, new_fill = self._get_good_data_mask(data, is_category=is_category) scale_factor = data.attrs.pop('scale_factor', None) @@ -356,14 +391,19 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): result2 = self._load_ds_by_name(name2) - offset try: sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') + print("sensor_zenith: ", sensor_zenith.chunks) except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None + print("get_interpolated_dataset: ", name1, resolution, result1.data.chunks) + print("get_interpolated_dataset: ", name2, resolution, result2.data.chunks) result1, result2 = interpolate( result1, result2, sensor_zenith, self.geo_resolution, resolution ) + print("get_interpolated_dataset after: ", name1, result1.data.chunks) + print("get_interpolated_dataset after: ", name2, result2.data.chunks) self.cache[(name1, resolution)] = result1 self.cache[(name2, resolution)] = result2 + offset diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index f0b5a74e8a..00e0b59c10 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -48,7 +48,6 @@ import numpy as np import xarray as xr -from satpy import CHUNK_SIZE from satpy.readers.hdf4_utils import from_sds from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader @@ -95,8 +94,8 @@ def get_dataset(self, key, info): index = band_names.index(key['name']) except ValueError: continue - uncertainty = self.sd.select(dataset + "_Uncert_Indexes") - array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[index, :, :], + chunks = self._chunks_for_variable(subdata) + array = xr.DataArray(from_sds(subdata, chunks=chunks)[index, :, :], dims=['y', 'x']).astype(np.float32) valid_range = var_attrs['valid_range'] @@ -122,7 +121,9 @@ def get_dataset(self, key, info): array = array.where(array >= np.float32(valid_range[0])) array = array.where(array <= np.float32(valid_range[1])) - array = array.where(from_sds(uncertainty, chunks=CHUNK_SIZE)[index, :, :] < 15) + uncertainty = self.sd.select(dataset + "_Uncert_Indexes") + uncertainty_chunks = self._chunks_for_variable(uncertainty) + array = array.where(from_sds(uncertainty, chunks=uncertainty_chunks)[index, :, :] < 15) if key['calibration'] == 'brightness_temperature': projectable = calibrate_bt(array, var_attrs, index, key['name']) diff --git a/satpy/tests/reader_tests/test_modis_l1b.py b/satpy/tests/reader_tests/test_modis_l1b.py index 981fac39bb..6cb9fd47a0 100644 --- a/satpy/tests/reader_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/test_modis_l1b.py @@ -41,6 +41,18 @@ def _check_shared_metadata(data_arr): assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs['reader'] == 'modis_l1b' + assert "resolution" in data_arr.attrs + res = data_arr.attrs["resolution"] + if res == 5000: + assert data_arr.chunks == ((2, 2, 2), (data_arr.shape[1],)) + elif res == 1000: + assert data_arr.chunks == ((10, 10, 10), (data_arr.shape[1],)) + elif res == 500: + assert data_arr.chunks == ((20, 20, 20), (data_arr.shape[1],)) + elif res == 250: + assert data_arr.chunks == ((40, 40, 40), (data_arr.shape[1],)) + else: + raise ValueError(f"Unexpected resolution: {res}") def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res, @@ -137,7 +149,8 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) - with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)): + scheduler = CustomScheduler(max_computes=1 + has_5km + has_500 + has_250) + with dask.config.set({'scheduler': scheduler, 'array.chunk-size': '1 MiB'}): _load_and_check_geolocation(scene, "*", default_res, default_shape, True) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500) @@ -147,7 +160,8 @@ def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): """Test loading satellite zenith angle band.""" scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) dataset_name = 'satellite_zenith_angle' - scene.load([dataset_name]) + with dask.config.set({'array.chunk-size': '1 MiB'}): + scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 @@ -157,7 +171,8 @@ def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) dataset_name = '1' - scene.load([dataset_name]) + with dask.config.set({'array.chunk-size': '1 MiB'}): + scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 From 876c69a5060f7df8fb9b63498f3b1f9ce4464810 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 11 Mar 2022 05:52:09 -0600 Subject: [PATCH 0042/1416] Remove debug print statements --- satpy/readers/hdfeos_base.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index af24a855f1..134e2926b5 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -391,19 +391,14 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): result2 = self._load_ds_by_name(name2) - offset try: sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') - print("sensor_zenith: ", sensor_zenith.chunks) except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None - print("get_interpolated_dataset: ", name1, resolution, result1.data.chunks) - print("get_interpolated_dataset: ", name2, resolution, result2.data.chunks) result1, result2 = interpolate( result1, result2, sensor_zenith, self.geo_resolution, resolution ) - print("get_interpolated_dataset after: ", name1, result1.data.chunks) - print("get_interpolated_dataset after: ", name2, result2.data.chunks) self.cache[(name1, resolution)] = result1 self.cache[(name2, resolution)] = result2 + offset From fa7942d78f85a6680feca4b8202bed708486df06 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 11:00:56 +0200 Subject: [PATCH 0043/1416] to_hvplot function New function to plot Scene datasets as Hvplot Overlay --- AUTHORS.md | 1 + satpy/scene.py | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/AUTHORS.md b/AUTHORS.md index dd2b24750d..e2aa4be396 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -47,6 +47,7 @@ The following people have made contributions to this project: - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Aronne Merrelli (aronnem)](https://github.com/aronnem) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) +- [Luca Merucci (lmeru)](https://github.com/lmeru) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) diff --git a/satpy/scene.py b/satpy/scene.py index 261d84ea81..6931c5bc4d 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1021,6 +1021,71 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, dynamic=dynamic) return gview + + def to_hvplot(self,datasets=None, *args,**kwargs): + """ + Convert satpy Scene to Hvplot. + Args: + datasets (list): Limit included products to these datasets. + kwargs: hvplot options list. + + Returns: hvplot object that contains within it the plots of datasets list. + As default it contains all Scene datasets plots and a plot title is shown. + + Example usage: + scene_list = ['ash','IR_108'] + plot = scn.to_hvplot(datasets=scene_list) + + plot.ash+plot.IR_108 + """ + + import hvplot.xarray + from holoviews import Overlay + from satpy import composites + from cartopy import crs + + def _get_crs(xarray_ds): + return xarray_ds.area.to_cartopy_crs() + + def _get_timestamp(xarray_ds): + time = xarray_ds.attrs['start_time'] + return time.strftime('%Y %m %d -- %H:%M UTC') + + def _get_units(xarray_ds,variable): + return xarray_ds[variable].attrs['units'] + + def _plot_rgb(xarray_ds, variable,**defaults): + img = composites.enhance2dataset(xarray_ds[variable]) + return img.hvplot.rgb(bands='bands',title=title, + clabel='',**defaults) + + def _plot_quadmesh(xarray_ds,variable,**defaults): + return xarray_ds[variable].hvplot.quadmesh( + clabel=f'[{_get_units(xarray_ds,variable)}]', + title=title,**defaults) + + plot = Overlay() + xarray_ds = self.to_xarray_dataset(datasets) + ccrs = _get_crs(xarray_ds) + + if datasets is None: datasets = list(xarray_ds.keys()) + + defaults = dict(x='x',y='y',data_aspect=1,project=True,geo=True, + crs=ccrs,projection=ccrs,rasterize=True, + coastline='110m',cmap='Plasma',responsive=True, + dynamic=False,framewise=True,colorbar=False, + global_extent=False,xlabel='Longitude',ylabel='Latitude') + + defaults.update(kwargs) + + for element in datasets: + title = f'{element} @ {_get_timestamp(xarray_ds)}' + if xarray_ds[element].shape[0] == 3: + plot[element] =_plot_rgb(xarray_ds,element,**defaults) + else: + plot[element]=_plot_quadmesh(xarray_ds,element,**defaults) + + return plot def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From 88d40023dd250bf4317ddee4d618a9ecf4fdfb66 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 11:14:34 +0200 Subject: [PATCH 0044/1416] Add to_hvplot function to_hvplot function plot the Scene datasets as Hvplot Overlay. Added Luca Merucci in authors.md (we create this function together ) --- AUTHORS.md | 1 + satpy/scene.py | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+) diff --git a/AUTHORS.md b/AUTHORS.md index dd2b24750d..e2aa4be396 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -47,6 +47,7 @@ The following people have made contributions to this project: - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Aronne Merrelli (aronnem)](https://github.com/aronnem) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) +- [Luca Merucci (lmeru)](https://github.com/lmeru) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) diff --git a/satpy/scene.py b/satpy/scene.py index 261d84ea81..6931c5bc4d 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1021,6 +1021,71 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, dynamic=dynamic) return gview + + def to_hvplot(self,datasets=None, *args,**kwargs): + """ + Convert satpy Scene to Hvplot. + Args: + datasets (list): Limit included products to these datasets. + kwargs: hvplot options list. + + Returns: hvplot object that contains within it the plots of datasets list. + As default it contains all Scene datasets plots and a plot title is shown. + + Example usage: + scene_list = ['ash','IR_108'] + plot = scn.to_hvplot(datasets=scene_list) + + plot.ash+plot.IR_108 + """ + + import hvplot.xarray + from holoviews import Overlay + from satpy import composites + from cartopy import crs + + def _get_crs(xarray_ds): + return xarray_ds.area.to_cartopy_crs() + + def _get_timestamp(xarray_ds): + time = xarray_ds.attrs['start_time'] + return time.strftime('%Y %m %d -- %H:%M UTC') + + def _get_units(xarray_ds,variable): + return xarray_ds[variable].attrs['units'] + + def _plot_rgb(xarray_ds, variable,**defaults): + img = composites.enhance2dataset(xarray_ds[variable]) + return img.hvplot.rgb(bands='bands',title=title, + clabel='',**defaults) + + def _plot_quadmesh(xarray_ds,variable,**defaults): + return xarray_ds[variable].hvplot.quadmesh( + clabel=f'[{_get_units(xarray_ds,variable)}]', + title=title,**defaults) + + plot = Overlay() + xarray_ds = self.to_xarray_dataset(datasets) + ccrs = _get_crs(xarray_ds) + + if datasets is None: datasets = list(xarray_ds.keys()) + + defaults = dict(x='x',y='y',data_aspect=1,project=True,geo=True, + crs=ccrs,projection=ccrs,rasterize=True, + coastline='110m',cmap='Plasma',responsive=True, + dynamic=False,framewise=True,colorbar=False, + global_extent=False,xlabel='Longitude',ylabel='Latitude') + + defaults.update(kwargs) + + for element in datasets: + title = f'{element} @ {_get_timestamp(xarray_ds)}' + if xarray_ds[element].shape[0] == 3: + plot[element] =_plot_rgb(xarray_ds,element,**defaults) + else: + plot[element]=_plot_quadmesh(xarray_ds,element,**defaults) + + return plot def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From 2fa9b87ef437e63c6c2e37ddec00f7aaf91b2dd5 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 12:42:35 +0200 Subject: [PATCH 0045/1416] trying to follow and correct stickler-ci messages --- satpy/scene.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 6931c5bc4d..83361588dc 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1022,9 +1022,8 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami return gview - def to_hvplot(self,datasets=None, *args,**kwargs): - """ - Convert satpy Scene to Hvplot. + def to_hvplot(self,datasets=None,*args,**kwargs): + """Convert satpy Scene to Hvplot. Args: datasets (list): Limit included products to these datasets. kwargs: hvplot options list. @@ -1037,8 +1036,8 @@ def to_hvplot(self,datasets=None, *args,**kwargs): plot = scn.to_hvplot(datasets=scene_list) plot.ash+plot.IR_108 + """ - import hvplot.xarray from holoviews import Overlay from satpy import composites @@ -1054,7 +1053,7 @@ def _get_timestamp(xarray_ds): def _get_units(xarray_ds,variable): return xarray_ds[variable].attrs['units'] - def _plot_rgb(xarray_ds, variable,**defaults): + def _plot_rgb(xarray_ds,variable,**defaults): img = composites.enhance2dataset(xarray_ds[variable]) return img.hvplot.rgb(bands='bands',title=title, clabel='',**defaults) From fcfd481516b6ebfbe31f80f8395ecb9b67aee71a Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 13:24:46 +0200 Subject: [PATCH 0046/1416] correction of whitespaces --- satpy/scene.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 83361588dc..61d85e3599 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1021,7 +1021,7 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, dynamic=dynamic) return gview - + def to_hvplot(self,datasets=None,*args,**kwargs): """Convert satpy Scene to Hvplot. Args: @@ -1030,29 +1030,29 @@ def to_hvplot(self,datasets=None,*args,**kwargs): Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. - + Example usage: scene_list = ['ash','IR_108'] plot = scn.to_hvplot(datasets=scene_list) - + plot.ash+plot.IR_108 """ import hvplot.xarray from holoviews import Overlay from satpy import composites - from cartopy import crs - + from cartopy import crs + def _get_crs(xarray_ds): return xarray_ds.area.to_cartopy_crs() def _get_timestamp(xarray_ds): time = xarray_ds.attrs['start_time'] return time.strftime('%Y %m %d -- %H:%M UTC') - + def _get_units(xarray_ds,variable): return xarray_ds[variable].attrs['units'] - + def _plot_rgb(xarray_ds,variable,**defaults): img = composites.enhance2dataset(xarray_ds[variable]) return img.hvplot.rgb(bands='bands',title=title, @@ -1062,11 +1062,11 @@ def _plot_quadmesh(xarray_ds,variable,**defaults): return xarray_ds[variable].hvplot.quadmesh( clabel=f'[{_get_units(xarray_ds,variable)}]', title=title,**defaults) - + plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) ccrs = _get_crs(xarray_ds) - + if datasets is None: datasets = list(xarray_ds.keys()) defaults = dict(x='x',y='y',data_aspect=1,project=True,geo=True, @@ -1074,18 +1074,18 @@ def _plot_quadmesh(xarray_ds,variable,**defaults): coastline='110m',cmap='Plasma',responsive=True, dynamic=False,framewise=True,colorbar=False, global_extent=False,xlabel='Longitude',ylabel='Latitude') - + defaults.update(kwargs) - + for element in datasets: title = f'{element} @ {_get_timestamp(xarray_ds)}' if xarray_ds[element].shape[0] == 3: plot[element] =_plot_rgb(xarray_ds,element,**defaults) else: plot[element]=_plot_quadmesh(xarray_ds,element,**defaults) - - return plot + return plot + def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From dfd93ec74f6f0e1658400f78e46fec2ec03ebedc Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 13:51:41 +0200 Subject: [PATCH 0047/1416] correction whitespaces --- satpy/scene.py | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 61d85e3599..097c19e521 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1022,7 +1022,7 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami return gview - def to_hvplot(self,datasets=None,*args,**kwargs): + def to_hvplot(self, datasets=None, *args, **kwargs): """Convert satpy Scene to Hvplot. Args: datasets (list): Limit included products to these datasets. @@ -1050,18 +1050,18 @@ def _get_timestamp(xarray_ds): time = xarray_ds.attrs['start_time'] return time.strftime('%Y %m %d -- %H:%M UTC') - def _get_units(xarray_ds,variable): + def _get_units(xarray_ds, variable): return xarray_ds[variable].attrs['units'] - def _plot_rgb(xarray_ds,variable,**defaults): + def _plot_rgb(xarray_ds, variable, **defaults): img = composites.enhance2dataset(xarray_ds[variable]) - return img.hvplot.rgb(bands='bands',title=title, - clabel='',**defaults) + return img.hvplot.rgb(bands='bands', title=title, + clabel='', **defaults) - def _plot_quadmesh(xarray_ds,variable,**defaults): + def _plot_quadmesh(xarray_ds, variable, **defaults): return xarray_ds[variable].hvplot.quadmesh( - clabel=f'[{_get_units(xarray_ds,variable)}]', - title=title,**defaults) + clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, + **defaults) plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) @@ -1069,20 +1069,19 @@ def _plot_quadmesh(xarray_ds,variable,**defaults): if datasets is None: datasets = list(xarray_ds.keys()) - defaults = dict(x='x',y='y',data_aspect=1,project=True,geo=True, - crs=ccrs,projection=ccrs,rasterize=True, - coastline='110m',cmap='Plasma',responsive=True, - dynamic=False,framewise=True,colorbar=False, - global_extent=False,xlabel='Longitude',ylabel='Latitude') + defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, + crs=ccrs, projection=ccrs, rasterize=True, coastline='110m', + cmap='Plasma', responsive=True, dynamic=False, framewise=True, + colorbar=False, global_extent=False, xlabel='Longitude', ylabel='Latitude') defaults.update(kwargs) for element in datasets: title = f'{element} @ {_get_timestamp(xarray_ds)}' if xarray_ds[element].shape[0] == 3: - plot[element] =_plot_rgb(xarray_ds,element,**defaults) + plot[element] = _plot_rgb(xarray_ds, element, **defaults) else: - plot[element]=_plot_quadmesh(xarray_ds,element,**defaults) + plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) return plot From c0022f3da8f24ca9b410c4d0b3159ef4e1d3e929 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 14:05:17 +0200 Subject: [PATCH 0048/1416] correction whitespaces --- satpy/scene.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 097c19e521..2ea3c041c3 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1069,10 +1069,11 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): if datasets is None: datasets = list(xarray_ds.keys()) - defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, - crs=ccrs, projection=ccrs, rasterize=True, coastline='110m', - cmap='Plasma', responsive=True, dynamic=False, framewise=True, - colorbar=False, global_extent=False, xlabel='Longitude', ylabel='Latitude') + defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, + crs=ccrs, projection=ccrs, rasterize=True, coastline='110m', + cmap='Plasma', responsive=True, dynamic=False, framewise=True, + colorbar=False, global_extent=False, xlabel='Longitude', + ylabel='Latitude') defaults.update(kwargs) From 249c4209c61bdf81f81ebfc81694e0f16fa25e11 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 19:46:29 +0200 Subject: [PATCH 0049/1416] function correction for pull request correction whitespaces, import libraries at beginning --- AUTHORS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AUTHORS.md b/AUTHORS.md index e2aa4be396..85adb23559 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -46,8 +46,8 @@ The following people have made contributions to this project: - [Lu Liu (yukaribbba)](https://github.com/yukaribbba) - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Aronne Merrelli (aronnem)](https://github.com/aronnem) -- [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Luca Merucci (lmeru)](https://github.com/lmeru) +- [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) From 2f2e8a84eb05b07a8a7464e1766873efb90052fc Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 21:10:21 +0200 Subject: [PATCH 0050/1416] Add to_hvplot functon --- satpy/scene.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 2ea3c041c3..ecaa58bfed 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -23,12 +23,14 @@ import warnings from typing import Callable +import hvplot.xarray # noqa import numpy as np import xarray as xr +from holoviews import Overlay from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition from xarray import DataArray -from satpy.composites import IncompatibleAreas +from satpy.composites import IncompatibleAreas, enhance2dataset from satpy.composites.config_loader import load_compositor_configs_for_sensors from satpy.dataset import DataID, DataQuery, DatasetDict, combine_metadata, dataset_walker, replace_anc from satpy.dependency_tree import DependencyTree @@ -1024,11 +1026,12 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami def to_hvplot(self, datasets=None, *args, **kwargs): """Convert satpy Scene to Hvplot. - Args: + + Args: datasets (list): Limit included products to these datasets. kwargs: hvplot options list. - Returns: hvplot object that contains within it the plots of datasets list. + Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. Example usage: @@ -1038,11 +1041,6 @@ def to_hvplot(self, datasets=None, *args, **kwargs): plot.ash+plot.IR_108 """ - import hvplot.xarray - from holoviews import Overlay - from satpy import composites - from cartopy import crs - def _get_crs(xarray_ds): return xarray_ds.area.to_cartopy_crs() @@ -1054,22 +1052,23 @@ def _get_units(xarray_ds, variable): return xarray_ds[variable].attrs['units'] def _plot_rgb(xarray_ds, variable, **defaults): - img = composites.enhance2dataset(xarray_ds[variable]) + img = enhance2dataset(xarray_ds[variable]) return img.hvplot.rgb(bands='bands', title=title, clabel='', **defaults) def _plot_quadmesh(xarray_ds, variable, **defaults): return xarray_ds[variable].hvplot.quadmesh( - clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, + clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, **defaults) - plot = Overlay() + plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) ccrs = _get_crs(xarray_ds) - if datasets is None: datasets = list(xarray_ds.keys()) + if datasets is None: + datasets = list(xarray_ds.keys()) - defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, + defaults = dict(x='x', y='y', data_aspect=1, project=True, geo=True, crs=ccrs, projection=ccrs, rasterize=True, coastline='110m', cmap='Plasma', responsive=True, dynamic=False, framewise=True, colorbar=False, global_extent=False, xlabel='Longitude', @@ -1085,7 +1084,7 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) return plot - + def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From ac8a36167736df51b5f3659be2270520848dd97d Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 21:26:39 +0200 Subject: [PATCH 0051/1416] add hvplot in extras require --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 0f10d9d15d..2ca2def303 100644 --- a/setup.py +++ b/setup.py @@ -78,6 +78,7 @@ 'doc': ['sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-apidoc'], # Other 'geoviews': ['geoviews'], + 'hvplot': ['hvplot'], 'overlays': ['pycoast', 'pydecorate'], 'tests': test_requires, } From d2c80fb9876f9aa82527a17a33ed324b2c8d677f Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 6 May 2022 23:24:07 +0200 Subject: [PATCH 0052/1416] add hvplot in test require --- setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2ca2def303..099a3f63b7 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,9 @@ test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', 'pylibtiff', 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', - 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml'] + 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml', + 'hvplot'] + extras_require = { # Readers: From 09b8f6ee3252ca52e9ba8fd14ecf69cf7f4b49f9 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Sat, 7 May 2022 19:47:01 +0200 Subject: [PATCH 0053/1416] Answer to #issuecomment-1120099909 --- continuous_integration/environment.yaml | 1 + satpy/scene.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index d131bad1e0..84c5b4be68 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -35,6 +35,7 @@ dependencies: - mock - libtiff - geoviews + - hvplot - zarr - python-eccodes # 2.19.1 seems to cause library linking issues diff --git a/satpy/scene.py b/satpy/scene.py index ecaa58bfed..c013957c6e 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -23,7 +23,6 @@ import warnings from typing import Callable -import hvplot.xarray # noqa import numpy as np import xarray as xr from holoviews import Overlay @@ -39,6 +38,12 @@ from satpy.resample import get_area_def, prepare_resampler, resample_dataset from satpy.writers import load_writer +try: + import hvplot.xarray # noqa +except ImportError: + hvplot.xarray = None + + LOG = logging.getLogger(__name__) @@ -1061,6 +1066,9 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, **defaults) + if hvplot.xarray is None: + raise ImportError("'hvplot' must be installed to use this feature") + plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) ccrs = _get_crs(xarray_ds) From d0299dd98254e0a37406e8dee7c3fec6d041c227 Mon Sep 17 00:00:00 2001 From: bornagain Date: Sat, 7 May 2022 21:35:43 +0200 Subject: [PATCH 0054/1416] Update satpy/scene.py Co-authored-by: Panu Lahtinen --- satpy/scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index c013957c6e..4a73f99a14 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1034,7 +1034,7 @@ def to_hvplot(self, datasets=None, *args, **kwargs): Args: datasets (list): Limit included products to these datasets. - kwargs: hvplot options list. + kwargs: hvplot options dictionary. Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. From aed6a9173fd8bf5dd33e7c8a5ca1ee3755e577a5 Mon Sep 17 00:00:00 2001 From: bornagain Date: Sat, 7 May 2022 21:35:58 +0200 Subject: [PATCH 0055/1416] Update satpy/scene.py Co-authored-by: Panu Lahtinen --- satpy/scene.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index 4a73f99a14..1468487da6 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1039,7 +1039,8 @@ def to_hvplot(self, datasets=None, *args, **kwargs): Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. - Example usage: + Example usage:: + scene_list = ['ash','IR_108'] plot = scn.to_hvplot(datasets=scene_list) From 4687ba437dc029514afe518b96caf7b0bcd83709 Mon Sep 17 00:00:00 2001 From: bornagain Date: Sat, 7 May 2022 21:37:07 +0200 Subject: [PATCH 0056/1416] Update satpy/scene.py Co-authored-by: Panu Lahtinen --- satpy/scene.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 1468487da6..7501ee54b7 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -39,9 +39,9 @@ from satpy.writers import load_writer try: - import hvplot.xarray # noqa + import hvplot.xarray as hvplot_xarray # noqa except ImportError: - hvplot.xarray = None + hvplot_xarray = None LOG = logging.getLogger(__name__) From 1aff451cd1481077090bc6da047b96693ad4b588 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Sat, 7 May 2022 21:41:55 +0200 Subject: [PATCH 0057/1416] Update --- satpy/scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index 7501ee54b7..1fcd7fc057 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1067,7 +1067,7 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): clabel=f'[{_get_units(xarray_ds,variable)}]', title=title, **defaults) - if hvplot.xarray is None: + if hvplot_xarray is None: raise ImportError("'hvplot' must be installed to use this feature") plot = Overlay() From e52fd96f17944841752fd3764c01fe39075a19d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Mon, 1 Aug 2022 07:50:15 +0200 Subject: [PATCH 0058/1416] feat: add first draft of area list --- doc/source/area_def_list.py | 47 +++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 doc/source/area_def_list.py diff --git a/doc/source/area_def_list.py b/doc/source/area_def_list.py new file mode 100644 index 0000000000..7b67002998 --- /dev/null +++ b/doc/source/area_def_list.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . +"""Module for autogenerating a list and overview of available area definitions .""" + +from pyresample.area_config import _read_yaml_area_file_content + +from satpy.resample import get_area_def, get_area_file + + +def generate_area_def_list(): + """Create list of available area definitions with overview plot. + + Returns: + str + """ + area_list = [] + + template = ("{area_name}\n" + "----------\n" + ".. raw:: html\n" + " {content}\n\n") + + area_file = get_area_file()[0] + for aname in [list(_read_yaml_area_file_content(area_file).keys())[0]]: + area = get_area_def(aname) + if hasattr(area, "_repr_html_"): + area_list.append(template.format(area_name=aname, content=area._repr_html_())) + else: + pass + + return "".join(area_list) From 96ae5d82de6156403381386db5f6678462609105 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Mon, 1 Aug 2022 08:50:47 +0200 Subject: [PATCH 0059/1416] fix: indentation of raw html block --- doc/source/area_def_list.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/doc/source/area_def_list.py b/doc/source/area_def_list.py index 7b67002998..9624f2937b 100644 --- a/doc/source/area_def_list.py +++ b/doc/source/area_def_list.py @@ -32,15 +32,17 @@ def generate_area_def_list(): area_list = [] template = ("{area_name}\n" - "----------\n" - ".. raw:: html\n" + "{n:->{header_title_length}}\n\n" + ".. raw:: html\n\n" " {content}\n\n") area_file = get_area_file()[0] for aname in [list(_read_yaml_area_file_content(area_file).keys())[0]]: area = get_area_def(aname) if hasattr(area, "_repr_html_"): - area_list.append(template.format(area_name=aname, content=area._repr_html_())) + content = "\n".join([x.rjust(len(x) + 5) for x in area._repr_html_().split("\n")]) + area_list.append(template.format(area_name=aname, n="", header_title_length=len(aname), + content=content)) else: pass From 546bb5f938d44ca1231227cd343bd13507c8885b Mon Sep 17 00:00:00 2001 From: bornagain Date: Tue, 2 Aug 2022 08:09:48 +0200 Subject: [PATCH 0060/1416] Update setup.py Co-authored-by: Panu Lahtinen --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 099a3f63b7..af6819d6e5 100644 --- a/setup.py +++ b/setup.py @@ -80,7 +80,7 @@ 'doc': ['sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-apidoc'], # Other 'geoviews': ['geoviews'], - 'hvplot': ['hvplot'], + 'hvplot': ['hvplot', 'geoviews', 'cartopy'], 'overlays': ['pycoast', 'pydecorate'], 'tests': test_requires, } From bdf89b7b30cb4b84a157168ba142c8395baee24a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Tue, 2 Aug 2022 10:00:38 +0200 Subject: [PATCH 0061/1416] add: area_def_list.rst to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 8990fa1d46..25eee3bf96 100644 --- a/.gitignore +++ b/.gitignore @@ -75,3 +75,4 @@ doc/source/_build/* satpy/version.py doc/source/api/*.rst doc/source/reader_table.rst +doc/source/area_def_list.rst From e310747b996f5856a1803d86a210718728644bb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Tue, 2 Aug 2022 10:03:26 +0200 Subject: [PATCH 0062/1416] add: area definition list to resample chapter --- doc/source/area_def_list.py | 8 +++++--- doc/source/conf.py | 4 ++++ satpy/resample.py | 7 ++++++- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/doc/source/area_def_list.py b/doc/source/area_def_list.py index 9624f2937b..665c770569 100644 --- a/doc/source/area_def_list.py +++ b/doc/source/area_def_list.py @@ -19,6 +19,7 @@ """Module for autogenerating a list and overview of available area definitions .""" from pyresample.area_config import _read_yaml_area_file_content +from pyresample.formatting_html import area_repr from satpy.resample import get_area_def, get_area_file @@ -34,13 +35,14 @@ def generate_area_def_list(): template = ("{area_name}\n" "{n:->{header_title_length}}\n\n" ".. raw:: html\n\n" - " {content}\n\n") + "{content}\n\n" + "
\n\n") area_file = get_area_file()[0] - for aname in [list(_read_yaml_area_file_content(area_file).keys())[0]]: + for aname in list(_read_yaml_area_file_content(area_file).keys()): area = get_area_def(aname) if hasattr(area, "_repr_html_"): - content = "\n".join([x.rjust(len(x) + 5) for x in area._repr_html_().split("\n")]) + content = "\n".join([x.rjust(len(x) + 5) for x in area_repr(area, include_header=False).split("\n")]) area_list.append(template.format(area_name=aname, n="", header_title_length=len(aname), content=content)) else: diff --git a/doc/source/conf.py b/doc/source/conf.py index 10dbde5c98..975622b345 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -26,6 +26,7 @@ sys.path.append(os.path.abspath('../../')) sys.path.append(os.path.abspath(os.path.dirname(__file__))) +from area_def_list import generate_area_def_list # noqa: E402 from reader_table import generate_reader_table # noqa: E402 # The version info for the project you're documenting, acts as replacement for @@ -80,6 +81,9 @@ def __getattr__(cls, name): with open("reader_table.rst", mode="w") as f: f.write(generate_reader_table()) +with open("area_def_list.rst", mode="w") as f: + f.write(generate_area_def_list()) + # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions diff --git a/satpy/resample.py b/satpy/resample.py index a8230ae8ed..ed2e08a921 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -135,7 +135,12 @@ For examples of area definitions, see the file ``etc/areas.yaml`` that is included with Satpy and where all the area definitions shipped with Satpy are -defined. +defined. The section below gives an overview of these area definitions. + +Area definitions included in Satpy +---------------------------------- + +.. include:: area_def_list.rst """ import hashlib From 3bf941c125bdfc3d74ce58c31a42311d6dacff25 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 3 Nov 2022 13:33:40 +0100 Subject: [PATCH 0063/1416] Refactor CF Writer --- satpy/scene.py | 91 ++++++++++++++ satpy/writers/cf_writer.py | 246 ++++++++++++++++++++++++++++++------- 2 files changed, 290 insertions(+), 47 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 5aab323042..3f60fb38b4 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1070,6 +1070,97 @@ def to_xarray_dataset(self, datasets=None): ds.attrs = mdata return ds + def to_xarray(self, + datasets=None, # DataID + header_attrs=None, + exclude_attrs=None, + flatten_attrs=False, + pretty=True, + include_lonlats=True, + epoch=None, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xr.Dataset. + + Parameters + ---------- + datasets (iterable): + List of Satpy Scene datasets to include in the output xr.Dataset. + Elements can be string name, a wavelength as a number, a DataID, + or DataQuery object. + If None (the default), it include all loaded Scene datasets. + header_attrs: + Global attributes of the output xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH" + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates. + If the 'area' attribute is a SwathDefinition, it always includes + latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, + but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + + Returns + ------- + ds, xr.Dataset + A CF-compliant xr.Dataset + + """ + from satpy.writers.cf_writer import EPOCH, collect_cf_datasets + + # Retrieve epoch + if epoch is None: + epoch = EPOCH + + # Check datasets + # - If None, retrieve all loaded datasets + if isinstance(datasets, str): + datasets = [datasets] + if datasets is None: + datasets = list(self.keys()) # list DataIDs + + # Get list of DataArrays + list_dataarrays = self._get_dataarrays_from_identifiers(datasets) + # Check that some DataArray could be returned + # TODO: DECIDE BEHAVIOUR + if len(list_dataarrays) == 0: + return xr.Dataset() + if not list_dataarrays: + raise RuntimeError("None of the requested datasets have been " + "generated or could not be loaded. Requested " + "composite inputs may need to have matching " + "dimensions (eg. through resampling).") + # Collect xr.Dataset for each group + grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=list_dataarrays, + header_attrs=header_attrs, + exclude_attrs=exclude_attrs, + flatten_attrs=flatten_attrs, + pretty=pretty, + include_lonlats=include_lonlats, + epoch=epoch, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix, + groups=None) + if len(grouped_datasets) == 1: + ds = grouped_datasets[None] + return ds + else: + msg = """The Scene object contains datasets with different dimensions. + Resample the Scene to have matching dimensions using i.e. scn.resample("native") """ + raise NotImplementedError(msg) + def _get_dataarrays_from_identifiers(self, identifiers): if identifiers is not None: dataarrays = [self[ds] for ds in identifiers] diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index ca0e2055fb..10858490ae 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -186,6 +186,7 @@ np.string_] # Unsigned and int64 isn't CF 1.7 compatible +# Note: Unsigned and int64 are CF 1.9 compatible CF_DTYPES = [np.dtype('int8'), np.dtype('int16'), np.dtype('int32'), @@ -611,6 +612,144 @@ def _get_groups(groups, datasets, root): return groups_ +def collect_cf_datasets(list_dataarrays, + header_attrs=None, + exclude_attrs=None, + flatten_attrs=False, + pretty=True, + include_lonlats=True, + epoch=EPOCH, + include_orig_name=True, + numeric_name_prefix='CHANNEL_', + compression=None, # TODO [DEPRECATED] + groups=None): + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. + + If the xr.DataArrays does not share the same dimensions, it creates a collection + of xr.Datasets sharing the same dimensions. + + Parameters + ---------- + datasets (list): + List of Satpy Scene datasets to include in the output xr.Dataset. + The list must include either dataset names or DataIDs. + If None (the default), it include all loaded Scene datasets. + header_attrs: + Global attributes of the output xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + groups (dict): + Group datasets according to the given assignment: + `{'': ['dataset_name1', 'dataset_name2', ...]}`. + It is used to create grouped netCDFs using the CF_Writer. + If None (the default), no groups will be created. + + Returns + ------- + grouped_datasets : dict + A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} + header_attrs : dict + Global attributes to be attached to the xr.Dataset / netCDF4. + """ + # Check some DataArray have been provided + if not list_dataarrays: + raise RuntimeError("None of the requested datasets have been " + "generated or could not be loaded. Requested " + "composite inputs may need to have matching " + "dimensions (eg. through resampling).") + + # Define file header attributes + if header_attrs is not None: + if flatten_attrs: + header_attrs = flatten_dict(header_attrs) + header_attrs = encode_attrs_nc(header_attrs) # OrderedDict + else: + header_attrs = {} + + # TODO REFACTOR + # - _get_groups should not input 'root' + # - 'groups_' --> rename to 'grouped_dataarrays' + # - 'conventions' attribute should be added outside _get_groups + # - If group_name all or wrong, currently behave like groups = None + # Retrieve groups + # - If groups is None: {None: list_dataarrays} + # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} + groups = {'group_name': ['IR_108', 'VIS8006'], 'group_name2': ["HRV"]} + groups = None + root = xr.Dataset({}, attrs={}) # TODO: this just to not to refactor _get_groups + groups_ = _get_groups(groups, list_dataarrays, root) # TODO: this add attr "Conventions" to root if no groups + is_grouped = len(groups_) >= 2 + + # TODO REFACTOR: remove root usage + # Update header_attrs with 'history' and 'Conventions' + # - Add "Created by pytroll/satpy ..." + _set_history(root) + header_attrs['history'] = root.attrs["history"] + # - Add CF conventions + if not is_grouped: + header_attrs['Conventions'] = root.attrs["Conventions"] + + # TODO REFACTOR + # Temporary create CF writer instance to acces to CFWriter._collect_datasets + # Requires refactor of CFWriter to define CFWriter._collect_datasets, and CFWriter.da2cf as generic functions + # CFWriter.da2cf is a static method and could be put outside + # CFWriter._collect_datasets(self, ...) could be put outside + from satpy.writers import load_writer + cf_writer, save_kwargs = load_writer(writer="cf", filename="") + + # Create dictionary of group xr.Datasets + # --> If no groups (groups=None) --> group_name=None + grouped_datasets = {} + for group_name, group_dataarrays in groups_.items(): + # XXX: Should we combine the info of all datasets? + dict_datarrays, start_times, end_times = cf_writer._collect_datasets( + datasets=group_dataarrays, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_lonlats=include_lonlats, + pretty=pretty, + compression=compression, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + ds = xr.Dataset(dict_datarrays) + + # If no groups, add global header to xr.Dataset + if group_name is None: + ds.attrs = header_attrs + + # Add time_bnds + if 'time' in ds: + ds['time_bnds'] = make_time_bounds(start_times, end_times) + ds['time'].attrs['bounds'] = "time_bnds" + ds['time'].attrs['standard_name'] = "time" + else: + grp_str = ' of group {}'.format(group_name) if group_name is not None else '' + logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str)) + + # Add xr.Dataset to dictionary + grouped_datasets[group_name] = ds + + # Return dictionary with xr.Dataset + return grouped_datasets, header_attrs + + class CFWriter(Writer): """Writer producing NetCDF/CF compatible datasets.""" @@ -840,27 +979,27 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Args: datasets (list): - Datasets to be saved + List of xr.DataArray to be saved. filename (str): Output file groups (dict): Group datasets according to the given assignment: `{'group_name': ['dataset1', 'dataset2', ...]}`. - Group name `None` corresponds to the root of the file, i.e. no group will be created. Warning: The - results will not be fully CF compliant! + Group name `None` corresponds to the root of the file, i.e. no group will be created. + Warning: The results will not be fully CF compliant! header_attrs: - Global attributes to be included + Global attributes to be included. engine (str): Module to be used for writing netCDF files. Follows xarray's :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. epoch (str): - Reference time for encoding of time coordinates + Reference time for encoding of time coordinates. flatten_attrs (bool): - If True, flatten dict-type attributes + If True, flatten dict-type attributes. exclude_attrs (list): - List of dataset attributes to be excluded + List of dataset attributes to be excluded. include_lonlats (bool): - Always include latitude and longitude coordinates, even for datasets with area definition + Always include latitude and longitude coordinates, even for datasets with area definition. pretty (bool): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. compression (dict): @@ -869,59 +1008,72 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, http://xarray.pydata.org/en/stable/generated/xarray.Dataset.to_netcdf.html for more possibilities. (This parameter is now being deprecated, please use the DataArrays's `encoding` from now on.) include_orig_name (bool). - Include the original dataset name as an varaibel attribute in the final netcdf + Include the original dataset name as a variable attribute in the final netCDF. numeric_name_prefix (str): Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ + # Note: datasets is a list of xr.DataArray logger.info('Saving datasets to NetCDF4/CF.') + + # Retrieve compression [Deprecated] compression = _get_compression(compression) - # Write global attributes to file root (creates the file) + # Define netCDF filename if not provided + # - It infers the name from the first DataArray filename = filename or self.get_filename(**datasets[0].attrs) - root = xr.Dataset({}, attrs={}) - if header_attrs is not None: - if flatten_attrs: - header_attrs = flatten_dict(header_attrs) - root.attrs = encode_attrs_nc(header_attrs) - - _set_history(root) - + # Collect xr.Dataset for each group + grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=datasets, # list of xr.DataArray + header_attrs=header_attrs, + exclude_attrs=exclude_attrs, + flatten_attrs=flatten_attrs, + pretty=pretty, + include_lonlats=include_lonlats, + epoch=epoch, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix, + groups=groups, + compression=compression, # [DEPRECATED] + ) # Remove satpy-specific kwargs - to_netcdf_kwargs = copy.deepcopy(to_netcdf_kwargs) # may contain dictionaries (encoding) + # - This kwargs can contain encoding dictionary + to_netcdf_kwargs = copy.deepcopy(to_netcdf_kwargs) satpy_kwargs = ['overlay', 'decorate', 'config_files'] for kwarg in satpy_kwargs: to_netcdf_kwargs.pop(kwarg, None) - init_nc_kwargs = to_netcdf_kwargs.copy() - init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point - init_nc_kwargs.pop('unlimited_dims', None) - - groups_ = _get_groups(groups, datasets, root) - - written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] - - # Write datasets to groups (appending to the file; group=None means no group) - for group_name, group_datasets in groups_.items(): - # XXX: Should we combine the info of all datasets? - datas, start_times, end_times = self._collect_datasets( - group_datasets, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, - include_lonlats=include_lonlats, pretty=pretty, compression=compression, - include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) - dataset = xr.Dataset(datas) - if 'time' in dataset: - dataset['time_bnds'] = make_time_bounds(start_times, - end_times) - dataset['time'].attrs['bounds'] = "time_bnds" - dataset['time'].attrs['standard_name'] = "time" - else: - grp_str = ' of group {}'.format(group_name) if group_name is not None else '' - logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str)) - - encoding, other_to_netcdf_kwargs = update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix) - res = dataset.to_netcdf(filename, engine=engine, group=group_name, mode='a', encoding=encoding, - **other_to_netcdf_kwargs) + # If writing grouped netCDF, create an empty "root" netCDF file + # - Add the global attributes + # - All groups will be appended in the for loop below + if groups is not None: + root = xr.Dataset({}, attrs=header_attrs) + # - Add history attribute: "Created by pytroll/satpy ..." + _set_history(root) + # - Define init kwargs + init_nc_kwargs = to_netcdf_kwargs.copy() + init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point + init_nc_kwargs.pop('unlimited_dims', None) + written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] + mode = "a" + else: + mode = "w" + written = [] + + # Write the netCDF + # - If grouped netCDF, it appends to the root file + # - If single netCDF, it write directly + for group_name, ds in grouped_datasets.items(): + # Update encoding + encoding, other_to_netcdf_kwargs = update_encoding(ds, + to_netcdf_kwargs, + numeric_name_prefix) + # Write (append) dataset + res = ds.to_netcdf(filename, engine=engine, + group=group_name, mode=mode, + encoding=encoding, + **other_to_netcdf_kwargs) written.append(res) + # Return list of writing results return written From 47fc269dff8db814de5a6470c2b0961a16801fb2 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 3 Nov 2022 13:43:23 +0100 Subject: [PATCH 0064/1416] Remove residual dummy lines --- satpy/writers/cf_writer.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 10858490ae..92dbcb603d 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -690,8 +690,6 @@ def collect_cf_datasets(list_dataarrays, # Retrieve groups # - If groups is None: {None: list_dataarrays} # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} - groups = {'group_name': ['IR_108', 'VIS8006'], 'group_name2': ["HRV"]} - groups = None root = xr.Dataset({}, attrs={}) # TODO: this just to not to refactor _get_groups groups_ = _get_groups(groups, list_dataarrays, root) # TODO: this add attr "Conventions" to root if no groups is_grouped = len(groups_) >= 2 @@ -731,7 +729,7 @@ def collect_cf_datasets(list_dataarrays, ds = xr.Dataset(dict_datarrays) # If no groups, add global header to xr.Dataset - if group_name is None: + if not is_grouped: ds.attrs = header_attrs # Add time_bnds From 74f495d9affecde14579edd9b70e5173bbd9f8e2 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 3 Nov 2022 15:17:27 +0100 Subject: [PATCH 0065/1416] Fix history and conventions failing tests --- satpy/writers/cf_writer.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 92dbcb603d..4191377b9c 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -696,11 +696,17 @@ def collect_cf_datasets(list_dataarrays, # TODO REFACTOR: remove root usage # Update header_attrs with 'history' and 'Conventions' + # - If 'Conventions' already in header_attrs, do not overwrite + # - If 'history' already in header_attres, _set_history decide what to do + # - Add "Created by pytroll/satpy ..." + if "history" in header_attrs: + root.attrs["history"] = header_attrs["history"] _set_history(root) header_attrs['history'] = root.attrs["history"] - # - Add CF conventions - if not is_grouped: + + # - Add CF conventions if not grouped + if "Conventions" not in header_attrs and not is_grouped: header_attrs['Conventions'] = root.attrs["Conventions"] # TODO REFACTOR @@ -1046,8 +1052,6 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, # - All groups will be appended in the for loop below if groups is not None: root = xr.Dataset({}, attrs=header_attrs) - # - Add history attribute: "Created by pytroll/satpy ..." - _set_history(root) # - Define init kwargs init_nc_kwargs = to_netcdf_kwargs.copy() init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point From dfcc75b962ebe73e793e06285d08888eb9b977d4 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 3 Nov 2022 18:12:02 +0100 Subject: [PATCH 0066/1416] Change behaviour of set_history and get_groups --- satpy/tests/writer_tests/test_cf.py | 2 +- satpy/writers/cf_writer.py | 65 ++++++++++++----------------- 2 files changed, 27 insertions(+), 40 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 95399c4a55..57bfc89871 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -100,7 +100,7 @@ def test_save_with_compression(self): comp = {'zlib': True, 'complevel': 9} scn.save_datasets(filename='bla', writer='cf', compression=comp) - ars, kws = xrdataset.call_args_list[1] + ars, kws = xrdataset.call_args_list[1] # TODO: this does not fail if [0] !!!! self.assertDictEqual(ars[0]['test-array'].encoding, comp) def test_save_array_coords(self): diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 4191377b9c..97b1907f9f 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -584,32 +584,31 @@ def _get_compression(compression): return compression -def _set_history(root): +def _set_history(attrs): + """Add 'history' attribute to the header_attrs.""" _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) - if 'history' in root.attrs: - if isinstance(root.attrs['history'], list): - root.attrs['history'] = ''.join(root.attrs['history']) - root.attrs['history'] += '\n' + _history_create + if 'history' in attrs: + if isinstance(attrs['history'], list): + attrs['history'] = ''.join(attrs['history']) + attrs['history'] += '\n' + _history_create else: - root.attrs['history'] = _history_create + attrs['history'] = _history_create -def _get_groups(groups, datasets, root): +def _get_groups(groups, list_datarrays): + """Return a dictionary with the list of xr.DataArray associated to each group.""" + # If no groups, return all DataArray attached to a single None key if groups is None: - # Groups are not CF-1.7 compliant - if 'Conventions' not in root.attrs: - root.attrs['Conventions'] = CF_VERSION - # Write all datasets to the file root without creating a group - groups_ = {None: datasets} + grouped_dataarrays = {None: list_datarrays} + # Else, collect the DataArrays associated to each group else: - # User specified a group assignment using dataset names. Collect the corresponding datasets. - groups_ = defaultdict(list) - for dataset in datasets: + grouped_dataarrays = defaultdict(list) + for datarray in list_datarrays: for group_name, group_members in groups.items(): - if dataset.attrs['name'] in group_members: - groups_[group_name].append(dataset) + if datarray.attrs['name'] in group_members: + grouped_dataarrays[group_name].append(datarray) break - return groups_ + return grouped_dataarrays def collect_cf_datasets(list_dataarrays, @@ -682,32 +681,19 @@ def collect_cf_datasets(list_dataarrays, else: header_attrs = {} - # TODO REFACTOR - # - _get_groups should not input 'root' - # - 'groups_' --> rename to 'grouped_dataarrays' - # - 'conventions' attribute should be added outside _get_groups - # - If group_name all or wrong, currently behave like groups = None # Retrieve groups # - If groups is None: {None: list_dataarrays} # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} - root = xr.Dataset({}, attrs={}) # TODO: this just to not to refactor _get_groups - groups_ = _get_groups(groups, list_dataarrays, root) # TODO: this add attr "Conventions" to root if no groups - is_grouped = len(groups_) >= 2 + # - TODO: if all dataset names are wrong, currently and before the PR behave like groups = None ! + grouped_dataarrays = _get_groups(groups, list_dataarrays) + is_grouped = len(grouped_dataarrays) >= 2 - # TODO REFACTOR: remove root usage # Update header_attrs with 'history' and 'Conventions' - # - If 'Conventions' already in header_attrs, do not overwrite - # - If 'history' already in header_attres, _set_history decide what to do - - # - Add "Created by pytroll/satpy ..." - if "history" in header_attrs: - root.attrs["history"] = header_attrs["history"] - _set_history(root) - header_attrs['history'] = root.attrs["history"] - - # - Add CF conventions if not grouped + # - Add "Created by pytroll/satpy ..." to history attribute + _set_history(header_attrs) + # - Add CF conventions if not grouped. If 'Conventions' key already present, do not overwrite if "Conventions" not in header_attrs and not is_grouped: - header_attrs['Conventions'] = root.attrs["Conventions"] + header_attrs['Conventions'] = CF_VERSION # TODO REFACTOR # Temporary create CF writer instance to acces to CFWriter._collect_datasets @@ -720,7 +706,7 @@ def collect_cf_datasets(list_dataarrays, # Create dictionary of group xr.Datasets # --> If no groups (groups=None) --> group_name=None grouped_datasets = {} - for group_name, group_dataarrays in groups_.items(): + for group_name, group_dataarrays in grouped_dataarrays.items(): # XXX: Should we combine the info of all datasets? dict_datarrays, start_times, end_times = cf_writer._collect_datasets( datasets=group_dataarrays, @@ -799,6 +785,7 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, compr for ds in new_data.attrs.get('ancillary_variables', [])] if anc: new_data.attrs['ancillary_variables'] = ' '.join(anc) + # TODO: make this a grid mapping or lon/lats # new_data.attrs['area'] = str(new_data.attrs.get('area')) CFWriter._cleanup_attrs(new_data) From 19a9f90c1b1558dbba120d22f3c8f69092605d9a Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 3 Nov 2022 18:33:20 +0100 Subject: [PATCH 0067/1416] Change da2cf as make_cf_dataarray top level function --- satpy/writers/cf_writer.py | 170 +++++++++++++++++++++++-------------- 1 file changed, 104 insertions(+), 66 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 97b1907f9f..4a246ff568 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -611,6 +611,86 @@ def _get_groups(groups, list_datarrays): return grouped_dataarrays +def make_cf_dataarray(dataarray, epoch=EPOCH, flatten_attrs=False, + exclude_attrs=None, compression=None, + include_orig_name=True, numeric_name_prefix='CHANNEL_'): + """ + Make the xr.DataArray CF-compliant. + + Parameters + ---------- + dataarray : xr.DataArray + The data array to be made CF-compliant. + epoch : str, optional + Reference time for encoding of time coordinates. + flatten_attrs : bool, optional + If True, flatten dict-type attributes. + The default is False. + exclude_attrs : list, optional + List of dataset attributes to be excluded. + The default is None. + include_orig_name : bool, optional + Include the original dataset name in the netcdf variable attributes. + The default is True. + numeric_name_prefix : TYPE, optional + Prepend dataset name with this if starting with a digit. + The default is 'CHANNEL_'. + + Returns + ------- + new_data : xr.DataArray + CF-compliant xr.DataArray. + + """ + if exclude_attrs is None: + exclude_attrs = [] + + original_name = None + new_data = dataarray.copy() + if 'name' in new_data.attrs: + name = new_data.attrs.pop('name') + original_name, name = _handle_dataarray_name(name, numeric_name_prefix) + new_data = new_data.rename(name) + + CFWriter._remove_satpy_attributes(new_data) + + new_data = CFWriter._encode_time(new_data, epoch) + new_data = CFWriter._encode_coords(new_data) + + # Remove area as well as user-defined attributes + for key in ['area'] + exclude_attrs: + new_data.attrs.pop(key, None) + + anc = [ds.attrs['name'] + for ds in new_data.attrs.get('ancillary_variables', [])] + if anc: + new_data.attrs['ancillary_variables'] = ' '.join(anc) + + # TODO: make this a grid mapping or lon/lats + # new_data.attrs['area'] = str(new_data.attrs.get('area')) + CFWriter._cleanup_attrs(new_data) + + if compression is not None: + new_data.encoding.update(compression) + + if 'long_name' not in new_data.attrs and 'standard_name' not in new_data.attrs: + new_data.attrs['long_name'] = new_data.name + if 'prerequisites' in new_data.attrs: + new_data.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in new_data.attrs['prerequisites']] + + if include_orig_name and numeric_name_prefix and original_name and original_name != name: + new_data.attrs['original_name'] = original_name + + # Flatten dict-type attributes, if desired + if flatten_attrs: + new_data.attrs = flatten_dict(new_data.attrs) + + # Encode attributes to netcdf-compatible datatype + new_data.attrs = encode_attrs_nc(new_data.attrs) + + return new_data + + def collect_cf_datasets(list_dataarrays, header_attrs=None, exclude_attrs=None, @@ -695,20 +775,12 @@ def collect_cf_datasets(list_dataarrays, if "Conventions" not in header_attrs and not is_grouped: header_attrs['Conventions'] = CF_VERSION - # TODO REFACTOR - # Temporary create CF writer instance to acces to CFWriter._collect_datasets - # Requires refactor of CFWriter to define CFWriter._collect_datasets, and CFWriter.da2cf as generic functions - # CFWriter.da2cf is a static method and could be put outside - # CFWriter._collect_datasets(self, ...) could be put outside - from satpy.writers import load_writer - cf_writer, save_kwargs = load_writer(writer="cf", filename="") - # Create dictionary of group xr.Datasets # --> If no groups (groups=None) --> group_name=None grouped_datasets = {} for group_name, group_dataarrays in grouped_dataarrays.items(): # XXX: Should we combine the info of all datasets? - dict_datarrays, start_times, end_times = cf_writer._collect_datasets( + dict_datarrays, start_times, end_times = CFWriter._collect_datasets( datasets=group_dataarrays, epoch=epoch, flatten_attrs=flatten_attrs, @@ -762,53 +834,16 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, compr numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ - if exclude_attrs is None: - exclude_attrs = [] - - original_name = None - new_data = dataarray.copy() - if 'name' in new_data.attrs: - name = new_data.attrs.pop('name') - original_name, name = _handle_dataarray_name(name, numeric_name_prefix) - new_data = new_data.rename(name) - - CFWriter._remove_satpy_attributes(new_data) - - new_data = CFWriter._encode_time(new_data, epoch) - new_data = CFWriter._encode_coords(new_data) - - # Remove area as well as user-defined attributes - for key in ['area'] + exclude_attrs: - new_data.attrs.pop(key, None) - - anc = [ds.attrs['name'] - for ds in new_data.attrs.get('ancillary_variables', [])] - if anc: - new_data.attrs['ancillary_variables'] = ' '.join(anc) - - # TODO: make this a grid mapping or lon/lats - # new_data.attrs['area'] = str(new_data.attrs.get('area')) - CFWriter._cleanup_attrs(new_data) - - if compression is not None: - new_data.encoding.update(compression) - - if 'long_name' not in new_data.attrs and 'standard_name' not in new_data.attrs: - new_data.attrs['long_name'] = new_data.name - if 'prerequisites' in new_data.attrs: - new_data.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in new_data.attrs['prerequisites']] - - if include_orig_name and numeric_name_prefix and original_name and original_name != name: - new_data.attrs['original_name'] = original_name - - # Flatten dict-type attributes, if desired - if flatten_attrs: - new_data.attrs = flatten_dict(new_data.attrs) - - # Encode attributes to netcdf-compatible datatype - new_data.attrs = encode_attrs_nc(new_data.attrs) - - return new_data + warnings.warn('CFWriter.da2cf is deprecated.' + 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + DeprecationWarning) + return make_cf_dataarray(dataarray=dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + compression=compression, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) @staticmethod def _cleanup_attrs(new_data): @@ -920,12 +955,11 @@ def update_encoding(dataset, to_netcdf_kwargs): DeprecationWarning) return update_encoding(dataset, to_netcdf_kwargs) - def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): - """Save the *dataset* to a given *filename*.""" - return self.save_datasets([dataset], filename, **kwargs) - - def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, - pretty=False, compression=None, include_orig_name=True, numeric_name_prefix='CHANNEL_'): + @staticmethod + def _collect_datasets(datasets, epoch=EPOCH, flatten_attrs=False, + exclude_attrs=None, include_lonlats=True, + pretty=False, compression=None, + include_orig_name=True, numeric_name_prefix='CHANNEL_'): """Collect and prepare datasets to be written.""" ds_collection = {} for ds in datasets: @@ -948,10 +982,10 @@ def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_ for new_ds in new_datasets: start_times.append(new_ds.attrs.get("start_time", None)) end_times.append(new_ds.attrs.get("end_time", None)) - new_var = self.da2cf(new_ds, epoch=epoch, flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, compression=compression, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) + new_var = make_cf_dataarray(new_ds, epoch=epoch, flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, compression=compression, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) datas[new_var.name] = new_var # Check and prepare coordinates @@ -961,6 +995,10 @@ def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_ return datas, start_times, end_times + def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): + """Save the *dataset* to a given *filename*.""" + return self.save_datasets([dataset], filename, **kwargs) + def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, compression=None, include_orig_name=True, numeric_name_prefix='CHANNEL_', **to_netcdf_kwargs): From 05f745a0f3827743f2eca512290f3f07a5906b76 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 8 Nov 2022 09:19:56 +0100 Subject: [PATCH 0068/1416] Fix new datasetid format and add an backend --- satpy/etc/composites/sgli.yaml | 10 +-- satpy/readers/sgli_l1b.py | 75 ++++++++++++++++++----- satpy/tests/reader_tests/test_sgli_l1b.py | 16 +++++ 3 files changed, 79 insertions(+), 22 deletions(-) create mode 100644 satpy/tests/reader_tests/test_sgli_l1b.py diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index f66744c037..58f52a1124 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -4,7 +4,7 @@ sensor_name: visir/sgli modifiers: rayleigh_corrected: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: @@ -17,7 +17,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_clean: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: @@ -30,7 +30,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_tropical: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: @@ -43,7 +43,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_desert: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: @@ -56,7 +56,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_land: - compositor: !!python/name:satpy.composites.PSPRayleighReflectance + compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 2455c4b63b..97db07a756 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. @@ -29,14 +27,19 @@ """ -from satpy.readers.file_handlers import BaseFileHandler +import logging from datetime import datetime -from satpy import CHUNK_SIZE -import xarray as xr + import dask.array as da import h5py -import logging import numpy as np +import xarray as xr +from xarray import Dataset, Variable +from xarray.backends import BackendArray, BackendEntrypoint +from xarray.core import indexing + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) @@ -69,7 +72,7 @@ class HDF5SGLI(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): """Initialize the filehandler.""" - super(HDF5SGLI, self).__init__(filename, filename_info, filetype_info) + super().__init__(filename, filename_info, filetype_info) self.resolution = resolutions[self.filename_info['resolution']] self.fh = h5py.File(self.filename, 'r') @@ -87,12 +90,12 @@ def end_time(self): def get_dataset(self, key, info): """Get the dataset.""" - if key.resolution != self.resolution: + if key["resolution"] != self.resolution: return - if key.polarization is not None: + if key["polarization"] is not None: pols = {0: '0', -60: 'm60', 60: 'p60'} - file_key = info['file_key'].format(pol=pols[key.polarization]) + file_key = info['file_key'].format(pol=pols[key["polarization"]]) else: file_key = info['file_key'] @@ -100,7 +103,7 @@ def get_dataset(self, key, info): resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) if resampling_interval != 1: - logger.debug('Interpolating %s.', key.name) + logger.debug('Interpolating %s.', key["name"]) full_shape = (self.fh['Image_data'].attrs['Number_of_lines'], self.fh['Image_data'].attrs['Number_of_pixels']) dataset = interpolate(h5dataset, resampling_interval, full_shape) @@ -108,6 +111,12 @@ def get_dataset(self, key, info): dataset = da.from_array(h5dataset[:].astype(' Date: Tue, 15 Nov 2022 09:27:03 +0100 Subject: [PATCH 0069/1416] refactor: move rst list generation function to pyresample --- doc/source/area_def_list.py | 51 ------------------------------------- doc/source/conf.py | 2 +- 2 files changed, 1 insertion(+), 52 deletions(-) delete mode 100644 doc/source/area_def_list.py diff --git a/doc/source/area_def_list.py b/doc/source/area_def_list.py deleted file mode 100644 index 665c770569..0000000000 --- a/doc/source/area_def_list.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2022 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# satpy is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with satpy. If not, see . -"""Module for autogenerating a list and overview of available area definitions .""" - -from pyresample.area_config import _read_yaml_area_file_content -from pyresample.formatting_html import area_repr - -from satpy.resample import get_area_def, get_area_file - - -def generate_area_def_list(): - """Create list of available area definitions with overview plot. - - Returns: - str - """ - area_list = [] - - template = ("{area_name}\n" - "{n:->{header_title_length}}\n\n" - ".. raw:: html\n\n" - "{content}\n\n" - "
\n\n") - - area_file = get_area_file()[0] - for aname in list(_read_yaml_area_file_content(area_file).keys()): - area = get_area_def(aname) - if hasattr(area, "_repr_html_"): - content = "\n".join([x.rjust(len(x) + 5) for x in area_repr(area, include_header=False).split("\n")]) - area_list.append(template.format(area_name=aname, n="", header_title_length=len(aname), - content=content)) - else: - pass - - return "".join(area_list) diff --git a/doc/source/conf.py b/doc/source/conf.py index 975622b345..43dd186e68 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -26,7 +26,7 @@ sys.path.append(os.path.abspath('../../')) sys.path.append(os.path.abspath(os.path.dirname(__file__))) -from area_def_list import generate_area_def_list # noqa: E402 +from pyresample.area_config import generate_area_def_list # noqa: E402 from reader_table import generate_reader_table # noqa: E402 # The version info for the project you're documenting, acts as replacement for From d6210898d56d6e92f702f395f311cc0f660881ad Mon Sep 17 00:00:00 2001 From: BENR0 Date: Tue, 15 Nov 2022 09:41:50 +0100 Subject: [PATCH 0070/1416] fix: missing area file argument --- doc/source/conf.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 43dd186e68..e84ad1cba5 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -29,6 +29,8 @@ from pyresample.area_config import generate_area_def_list # noqa: E402 from reader_table import generate_reader_table # noqa: E402 +from satpy.resample import get_area_file # noqa: E402 + # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. @@ -81,8 +83,9 @@ def __getattr__(cls, name): with open("reader_table.rst", mode="w") as f: f.write(generate_reader_table()) +area_file = get_area_file()[0] with open("area_def_list.rst", mode="w") as f: - f.write(generate_area_def_list()) + f.write(generate_area_def_list(area_file)) # -- General configuration ----------------------------------------------------- From 4b94939a472dc344249911c52f2cc7cb60125818 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Tue, 15 Nov 2022 09:47:55 +0100 Subject: [PATCH 0071/1416] fix: function name --- doc/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index e84ad1cba5..e69edac07e 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -26,7 +26,7 @@ sys.path.append(os.path.abspath('../../')) sys.path.append(os.path.abspath(os.path.dirname(__file__))) -from pyresample.area_config import generate_area_def_list # noqa: E402 +from pyresample.area_config import generate_area_def_rst_list # noqa: E402 from reader_table import generate_reader_table # noqa: E402 from satpy.resample import get_area_file # noqa: E402 @@ -85,7 +85,7 @@ def __getattr__(cls, name): area_file = get_area_file()[0] with open("area_def_list.rst", mode="w") as f: - f.write(generate_area_def_list(area_file)) + f.write(generate_area_def_rst_list(area_file)) # -- General configuration ----------------------------------------------------- From fd0879daba1abca651ddc85bb73790dca8eb82e3 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Mon, 5 Dec 2022 14:14:00 +0100 Subject: [PATCH 0072/1416] refactor: add class name argument to rst table header generation --- doc/source/reader_table.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 1c6760a390..99499de26c 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -38,7 +38,7 @@ def rst_table_row(columns=None): return row -def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): +def rst_table_header(name=None, header=None, header_rows=1, widths="auto", class_name="datatable"): """Create header for rst table. Args: @@ -59,7 +59,7 @@ def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): table_header = (f".. list-table:: {name}\n" f" :header-rows: {header_rows}\n" f" :widths: {widths}\n" - f" :class: datatable\n\n" + f" :class: {class_name}\n\n" f"{header}") return table_header From 6edf540767498f79ec684661646eff94fb57d0b5 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Mon, 5 Dec 2022 14:15:05 +0100 Subject: [PATCH 0073/1416] refactor: add options for area table to datatable js init --- doc/source/_static/main.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/source/_static/main.js b/doc/source/_static/main.js index 188a335e71..8455d3e2ef 100644 --- a/doc/source/_static/main.js +++ b/doc/source/_static/main.js @@ -3,4 +3,10 @@ $(document).ready( function () { "paging": false, "dom": 'lfitp' } ); + + $('table.area-table').DataTable( { + "paging": true, + "pageLength": 15, + "dom": 'lfitp' +} ); } ); From 590d3742d1a083b8ab87a4a27936ac22c929949d Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 24 Feb 2023 13:59:19 +0100 Subject: [PATCH 0074/1416] add holoviews to continuous_integration --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 57728e2c73..67c007a798 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -35,12 +35,12 @@ dependencies: - mock - libtiff - geoviews + - holoviews - hvplot - zarr - python-eccodes # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - - geoviews - pytest - pytest-cov - pytest-lazy-fixture From 9d2b3b0abc849964c7fc52f05ddb87a00a30bf41 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Fri, 17 Mar 2023 15:54:23 +0000 Subject: [PATCH 0075/1416] ADD roudning of the start time to nominal slots --- satpy/readers/seviri_l1b_native.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 87e86b7c71..af751c4357 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -27,7 +27,7 @@ import logging import warnings -from datetime import datetime +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -129,8 +129,19 @@ def _has_archive_header(self): @property def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata.""" - return self.header['15_DATA_HEADER']['ImageAcquisition'][ - 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] + tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] + if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: + # rouding nominal start time to fit the expected 15 minutes RC for full disk scan + tm = tm - timedelta(minutes=tm.minute % 15, + seconds=tm.second, + microseconds=tm.microsecond) + elif self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + # rouding nominal start time to fit the expected 5 minutes RSS for full disk scan + tm = tm - timedelta(minutes=tm.minute % 5, + seconds=tm.second, + microseconds=tm.microsecond) + # TODO raise a warning if none fo the above but still return the original time + return tm @property def nominal_end_time(self): From 7c3fbc71e736623732df43fbb7ff38a36fc2069a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Fri, 17 Mar 2023 15:58:03 +0000 Subject: [PATCH 0076/1416] ADD rounding of the end time to nominal slots --- satpy/readers/seviri_l1b_native.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index af751c4357..a17588107a 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -146,8 +146,20 @@ def nominal_start_time(self): @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata.""" - return self.header['15_DATA_HEADER']['ImageAcquisition'][ + tm = self.header['15_DATA_HEADER']['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: + # rouding nominal start time to fit the expected 15 minutes RC for full disk scan + tm = tm - timedelta(minutes=tm.minute % 15, + seconds=tm.second, + microseconds=tm.microsecond) + elif self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + # rouding nominal start time to fit the expected 5 minutes RSS for full disk scan + tm = tm - timedelta(minutes=tm.minute % 5, + seconds=tm.second, + microseconds=tm.microsecond) + # TODO raise a warning if none fo the above but still return the original time + return tm @property def observation_start_time(self): From 2f15016c7147b6f241028ef3028108df8ecc979d Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Fri, 17 Mar 2023 16:06:56 +0000 Subject: [PATCH 0077/1416] Fix typo in the comment of the rounding of nominal time --- satpy/readers/seviri_l1b_native.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index a17588107a..9906a9d661 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -131,12 +131,12 @@ def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: - # rouding nominal start time to fit the expected 15 minutes RC for full disk scan + # rounding nominal start time to fit the expected 15 minutes RC for full disk scan tm = tm - timedelta(minutes=tm.minute % 15, seconds=tm.second, microseconds=tm.microsecond) elif self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - # rouding nominal start time to fit the expected 5 minutes RSS for full disk scan + # rounding nominal start time to fit the expected 5 minutes RSS for full disk scan tm = tm - timedelta(minutes=tm.minute % 5, seconds=tm.second, microseconds=tm.microsecond) @@ -146,15 +146,14 @@ def nominal_start_time(self): @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata.""" - tm = self.header['15_DATA_HEADER']['ImageAcquisition'][ - 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: - # rouding nominal start time to fit the expected 15 minutes RC for full disk scan + # rounding nominal start time to fit the expected 15 minutes RC for full disk scan tm = tm - timedelta(minutes=tm.minute % 15, seconds=tm.second, microseconds=tm.microsecond) elif self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - # rouding nominal start time to fit the expected 5 minutes RSS for full disk scan + # rounding nominal start time to fit the expected 5 minutes RSS for full disk scan tm = tm - timedelta(minutes=tm.minute % 5, seconds=tm.second, microseconds=tm.microsecond) From 24273d57e8c1d05b62c5104e53f0383e1e29ad40 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 20 Mar 2023 13:46:53 +0100 Subject: [PATCH 0078/1416] Add compositor for high-level clouds following GeoColor implementation. --- satpy/composites/__init__.py | 68 ++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0948d543ab..248aa3b8e6 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1018,6 +1018,74 @@ def __call__(self, projectables, **kwargs): return res +class HighCloudCompositor(CloudCompositor): + """Detect high clouds based on latitude-dependent thresholding and use it as a mask for compositing. + + This compositor aims at identifying high clouds and assigning them a transparency based on the brightness + temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at + the lower end, used to identify opaque clouds, is made a function of the latitude in order to have tropopause + level clouds appear as opaque at both high and low latitudes. This follows the Geocolor implementation of + high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + + The idea is to define a tuple of two brightness temperature thresholds in transisiton_min and two corresponding + latitude thresholds in latitude_min. + + + TODO improve docstring: + The modified and latitude-dependent transition_min, sent to `CloudCopositor`, + will then be computed such that transition_min[0] is used if abs(latitude) < latitude_min[0]. + + if abs(latitude) < latitude_min(0): + tr_min_lat = transition_min[0] + elif abs(latitude) > latitude_min(1): + tr_min_lat = transition_min[1] + else: + tr_min_lat = linear intterpolation of + + tr_min_lat = transition_min[0] where abs(latitude) < latitude_min(0) + tr_min_lat = transition_min[1] where abs(latitude) > latitude_min(0) + tr_min_lat = linear interpolation between transition_min[0] and transition_min[1] where abs(latitude). + + """ + + def __init__(self, name, transition_min=(200., 220.), transition_max=280, latitude_min=(30., 60.), + transition_gamma=1.0, **kwargs): + """Collect custom configuration values. + + Args: + transition_min (tuple): Brightness temperature values used to identify opaque white + clouds at different latitudes + transition_max (float): Brightness temperatures above this value are not considered to + be high clouds -> transparent + latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent + transition_min values. + transition_gamma (float): Gamma correction to apply at the end + + """ + self.latitude_min = latitude_min + super().__init__(name, transition_min=transition_min, transition_max=transition_max, + transition_gamma=transition_gamma, **kwargs) + + def __call__(self, projectables, **kwargs): + """Generate the composite.""" + data = projectables[0] + _, lats = data.attrs["area"].get_lonlats() + lats = np.abs(lats) + + slope = (self.transition_min[1] - self.transition_min[0]) / (self.latitude_min[1] - self.latitude_min[0]) + offset = self.transition_min[0] - slope * self.latitude_min[0] + + tr_min_lat = xr.DataArray(name='tr_min_lat', coords=data.coords, dims=data.dims) + tr_min_lat = tr_min_lat.where(lats >= self.latitude_min[0], self.transition_min[0]) + tr_min_lat = tr_min_lat.where(lats <= self.latitude_min[1], self.transition_min[1]) + tr_min_lat = tr_min_lat.where((lats < self.latitude_min[0]) | (lats > self.latitude_min[1]), + slope * lats + offset) + + self.transition_min = tr_min_lat + + return super().__call__(projectables, **kwargs) + + class RatioSharpenedRGB(GenericCompositor): """Sharpen RGB bands with ratio of a high resolution band to a lower resolution version. From d01760c0da297c3bd8dfc984a4edb7d19b3f8960 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 20 Mar 2023 13:47:53 +0100 Subject: [PATCH 0079/1416] Add dedicated enhancement recipe for GeoColor high cloud composites. --- satpy/etc/enhancements/generic.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index ce1ce1bb94..b575d776df 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -838,6 +838,20 @@ enhancements: kwargs: weight: 1.0 + ir_high_cloud: + standard_name: ir_high_cloud + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [True, false] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: linear + - name: 3d + method: !!python/name:satpy.enhancements.three_d_effect + colorized_ir_clouds: standard_name: colorized_ir_clouds operations: From 84ae8071c956006a49422bd5e419b7c012681a0d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 20 Mar 2023 13:49:46 +0100 Subject: [PATCH 0080/1416] Add AHI recipe for GeoColor high-level cloud composite nighttime layer. --- satpy/etc/composites/ahi.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index a2e80a4ac1..0785ad80c2 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -489,3 +489,10 @@ composites: prerequisites: - night_ir_alpha - _night_background_hires + + # GeoColor + GeoColor_HighClouds: + standard_name: ir_high_cloud + compositor: !!python/name:satpy.composites.HighCloudCompositor + prerequisites: + - name: B13 From d1885849c2de1ece1e0d600648bf0c4ec8a1ac6a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 20 Mar 2023 15:45:38 +0000 Subject: [PATCH 0081/1416] Add observation start and end time as properties and modify start and end time to point to the nominal ones --- satpy/readers/seviri_l1b_hrit.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 5fd5c325e2..b4660a639b 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -502,17 +502,27 @@ def nominal_end_time(self): 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] @property - def start_time(self): + def observation_start_time(self): """Get the start time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanStart'] @property - def end_time(self): + def observation_end_time(self): """Get the end time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanEnd'] + @property + def start_time(self): + """Get general start time for this file.""" + return self.nominal_start_time + + @property + def end_time(self): + """Get the general end time for this file.""" + return self.nominal_end_time + def _get_area_extent(self, pdict): """Get the area extent of the file. From 93935c2782911ccf2d2367a09af44be6429a51ac Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 20 Mar 2023 16:02:46 +0000 Subject: [PATCH 0082/1416] Add rouding for nominal start time --- satpy/readers/seviri_l1b_hrit.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index b4660a639b..5468b1eb34 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -187,7 +187,7 @@ import copy import logging -from datetime import datetime +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -492,9 +492,22 @@ def _get_header(self): @property def nominal_start_time(self): """Get the start time.""" - return self.prologue['ImageAcquisition'][ + tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] + if self.epilogue['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: + # rounding nominal start time to fit the expected 15 minutes RC for full disk scan + tm = tm - timedelta(minutes=tm.minute % 15, + seconds=tm.second, + microseconds=tm.microsecond) + elif self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + # rounding nominal start time to fit the expected 5 minutes RSS for full disk scan + tm = tm - timedelta(minutes=tm.minute % 5, + seconds=tm.second, + microseconds=tm.microsecond) + # TODO raise a warning if none fo the above but still return the original time + return tm + @property def nominal_end_time(self): """Get the end time.""" From 6cb81cc048d1e148d30cc7dead6d3e111279a95b Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 20 Mar 2023 16:06:53 +0000 Subject: [PATCH 0083/1416] Add rouding for nominal end time --- satpy/readers/seviri_l1b_hrit.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 5468b1eb34..cc9b15565e 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -491,7 +491,7 @@ def _get_header(self): @property def nominal_start_time(self): - """Get the start time.""" + """Get the start time and round it according to scan law.""" tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] @@ -510,9 +510,21 @@ def nominal_start_time(self): @property def nominal_end_time(self): - """Get the end time.""" - return self.prologue['ImageAcquisition'][ + """Get the end time and round it according to scan law.""" + tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + if self.epilogue['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: + # rounding nominal start time to fit the expected 15 minutes RC for full disk scan + tm = tm - timedelta(minutes=tm.minute % 15, + seconds=tm.second, + microseconds=tm.microsecond) + elif self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + # rounding nominal start time to fit the expected 5 minutes RSS for full disk scan + tm = tm - timedelta(minutes=tm.minute % 5, + seconds=tm.second, + microseconds=tm.microsecond) + # TODO raise a warning if none fo the above but still return the original time + return tm @property def observation_start_time(self): From 493827578ab8475b922fd35884bcbf524ac0c09c Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 20 Mar 2023 16:59:48 +0000 Subject: [PATCH 0084/1416] Add time_parameters attrs with nominal and observation start and end times. Change former nominale_start_time and end time to deprecation warning --- satpy/readers/seviri_l1b_hrit.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index cc9b15565e..c724ce6691 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -756,8 +756,18 @@ def _update_attrs(self, res, info): res.attrs['standard_name'] = info['standard_name'] res.attrs['platform_name'] = self.platform_name res.attrs['sensor'] = 'seviri' - res.attrs['nominal_start_time'] = self.nominal_start_time - res.attrs['nominal_end_time'] = self.nominal_end_time + res.attrs['nominal_start_time'] = """ + Deprecation warning: nominal_start_time should be accessed via the time_parameters attrs + """ + res.attrs['nominal_end_time'] = """ + deprecation warning: nominal_end_time should be accessed via the time_parameters attrs + """ + res.attrs['time_parameters'] = { + 'nominal_start_time': self.nominal_start_time, + 'nominal_end_time': self.nominal_end_time, + 'observation_start_time': self.observation_start_time, + 'observation_end_time': self.observation_end_time, + } res.attrs['orbital_parameters'] = { 'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': self.mda['projection_parameters']['SSP_latitude'], From 9aa77ba5d5048fccbbce09aa8eb6c28794e6fe7d Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 20 Mar 2023 17:02:01 +0000 Subject: [PATCH 0085/1416] Fix doc string for nominal start and end time to reflect teh rouding to nominal slots --- satpy/readers/seviri_l1b_native.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 9906a9d661..e3a6adb9f1 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -128,7 +128,7 @@ def _has_archive_header(self): @property def nominal_start_time(self): - """Read the repeat cycle nominal start time from metadata.""" + """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: # rounding nominal start time to fit the expected 15 minutes RC for full disk scan @@ -145,7 +145,7 @@ def nominal_start_time(self): @property def nominal_end_time(self): - """Read the repeat cycle nominal end time from metadata.""" + """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: # rounding nominal start time to fit the expected 15 minutes RC for full disk scan From 3cd12ceb3039133a22edfea9cceb04701de6d180 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 20 Mar 2023 17:05:16 +0000 Subject: [PATCH 0086/1416] Replace start time by self.start_time in satpos --- satpy/readers/seviri_l1b_hrit.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index c724ce6691..11548ea7ac 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -338,14 +338,12 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ a, b = self.get_earth_radii() - start_time = self.prologue['ImageAcquisition'][ - 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] poly_finder = OrbitPolynomialFinder(self.prologue['SatelliteStatus'][ 'Orbit']['OrbitPolynomial']) - orbit_polynomial = poly_finder.get_orbit_polynomial(start_time) + orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, - time=start_time, + time=self.start_time, semi_major_axis=a, semi_minor_axis=b, ) From 6473ec66bc252c72224191e4d46638f5fca20cd9 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 21 Mar 2023 14:17:56 +0000 Subject: [PATCH 0087/1416] Add nominal and observation start/end time properties and point start/end time to nominal ones --- satpy/readers/fci_l1c_nc.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 3a72347c3a..375a3d5fca 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -212,15 +212,35 @@ def __init__(self, filename, filename_info, filetype_info): self._cache = {} @property - def start_time(self): + def nominal_start_time(self): """Get start time.""" return self.filename_info['start_time'] @property - def end_time(self): + def nominal_end_time(self): """Get end time.""" return self.filename_info['end_time'] + @property + def observation_start_time(self): + """Get observation start time.""" + return self.filename_info['start_time'] + + @property + def observation_end_time(self): + """Get observation end time.""" + return self.filename_info['end_time'] + + @property + def start_time(self): + """Get start time.""" + return self.nominal_start_time + + @property + def end_time(self): + """Get end time.""" + return self.nominal_end_time + def get_channel_measured_group_path(self, channel): """Get the channel's measured group path.""" if self.filetype_info['file_type'] == 'fci_l1c_hrfi': From 57a4e27824cd5e5c7ca614c61717129a4e1e62ad Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 21 Mar 2023 14:40:41 +0000 Subject: [PATCH 0088/1416] Add a target to extract info for coverage in the filename --- satpy/etc/readers/fci_l1c_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index f558b3dfd0..4baed25cf7 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -16,7 +16,7 @@ reader: file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler - file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-FD-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] + file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] expected_segments: 40 required_netcdf_variables: - attr/platform From fa8221892eda6865bf141326d5e9d2ac05d30241 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 21 Mar 2023 14:45:46 +0000 Subject: [PATCH 0089/1416] adde correct nominal start/end times based on info available in the filename including the RC number --- satpy/readers/fci_l1c_nc.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 375a3d5fca..55fc2de578 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -112,6 +112,7 @@ from __future__ import absolute_import, division, print_function, unicode_literals import logging +from datetime import timedelta from functools import cached_property import dask.array as da @@ -213,13 +214,23 @@ def __init__(self, filename, filename_info, filetype_info): @property def nominal_start_time(self): - """Get start time.""" - return self.filename_info['start_time'] + """Get nominal start time.""" + RC_date = self.filename_info['start_time'].replace(hour=0, minute=0, second=0, microsecond=0) + if self.filename_info['coverage'] == 'FD': + RC_period_min = 10 + else: + RC_period_min = 2.5 + print((self.filename_info['repeat_cycle_in_day']-1)*RC_period_min) + return RC_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*RC_period_min) @property def nominal_end_time(self): - """Get end time.""" - return self.filename_info['end_time'] + """Get nominal end time.""" + if self.filename_info['coverage'] == 'FD': + RC_period_min = 10 + else: + RC_period_min = 2.5 + return self.nominal_start_time + timedelta(minutes=RC_period_min) @property def observation_start_time(self): From 78df2b64ee9568774558f31312c7fac7eeea3568 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 21 Mar 2023 14:58:19 +0000 Subject: [PATCH 0090/1416] Add time_parameters as an attribute and include nominal and observation start/end times --- satpy/readers/fci_l1c_nc.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 55fc2de578..a526453961 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -220,7 +220,6 @@ def nominal_start_time(self): RC_period_min = 10 else: RC_period_min = 2.5 - print((self.filename_info['repeat_cycle_in_day']-1)*RC_period_min) return RC_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*RC_period_min) @property @@ -379,7 +378,13 @@ def _get_dataset_measurand(self, key, info=None): # remove attributes from original file which don't apply anymore res.attrs.pop('long_name') - + # Add time_parameter attributes + res.attrs['time_parameters'] = { + 'nominal_start_time': self.nominal_start_time, + 'nominal_end_time': self.nominal_end_time, + 'observation_start_time': self.observation_start_time, + 'observation_end_time': self.observation_end_time, + } res.attrs.update(self.orbital_param) return res From bab420d36ea4033b12aaaa70b73b259e47c83842 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 21 Mar 2023 15:01:30 +0000 Subject: [PATCH 0091/1416] Add coverage target on HRFI file_pattern to be consistent with FDHSI --- satpy/etc/readers/fci_l1c_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 4baed25cf7..f89699ae3a 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -74,7 +74,7 @@ file_types: - ir_133 fci_l1c_hrfi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler - file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-FD-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] + file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] expected_segments: 40 required_netcdf_variables: - attr/platform From fbeb405f84e17eedff03efc97a2c329016db889f Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 21 Mar 2023 15:51:07 +0000 Subject: [PATCH 0092/1416] Add a test for coverage to avoid reading RSS data that are not supported by the reader --- satpy/readers/fci_l1c_nc.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index a526453961..0c6279055b 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -202,6 +202,8 @@ class using the :mod:`~satpy.Scene.load` method with the reader def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" + if not filename_info['coverage'] == 'FD': + raise NotImplementedError(f"coverage for {filename_info['coverage']} not supported by this reader") super().__init__(filename, filename_info, filetype_info, cache_var_size=0, From fdd084359657c36c21d4c619c004eea8a07e0743 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 22 Mar 2023 12:00:29 +0100 Subject: [PATCH 0093/1416] Add first version of low-level cloud composite. --- satpy/composites/__init__.py | 72 ++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 248aa3b8e6..5c16e0c83f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1086,6 +1086,78 @@ def __call__(self, projectables, **kwargs): return super().__call__(projectables, **kwargs) +# class LowCloudCompositor(CloudCompositor): +class LowCloudCompositor(GenericCompositor): + """Class information. + + TODO: Rewrite docstring + + Detect low clouds based on latitude-dependent thresholding and use it as a mask for compositing. + + This compositor aims at identifying high clouds and assigning them a transparency based on the brightness + temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at + the lower end, used to identify opaque clouds, is made a function of the latitude in order to have tropopause + level clouds appear as opaque at both high and low latitudes. This follows the Geocolor implementation of + high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + + The idea is to define a tuple of two brightness temperature thresholds in transisiton_min and two corresponding + latitude thresholds in latitude_min. + + """ + + def __init__(self, name, range_land=(1.0, 4.5), range_water=(0.0, 4.0), transition_gamma=1.0, + color=(140.25, 191.25, 249.9), **kwargs): + """Init info. + + TODO: Rewrite docstring + Collect custom configuration values. + + Args: + transition_min (tuple): Brightness temperature values used to identify opaque white + clouds at different latitudes + transition_max (float): Brightness temperatures above this value are not considered to + be high clouds -> transparent + latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent + transition_min values. + transition_gamma (float): Gamma correction to apply at the end + + """ + self.range_land = range_land + self.range_water = range_water + self.transition_gamma = transition_gamma + self.color = color + super().__init__(name, **kwargs) + # super().__init__(name, transition_gamma=transition_gamma, **kwargs) + + def __call__(self, projectables, **kwargs): + """Generate the composite.""" + diff_comp = DifferenceCompositor(name='ir_difference') + btd = diff_comp.__call__(projectables) + + # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops + btd = btd.where(projectables[0] >= 230, 0.0) + + # self.transition_min, self.transition_max = self.range_land + # res_land = super().__call__((btd), **kwargs) + + # self.transition_min, self.transition_max = self.range_water + # res_water = super().__call__(btd, **kwargs) + + tr_min = self.range_land[0] + tr_max = self.range_land[1] + + slope = 1 / (tr_max - tr_min) + offset = 0 - slope * tr_min + + alpha = btd.where(btd > tr_min, 0.0) + alpha = alpha.where(btd <= tr_max, 1.0) + alpha = alpha.where((btd <= tr_min) | (btd > tr_max), slope * btd + offset) + + alpha **= self.transition_gamma + res = super().__call__((btd, alpha), low_cloud_color=self.color, **kwargs) + return res + + class RatioSharpenedRGB(GenericCompositor): """Sharpen RGB bands with ratio of a high resolution band to a lower resolution version. From 92607d6f07e353494244b5143d9c585466203dcc Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 22 Mar 2023 12:02:21 +0100 Subject: [PATCH 0094/1416] Add support for monochromatic colorization enhancement. --- satpy/enhancements/__init__.py | 43 ++++++++++++++++++++++------- satpy/etc/enhancements/generic.yaml | 14 ++++++++++ 2 files changed, 47 insertions(+), 10 deletions(-) diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index 29f2cbdf54..0ae8fd45c0 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -358,16 +358,39 @@ def _merge_colormaps(kwargs, img=None): from trollimage.colormap import Colormap full_cmap = None - palette = kwargs['palettes'] - if isinstance(palette, Colormap): - full_cmap = palette - else: - for itm in palette: - cmap = create_colormap(itm, img) - if full_cmap is None: - full_cmap = cmap - else: - full_cmap = full_cmap + cmap + # TODO + # - Improve check if both palettes and monochromatic are set + # - Improve exception handling for monochromatic cases + # - Resolve RunTimeWarnings + + if 'palettes' in kwargs: + palette = kwargs['palettes'] + if isinstance(palette, Colormap): + full_cmap = palette + else: + for itm in palette: + cmap = create_colormap(itm, img) + if full_cmap is None: + full_cmap = cmap + else: + full_cmap = full_cmap + cmap + + if 'monochromatic' in kwargs: + palette = {} + color = kwargs['monochromatic'].get('color', None) + if color is None: + # TODO: add error + pass + elif isinstance(color, (list, tuple)): + palette['colors'] = [color] + elif isinstance(color, str): + var = img.data.attrs.get(color, None) + if not isinstance(var, (tuple, list)): + # TODO: add error + pass + palette['colors'] = [var] + + full_cmap = create_colormap(palette, img) return full_cmap diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index b575d776df..43b40e2fcd 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -852,6 +852,20 @@ enhancements: - name: 3d method: !!python/name:satpy.enhancements.three_d_effect + ir_low_cloud: + standard_name: ir_low_cloud + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: linear + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + monochromatic: + color: low_cloud_color +# color: [0, 255, 0] + colorized_ir_clouds: standard_name: colorized_ir_clouds operations: From 6ee859b9f0d0b2c6a2e10994072dbc6cdae04523 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 22 Mar 2023 12:03:13 +0100 Subject: [PATCH 0095/1416] Add GeoColor low-level cloud composite for AHI. --- satpy/etc/composites/ahi.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 0785ad80c2..964e37b0a3 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -491,8 +491,16 @@ composites: - _night_background_hires # GeoColor - GeoColor_HighClouds: + geo_color_high_clouds: standard_name: ir_high_cloud compositor: !!python/name:satpy.composites.HighCloudCompositor prerequisites: - name: B13 + + geo_color_low_clouds: + standard_name: ir_low_cloud + compositor: !!python/name:satpy.composites.LowCloudCompositor + color: [140.25, 191.25, 249.9] + prerequisites: + - name: B13 + - name: B07 From 46f431668b72c206633dcb6dc10599dd901b2e6e Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 28 Mar 2023 13:50:01 +0000 Subject: [PATCH 0096/1416] Add a function to round time --- satpy/readers/seviri_base.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 35ef1fdf4d..9b4361c1fe 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -166,6 +166,7 @@ """ import warnings +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -980,3 +981,33 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr line_mask = line_mask[:, np.newaxis] data = data.where(~line_mask, np.nan).astype(np.float32) return data + + +def round_time(dt=None, date_delta=None, to='average'): + """Round a datetime object to a multiple of a timedelta. + + dt : datetime.datetime object, default now. + dateDelta : timedelta object, we round to a multiple of this, default 1 minute. + from: http://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python + """ + round_to = date_delta.total_seconds() + if dt is None: + dt = datetime.now() + seconds = (dt - dt.min).seconds + if date_delta is None: + date_delta = timedelta(minutes=1) + + if seconds % round_to == 0 and dt.microsecond == 0: + rounding = (seconds + round_to / 2) // round_to * round_to + else: + if to == 'up': + # // is a floor division, not a comment on following line (like in javascript): + rounding = (seconds + dt.microsecond/1000000 + round_to) // round_to * round_to + elif to == 'down': + rounding = seconds // round_to * round_to + elif to == 'average': + rounding = (seconds + round_to / 2) // round_to * round_to + else: + raise ValueError(f'Unusported rounding option {to}') + + return dt + timedelta(0, rounding - seconds, - dt.microsecond) From d1fa4eec51df469262e5200ded6197db1ca6f34e Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 28 Mar 2023 14:15:01 +0000 Subject: [PATCH 0097/1416] Clean the code to only use the relevant section for SEVIRI --- satpy/readers/seviri_base.py | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 9b4361c1fe..b986aa2b8b 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -983,31 +983,21 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr return data -def round_time(dt=None, date_delta=None, to='average'): +def round_time(dt=None, date_delta=None): """Round a datetime object to a multiple of a timedelta. dt : datetime.datetime object, default now. dateDelta : timedelta object, we round to a multiple of this, default 1 minute. - from: http://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python + adapted for SEVIRI from: + http://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python """ - round_to = date_delta.total_seconds() if dt is None: dt = datetime.now() seconds = (dt - dt.min).seconds if date_delta is None: date_delta = timedelta(minutes=1) + round_to = date_delta.total_seconds() - if seconds % round_to == 0 and dt.microsecond == 0: - rounding = (seconds + round_to / 2) // round_to * round_to - else: - if to == 'up': - # // is a floor division, not a comment on following line (like in javascript): - rounding = (seconds + dt.microsecond/1000000 + round_to) // round_to * round_to - elif to == 'down': - rounding = seconds // round_to * round_to - elif to == 'average': - rounding = (seconds + round_to / 2) // round_to * round_to - else: - raise ValueError(f'Unusported rounding option {to}') + rounding = (seconds + round_to / 2) // round_to * round_to return dt + timedelta(0, rounding - seconds, - dt.microsecond) From 1870df55673dda0f240cf70e67048200dbbe848b Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 28 Mar 2023 14:18:26 +0000 Subject: [PATCH 0098/1416] Refacto the nominal start/end time to use the newly defined roudntime fucntion in sevir_base --- satpy/readers/seviri_l1b_native.py | 32 ++++++++---------------------- 1 file changed, 8 insertions(+), 24 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index e3a6adb9f1..f90675efaa 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -43,6 +43,7 @@ CHANNEL_NAMES, HRV_NUM_COLUMNS, HRV_NUM_LINES, + REPEAT_CYCLE_DURATION, SATNUM, VISIR_NUM_COLUMNS, VISIR_NUM_LINES, @@ -57,6 +58,7 @@ get_satpos, pad_data_horizontally, pad_data_vertically, + round_time, ) from satpy.readers.seviri_l1b_native_hdr import ( DEFAULT_15_SECONDARY_PRODUCT_HEADER, @@ -120,6 +122,10 @@ def __init__(self, filename, filename_info, filetype_info, self._read_trailer() self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) + self.tres = REPEAT_CYCLE_DURATION # base RC duration of 15 + if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + self.tres = 5 + def _has_archive_header(self): """Check whether the file includes an ASCII archive header.""" ascii_startswith = b'FormatName : NATIVE' @@ -130,35 +136,13 @@ def _has_archive_header(self): def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] - if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: - # rounding nominal start time to fit the expected 15 minutes RC for full disk scan - tm = tm - timedelta(minutes=tm.minute % 15, - seconds=tm.second, - microseconds=tm.microsecond) - elif self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - # rounding nominal start time to fit the expected 5 minutes RSS for full disk scan - tm = tm - timedelta(minutes=tm.minute % 5, - seconds=tm.second, - microseconds=tm.microsecond) - # TODO raise a warning if none fo the above but still return the original time - return tm + return round_time(tm, date_delta=timedelta(minutes=self.tres)) @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: - # rounding nominal start time to fit the expected 15 minutes RC for full disk scan - tm = tm - timedelta(minutes=tm.minute % 15, - seconds=tm.second, - microseconds=tm.microsecond) - elif self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - # rounding nominal start time to fit the expected 5 minutes RSS for full disk scan - tm = tm - timedelta(minutes=tm.minute % 5, - seconds=tm.second, - microseconds=tm.microsecond) - # TODO raise a warning if none fo the above but still return the original time - return tm + return round_time(tm, date_delta=timedelta(minutes=15)) @property def observation_start_time(self): From 1d9aa9748d3ad82cf2de92554381d5b02e52460a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 28 Mar 2023 14:23:26 +0000 Subject: [PATCH 0099/1416] Change Name of round_time to round_nom_time to clarify the use of the function --- satpy/readers/seviri_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index b986aa2b8b..37f12377d4 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -983,7 +983,7 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr return data -def round_time(dt=None, date_delta=None): +def round_nom_time(dt=None, date_delta=None): """Round a datetime object to a multiple of a timedelta. dt : datetime.datetime object, default now. From 6cba8fdef58ede50fadcf9b0acbcc549cbf06316 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 28 Mar 2023 14:23:53 +0000 Subject: [PATCH 0100/1416] change name of round_time to round_nom_time --- satpy/readers/seviri_l1b_native.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index f90675efaa..4bc32bdc5c 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -58,7 +58,7 @@ get_satpos, pad_data_horizontally, pad_data_vertically, - round_time, + round_nom_time, ) from satpy.readers.seviri_l1b_native_hdr import ( DEFAULT_15_SECONDARY_PRODUCT_HEADER, @@ -136,13 +136,13 @@ def _has_archive_header(self): def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] - return round_time(tm, date_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, date_delta=timedelta(minutes=self.tres)) @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - return round_time(tm, date_delta=timedelta(minutes=15)) + return round_nom_time(tm, date_delta=timedelta(minutes=15)) @property def observation_start_time(self): From 86f7e60df3e9452e37240a05f2d98cb2bc496d9a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 28 Mar 2023 14:27:10 +0000 Subject: [PATCH 0101/1416] Refacto the nominal start/end time to use the newly defined roundtime function in seviri_base --- satpy/readers/seviri_l1b_hrit.py | 32 +++++++------------------------- 1 file changed, 7 insertions(+), 25 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 11548ea7ac..a2605376b4 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -209,6 +209,7 @@ from satpy.readers.seviri_base import ( CHANNEL_NAMES, HRV_NUM_COLUMNS, + REPEAT_CYCLE_DURATION, SATNUM, NoValidOrbitParams, OrbitPolynomialFinder, @@ -219,6 +220,7 @@ get_satpos, mask_bad_quality, pad_data_horizontally, + round_nom_time, ) from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration @@ -446,6 +448,9 @@ def __init__(self, filename, filename_info, filetype_info, self.calib_mode = calib_mode self.ext_calib_coefs = ext_calib_coefs or {} self.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines + self.tres = REPEAT_CYCLE_DURATION # base RC duration of 15 + if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + self.tres = 5 self._get_header() @@ -492,37 +497,14 @@ def nominal_start_time(self): """Get the start time and round it according to scan law.""" tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] - - if self.epilogue['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: - # rounding nominal start time to fit the expected 15 minutes RC for full disk scan - tm = tm - timedelta(minutes=tm.minute % 15, - seconds=tm.second, - microseconds=tm.microsecond) - elif self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - # rounding nominal start time to fit the expected 5 minutes RSS for full disk scan - tm = tm - timedelta(minutes=tm.minute % 5, - seconds=tm.second, - microseconds=tm.microsecond) - # TODO raise a warning if none fo the above but still return the original time - return tm + return round_nom_time(tm, date_delta=timedelta(minutes=self.tres)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - if self.epilogue['ImageProductionStats']['ActualScanningSummary']['NominalImageScanning'] == 1: - # rounding nominal start time to fit the expected 15 minutes RC for full disk scan - tm = tm - timedelta(minutes=tm.minute % 15, - seconds=tm.second, - microseconds=tm.microsecond) - elif self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - # rounding nominal start time to fit the expected 5 minutes RSS for full disk scan - tm = tm - timedelta(minutes=tm.minute % 5, - seconds=tm.second, - microseconds=tm.microsecond) - # TODO raise a warning if none fo the above but still return the original time - return tm + return round_nom_time(tm, date_delta=timedelta(minutes=self.tres)) @property def observation_start_time(self): From c3c3bf7e0cd87490ed92316fdf9c0161e06b820a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 28 Mar 2023 14:30:49 +0000 Subject: [PATCH 0102/1416] Add infor on the time parameter dict --- satpy/readers/seviri_l1b_hrit.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index a2605376b4..ae921625bf 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -738,9 +738,11 @@ def _update_attrs(self, res, info): res.attrs['sensor'] = 'seviri' res.attrs['nominal_start_time'] = """ Deprecation warning: nominal_start_time should be accessed via the time_parameters attrs + nominal_start_time is also available directly via start_time """ res.attrs['nominal_end_time'] = """ deprecation warning: nominal_end_time should be accessed via the time_parameters attrs + nominal_end_time is also available directly via end_time """ res.attrs['time_parameters'] = { 'nominal_start_time': self.nominal_start_time, From d75d3539d3910399782d033c6e73d4f28d32df7b Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 28 Mar 2023 15:14:22 +0000 Subject: [PATCH 0103/1416] Make RC_period_min a class variable defined according to the coverage already in the init to avoid code duplication --- satpy/readers/fci_l1c_nc.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 0c6279055b..1b07dfe110 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -202,8 +202,10 @@ class using the :mod:`~satpy.Scene.load` method with the reader def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" + self.RC_period_min = 10 if not filename_info['coverage'] == 'FD': raise NotImplementedError(f"coverage for {filename_info['coverage']} not supported by this reader") + self.RC_period_min = 2.5 super().__init__(filename, filename_info, filetype_info, cache_var_size=0, @@ -218,20 +220,12 @@ def __init__(self, filename, filename_info, filetype_info): def nominal_start_time(self): """Get nominal start time.""" RC_date = self.filename_info['start_time'].replace(hour=0, minute=0, second=0, microsecond=0) - if self.filename_info['coverage'] == 'FD': - RC_period_min = 10 - else: - RC_period_min = 2.5 - return RC_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*RC_period_min) + return RC_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*self.RC_period_min) @property def nominal_end_time(self): """Get nominal end time.""" - if self.filename_info['coverage'] == 'FD': - RC_period_min = 10 - else: - RC_period_min = 2.5 - return self.nominal_start_time + timedelta(minutes=RC_period_min) + return self.nominal_start_time + timedelta(minutes=self.RC_period_min) @property def observation_start_time(self): From c8ae060e8db938108e2d8e4de6c0eba24abe0f91 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 31 Mar 2023 18:16:35 +0200 Subject: [PATCH 0104/1416] Implement usage of land-sea-mask for the brightness temperature difference used for the Geocolor low-level cloud layer. --- satpy/composites/__init__.py | 47 ++++++++++++++--------------- satpy/etc/composites/ahi.yaml | 10 +++++- satpy/etc/enhancements/generic.yaml | 5 ++- 3 files changed, 35 insertions(+), 27 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 5c16e0c83f..de7cf04613 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1086,8 +1086,7 @@ def __call__(self, projectables, **kwargs): return super().__call__(projectables, **kwargs) -# class LowCloudCompositor(CloudCompositor): -class LowCloudCompositor(GenericCompositor): +class LowCloudCompositor(CloudCompositor): """Class information. TODO: Rewrite docstring @@ -1105,8 +1104,10 @@ class LowCloudCompositor(GenericCompositor): """ - def __init__(self, name, range_land=(1.0, 4.5), range_water=(0.0, 4.0), transition_gamma=1.0, - color=(140.25, 191.25, 249.9), **kwargs): + def __init__(self, name, land_sea_mask=None, value_land=1, value_sea=0, + range_land=(1.0, 4.5), + range_sea=(0.0, 4.0), + transition_gamma=1.0, color=(140.25, 191.25, 249.9), **kwargs): """Init info. TODO: Rewrite docstring @@ -1122,39 +1123,35 @@ def __init__(self, name, range_land=(1.0, 4.5), range_water=(0.0, 4.0), transiti transition_gamma (float): Gamma correction to apply at the end """ + self.land_sea_mask = land_sea_mask + self.val_land = value_land + self.val_sea = value_sea self.range_land = range_land - self.range_water = range_water + self.range_sea = range_sea self.transition_gamma = transition_gamma self.color = color - super().__init__(name, **kwargs) - # super().__init__(name, transition_gamma=transition_gamma, **kwargs) + self.transition_min = None + self.transition_max = None + super().__init__(name, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite.""" - diff_comp = DifferenceCompositor(name='ir_difference') - btd = diff_comp.__call__(projectables) + projectables = self.match_data_arrays(projectables) + btd, lsm, win_bt = projectables + lsm = lsm.squeeze(drop=True) + lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops - btd = btd.where(projectables[0] >= 230, 0.0) - - # self.transition_min, self.transition_max = self.range_land - # res_land = super().__call__((btd), **kwargs) - - # self.transition_min, self.transition_max = self.range_water - # res_water = super().__call__(btd, **kwargs) + btd = btd.where(win_bt >= 230, 0.0) - tr_min = self.range_land[0] - tr_max = self.range_land[1] + self.transition_min, self.transition_max = self.range_land + res = super().__call__([btd.where(lsm == self.val_land)], low_cloud_color=self.color, **kwargs) - slope = 1 / (tr_max - tr_min) - offset = 0 - slope * tr_min + self.transition_min, self.transition_max = self.range_sea + res_sea = super().__call__([btd.where(lsm == self.val_sea)], low_cloud_color=self.color, **kwargs) - alpha = btd.where(btd > tr_min, 0.0) - alpha = alpha.where(btd <= tr_max, 1.0) - alpha = alpha.where((btd <= tr_min) | (btd > tr_max), slope * btd + offset) + res = res.where(lsm == self.val_land, res_sea) - alpha **= self.transition_gamma - res = super().__call__((btd, alpha), low_cloud_color=self.color, **kwargs) return res diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 964e37b0a3..2730547f56 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -500,7 +500,15 @@ composites: geo_color_low_clouds: standard_name: ir_low_cloud compositor: !!python/name:satpy.composites.LowCloudCompositor + value_sea: 0 + value_land: 254 color: [140.25, 191.25, 249.9] prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B13 + - name: B07 + - compositor: !!python/name:satpy.composites.StaticImageCompositor + standard_name: land_sea_mask + filename: "/tcenas/scratch/strandgren/GeoColor/land_sea_mask_3km_i_.tif" - name: B13 - - name: B07 diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 43b40e2fcd..2094327918 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -855,6 +855,10 @@ enhancements: ir_low_cloud: standard_name: ir_low_cloud operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [False, True] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: @@ -864,7 +868,6 @@ enhancements: kwargs: monochromatic: color: low_cloud_color -# color: [0, 255, 0] colorized_ir_clouds: standard_name: colorized_ir_clouds From d64980735a7669398e2f626c715a5c54ae024937 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 3 Apr 2023 08:17:47 +0000 Subject: [PATCH 0105/1416] Change date_delta to time_delta to clarify the purpose and understanding --- satpy/readers/seviri_base.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 37f12377d4..598397782e 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -983,20 +983,20 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr return data -def round_nom_time(dt=None, date_delta=None): +def round_nom_time(dt=None, time_delta=None): """Round a datetime object to a multiple of a timedelta. dt : datetime.datetime object, default now. - dateDelta : timedelta object, we round to a multiple of this, default 1 minute. + time_delta : timedelta object, we round to a multiple of this, default 1 minute. adapted for SEVIRI from: http://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python """ if dt is None: dt = datetime.now() seconds = (dt - dt.min).seconds - if date_delta is None: - date_delta = timedelta(minutes=1) - round_to = date_delta.total_seconds() + if time_delta is None: + time_delta = timedelta(minutes=1) + round_to = time_delta.total_seconds() rounding = (seconds + round_to / 2) // round_to * round_to From c3636f3b3f60a46a6032bdca82a6a8e08b6a50ea Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 11:33:49 +0200 Subject: [PATCH 0106/1416] Add GeoColor composite recipes including rayleigh correction modifier. --- satpy/etc/composites/ahi.yaml | 86 ++++++++++++++++++++++++++--------- 1 file changed, 64 insertions(+), 22 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 2730547f56..a2fccee84b 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -14,6 +14,22 @@ modifiers: - solar_azimuth_angle - solar_zenith_angle + geo_color_rayleigh_corrected: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + reduce_lim_low: 70 + reduce_lim_high: 105 + reduce_strength: 1.5 + prerequisites: + - name: B03 + modifiers: [sunz_corrected] + optional_prerequisites: + - satellite_azimuth_angle + - satellite_zenith_angle + - solar_azimuth_angle + - solar_zenith_angle + composites: green: deprecation_warning: "'green' is a deprecated composite. Use the equivalent 'hybrid_green' instead." @@ -103,17 +119,6 @@ composites: - wavelength: 0.85 standard_name: toa_reflectance - ndvi_hybrid_green: - compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen - prerequisites: - - name: B02 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: B04 - modifiers: [sunz_corrected] - standard_name: toa_bidirectional_reflectance - airmass: # PDF slides: https://www.eumetsat.int/website/home/News/ConferencesandEvents/DAT_2833302.html # Under session 2 by Akihiro Shimizu (JMA) @@ -271,17 +276,6 @@ composites: high_resolution_band: red standard_name: true_color - true_color_ndvi_green: - compositor: !!python/name:satpy.composites.SelfSharpenedRGB - prerequisites: - - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: ndvi_hybrid_green - - name: B01 - modifiers: [sunz_corrected, rayleigh_corrected] - high_resolution_band: red - standard_name: true_color - natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: @@ -491,6 +485,40 @@ composites: - _night_background_hires # GeoColor + + geo_color: + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 80 + lim_high: 88 + standard_name: true_color_with_night_ir + prerequisites: + - geo_color_true_color + - geo_color_night + + # GeoColor Daytime + geo_color_green: + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + prerequisites: + - name: B02 + modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + - name: B03 + modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + - name: B04 + modifiers: [ sunz_corrected ] + standard_name: toa_bidirectional_reflectance + + geo_color_true_color: + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: B03 + modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + - name: geo_color_green + - name: B01 + modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + high_resolution_band: red + standard_name: true_color + + # GeoColor Nighttime geo_color_high_clouds: standard_name: ir_high_cloud compositor: !!python/name:satpy.composites.HighCloudCompositor @@ -512,3 +540,17 @@ composites: standard_name: land_sea_mask filename: "/tcenas/scratch/strandgren/GeoColor/land_sea_mask_3km_i_.tif" - name: B13 + + geo_color_bl: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - _night_background_hires + + geo_color_night: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_high_clouds + - geo_color_bl From 5d1c7235794fbea3e4e88539f2004c88318fc693 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 3 Apr 2023 10:00:41 +0000 Subject: [PATCH 0107/1416] Change the reference to use the observation_start_time instead of the filename --- satpy/readers/fci_l1c_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 1b07dfe110..49b01e4483 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -219,7 +219,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def nominal_start_time(self): """Get nominal start time.""" - RC_date = self.filename_info['start_time'].replace(hour=0, minute=0, second=0, microsecond=0) + RC_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) return RC_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*self.RC_period_min) @property From e8f74f4ed45c8e42d83d7408a6550b951da3d751 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 3 Apr 2023 10:02:53 +0000 Subject: [PATCH 0108/1416] Fix the use fo self.tres in the nominal_end_time --- satpy/readers/seviri_l1b_native.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 4bc32bdc5c..06ce0bb7aa 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -142,7 +142,7 @@ def nominal_start_time(self): def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - return round_nom_time(tm, date_delta=timedelta(minutes=15)) + return round_nom_time(tm, date_delta=timedelta(minutes=self.tres)) @property def observation_start_time(self): From 7f10c774f81786d2aa4f05c0fa06239f2f93bdcf Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 3 Apr 2023 14:26:04 +0000 Subject: [PATCH 0109/1416] Add test for round_nom_time --- satpy/tests/reader_tests/test_seviri_base.py | 26 +++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 92b4c5e287..51511d5aa8 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -18,7 +18,7 @@ """Test the MSG common (native and hrit format) functionionalities.""" import unittest -from datetime import datetime +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -37,6 +37,7 @@ get_satpos, pad_data_horizontally, pad_data_vertically, + round_nom_time, ) @@ -105,6 +106,29 @@ def test_pad_data_vertically_bad_shape(self): with self.assertRaises(IndexError): pad_data_vertically(data, final_size, south_bound, north_bound) + def observation_start_time(self): + """Get scan start timestamp for testing.""" + return datetime(2023, 3, 20, 15, 0, 10, 691000) + + def observation_end_time(self): + """Get scan end timestamp for testing.""" + return datetime(2023, 3, 20, 15, 12, 43, 843000) + + def test_round_nom_time(self): + """Test the rouding of start/end_time.""" + self.assertEqual(round_nom_time( + dt=self.observation_start_time(), + time_delta=timedelta(minutes=15) + ), + datetime(2023, 3, 20, 15, 0) + ) + self.assertEqual(round_nom_time( + dt=self.observation_end_time(), + time_delta=timedelta(minutes=15) + ), + datetime(2023, 3, 20, 15, 15) + ) + @staticmethod def test_pad_data_horizontally(): """Test the horizontal hrv padding.""" From b30ee727ba9c6b9809cb339b2d80d5dfc45274c6 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 3 Apr 2023 14:29:57 +0000 Subject: [PATCH 0110/1416] Add my name to the list of authors --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 703c88d413..3d4dc5de48 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -82,3 +82,4 @@ The following people have made contributions to this project: - [praerien (praerien)](https://github.com/praerien) - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) +- [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) From 258099073b32003402f308b1518f308b70e65334 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 17:08:32 +0200 Subject: [PATCH 0111/1416] Add AHI GeoColor composite without background layer. --- satpy/etc/composites/ahi.yaml | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index a2fccee84b..98e3a028c7 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -495,6 +495,15 @@ composites: - geo_color_true_color - geo_color_night + geo_color_without_background: + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 80 + lim_high: 88 + standard_name: true_color_with_night_ir + prerequisites: + - geo_color_true_color + - geo_color_night_without_background + # GeoColor Daytime geo_color_green: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen @@ -518,7 +527,7 @@ composites: high_resolution_band: red standard_name: true_color - # GeoColor Nighttime + # GeoColor Night-time geo_color_high_clouds: standard_name: ir_high_cloud compositor: !!python/name:satpy.composites.HighCloudCompositor @@ -538,10 +547,11 @@ composites: - name: B07 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask + # TODO Change filename filename: "/tcenas/scratch/strandgren/GeoColor/land_sea_mask_3km_i_.tif" - name: B13 - geo_color_bl: + geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: @@ -553,4 +563,11 @@ composites: standard_name: night_ir_with_background prerequisites: - geo_color_high_clouds - - geo_color_bl + - geo_color_background_with_low_clouds + + geo_color_night_without_background: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - geo_color_high_clouds From 608233fb1fa98850ff070848441750139baec55f Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 18:56:09 +0200 Subject: [PATCH 0112/1416] Improve docstrings and exception handling of HighCloudCompositor and LowCloudCompositor. --- satpy/composites/__init__.py | 142 +++++++++++++++++++++-------------- 1 file changed, 85 insertions(+), 57 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index de7cf04613..b20d057ca2 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1023,29 +1023,20 @@ class HighCloudCompositor(CloudCompositor): This compositor aims at identifying high clouds and assigning them a transparency based on the brightness temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at - the lower end, used to identify opaque clouds, is made a function of the latitude in order to have tropopause - level clouds appear as opaque at both high and low latitudes. This follows the Geocolor implementation of - high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). - - The idea is to define a tuple of two brightness temperature thresholds in transisiton_min and two corresponding - latitude thresholds in latitude_min. - - - TODO improve docstring: - The modified and latitude-dependent transition_min, sent to `CloudCopositor`, - will then be computed such that transition_min[0] is used if abs(latitude) < latitude_min[0]. - - if abs(latitude) < latitude_min(0): - tr_min_lat = transition_min[0] - elif abs(latitude) > latitude_min(1): - tr_min_lat = transition_min[1] - else: - tr_min_lat = linear intterpolation of - - tr_min_lat = transition_min[0] where abs(latitude) < latitude_min(0) - tr_min_lat = transition_min[1] where abs(latitude) > latitude_min(0) - tr_min_lat = linear interpolation between transition_min[0] and transition_min[1] where abs(latitude). - + the lower end, used to identify high opaque clouds, is made a function of the latitude in order to have + tropopause level clouds appear opaque at both high and low latitudes. This follows the Geocolor + implementation of high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + + The two brightness temperature thresholds in `transition_min` are used together with the corresponding + latitude limits in `latitude_min` to compute a modified version of `transition_min` that is later used + when calling `CloudCompositor`. The modified version of `transition_min` will be an array with the same + shape as the input projectable dataset, where the actual values of threshold_min are a function of the + dataset `latitude`: + + - transition_min = transition_min[0] where abs(latitude) < latitude_min(0) + - transition_min = transition_min[1] where abs(latitude) > latitude_min(0) + - transition_min = linear interpolation between transition_min[0] and transition_min[1] as a funtion + of where abs(latitude). """ def __init__(self, name, transition_min=(200., 220.), transition_max=280, latitude_min=(30., 60.), @@ -1054,20 +1045,35 @@ def __init__(self, name, transition_min=(200., 220.), transition_max=280, latitu Args: transition_min (tuple): Brightness temperature values used to identify opaque white - clouds at different latitudes + clouds at different latitudes transition_max (float): Brightness temperatures above this value are not considered to be high clouds -> transparent latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent transition_min values. - transition_gamma (float): Gamma correction to apply at the end + transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness + temperature range (`transition_min` to `transition_max`). """ + if len(transition_min) != 2: + raise ValueError(f"Expected 2 `transition_min` values, got {len(transition_min)}") + if len(latitude_min) != 2: + raise ValueError(f"Expected 2 `latitude_min` values, got {len(latitude_min)}") + if type(transition_max) in [list, tuple]: + raise ValueError(f"Expected `transition_max` to be of type float, is of type {type(transition_max)}") + self.latitude_min = latitude_min super().__init__(name, transition_min=transition_min, transition_max=transition_max, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): - """Generate the composite.""" + """Generate the composite. + + `projectables` is expected to be a list or tuple with a single element: + - index 0: Brightness temperature of a thermal infrared window channel (e.g. 10.5 microns). + """ + if len(projectables) != 1: + raise ValueError(f"Expected 1 dataset, got {len(projectables)}") + data = projectables[0] _, lats = data.attrs["area"].get_lonlats() lats = np.abs(lats) @@ -1087,70 +1093,92 @@ def __call__(self, projectables, **kwargs): class LowCloudCompositor(CloudCompositor): - """Class information. + """Detect low-level clouds based on thresholding and use it as a mask for compositing during night-time. - TODO: Rewrite docstring + This compsitor computes the brightness temperature difference between a window channel (e.g. 10.5 micron) + and the near-infrared channel e.g. (3.8 micron) and uses this brightness temperature difference, `BTD`, to + create a partially transparent mask for compositing. - Detect low clouds based on latitude-dependent thresholding and use it as a mask for compositing. - - This compositor aims at identifying high clouds and assigning them a transparency based on the brightness - temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at - the lower end, used to identify opaque clouds, is made a function of the latitude in order to have tropopause - level clouds appear as opaque at both high and low latitudes. This follows the Geocolor implementation of - high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). - - The idea is to define a tuple of two brightness temperature thresholds in transisiton_min and two corresponding - latitude thresholds in latitude_min. + Pixels with `BTD` values below a given threshold will be transparent, whereas pixels with `BTD` values + above another threshold will be opaque. The transparency of all other `BTD` values will be a linear + function of the `BTD` value itself. Two sets of thresholds are used, one set for land surface types + (`range_land`) and another one for sea/water surface types (`range_sea`), respectively. Hence, + this compositor requires a land-sea-mask as a prerequisite input. This follows the GeoColor + implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + Please note that the spectral test and thus the output of the compositor (using the expected input data) is + only applicable during night-time. """ - def __init__(self, name, land_sea_mask=None, value_land=1, value_sea=0, + def __init__(self, name, values_land=(1), values_sea=(0), range_land=(1.0, 4.5), range_sea=(0.0, 4.0), transition_gamma=1.0, color=(140.25, 191.25, 249.9), **kwargs): """Init info. - TODO: Rewrite docstring Collect custom configuration values. Args: - transition_min (tuple): Brightness temperature values used to identify opaque white - clouds at different latitudes - transition_max (float): Brightness temperatures above this value are not considered to - be high clouds -> transparent + value_land (list): List of values used to identify land surface pixels in the land-sea-mask. + value_sea (list): List of values used to identify sea/water surface pixels in the land-sea-mask. + range_land (tuple): Threshold values used for masking low-level clouds from the brightness temperature + difference over land surface types. + range_sea (tuple): Threshold values used for masking low-level clouds from the brightness temperature + difference over sea/water. latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent transition_min values. - transition_gamma (float): Gamma correction to apply at the end - + transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness + temperature difference range. + color (list): RGB definition of color to use for the low-level clouds in the composite (the final + color will be a function of the corresponding trasnparency/alpha channel). """ - self.land_sea_mask = land_sea_mask - self.val_land = value_land - self.val_sea = value_sea + if len(range_land) != 2: + raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") + if len(range_sea) != 2: + raise ValueError(f"Expected 2 `range_sea` values, got {len(range_sea)}") + if type(color) not in [list, tuple] or len(color) != 3: + raise ValueError("Expected list/tuple with the red, green and blue color components.") + + self.values_land = values_land if type(values_land) in [list, tuple] else [values_land] + self.values_sea = values_sea if type(values_sea) in [list, tuple] else [values_sea] self.range_land = range_land self.range_sea = range_sea self.transition_gamma = transition_gamma self.color = color - self.transition_min = None - self.transition_max = None + self.transition_min = None # Placeholder for later use in CloudCompositor + self.transition_max = None # Placeholder for later use in CloudCompositor super().__init__(name, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): - """Generate the composite.""" + """Generate the composite. + + `projectables` is expected to be a list or tuple with the following three elements: + - index 0: Brightness temperature difference between a window channel (e.g. 10.5 micron) and a + near-infrared channel e.g. (3.8 micron). + - index 1. Brightness temperature of the window channel (used to filter out noise-induced false alarms). + - index 2: Land-Sea-Mask. + """ + if len(projectables) != 3: + raise ValueError(f"Expected 3 datasets, got {len(projectables)}") + projectables = self.match_data_arrays(projectables) - btd, lsm, win_bt = projectables + btd, bt_win, lsm = projectables lsm = lsm.squeeze(drop=True) lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops - btd = btd.where(win_bt >= 230, 0.0) + btd = btd.where(bt_win >= 230, 0.0) + # Call CloudCompositor for land surface pixels self.transition_min, self.transition_max = self.range_land - res = super().__call__([btd.where(lsm == self.val_land)], low_cloud_color=self.color, **kwargs) + res = super().__call__([btd.where(lsm.isin(self.values_land))], low_cloud_color=self.color, **kwargs) + # Call CloudCompositor for sea/water surface pixels self.transition_min, self.transition_max = self.range_sea - res_sea = super().__call__([btd.where(lsm == self.val_sea)], low_cloud_color=self.color, **kwargs) + res_sea = super().__call__([btd.where(lsm.isin(self.values_sea))], low_cloud_color=self.color, **kwargs) - res = res.where(lsm == self.val_land, res_sea) + # Compine resutls for land and sea/water surface pixels + res = res.where(lsm.isin(self.values_land), res_sea) return res From 489069551915d6c4744856abf65f9e288619215a Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 18:57:15 +0200 Subject: [PATCH 0113/1416] Modify low-level cloud composite recipe to account for modified order of projectables and new land-sea mask data. --- satpy/etc/composites/ahi.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 98e3a028c7..f0543dd7d5 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -537,19 +537,19 @@ composites: geo_color_low_clouds: standard_name: ir_low_cloud compositor: !!python/name:satpy.composites.LowCloudCompositor - value_sea: 0 - value_land: 254 + values_sea: 0 + values_land: 100 color: [140.25, 191.25, 249.9] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B13 - name: B07 + - name: B13 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask # TODO Change filename - filename: "/tcenas/scratch/strandgren/GeoColor/land_sea_mask_3km_i_.tif" - - name: B13 + filename: "/tcenas/scratch/strandgren/GeoColor/gshhs_land_sea_mask_3km_i.tif" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor From 40ded69a74d51790ba581e0cf30d4155162a23a3 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 3 Apr 2023 19:00:39 +0200 Subject: [PATCH 0114/1416] Fix syntax of single-element tuples. --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index b20d057ca2..995ef7d317 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1110,7 +1110,7 @@ class LowCloudCompositor(CloudCompositor): only applicable during night-time. """ - def __init__(self, name, values_land=(1), values_sea=(0), + def __init__(self, name, values_land=(1,), values_sea=(0,), range_land=(1.0, 4.5), range_sea=(0.0, 4.0), transition_gamma=1.0, color=(140.25, 191.25, 249.9), **kwargs): From bd8494e1931d8f967fdb170be218c1a49860b545 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 5 Apr 2023 13:01:11 +0200 Subject: [PATCH 0115/1416] Implement non-linearity term for NDVI-weighted hybrid-green correction when converting NDVI to blend factor. --- satpy/composites/spectral.py | 24 ++++++++++++++++--- satpy/tests/compositor_tests/test_spectral.py | 13 ++++++++++ 2 files changed, 34 insertions(+), 3 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index c0ccaff64f..744637551a 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -114,8 +114,8 @@ class NDVIHybridGreen(SpectralBlender): This green band correction follows the same approach as the HybridGreen compositor, but with a dynamic blend factor `f` that depends on the pixel-level Normalized Differece Vegetation Index (NDVI). The higher the NDVI, the - smaller the contribution from the nir channel will be, following a liner relationship between the two ranges - `[ndvi_min, ndvi_max]` and `limits`. + smaller the contribution from the nir channel will be, following a liner (default) or non-linear relationship + between the two ranges `[ndvi_min, ndvi_max]` and `limits`. As an example, a new green channel using e.g. FCI data and the NDVIHybridGreen compositor can be defined like:: @@ -124,6 +124,7 @@ class NDVIHybridGreen(SpectralBlender): ndvi_min: 0.0 ndvi_max: 1.0 limits: [0.15, 0.05] + strength: 1.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected] @@ -138,17 +139,29 @@ class NDVIHybridGreen(SpectralBlender): pixels with NDVI=1.0 will be a weighted average with 5% contribution from the near-infrared vis_08 channel and the remaining 95% from the native green vis_05 channel. For other values of NDVI a linear interpolation between these values will be performed. + + A strength larger or smaller than 1.0 will introduce a non-linear relationship between the two ranges + `[ndvi_min, ndvi_max]` and `limits`. Hence, a higher strength (> 1.0) will result in a slower transition + to higher/lower fractions at the NDVI extremes. Similarly, a lower strength (< 1.0) will result in a + faster transition to higher/lower fractions at the NDVI extremes. """ - def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), **kwargs): + def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), strength=1.0, **kwargs): """Initialize class and set the NDVI limits and the corresponding blending fraction limits.""" + if strength <= 0.0: + raise ValueError(f"Expected stength greater than 0.0, got {strength}.") + self.ndvi_min = ndvi_min self.ndvi_max = ndvi_max self.limits = limits + self.strength = strength super().__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Construct the hybrid green channel weighted by NDVI.""" + LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " + f"{self.limits[1]}] and stength {self.strength}.") + ndvi_input = self.match_data_arrays([projectables[1], projectables[2]]) ndvi = (ndvi_input[1] - ndvi_input[0]) / (ndvi_input[1] + ndvi_input[0]) @@ -156,6 +169,11 @@ def __call__(self, projectables, optional_datasets=None, **attrs): ndvi.data = da.where(ndvi > self.ndvi_min, ndvi, self.ndvi_min) ndvi.data = da.where(ndvi < self.ndvi_max, ndvi, self.ndvi_max) + # Apply non-linearity to the ndvi for a non-linear conversion from ndvi to fraction. This can be used for a + # slower transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this operation has + # no effect on ndvi. + ndvi = ndvi ** self.strength / (ndvi ** self.strength + (1 - ndvi) ** self.strength) + fraction = (ndvi - self.ndvi_min) / (self.ndvi_max - self.ndvi_min) * (self.limits[1] - self.limits[0]) \ + self.limits[0] self.fractions = (1 - fraction, fraction) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 2e9f59c13f..03e51a5043 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -77,6 +77,7 @@ def test_ndvi_hybrid_green(self): comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name='toa_bidirectional_reflectance') + # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) @@ -85,6 +86,18 @@ def test_ndvi_hybrid_green(self): data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) + # Test invalid strength + with pytest.raises(ValueError): + _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') + + # Test non-linear strength + comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') + + res = comp((self.c01, self.c02, self.c03)) + np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) + def test_green_corrector(self): """Test the deprecated class for green corrections.""" comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), From 01b1ba86b7c5ceaea820532c6b9e9b2cf7ee604b Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 5 Apr 2023 13:26:24 +0000 Subject: [PATCH 0116/1416] Fix typo in the call of round_nom_time --- satpy/readers/seviri_l1b_native.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 06ce0bb7aa..d1e05d8c9a 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -136,13 +136,13 @@ def _has_archive_header(self): def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] - return round_nom_time(tm, date_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - return round_nom_time(tm, date_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) @property def observation_start_time(self): From 448d3440733d314c2a4e8dafc59b98070a8a794c Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 5 Apr 2023 13:27:44 +0000 Subject: [PATCH 0117/1416] Fix a typo in the call of round_nom_time --- satpy/readers/seviri_l1b_hrit.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index ae921625bf..ef4b9d6908 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -497,14 +497,14 @@ def nominal_start_time(self): """Get the start time and round it according to scan law.""" tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] - return round_nom_time(tm, date_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - return round_nom_time(tm, date_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) @property def observation_start_time(self): From 2951ebeb3299d52b2076497dde213194e726d529 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 5 Apr 2023 14:55:44 +0000 Subject: [PATCH 0118/1416] Fix the pytest by handling the time resolution in the pytest fistures wherer needed --- satpy/readers/seviri_l1b_native.py | 5 +++-- satpy/tests/reader_tests/test_seviri_l1b_native.py | 5 +++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index d1e05d8c9a..576acbf713 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -123,8 +123,9 @@ def __init__(self, filename, filename_info, filetype_info, self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) self.tres = REPEAT_CYCLE_DURATION # base RC duration of 15 - if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - self.tres = 5 + if filetype_info is not None: # to avoid error in the pytest + if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + self.tres = 5 def _has_archive_header(self): """Check whether the file includes an ASCII archive header.""" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 6bf5d2705d..e2435783eb 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -674,6 +674,9 @@ def prepare_area_defs(self, test_dict): fh.header = header fh.trailer = trailer fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) + fh.tres = 15 # base RC duration + if fh.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + fh.tres = 5 calc_area_def = fh.get_area_def(dataset_id) return (calc_area_def, expected_area_def) @@ -1037,6 +1040,7 @@ def file_handler(self): fh = NativeMSGFileHandler() fh.header = header fh.trailer = trailer + fh.tres = 15 fh.platform_id = self.platform_id return fh @@ -1129,6 +1133,7 @@ def file_handler(self): fh = NativeMSGFileHandler() fh.header = header fh.trailer = trailer + fh.tres = 15 fh.mda = mda fh.dask_array = da.from_array(data) fh.platform_id = 324 From d49885a3f7445a135458fbf815271b614a7866dd Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 5 Apr 2023 15:38:41 +0000 Subject: [PATCH 0119/1416] Fix some unitest failure to open more... --- .../test_seviri_l1b_hrit_setup.py | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index a9ca6377c4..778e779c59 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -141,6 +141,11 @@ def get_fake_epilogue(): 'UpperNorthLineActual': 11136, 'UpperEastColumnActual': 1805, 'UpperWestColumnActual': 7372 + }, + 'ActualScanningSummary': { + 'ReducedScan': 0, + 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), + 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 39, 0) } } } @@ -225,6 +230,18 @@ def get_attrs_exp(projection_longitude=0.0): 'satellite_actual_latitude': -0.5711243456528018, 'satellite_actual_altitude': 35783296.150123544}, 'georef_offset_corrected': True, - 'nominal_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'nominal_end_time': datetime(2006, 1, 1, 12, 30, 0, 0) + 'nominal_start_time': """ + Deprecation warning: nominal_start_time should be accessed via the time_parameters attrs + nominal_start_time is also available directly via start_time + """, + 'nominal_end_time': """ + deprecation warning: nominal_end_time should be accessed via the time_parameters attrs + nominal_end_time is also available directly via end_time + """, + 'time_parameters': { + 'nominal_start_time': datetime(2006, 1, 1, 12, 15), + 'nominal_end_time': datetime(2006, 1, 1, 12, 30), + 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), + 'observation_end_time': datetime(2006, 1, 1, 12, 27, 39, 0) + } } From 5dc3d3edc53f2005cee3a3251e92c8fd913f6c28 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 6 Apr 2023 11:54:04 +0200 Subject: [PATCH 0120/1416] Remove monochromatic colorization and use available palettes functionality instead. --- satpy/composites/__init__.py | 11 ++------ satpy/enhancements/__init__.py | 43 +++++++---------------------- satpy/etc/composites/ahi.yaml | 3 +- satpy/etc/enhancements/generic.yaml | 4 +-- 4 files changed, 16 insertions(+), 45 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 995ef7d317..db120293ec 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1113,7 +1113,7 @@ class LowCloudCompositor(CloudCompositor): def __init__(self, name, values_land=(1,), values_sea=(0,), range_land=(1.0, 4.5), range_sea=(0.0, 4.0), - transition_gamma=1.0, color=(140.25, 191.25, 249.9), **kwargs): + transition_gamma=1.0, **kwargs): """Init info. Collect custom configuration values. @@ -1129,22 +1129,17 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), transition_min values. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature difference range. - color (list): RGB definition of color to use for the low-level clouds in the composite (the final - color will be a function of the corresponding trasnparency/alpha channel). """ if len(range_land) != 2: raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") if len(range_sea) != 2: raise ValueError(f"Expected 2 `range_sea` values, got {len(range_sea)}") - if type(color) not in [list, tuple] or len(color) != 3: - raise ValueError("Expected list/tuple with the red, green and blue color components.") self.values_land = values_land if type(values_land) in [list, tuple] else [values_land] self.values_sea = values_sea if type(values_sea) in [list, tuple] else [values_sea] self.range_land = range_land self.range_sea = range_sea self.transition_gamma = transition_gamma - self.color = color self.transition_min = None # Placeholder for later use in CloudCompositor self.transition_max = None # Placeholder for later use in CloudCompositor super().__init__(name, transition_gamma=transition_gamma, **kwargs) @@ -1171,11 +1166,11 @@ def __call__(self, projectables, **kwargs): # Call CloudCompositor for land surface pixels self.transition_min, self.transition_max = self.range_land - res = super().__call__([btd.where(lsm.isin(self.values_land))], low_cloud_color=self.color, **kwargs) + res = super().__call__([btd.where(lsm.isin(self.values_land))], **kwargs) # Call CloudCompositor for sea/water surface pixels self.transition_min, self.transition_max = self.range_sea - res_sea = super().__call__([btd.where(lsm.isin(self.values_sea))], low_cloud_color=self.color, **kwargs) + res_sea = super().__call__([btd.where(lsm.isin(self.values_sea))], **kwargs) # Compine resutls for land and sea/water surface pixels res = res.where(lsm.isin(self.values_land), res_sea) diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index 0ae8fd45c0..29f2cbdf54 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -358,39 +358,16 @@ def _merge_colormaps(kwargs, img=None): from trollimage.colormap import Colormap full_cmap = None - # TODO - # - Improve check if both palettes and monochromatic are set - # - Improve exception handling for monochromatic cases - # - Resolve RunTimeWarnings - - if 'palettes' in kwargs: - palette = kwargs['palettes'] - if isinstance(palette, Colormap): - full_cmap = palette - else: - for itm in palette: - cmap = create_colormap(itm, img) - if full_cmap is None: - full_cmap = cmap - else: - full_cmap = full_cmap + cmap - - if 'monochromatic' in kwargs: - palette = {} - color = kwargs['monochromatic'].get('color', None) - if color is None: - # TODO: add error - pass - elif isinstance(color, (list, tuple)): - palette['colors'] = [color] - elif isinstance(color, str): - var = img.data.attrs.get(color, None) - if not isinstance(var, (tuple, list)): - # TODO: add error - pass - palette['colors'] = [var] - - full_cmap = create_colormap(palette, img) + palette = kwargs['palettes'] + if isinstance(palette, Colormap): + full_cmap = palette + else: + for itm in palette: + cmap = create_colormap(itm, img) + if full_cmap is None: + full_cmap = cmap + else: + full_cmap = full_cmap + cmap return full_cmap diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index f0543dd7d5..5c73eea7e9 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -539,7 +539,6 @@ composites: compositor: !!python/name:satpy.composites.LowCloudCompositor values_sea: 0 values_land: 100 - color: [140.25, 191.25, 249.9] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: @@ -567,7 +566,7 @@ composites: geo_color_night_without_background: compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir_with_background + standard_name: night_ir prerequisites: - geo_color_low_clouds - geo_color_high_clouds diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 2094327918..362625fa15 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -866,8 +866,8 @@ enhancements: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: - monochromatic: - color: low_cloud_color + palettes: + - {colors: [[140.25, 191.25, 249.9]]} colorized_ir_clouds: standard_name: colorized_ir_clouds From 5da3d5393a47012848cfc953ae7622d6e67ba3b8 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Thu, 6 Apr 2023 16:04:30 +0000 Subject: [PATCH 0121/1416] Fix some of the unit test by replacing start_time by observation_start_time in all fucntion realted to satpos and all pytest fixture,Fix the acqtime assertion issue by adding and offset corresponding to the time of the day in millisecond. Add soem print for the debug that shall be removed once all the unit test works --- satpy/readers/hrit_base.py | 12 ++++++++++- satpy/readers/seviri_l1b_hrit.py | 9 +++++--- .../reader_tests/test_seviri_l1b_hrit.py | 21 ++++++++++--------- .../test_seviri_l1b_hrit_setup.py | 9 ++++---- 4 files changed, 33 insertions(+), 18 deletions(-) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index a092288575..6b9780a1c9 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -175,7 +175,7 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info): self.mda = {} self.hdr_info = hdr_info self._get_hd(self.hdr_info) - + print('HRITFileHandler init filename_info:', filename_info) self._start_time = filename_info['start_time'] self._end_time = self._start_time + timedelta(minutes=15) @@ -222,6 +222,16 @@ def _get_hd(self, hdr_info): 'SSP_longitude': 0.0} self.mda['orbital_parameters'] = {} + @property + def observation_start_time(self): + """Get start time.""" + return self._start_time + + @property + def observation_end_time(self): + """Get end time.""" + return self._end_time + @property def start_time(self): """Get start time.""" diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index ef4b9d6908..f4013ee05d 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -342,10 +342,11 @@ def satpos(self): a, b = self.get_earth_radii() poly_finder = OrbitPolynomialFinder(self.prologue['SatelliteStatus'][ 'Orbit']['OrbitPolynomial']) - orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) + print('satpos start_time', self.start_time) + orbit_polynomial = poly_finder.get_orbit_polynomial(self.observation_start_time) return get_satpos( orbit_polynomial=orbit_polynomial, - time=self.start_time, + time=self.observation_start_time, semi_major_axis=a, semi_minor_axis=b, ) @@ -451,7 +452,7 @@ def __init__(self, filename, filename_info, filetype_info, self.tres = REPEAT_CYCLE_DURATION # base RC duration of 15 if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: self.tres = 5 - + print('HRITMSGFileHandler init obs_start_time:', self.observation_start_time) self._get_header() def _get_header(self): @@ -726,7 +727,9 @@ def _get_raw_mda(self): def _add_scanline_acq_time(self, dataset): """Add scanline acquisition time to the given dataset.""" tline = self.mda['image_segment_line_quality']['line_mean_acquisition'] + print('_add_scanline_acq_time tline %f and msec %f', tline['days'], tline['milliseconds']) acq_time = get_cds_time(days=tline['days'], msecs=tline['milliseconds']) + # print('_add_scanline_acq_time acq_time', acq_time) add_scanline_acq_time(dataset, acq_time) def _update_attrs(self, res, info): diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index f105870f12..97020a8e38 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -46,10 +46,11 @@ class TestHRITMSGFileHandlerHRV(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing HRV.""" - self.start_time = datetime(2016, 3, 3, 0, 0) + # self.observation_start_time = datetime(2016, 3, 3, 0, 0) + self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.reader = setup.get_fake_file_handler( - start_time=self.start_time, + observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=5568, ) @@ -88,7 +89,7 @@ def test_get_dataset(self, calibrate, parent_get_dataset): self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( res['acq_time'], - setup.get_acq_time_exp(self.start_time, self.nlines) + setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @@ -111,7 +112,7 @@ def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( res['acq_time'], - setup.get_acq_time_exp(self.start_time, self.nlines) + setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) def test_get_area_def(self): @@ -144,12 +145,12 @@ class TestHRITMSGFileHandler(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing.""" - self.start_time = datetime(2016, 3, 3, 0, 0) + self.observation_start_time = datetime(2016, 3, 3, 0, 0) self.nlines = 464 self.ncols = 3712 self.projection_longitude = 9.5 self.reader = setup.get_fake_file_handler( - start_time=self.start_time, + observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=self.ncols, projection_longitude=self.projection_longitude @@ -219,7 +220,7 @@ def test_get_dataset(self, calibrate, parent_get_dataset): expected['acq_time'] = ( 'y', - setup.get_acq_time_exp(self.start_time, self.nlines) + setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) self.assert_attrs_equal( @@ -244,7 +245,7 @@ def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_ expected = data.copy() expected['acq_time'] = ( 'y', - setup.get_acq_time_exp(self.start_time, self.nlines) + setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) self.assert_attrs_equal( @@ -277,7 +278,7 @@ def test_get_raw_mda(self): def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" reader = setup.get_fake_file_handler( - start_time=self.start_time, + observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=self.ncols, projection_longitude=self.projection_longitude, @@ -295,7 +296,7 @@ class TestHRITMSGPrologueFileHandler(unittest.TestCase): def setUp(self, *mocks): """Set up the test case.""" fh = setup.get_fake_file_handler( - start_time=datetime(2016, 3, 3, 0, 0), + observation_start_time=datetime(2016, 3, 3, 0, 0), nlines=464, ncols=3712, ) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index 778e779c59..de718042d2 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -46,12 +46,12 @@ def new_read_prologue(self): return new_read_prologue -def get_fake_file_handler(start_time, nlines, ncols, projection_longitude=0, +def get_fake_file_handler(observation_start_time, nlines, ncols, projection_longitude=0, orbit_polynomials=ORBIT_POLYNOMIALS): """Create a mocked SEVIRI HRIT file handler.""" prologue = get_fake_prologue(projection_longitude, orbit_polynomials) - mda = get_fake_mda(nlines=nlines, ncols=ncols, start_time=start_time) - filename_info = get_fake_filename_info(start_time) + mda = get_fake_mda(nlines=nlines, ncols=ncols, start_time=observation_start_time) + filename_info = get_fake_filename_info(observation_start_time) epilogue = get_fake_epilogue() m = mock.mock_open() @@ -199,7 +199,8 @@ def get_acq_time_cds(start_time, nlines): dtype=[('days', '>u2'), ('milliseconds', '>u4')] ) tline['days'][1:-1] = days_since_1958 * np.ones(nlines - 2) - tline['milliseconds'][1:-1] = np.arange(nlines - 2) + offset_second = (start_time - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()*1000 + tline['milliseconds'][1:-1] = np.arange(nlines - 2)+offset_second return tline From b20180a487ff967a2632b1a9d542439329b9d72d Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 7 Apr 2023 09:13:47 -0500 Subject: [PATCH 0122/1416] Create aliases for CLAVRx product names that are the bidirectional reflectance derived information for the L1b channels at 2 km. --- satpy/readers/clavrx.py | 47 ++++++++++++++++++++++++++++++++++++++--- 1 file changed, 44 insertions(+), 3 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 9565fa7169..cd5d0461e6 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -68,6 +68,23 @@ 'abi': 2004, } +CHANNEL_ALIASES = { + "abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47}, + "refl_0_65um_nom": {"name": "C02", "wavelength": 0.64}, + "refl_0_86um_nom": {"name": "C03", "wavelength": 0.865}, + "refl_1_38um_nom": {"name": "C04", "wavelength": 1.378}, + "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61}, + "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25}, + }, + "ahi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47}, + "refl_0_55um_nom": {"name": "C02", "wavelength": 0.51}, + "refl_0_65um_nom": {"name": "C03", "wavelength": 0.64}, + "refl_0_86um_nom": {"name": "C04", "wavelength": 0.86}, + "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61}, + "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25} + }, +} + def _get_sensor(sensor: str) -> str: """Get the sensor.""" @@ -273,6 +290,19 @@ def get_metadata(sensor: str, platform: str, attrs: dict, ds_info: dict) -> dict return attr_info + @staticmethod + def _lookup_alias(vname: str, sensor: str, is_polar: bool) -> str: + """Return variable name if channel name is an alias for a different variable.""" + # Why? The aliases provide built-in access to the base sensor RGB composites. + if is_polar: + # Not implemented + pass + else: + dd = CHANNEL_ALIASES[sensor] + key = next(key for key, value in dd.items() if value["name"] == vname) + + return key + class CLAVRXHDF4FileHandler(HDF4FileHandler, _CLAVRxHelper): """A file handler for CLAVRx files.""" @@ -294,7 +324,7 @@ def end_time(self): return self.filename_info.get('end_time', self.start_time) def get_dataset(self, dataset_id, ds_info): - """Get a dataset.""" + """Get a dataset for Polar Sensors.""" var_name = ds_info.get('file_key', dataset_id['name']) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) @@ -414,7 +444,8 @@ def _get_ds_info_for_data_arr(self, var_name): } return ds_info - def _is_2d_yx_data_array(self, data_arr): + @staticmethod + def _is_2d_yx_data_array(data_arr): has_y_dim = data_arr.dims[0] == "y" has_x_dim = data_arr.dims[1] == "x" return has_y_dim and has_x_dim @@ -435,6 +466,14 @@ def _available_new_datasets(self, handled_vars): ds_info = self._get_ds_info_for_data_arr(var_name) yield True, ds_info + alias_info = CHANNEL_ALIASES[self.sensor].get(var_name, None) + if alias_info is not None: + if self.nc.attrs["RESOLUTION_KM"] is not None: + alias_info["resolution"] = self.nc.attrs.get("RESOLUTION_KM", "2") + alias_info["resolution"] = alias_info["resolution"] * 1000. + ds_info.update(alias_info) + yield True, ds_info + def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. @@ -470,8 +509,9 @@ def get_area_def(self, key): return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) def get_dataset(self, dataset_id, ds_info): - """Get a dataset.""" + """Get a dataset for supported geostationary sensors.""" var_name = ds_info.get('name', dataset_id['name']) + var_name = _CLAVRxHelper._lookup_alias(var_name, self.sensor, self._is_polar()) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, @@ -480,5 +520,6 @@ def get_dataset(self, dataset_id, ds_info): def __getitem__(self, item): """Wrap around `self.nc[item]`.""" + # Check if 'item' is an alias: data = self.nc[item] return data From d83eada1e425ee420d70441e7e0383a9b9dd49e0 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 11 Apr 2023 12:05:26 +0000 Subject: [PATCH 0123/1416] Fix issue with differnt date used in the tests --- satpy/tests/reader_tests/test_seviri_l1b_hrit.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 97020a8e38..281043e07f 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -145,7 +145,8 @@ class TestHRITMSGFileHandler(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing.""" - self.observation_start_time = datetime(2016, 3, 3, 0, 0) + # self.observation_start_time = datetime(2016, 3, 3, 0, 0) + self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.ncols = 3712 self.projection_longitude = 9.5 From 752213216d4a2b773e83f1648c1c2abc8ba6e533 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 11 Apr 2023 12:15:49 +0000 Subject: [PATCH 0124/1416] Fix failing test because of undefined time resolution (added into the fake files handler creation) --- satpy/tests/reader_tests/test_seviri_l1b_hrit.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 281043e07f..5d0983f73a 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -405,7 +405,13 @@ def file_handler(self): 'Level15ImageProduction': { 'PlannedChanProcessing': self.radiance_types } - } + }, + 'ImageAcquisition': { + 'PlannedAcquisitionTime': { + 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888), + 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0) + } + } } epilog = { 'ImageProductionStats': { @@ -431,6 +437,7 @@ def file_handler(self): fh.mda = mda fh.prologue = prolog fh.epilogue = epilog + fh.tres = 15 return fh @pytest.mark.parametrize( From c6d0122761f4fdc0919a67db98fbb51b904dda54 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Tue, 11 Apr 2023 08:51:25 -0500 Subject: [PATCH 0125/1416] Update how alias is used with "file_key" in ds_info, which eliminates the need for a special lookup within reader Add tests to make sure aliases are created. --- satpy/readers/clavrx.py | 25 +++------- satpy/tests/reader_tests/test_clavrx_nc.py | 55 +++++++++++++--------- 2 files changed, 40 insertions(+), 40 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index cd5d0461e6..f8cb35cf51 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -290,19 +290,6 @@ def get_metadata(sensor: str, platform: str, attrs: dict, ds_info: dict) -> dict return attr_info - @staticmethod - def _lookup_alias(vname: str, sensor: str, is_polar: bool) -> str: - """Return variable name if channel name is an alias for a different variable.""" - # Why? The aliases provide built-in access to the base sensor RGB composites. - if is_polar: - # Not implemented - pass - else: - dd = CHANNEL_ALIASES[sensor] - key = next(key for key, value in dd.items() if value["name"] == vname) - - return key - class CLAVRXHDF4FileHandler(HDF4FileHandler, _CLAVRxHelper): """A file handler for CLAVRx files.""" @@ -464,13 +451,16 @@ def _available_new_datasets(self, handled_vars): continue ds_info = self._get_ds_info_for_data_arr(var_name) + ds_info.update({"file_key": var_name}) yield True, ds_info alias_info = CHANNEL_ALIASES[self.sensor].get(var_name, None) if alias_info is not None: - if self.nc.attrs["RESOLUTION_KM"] is not None: - alias_info["resolution"] = self.nc.attrs.get("RESOLUTION_KM", "2") - alias_info["resolution"] = alias_info["resolution"] * 1000. + alias_info.update({"file_key": var_name}) + if "RESOLUTION_KM" in self.nc.attrs: + alias_info["resolution"] = self.nc.attrs["RESOLUTION_KM"] * 1000. + else: + alias_info["resolution"] = NADIR_RESOLUTION[self.sensor] ds_info.update(alias_info) yield True, ds_info @@ -510,8 +500,7 @@ def get_area_def(self, key): def get_dataset(self, dataset_id, ds_info): """Get a dataset for supported geostationary sensors.""" - var_name = ds_info.get('name', dataset_id['name']) - var_name = _CLAVRxHelper._lookup_alias(var_name, self.sensor, self._is_polar()) + var_name = ds_info.get("file_key", dataset_id['name']) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index ea0dcaed9b..a72e46c354 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -37,6 +37,7 @@ DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc' +FILL_VALUE = -32768 def fake_test_content(filename, **kwargs): @@ -51,7 +52,8 @@ def fake_test_content(filename, **kwargs): longitude = xr.DataArray(DEFAULT_LON_DATA, dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, + attrs={'_FillValue': -999., + 'SCALED': 0, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'longitude', @@ -61,37 +63,37 @@ def fake_test_content(filename, **kwargs): latitude = xr.DataArray(DEFAULT_LAT_DATA, dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, + attrs={'_FillValue': -999., + 'SCALED': 0, 'scale_factor': 1., 'add_offset': 0., 'standard_name': 'latitude', 'units': 'degrees_south' }) - variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), + variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int8), dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., + attrs={'_FillValue': -127, + 'SCALED': 0, 'units': '1', - 'valid_range': [-32767, 32767], }) - # data with fill values - variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), + # data with fill values and a file_type alias + variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int16), dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + attrs={'_FillValue': FILL_VALUE, + 'SCALED': 1, + 'scale_factor': 0.001861629, + 'add_offset': 59., + 'units': '%', 'valid_range': [-32767, 32767], }) - variable2 = variable2.where(variable2 % 2 != 0) + variable2 = variable2.where(variable2 % 2 != 0, FILL_VALUE) # category - variable3 = xr.DataArray(DEFAULT_FILE_FLAGS, + variable3 = xr.DataArray(DEFAULT_FILE_FLAGS.astype(np.int8), dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), attrs={'SCALED': 0, @@ -103,7 +105,7 @@ def fake_test_content(filename, **kwargs): 'longitude': longitude, 'latitude': latitude, 'variable1': variable1, - 'variable2': variable2, + 'refl_0_65um_nom': variable2, 'variable3': variable3 } @@ -141,7 +143,7 @@ def test_reader_creation(self, filenames, expected_loadables): @pytest.mark.parametrize( ("filenames", "expected_datasets"), - [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] + [([AHI_FILE], ['variable1', 'refl_0_65um_nom', 'variable3']), ] ) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" @@ -154,10 +156,13 @@ def test_available_datasets(self, filenames, expected_datasets): avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails + # check extra datasets created by alias or coordinates + for var_name in ["latitude", "longitude", "C03"]: + assert var_name in avails @pytest.mark.parametrize( ("filenames", "loadable_ids"), - [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] + [([AHI_FILE], ['variable1', 'refl_0_65um_nom', 'C03', 'variable3']), ] ) def test_load_all_new_donor(self, filenames, loadable_ids): """Test loading all test datasets with new donor.""" @@ -184,18 +189,24 @@ def test_load_all_new_donor(self, filenames, loadable_ids): ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(loadable_ids) - assert len(datasets) == 3 + assert len(datasets) == 4 for v in datasets.values(): assert 'calibration' not in v.attrs - assert v.attrs['units'] == '1' + assert "units" in v.attrs assert isinstance(v.attrs['area'], AreaDefinition) assert v.attrs['platform_name'] == 'himawari8' assert v.attrs['sensor'] == 'ahi' assert 'rows_per_scan' not in v.coords.get('longitude').attrs - if v.attrs["name"] in ["variable1", "variable2"]: + if v.attrs["name"] == 'variable1': + assert "valid_range" not in v.attrs + assert v.dtype == np.float64 + assert "_FillValue" not in v.attrs + # should have file variable and one alias for reflectance + elif v.attrs["name"] in ["refl_0_65um_nom", "C03"]: assert isinstance(v.attrs["valid_range"], list) - assert v.dtype == np.float32 + assert v.dtype == np.float64 assert "_FillValue" not in v.attrs.keys() + assert (v.attrs["file_key"] == "refl_0_65um_nom") else: assert (datasets['variable3'].attrs.get('flag_meanings')) is not None assert (datasets['variable3'].attrs.get('flag_meanings') == '') From 16f819883623c3ab0278d4bda7768e366828a684 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 12 Apr 2023 14:26:49 +0000 Subject: [PATCH 0126/1416] Fix failing pytest for seviri_hrit readers and especialy calibration byt using the defautl scan_time instead of the more refined scan time used in other test --- satpy/readers/hrit_base.py | 1 - satpy/readers/seviri_l1b_hrit.py | 6 +----- satpy/readers/seviri_l1b_native.py | 2 +- satpy/tests/reader_tests/test_seviri_l1b_hrit.py | 4 +--- 4 files changed, 3 insertions(+), 10 deletions(-) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index 6b9780a1c9..b27103cbce 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -175,7 +175,6 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info): self.mda = {} self.hdr_info = hdr_info self._get_hd(self.hdr_info) - print('HRITFileHandler init filename_info:', filename_info) self._start_time = filename_info['start_time'] self._end_time = self._start_time + timedelta(minutes=15) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index f4013ee05d..9a8cf946e5 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -342,7 +342,6 @@ def satpos(self): a, b = self.get_earth_radii() poly_finder = OrbitPolynomialFinder(self.prologue['SatelliteStatus'][ 'Orbit']['OrbitPolynomial']) - print('satpos start_time', self.start_time) orbit_polynomial = poly_finder.get_orbit_polynomial(self.observation_start_time) return get_satpos( orbit_polynomial=orbit_polynomial, @@ -452,7 +451,6 @@ def __init__(self, filename, filename_info, filetype_info, self.tres = REPEAT_CYCLE_DURATION # base RC duration of 15 if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: self.tres = 5 - print('HRITMSGFileHandler init obs_start_time:', self.observation_start_time) self._get_header() def _get_header(self): @@ -697,7 +695,7 @@ def calibrate(self, data, calibration): channel_name=self.channel_name, coefs=self._get_calib_coefs(self.channel_name), calib_mode=self.calib_mode, - scan_time=self.start_time + scan_time=self.observation_start_time ) res = calib.calibrate(data, calibration) logger.debug("Calibration time " + str(datetime.now() - tic)) @@ -727,9 +725,7 @@ def _get_raw_mda(self): def _add_scanline_acq_time(self, dataset): """Add scanline acquisition time to the given dataset.""" tline = self.mda['image_segment_line_quality']['line_mean_acquisition'] - print('_add_scanline_acq_time tline %f and msec %f', tline['days'], tline['milliseconds']) acq_time = get_cds_time(days=tline['days'], msecs=tline['milliseconds']) - # print('_add_scanline_acq_time acq_time', acq_time) add_scanline_acq_time(dataset, acq_time) def _update_attrs(self, res, info): diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 576acbf713..b921288019 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -547,7 +547,7 @@ def calibrate(self, data, dataset_id): channel_name=channel_name, coefs=self._get_calib_coefs(channel_name), calib_mode=self.calib_mode, - scan_time=self.start_time + scan_time=self.observation_start_time ) res = calib.calibrate(data, dataset_id['calibration']) logger.debug("Calibration time " + str(datetime.now() - tic)) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 5d0983f73a..358deb59bb 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -408,8 +408,7 @@ def file_handler(self): }, 'ImageAcquisition': { 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0) + 'TrueRepeatCycleStart': self.scan_time, } } } @@ -488,7 +487,6 @@ def test_calibrate( fh.channel_name = channel fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs - res = fh.calibrate(counts, calibration) xr.testing.assert_allclose(res, expected) From 00f6c91a4d2ef52d68260a6655931bd32fa87d14 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Thu, 13 Apr 2023 14:11:58 +0000 Subject: [PATCH 0127/1416] Propagate the start/end time change to the seviri nc reader and made sure the test are passing --- satpy/readers/seviri_l1b_nc.py | 44 ++++++++++++++++--- .../tests/reader_tests/test_seviri_l1b_nc.py | 7 +++ 2 files changed, 46 insertions(+), 5 deletions(-) diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index 17435fcdb6..dd4595e9bb 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -19,6 +19,7 @@ import datetime import logging +from datetime import timedelta import numpy as np @@ -37,6 +38,7 @@ get_cds_time, get_satpos, mask_bad_quality, + round_nom_time, ) logger = logging.getLogger('nc_msg') @@ -68,15 +70,37 @@ def __init__(self, filename, filename_info, filetype_info, self.get_metadata() @property - def start_time(self): - """Get the start time.""" + def nominal_start_time(self): + """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" + tm = self.deltaSt + return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) + + @property + def nominal_end_time(self): + """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" + tm = self.deltaEnd + return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) + + @property + def observation_start_time(self): + """Read the repeat cycle sensing start time from metadata.""" return self.deltaSt @property - def end_time(self): - """Get the end time.""" + def observation_end_time(self): + """Read the repeat cycle sensing end time from metadata.""" return self.deltaEnd + @property + def start_time(self): + """Get general start time for this file.""" + return self.nominal_start_time + + @property + def end_time(self): + """Get the general end time for this file.""" + return self.nominal_end_time + @cached_property def nc(self): """Read the file.""" @@ -120,6 +144,10 @@ def get_metadata(self): self.deltaEnd = self.reference + datetime.timedelta( days=int(self.nc.attrs['planned_repeat_cycle_end_day']), milliseconds=int(self.nc.attrs['planned_repeat_cycle_end_mi_sec'])) + if self.nc.attrs['nominal_image_scanning'] == 'T': + self.tres = 15 + elif self.nc.attrs['reduced_scanning'] == 'T': + self.tres = 5 self.north = int(self.nc.attrs['north_most_line']) self.east = int(self.nc.attrs['east_most_pixel']) @@ -158,7 +186,7 @@ def calibrate(self, dataset, dataset_id): channel_name=channel, coefs=self._get_calib_coefs(dataset, channel), calib_mode='NOMINAL', - scan_time=self.start_time + scan_time=self.observation_start_time ) return calib.calibrate(dataset, calibration) @@ -203,6 +231,12 @@ def _update_attrs(self, dataset, dataset_info): ), 'satellite_nominal_latitude': 0.0, } + dataset.attrs['time_parameters'] = { + 'nominal_start_time': self.nominal_start_time, + 'nominal_end_time': self.nominal_end_time, + 'observation_start_time': self.observation_start_time, + 'observation_end_time': self.observation_end_time, + } try: actual_lon, actual_lat, actual_alt = self.satpos dataset.attrs['orbital_parameters'].update({ diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index 39e4ad570e..adb2089fd3 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -155,6 +155,7 @@ def _get_fake_dataset(self, counts, h5netcdf): 'vis_ir_column_dir_grid_step': 3.0004032, 'vis_ir_line_dir_grid_step': 3.0004032, 'type_of_earth_model': '0x02', + 'nominal_image_scanning': 'T', } ) @@ -323,6 +324,12 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ 'projection_latitude': 0.0, 'projection_altitude': 35785831.0 }, + 'time_parameters': { + 'nominal_start_time': datetime(2020, 1, 1, 0, 0), + 'nominal_end_time': datetime(2020, 1, 1, 0, 0), + 'observation_start_time': datetime(2020, 1, 1, 0, 0), + 'observation_end_time': datetime(2020, 1, 1, 0, 0), + }, 'georef_offset_corrected': True, 'platform_name': 'Meteosat-11', 'sensor': 'seviri', From 87066011bb95e361c5b7b3cc15a621dfceb7a925 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 5 May 2023 17:12:46 -0500 Subject: [PATCH 0128/1416] Fixed is_polar() error improperly identifying ABI as polar. Updated tests to use fake goes data to test existing alias creation Reduced aliases to ABI and VIIRS channels --- satpy/etc/enhancements/generic.yaml | 23 ++++++ satpy/readers/clavrx.py | 81 +++++++++++----------- satpy/tests/reader_tests/test_clavrx.py | 2 +- satpy/tests/reader_tests/test_clavrx_nc.py | 26 +++---- 4 files changed, 79 insertions(+), 53 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index ce1ce1bb94..45ae789dde 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -262,6 +262,29 @@ enhancements: stretch: linear cutoffs: [0.005, 0.005] + four_level_cloud_mask: + standard_name: cloud_mask + reader: clavrx + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - {'values': [-127,# Fill Value + 0, # Clear + 1, # Probably Clear + 2, # Probably Cloudy + 3, # Cloudy + ], + 'colors': [[ 0, 0, 0], # black,-127 = Fill Value + [ 94, 79, 162], # blue, 0 = Clear + [ 73, 228, 242], # cyan, 1 = Probably Clear + [158, 1, 66], # red, 2 = Probably Cloudy + [255, 255, 255], # white, 3 = Cloudy + ], + 'color_scale': 255, + } + sar-ice: standard_name: sar-ice operations: diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index f8cb35cf51..289adf6e0a 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -69,21 +69,18 @@ } CHANNEL_ALIASES = { - "abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47}, - "refl_0_65um_nom": {"name": "C02", "wavelength": 0.64}, - "refl_0_86um_nom": {"name": "C03", "wavelength": 0.865}, - "refl_1_38um_nom": {"name": "C04", "wavelength": 1.378}, - "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61}, - "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25}, + "abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47, "modifiers": ("sunz_corrected",)}, + "refl_0_65um_nom": {"name": "C02", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, + "refl_0_86um_nom": {"name": "C03", "wavelength": 0.865, "modifiers": ("sunz_corrected",)}, + "refl_1_38um_nom": {"name": "C04", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, + "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61, "modifiers": ("sunz_corrected",)}, + "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25, "modifiers": ("sunz_corrected",)}, }, - "ahi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47}, - "refl_0_55um_nom": {"name": "C02", "wavelength": 0.51}, - "refl_0_65um_nom": {"name": "C03", "wavelength": 0.64}, - "refl_0_86um_nom": {"name": "C04", "wavelength": 0.86}, - "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61}, - "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25} - }, -} + "viirs": {"refl_0_65um_nom": {"name": "I01", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, + "refl_1_38um_nom": {"name": "M09", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, + "refl_1_60um_nom": {"name": "I03", "wavelength": 1.61, "modifiers": ("sunz_corrected",)} + } + } def _get_sensor(sensor: str) -> str: @@ -143,8 +140,6 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: factor = attrs.pop('scale_factor', (np.ones(1, dtype=data.dtype))[0]) offset = attrs.pop('add_offset', (np.zeros(1, dtype=data.dtype))[0]) valid_range = attrs.get('valid_range', [None]) - if isinstance(valid_range, np.ndarray): - attrs["valid_range"] = valid_range.tolist() flags = not data.attrs.get("SCALED", 1) and any(data.attrs.get("flag_values", [None])) if not flags: @@ -152,15 +147,14 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: data = _CLAVRxHelper._scale_data(data, factor, offset) # don't need _FillValue if it has been applied. attrs.pop('_FillValue', None) - - if all(valid_range): - valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset) - valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset) - if flags: - data = data.where((data >= valid_min) & (data <= valid_max), fill) - else: + if isinstance(valid_range, np.ndarray): + valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset) + valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset) data = data.where((data >= valid_min) & (data <= valid_max)) - attrs['valid_range'] = [valid_min, valid_max] + else: + flag_values = attrs.get('flag_values', None) + if flag_values is not None and isinstance(flag_values, np.ndarray): + data = data.where((data >= flag_values[0]) & (data <= flag_values[-1]), fill) data.attrs = _CLAVRxHelper._remove_attributes(attrs) @@ -330,6 +324,15 @@ def get_nadir_resolution(self, sensor): elif res is not None: return int(res) + def _available_aliases(self, ds_info, current_var): + """Add alias if there is a match.""" + alias_info = CHANNEL_ALIASES.get(self.sensor).get(current_var, None) + if alias_info is not None: + alias_info.update({"file_key": current_var}) + alias_info["resolution"] = self.get_nadir_resolution(self.sensor) + ds_info.update(alias_info) + yield True, ds_info + def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) @@ -375,6 +378,9 @@ def available_datasets(self, configured_datasets=None): ds_info['coordinates'] = ['longitude', 'latitude'] yield True, ds_info + if CHANNEL_ALIASES.get(self.sensor) is not None: + yield from self._available_aliases(ds_info, var_name) + def get_shape(self, dataset_id, ds_info): """Get the shape.""" var_name = ds_info.get('file_key', dataset_id['name']) @@ -425,11 +431,20 @@ def __init__(self, filename, filename_info, filetype_info): {"name": "longitude"}) def _get_ds_info_for_data_arr(self, var_name): + """Set data name and, if applicable, aliases.""" + channel_info = None ds_info = { 'file_type': self.filetype_info['file_type'], 'name': var_name, } - return ds_info + yield True, ds_info + + if CHANNEL_ALIASES.get(self.sensor) is not None: + channel_info = CHANNEL_ALIASES.get(self.sensor).get(var_name, None) + if channel_info is not None: + channel_info["file_key"] = var_name + ds_info.update(channel_info) + yield True, ds_info @staticmethod def _is_2d_yx_data_array(data_arr): @@ -450,19 +465,7 @@ def _available_new_datasets(self, handled_vars): # we need 'traditional' y/x dimensions currently continue - ds_info = self._get_ds_info_for_data_arr(var_name) - ds_info.update({"file_key": var_name}) - yield True, ds_info - - alias_info = CHANNEL_ALIASES[self.sensor].get(var_name, None) - if alias_info is not None: - alias_info.update({"file_key": var_name}) - if "RESOLUTION_KM" in self.nc.attrs: - alias_info["resolution"] = self.nc.attrs["RESOLUTION_KM"] * 1000. - else: - alias_info["resolution"] = NADIR_RESOLUTION[self.sensor] - ds_info.update(alias_info) - yield True, ds_info + yield from self._get_ds_info_for_data_arr(var_name) def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. @@ -488,7 +491,7 @@ def _is_polar(self): l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)), str(self.nc.attrs.get('sensor', None))) - return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) + return (inst_att not in ['ABI', 'AHI'] and 'GOES' not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 86e0cf1fa7..5a90bf873a 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -386,7 +386,7 @@ def test_load_all_old_donor(self): else: self.assertNotIn('_FillValue', v.attrs) if v.attrs["name"] == 'variable1': - self.assertIsInstance(v.attrs["valid_range"], list) + self.assertIsInstance(v.attrs["valid_range"], tuple) else: self.assertNotIn('valid_range', v.attrs) if 'flag_values' in v.attrs: diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index a72e46c354..5f0ba812b7 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -36,17 +36,17 @@ DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) -AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc' +ABI_FILE = 'clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc' FILL_VALUE = -32768 def fake_test_content(filename, **kwargs): """Mimic reader input file content.""" attrs = { - 'platform': 'HIM8', - 'sensor': 'AHI', + 'platform': 'G16', + 'sensor': 'ABI', # this is a Level 2 file that came from a L1B file - 'L1B': 'clavrx_H08_20210603_1500_B01_FLDK_R', + 'L1B': '"clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173', } longitude = xr.DataArray(DEFAULT_LON_DATA, @@ -127,7 +127,7 @@ def setup_method(self): @pytest.mark.parametrize( ("filenames", "expected_loadables"), - [([AHI_FILE], 1)] + [([ABI_FILE], 1)] ) def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" @@ -143,7 +143,7 @@ def test_reader_creation(self, filenames, expected_loadables): @pytest.mark.parametrize( ("filenames", "expected_datasets"), - [([AHI_FILE], ['variable1', 'refl_0_65um_nom', 'variable3']), ] + [([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'variable3']), ] ) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" @@ -157,12 +157,12 @@ def test_available_datasets(self, filenames, expected_datasets): for var_name in expected_datasets: assert var_name in avails # check extra datasets created by alias or coordinates - for var_name in ["latitude", "longitude", "C03"]: + for var_name in ["latitude", "longitude"]: assert var_name in avails @pytest.mark.parametrize( ("filenames", "loadable_ids"), - [([AHI_FILE], ['variable1', 'refl_0_65um_nom', 'C03', 'variable3']), ] + [([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'C02', 'variable3']), ] ) def test_load_all_new_donor(self, filenames, loadable_ids): """Test loading all test datasets with new donor.""" @@ -181,8 +181,8 @@ def test_load_all_new_donor(self, filenames, loadable_ids): semi_major_axis=6378137, semi_minor_axis=6356752.3142, perspective_point_height=35791000, - longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + longitude_of_projection_origin=-137.2, + sweep_angle_axis='x', ) d.return_value = fake_donor = mock.MagicMock( variables={'goes_imager_projection': proj, 'x': x, 'y': y}, @@ -194,15 +194,15 @@ def test_load_all_new_donor(self, filenames, loadable_ids): assert 'calibration' not in v.attrs assert "units" in v.attrs assert isinstance(v.attrs['area'], AreaDefinition) - assert v.attrs['platform_name'] == 'himawari8' - assert v.attrs['sensor'] == 'ahi' + assert v.attrs['platform_name'] == 'GOES-16' + assert v.attrs['sensor'] == 'abi' assert 'rows_per_scan' not in v.coords.get('longitude').attrs if v.attrs["name"] == 'variable1': assert "valid_range" not in v.attrs assert v.dtype == np.float64 assert "_FillValue" not in v.attrs # should have file variable and one alias for reflectance - elif v.attrs["name"] in ["refl_0_65um_nom", "C03"]: + elif v.attrs["name"] in ["refl_0_65um_nom", "C02"]: assert isinstance(v.attrs["valid_range"], list) assert v.dtype == np.float64 assert "_FillValue" not in v.attrs.keys() From a48111e023e73fbabba1338901fdb9bfc5201d4a Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Tue, 9 May 2023 13:55:26 -0500 Subject: [PATCH 0129/1416] attempt to address complexity of available_datasets --- satpy/readers/clavrx.py | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 289adf6e0a..d5154dca91 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -333,6 +333,23 @@ def _available_aliases(self, ds_info, current_var): ds_info.update(alias_info) yield True, ds_info + def _dynamic_datasets(self, nadir_resolution): + """Get data from file and build aliases.""" + # add new datasets + for var_name, val in self.file_content.items(): + if isinstance(val, SDS): + ds_info = { + 'file_type': self.filetype_info['file_type'], + 'resolution': nadir_resolution, + 'name': var_name, + } + if self._is_polar(): + ds_info['coordinates'] = ['longitude', 'latitude'] + yield True, ds_info + + if CHANNEL_ALIASES.get(self.sensor) is not None: + yield from self._available_aliases(ds_info, var_name) + def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) @@ -366,20 +383,7 @@ def available_datasets(self, configured_datasets=None): # then we should keep it going down the chain yield is_avail, ds_info - # add new datasets - for var_name, val in self.file_content.items(): - if isinstance(val, SDS): - ds_info = { - 'file_type': self.filetype_info['file_type'], - 'resolution': nadir_resolution, - 'name': var_name, - } - if self._is_polar(): - ds_info['coordinates'] = ['longitude', 'latitude'] - yield True, ds_info - - if CHANNEL_ALIASES.get(self.sensor) is not None: - yield from self._available_aliases(ds_info, var_name) + yield from self._dynamic_datasets(nadir_resolution) def get_shape(self, dataset_id, ds_info): """Get the shape.""" @@ -452,7 +456,7 @@ def _is_2d_yx_data_array(data_arr): has_x_dim = data_arr.dims[1] == "x" return has_y_dim and has_x_dim - def _available_new_datasets(self, handled_vars): + def _available_file_datasets(self, handled_vars): """Metadata for available variables other than BT.""" possible_vars = list(self.nc.items()) + list(self.nc.coords.items()) for var_name, data_arr in possible_vars: @@ -485,7 +489,7 @@ def available_datasets(self, configured_datasets=None): if self.file_type_matches(ds_info['file_type']): handled_vars.add(ds_info['name']) yield self.file_type_matches(ds_info['file_type']), ds_info - yield from self._available_new_datasets(handled_vars) + yield from self._available_file_datasets(handled_vars) def _is_polar(self): l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)), From c18c6deaa88cf622d65b0258e7c5c7e8a81b5062 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Mon, 15 May 2023 11:36:34 +0200 Subject: [PATCH 0130/1416] Add noaa21 Without it I got error message: ValueError: Could not find dataset named cmic_reff_pal in ancillary variables for dataset 'cloud_drop_effective_radius' --- satpy/readers/nwcsaf_nc.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 3c004b3a27..3a788cebe9 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -52,6 +52,7 @@ 'EOS-Terra': 'modis', 'Suomi-NPP': 'viirs', 'NOAA-20': 'viirs', + 'NOAA-21': 'viirs', 'JPSS-1': 'viirs', 'GOES-16': 'abi', 'GOES-17': 'abi', From db8b011633cfbb3e1f1e6d8ff0bc8ba4400e82cc Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Mon, 15 May 2023 10:43:45 -0500 Subject: [PATCH 0131/1416] Trying to adjust for complexity error in available datasets --- satpy/readers/clavrx.py | 45 ++++++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 23 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index d5154dca91..3add1ea380 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -294,6 +294,9 @@ def __init__(self, filename, filename_info, filetype_info): filename_info, filetype_info) + self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) + self.platform = _get_platform(self.file_content.get('/attr/platform')) + @property def start_time(self): """Get the start time.""" @@ -333,9 +336,26 @@ def _available_aliases(self, ds_info, current_var): ds_info.update(alias_info) yield True, ds_info + def _add_info_if_appropriate(self, is_avail, ds_info, nadir_resolution): + """Add more information if this reader can provide it.""" + new_info = ds_info.copy() # don't change input + this_res = ds_info.get('resolution') + var_name = ds_info.get('file_key', ds_info['name']) + matches = self.file_type_matches(ds_info['file_type']) + # we can confidently say that we can provide this dataset and can + # provide more info + if matches and var_name in self and this_res != nadir_resolution: + new_info['resolution'] = nadir_resolution + if self._is_polar(): + new_info['coordinates'] = ds_info.get('coordinates', ('longitude', 'latitude')) + yield True, new_info + elif is_avail is None: + # if we didn't know how to handle this dataset and no one else did + # then we should keep it going down the chain + yield is_avail, ds_info + def _dynamic_datasets(self, nadir_resolution): """Get data from file and build aliases.""" - # add new datasets for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { @@ -352,36 +372,15 @@ def _dynamic_datasets(self, nadir_resolution): def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) - self.platform = _get_platform(self.file_content.get('/attr/platform')) - nadir_resolution = self.get_nadir_resolution(self.sensor) - coordinates = ('longitude', 'latitude') - handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): - this_res = ds_info.get('resolution') - this_coords = ds_info.get('coordinates') # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) - matches = self.file_type_matches(ds_info['file_type']) - # we can confidently say that we can provide this dataset and can - # provide more info - if matches and var_name in self and this_res != nadir_resolution: - handled_variables.add(var_name) - new_info = ds_info.copy() # don't mess up the above yielded - new_info['resolution'] = nadir_resolution - if self._is_polar() and this_coords is None: - new_info['coordinates'] = coordinates - yield True, new_info - elif is_avail is None: - # if we didn't know how to handle this dataset and no one else did - # then we should keep it going down the chain - yield is_avail, ds_info + yield from self._add_info_if_appropriate(is_avail, ds_info, nadir_resolution) yield from self._dynamic_datasets(nadir_resolution) From 92dcc1062918b46500a82af038b4b2009d3ab535 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Tue, 16 May 2023 09:38:34 +0200 Subject: [PATCH 0132/1416] Platform names for the future --- satpy/readers/nwcsaf_nc.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 3a788cebe9..7ecc5f43f4 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -53,7 +53,12 @@ 'Suomi-NPP': 'viirs', 'NOAA-20': 'viirs', 'NOAA-21': 'viirs', + 'NOAA-22': 'viirs', + 'NOAA-23': 'viirs', 'JPSS-1': 'viirs', + 'Metop-SG-A1': 'metimage', + 'Metop-SG-A2': 'metimage', + 'Metop-SG-A3': 'metimage', 'GOES-16': 'abi', 'GOES-17': 'abi', 'Himawari-8': 'ahi', From 6bf7ffa640f2c9e6796db60913404f899b2e7d2e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 16 May 2023 15:33:19 +0200 Subject: [PATCH 0133/1416] Fix enhancement --- satpy/etc/enhancements/generic.yaml | 13 +++---- satpy/readers/sar_c_safe.py | 60 ++++++++++++++--------------- 2 files changed, 34 insertions(+), 39 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 42eb36a1a6..fae957b160 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -338,14 +338,11 @@ enhancements: method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude - min_stretch: [0, -19.18, 0] - max_stretch: [0.07, -1.294, .43] - #min_stretch: [0, -30, 0] - #max_stretch: [1, 10, 2] - - name: gamma - method: !!python/name:satpy.enhancements.gamma - kwargs: - gamma: [1.82, 0.74, 1] + # R -- VH: 0.00109 to 0.0594 + # G -- VV_db: -17.57 to -3.3 + # B -- VV: 0.00332 to 0.3 + min_stretch: [0.00109, -17.57, 0.00332] + max_stretch: [0.0594, -3.3, .3] sar-quick: standard_name: sar-quick diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 7a8002a95b..4b2d214187 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -71,7 +71,8 @@ def _dictify(r): return r.text for x in r.findall("./*"): if x.tag in d and not isinstance(d[x.tag], list): - d[x.tag] = [d[x.tag], _dictify(x)] + d[x.tag] = [d[x.tag]] + d[x.tag].append(_dictify(x)) else: d[x.tag] = _dictify(x) return d @@ -173,7 +174,8 @@ def get_calibration_constant(self): def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" calibration_name = _get_calibration_name(calibration) - return self._get_calibration_vector(calibration_name, chunks) + calibration_vector = self._get_calibration_vector(calibration_name, chunks) + return calibration_vector def _get_calibration_vector(self, calibration_name, chunks): """Get the calibration vector.""" @@ -255,7 +257,9 @@ def __init__(self, root, shape): def read_azimuth_noise_array(self, chunks=CHUNK_SIZE): """Read the azimuth noise vectors.""" self._read_azimuth_noise_blocks(chunks) - return self._assemble_azimuth_noise_blocks(chunks) + populated_array = self._assemble_azimuth_noise_blocks(chunks) + + return populated_array def _read_azimuth_noise_blocks(self, chunks): """Read the azimuth noise blocks.""" @@ -295,7 +299,9 @@ def _create_dask_slice_from_block_line(self, current_line, chunks): """Create a dask slice from the blocks at the current line.""" pieces = self._get_array_pieces_for_current_line(current_line) dask_pieces = self._get_padded_dask_pieces(pieces, chunks) - return da.hstack(dask_pieces) + new_slice = da.hstack(dask_pieces) + + return new_slice def _get_array_pieces_for_current_line(self, current_line): """Get the array pieces that cover the current line.""" @@ -308,11 +314,11 @@ def _get_array_pieces_for_current_line(self, current_line): def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" - return [ - block - for block in self.blocks - if block.coords['y'][0] <= current_line <= block.coords['y'][-1] - ] + current_blocks = [] + for block in self.blocks: + if block.coords['y'][0] <= current_line <= block.coords['y'][-1]: + current_blocks.append(block) + return current_blocks def _get_next_start_line(self, current_blocks, current_line): next_line = min((arr.coords['y'][-1] for arr in current_blocks)) + 1 @@ -572,34 +578,24 @@ def get_dataset(self, key, info): if key['name'] in ['longitude', 'latitude', 'altitude']: logger.debug('Constructing coordinate arrays.') - arrays = {} + arrays = dict() arrays['longitude'], arrays['latitude'], arrays['altitude'] = self.get_lonlatalts() data = arrays[key['name']] data.attrs.update(info) else: - data = self.get_measurement(key, info) - return data - - def get_measurement(self, key, info): - """Get the measurement data.""" - result = rioxarray.open_rasterio( - self.filename, lock=False, chunks=(1, CHUNK_SIZE, CHUNK_SIZE) - ).squeeze() - - result = result.assign_coords( - x=np.arange(len(result.coords['x'])), - y=np.arange(len(result.coords['y'])), - ) - - result = self._calibrate_and_denoise(result, key) - result.attrs.update(info) - result.attrs.update({'platform_name': self._mission_id}) + data = xr.open_dataset(self.filename, engine="rasterio", + chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE})["band_data"].squeeze() + data = data.assign_coords(x=np.arange(len(data.coords['x'])), + y=np.arange(len(data.coords['y']))) + data = self._calibrate_and_denoise(data, key) + data.attrs.update(info) + data.attrs.update({'platform_name': self._mission_id}) - result = self._change_quantity(result, key['quantity']) + data = self._change_quantity(data, key['quantity']) - return result + return data @staticmethod def _change_quantity(data, quantity): @@ -626,7 +622,8 @@ def _get_digital_number(self, data): """Get the digital numbers (uncalibrated data).""" data = data.where(data > 0) data = data.astype(np.float64) - return data * data + dn = data * data + return dn def _denoise(self, dn, chunks): """Denoise the data.""" @@ -641,7 +638,8 @@ def _calibrate(self, dn, chunks, key): cal = self.calibration.get_calibration(key['calibration'], chunks=chunks) cal_constant = self.calibration.get_calibration_constant() logger.debug('Calibrating.') - return ((dn + cal_constant) / (cal ** 2)).clip(min=0) + data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) + return data def _get_lonlatalts_uncached(self): """Obtain GCPs and construct latitude and longitude arrays. From 6bb69944c80c494b0a18b17d00f85b9c2d5cd804 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 17 May 2023 17:38:43 +0200 Subject: [PATCH 0134/1416] Parametrize test_seviri_native area tests. --- .../reader_tests/test_seviri_l1b_native.py | 713 +++++++----------- 1 file changed, 254 insertions(+), 459 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 7615e66011..8c1060f1e9 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -502,6 +502,253 @@ } +def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'): + """Create mocked NativeMSGFileHandler. + + Contains sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. + """ + if dataset_id['name'] == 'HRV': + reference_grid = 'ReferenceGridHRV' + column_dir_grid_step = 1.0001343488693237 + line_dir_grid_step = 1.0001343488693237 + else: + reference_grid = 'ReferenceGridVIS_IR' + column_dir_grid_step = 3.0004031658172607 + line_dir_grid_step = 3.0004031658172607 + + if is_full_disk: + north = 3712 + east = 1 + west = 3712 + south = 1 + n_visir_cols = 3712 + n_visir_lines = 3712 + n_hrv_cols = 11136 + n_hrv_lines = 11136 + ssp_lon = 0 + elif is_rapid_scan: + north = 3712 + east = 1 + west = 3712 + south = 2321 + n_visir_cols = 3712 + n_visir_lines = 1392 + n_hrv_cols = 11136 + n_hrv_lines = 4176 + ssp_lon = 9.5 + else: + north = 3574 + east = 78 + west = 2591 + south = 1746 + n_visir_cols = 2516 + n_visir_lines = north - south + 1 + n_hrv_cols = n_visir_cols * 3 + n_hrv_lines = n_visir_lines * 3 + ssp_lon = 0 + header = { + '15_MAIN_PRODUCT_HEADER': { + 'QQOV': {'Name': 'QQOV', + 'Value': good_qual} + }, + '15_DATA_HEADER': { + 'ImageDescription': { + reference_grid: { + 'ColumnDirGridStep': column_dir_grid_step, + 'LineDirGridStep': line_dir_grid_step, + 'GridOrigin': 2, # south-east corner + }, + 'ProjectionDescription': { + 'LongitudeOfSSP': ssp_lon + } + }, + 'GeometricProcessing': { + 'EarthModel': { + 'TypeOfEarthModel': earth_model, + 'EquatorialRadius': 6378169.0, + 'NorthPolarRadius': 6356583.800000001, + 'SouthPolarRadius': 6356583.800000001, + } + }, + 'SatelliteStatus': { + 'SatelliteDefinition': { + 'SatelliteId': 324 + } + } + }, + '15_SECONDARY_PRODUCT_HEADER': { + 'NorthLineSelectedRectangle': {'Value': north}, + 'EastColumnSelectedRectangle': {'Value': east}, + 'WestColumnSelectedRectangle': {'Value': west}, + 'SouthLineSelectedRectangle': {'Value': south}, + 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, + 'NumberColumnsVISIR': {'Value': n_visir_cols}, + 'NumberLinesVISIR': {'Value': n_visir_lines}, + 'NumberColumnsHRV': {'Value': n_hrv_cols}, + 'NumberLinesHRV': {'Value': n_hrv_lines}, + } + + } + + return header + + +def create_test_trailer(is_rapid_scan): + """Create Test Trailer. + + Mocked Trailer with sufficient attributes for + NativeMSGFileHandler.get_area_extent to be able to execute. + """ + trailer = { + '15TRAILER': { + 'ImageProductionStats': { + 'ActualL15CoverageHRV': { + 'UpperNorthLineActual': 11136, + 'UpperWestColumnActual': 7533, + 'UpperSouthLineActual': 8193, + 'UpperEastColumnActual': 1966, + 'LowerNorthLineActual': 8192, + 'LowerWestColumnActual': 5568, + 'LowerSouthLineActual': 1, + 'LowerEastColumnActual': 1 + }, + 'ActualScanningSummary': { + 'ReducedScan': is_rapid_scan + } + } + } + } + + return trailer + + +def prepare_area_definitions(test_dict): + """Prepare calculated and expected area definitions for equal checking.""" + earth_model = test_dict['earth_model'] + dataset_id = test_dict['dataset_id'] + is_full_disk = test_dict['is_full_disk'] + is_rapid_scan = test_dict['is_rapid_scan'] + fill_disk = test_dict['fill_disk'] + header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) + trailer = create_test_trailer(is_rapid_scan) + expected_area_def = test_dict['expected_area_def'] + + with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ + mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ + mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ + mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + mock.patch( + 'satpy.readers.seviri_l1b_native.has_archive_header' + ) as has_archive_header: + has_archive_header.return_value = True + fromfile.return_value = header + recarray2dict.side_effect = (lambda x: x) + _get_memmap.return_value = np.arange(3) + fh = NativeMSGFileHandler(None, {}, None) + fh.fill_disk = fill_disk + fh.header = header + fh.trailer = trailer + fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) + calc_area_def = fh.get_area_def(dataset_id) + + return calc_area_def, expected_area_def + + +@pytest.mark.parametrize( + "actual, expected", + [ + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL)), + ] +) +def test_area_definitions(actual, expected): + """Test area definitions with only one area.""" + np.testing.assert_allclose(np.array(actual.area_extent), + np.array(expected['Area extent'])) + assert actual.width == expected['Number of columns'] + assert actual.height == expected['Number of rows'] + assert actual.area_id == expected['Area ID'] + + +@pytest.mark.parametrize( + "actual, expected", + [ + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK)), + (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK)), + ] +) +def test_stacked_area_definitions(actual, expected): + """Test area definitions with stacked areas.""" + np.testing.assert_allclose(np.array(actual.defs[0].area_extent), + np.array(expected['Area extent 0'])) + np.testing.assert_allclose(np.array(actual.defs[1].area_extent), + np.array(expected['Area extent 1'])) + assert actual.width == expected['Number of columns'] + assert actual.height == expected['Number of rows'] + assert actual.defs[0].area_id, expected['Area ID'] + assert actual.defs[1].area_id, expected['Area ID'] + + +def prepare_is_roi(test_dict): + """Prepare calculated and expected check for region of interest data for equal checking.""" + earth_model = 2 + dataset_id = make_dataid(name='VIS006') + is_full_disk = test_dict['is_full_disk'] + is_rapid_scan = test_dict['is_rapid_scan'] + header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) + trailer = create_test_trailer(is_rapid_scan) + expected = test_dict['is_roi'] + + with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ + mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ + mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ + mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + mock.patch( + 'satpy.readers.seviri_l1b_native.has_archive_header' + ) as has_archive_header: + has_archive_header.return_value = True + fromfile.return_value = header + recarray2dict.side_effect = (lambda x: x) + _get_memmap.return_value = np.arange(3) + fh = NativeMSGFileHandler(None, {}, None) + fh.header = header + fh.trailer = trailer + actual = fh.is_roi() + + return actual, expected + + +@pytest.mark.parametrize( + "actual, expected", + [ + (prepare_is_roi(TEST_IS_ROI_FULLDISK)), + (prepare_is_roi(TEST_IS_ROI_RAPIDSCAN)), + (prepare_is_roi(TEST_IS_ROI_ROI)), + ] +) +def test_is_roi(actual, expected): + """Test if given area is of area-of-interest.""" + assert actual == expected + + class TestNativeMSGFileHandler(unittest.TestCase): """Test the NativeMSGFileHandler.""" @@ -528,460 +775,6 @@ def test_get_available_channels(self): self.assertTrue(available_chs[bandname]) -class TestNativeMSGArea(unittest.TestCase): - """Test NativeMSGFileHandler.get_area_extent. - - The expected results have been verified by manually - inspecting the output of geoferenced imagery. - """ - - @staticmethod - def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'): - """Create mocked NativeMSGFileHandler. - - Contains sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. - """ - if dataset_id['name'] == 'HRV': - reference_grid = 'ReferenceGridHRV' - column_dir_grid_step = 1.0001343488693237 - line_dir_grid_step = 1.0001343488693237 - else: - reference_grid = 'ReferenceGridVIS_IR' - column_dir_grid_step = 3.0004031658172607 - line_dir_grid_step = 3.0004031658172607 - - if is_full_disk: - north = 3712 - east = 1 - west = 3712 - south = 1 - n_visir_cols = 3712 - n_visir_lines = 3712 - n_hrv_cols = 11136 - n_hrv_lines = 11136 - ssp_lon = 0 - elif is_rapid_scan: - north = 3712 - east = 1 - west = 3712 - south = 2321 - n_visir_cols = 3712 - n_visir_lines = 1392 - n_hrv_cols = 11136 - n_hrv_lines = 4176 - ssp_lon = 9.5 - else: - north = 3574 - east = 78 - west = 2591 - south = 1746 - n_visir_cols = 2516 - n_visir_lines = north - south + 1 - n_hrv_cols = n_visir_cols * 3 - n_hrv_lines = n_visir_lines * 3 - ssp_lon = 0 - header = { - '15_MAIN_PRODUCT_HEADER': { - 'QQOV': {'Name': 'QQOV', - 'Value': good_qual} - }, - '15_DATA_HEADER': { - 'ImageDescription': { - reference_grid: { - 'ColumnDirGridStep': column_dir_grid_step, - 'LineDirGridStep': line_dir_grid_step, - 'GridOrigin': 2, # south-east corner - }, - 'ProjectionDescription': { - 'LongitudeOfSSP': ssp_lon - } - }, - 'GeometricProcessing': { - 'EarthModel': { - 'TypeOfEarthModel': earth_model, - 'EquatorialRadius': 6378169.0, - 'NorthPolarRadius': 6356583.800000001, - 'SouthPolarRadius': 6356583.800000001, - } - }, - 'SatelliteStatus': { - 'SatelliteDefinition': { - 'SatelliteId': 324 - } - } - }, - '15_SECONDARY_PRODUCT_HEADER': { - 'NorthLineSelectedRectangle': {'Value': north}, - 'EastColumnSelectedRectangle': {'Value': east}, - 'WestColumnSelectedRectangle': {'Value': west}, - 'SouthLineSelectedRectangle': {'Value': south}, - 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, - 'NumberColumnsVISIR': {'Value': n_visir_cols}, - 'NumberLinesVISIR': {'Value': n_visir_lines}, - 'NumberColumnsHRV': {'Value': n_hrv_cols}, - 'NumberLinesHRV': {'Value': n_hrv_lines}, - } - - } - - return header - - @staticmethod - def create_test_trailer(is_rapid_scan): - """Create Test Trailer. - - Mocked Trailer with sufficient attributes for - NativeMSGFileHandler.get_area_extent to be able to execute. - """ - trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualL15CoverageHRV': { - 'UpperNorthLineActual': 11136, - 'UpperWestColumnActual': 7533, - 'UpperSouthLineActual': 8193, - 'UpperEastColumnActual': 1966, - 'LowerNorthLineActual': 8192, - 'LowerWestColumnActual': 5568, - 'LowerSouthLineActual': 1, - 'LowerEastColumnActual': 1 - }, - 'ActualScanningSummary': { - 'ReducedScan': is_rapid_scan - } - } - } - } - - return trailer - - def prepare_area_defs(self, test_dict): - """Prepare calculated and expected area definitions for equal checking.""" - earth_model = test_dict['earth_model'] - dataset_id = test_dict['dataset_id'] - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] - fill_disk = test_dict['fill_disk'] - header = self.create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) - trailer = self.create_test_trailer(is_rapid_scan) - expected_area_def = test_dict['expected_area_def'] - - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ - mock.patch( - 'satpy.readers.seviri_l1b_native.has_archive_header' - ) as has_archive_header: - has_archive_header.return_value = True - fromfile.return_value = header - recarray2dict.side_effect = (lambda x: x) - _get_memmap.return_value = np.arange(3) - fh = NativeMSGFileHandler(None, {}, None) - fh.fill_disk = fill_disk - fh.header = header - fh.trailer = trailer - fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) - calc_area_def = fh.get_area_def(dataset_id) - - return calc_area_def, expected_area_def - - # Earth model 1 tests - def test_earthmodel1_visir_fulldisk(self): - """Test the VISIR FES with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_fulldisk(self): - """Test the HRV FES with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK - ) - np.testing.assert_allclose(np.array(calculated.defs[0].area_extent), - np.array(expected['Area extent 0'])) - np.testing.assert_allclose(np.array(calculated.defs[1].area_extent), - np.array(expected['Area extent 1'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.defs[0].area_id, expected['Area ID']) - self.assertEqual(calculated.defs[1].area_id, expected['Area ID']) - - def test_earthmodel1_hrv_fulldisk_fill(self): - """Test the HRV FES padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_visir_rapidscan(self): - """Test the VISIR RSS with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_visir_rapidscan_fill(self): - """Test the VISIR RSS padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_rapidscan(self): - """Test the HRV RSS with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_rapidscan_fill(self): - """Test the HRV RSS padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_visir_roi(self): - """Test the VISIR ROI with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_visir_roi_fill(self): - """Test the VISIR ROI padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_roi(self): - """Test the HRV ROI with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel1_hrv_roi_fill(self): - """Test the HRV ROI padded to fulldisk with the EarthModel 1.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - # Earth model 2 tests - def test_earthmodel2_visir_fulldisk(self): - """Test the VISIR FES with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_hrv_fulldisk(self): - """Test the HRV FES with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK - ) - np.testing.assert_allclose(np.array(calculated.defs[0].area_extent), - np.array(expected['Area extent 0'])) - np.testing.assert_allclose(np.array(calculated.defs[1].area_extent), - np.array(expected['Area extent 1'])) - - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.defs[0].area_id, expected['Area ID']) - self.assertEqual(calculated.defs[1].area_id, expected['Area ID']) - - def test_earthmodel2_hrv_fulldisk_fill(self): - """Test the HRV FES padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_visir_rapidscan(self): - """Test the VISIR RSS with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_visir_rapidscan_fill(self): - """Test the VISIR RSS padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_hrv_rapidscan(self): - """Test the HRV RSS with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_hrv_rapidscan_fill(self): - """Test the HRV RSS padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_visir_roi(self): - """Test the VISIR ROI with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_visir_roi_fill(self): - """Test the VISIR ROI padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_hrv_roi(self): - """Test the HRV ROI with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - def test_earthmodel2_hrv_roi_fill(self): - """Test the HRV ROI padded to fulldisk with the EarthModel 2.""" - calculated, expected = self.prepare_area_defs( - TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL - ) - np.testing.assert_allclose(np.array(calculated.area_extent), - np.array(expected['Area extent'])) - self.assertEqual(calculated.width, expected['Number of columns']) - self.assertEqual(calculated.height, expected['Number of rows']) - self.assertEqual(calculated.area_id, expected['Area ID']) - - # Test check for Region Of Interest (ROI) data - def prepare_is_roi(self, test_dict): - """Prepare calculated and expected check for region of interest data for equal checking.""" - earth_model = 2 - dataset_id = make_dataid(name='VIS006') - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] - header = self.create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) - trailer = self.create_test_trailer(is_rapid_scan) - expected_is_roi = test_dict['is_roi'] - - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ - mock.patch( - 'satpy.readers.seviri_l1b_native.has_archive_header' - ) as has_archive_header: - has_archive_header.return_value = True - fromfile.return_value = header - recarray2dict.side_effect = (lambda x: x) - _get_memmap.return_value = np.arange(3) - fh = NativeMSGFileHandler(None, {}, None) - fh.header = header - fh.trailer = trailer - calc_is_roi = fh.is_roi() - - return calc_is_roi, expected_is_roi - - def test_is_roi_fulldisk(self): - """Test check for region of interest with FES data.""" - calculated, expected = self.prepare_is_roi(TEST_IS_ROI_FULLDISK) - self.assertEqual(calculated, expected) - - def test_is_roi_rapidscan(self): - """Test check for region of interest with RSS data.""" - calculated, expected = self.prepare_is_roi(TEST_IS_ROI_RAPIDSCAN) - self.assertEqual(calculated, expected) - - def test_is_roi_roi(self): - """Test check for region of interest with ROI data.""" - calculated, expected = self.prepare_is_roi(TEST_IS_ROI_ROI) - self.assertEqual(calculated, expected) - - TEST_HEADER_CALIB = { 'RadiometricProcessing': { 'Level15ImageCalibration': { @@ -1348,7 +1141,7 @@ def test_file_pattern(self, reader): ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" - header = TestNativeMSGArea.create_test_header( + header = create_test_header( dataset_id=make_dataid(name='VIS006', resolution=3000), earth_model=1, is_full_disk=True, @@ -1371,14 +1164,14 @@ def test_header_type(file_content, exp_header_size): def test_header_warning(): """Test warning is raised for NOK quality flag.""" - header_good = TestNativeMSGArea.create_test_header( + header_good = create_test_header( dataset_id=make_dataid(name='VIS006', resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, good_qual='OK' ) - header_bad = TestNativeMSGArea.create_test_header( + header_bad = create_test_header( dataset_id=make_dataid(name='VIS006', resolution=3000), earth_model=1, is_full_disk=True, @@ -1408,8 +1201,10 @@ def test_header_warning(): @pytest.mark.parametrize( "starts_with, expected", - [(ASCII_STARTSWITH, True), - (b'invalid_startswith', False)] + [ + (ASCII_STARTSWITH, True), + (b'invalid_startswith', False) + ] ) def test_has_archive_header(starts_with, expected): """Test if the file includes an ASCII archive header.""" From 07953a7d26f7eb8a80feb6c3ae3e02e65b03bee0 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 17 May 2023 21:33:02 +0200 Subject: [PATCH 0135/1416] Streamline FileHandler creation, code clean-up. --- .../reader_tests/test_seviri_l1b_native.py | 80 +++++++++---------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 8c1060f1e9..323fabd2a9 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -644,19 +644,19 @@ def prepare_area_definitions(test_dict): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) - fh = NativeMSGFileHandler(None, {}, None) + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.fill_disk = fill_disk fh.header = header fh.trailer = trailer fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) - calc_area_def = fh.get_area_def(dataset_id) + actual_area_def = fh.get_area_def(dataset_id) - return calc_area_def, expected_area_def + return actual_area_def, expected_area_def @pytest.mark.parametrize( "actual, expected", - [ + ( (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN)), @@ -677,7 +677,7 @@ def prepare_area_definitions(test_dict): (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL)), - ] + ) ) def test_area_definitions(actual, expected): """Test area definitions with only one area.""" @@ -690,10 +690,10 @@ def test_area_definitions(actual, expected): @pytest.mark.parametrize( "actual, expected", - [ + ( (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK)), - ] + ) ) def test_stacked_area_definitions(actual, expected): """Test area definitions with stacked areas.""" @@ -728,7 +728,7 @@ def prepare_is_roi(test_dict): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) - fh = NativeMSGFileHandler(None, {}, None) + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer actual = fh.is_roi() @@ -738,11 +738,11 @@ def prepare_is_roi(test_dict): @pytest.mark.parametrize( "actual, expected", - [ + ( (prepare_is_roi(TEST_IS_ROI_FULLDISK)), (prepare_is_roi(TEST_IS_ROI_RAPIDSCAN)), (prepare_is_roi(TEST_IS_ROI_ROI)), - ] + ) ) def test_is_roi(actual, expected): """Test if given area is of area-of-interest.""" @@ -755,7 +755,7 @@ class TestNativeMSGFileHandler(unittest.TestCase): def test_get_available_channels(self): """Test the derivation of the available channel list.""" available_chs = get_available_channels(TEST1_HEADER_CHNLIST) - trues = ['WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120'] + trues = ('WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120') for bandname in AVAILABLE_CHANNELS: if bandname in trues: self.assertTrue(available_chs[bandname]) @@ -763,7 +763,7 @@ def test_get_available_channels(self): self.assertFalse(available_chs[bandname]) available_chs = get_available_channels(TEST2_HEADER_CHNLIST) - trues = ['VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV'] + trues = ('VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV') for bandname in AVAILABLE_CHANNELS: if bandname in trues: self.assertTrue(available_chs[bandname]) @@ -822,7 +822,7 @@ def file_handler(self): header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', return_value=None): - fh = NativeMSGFileHandler(filename='', filename_info=dict(), filetype_info=None) + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer fh.platform_id = self.platform_id @@ -830,7 +830,7 @@ def file_handler(self): @pytest.mark.parametrize( ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), - [ + ( # VIS channel, internal coefficients ('VIS006', 'counts', 'NOMINAL', False), ('VIS006', 'radiance', 'NOMINAL', False), @@ -856,7 +856,7 @@ def file_handler(self): # HRV channel, external coefficients (mode should have no effect) ('HRV', 'radiance', 'GSICS', True), ('HRV', 'reflectance', 'NOMINAL', True), - ] + ) ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, @@ -914,7 +914,7 @@ def file_handler(self): data = self._fake_data() with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', return_value=None): - fh = NativeMSGFileHandler(filename='', filename_info=dict(), filetype_info=None) + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer fh.mda = mda @@ -989,13 +989,13 @@ def test_get_dataset(self, file_handler): 'wavelength': (1, 2, 3), 'standard_name': 'counts' } - dataset = file_handler.get_dataset(dataset_id, dataset_info) + xarr = file_handler.get_dataset(dataset_id, dataset_info) expected = self._exp_data_array() - xr.testing.assert_equal(dataset, expected) - assert 'raw_metadata' not in dataset.attrs + xr.testing.assert_equal(xarr, expected) + assert 'raw_metadata' not in xarr.attrs assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0) assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0) - assert_attrs_equal(dataset.attrs, expected.attrs, tolerance=1e-4) + assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4) @staticmethod def _exp_data_array(): @@ -1005,7 +1005,7 @@ def _exp_data_array(): [44., 192., 835., 527.], [64., 273., 132., 788.]], dtype=np.float32), - dims=('y', 'x'), + dims=['y', 'x'], attrs={ 'orbital_parameters': { 'satellite_actual_longitude': -3.55117540817073, @@ -1050,8 +1050,8 @@ def test_get_dataset_with_raw_metadata(self, file_handler): 'wavelength': (1, 2, 3), 'standard_name': 'counts' } - res = file_handler.get_dataset(dataset_id, dataset_info) - assert 'raw_metadata' in res.attrs + xarr = file_handler.get_dataset(dataset_id, dataset_info) + assert 'raw_metadata' in xarr.attrs def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" @@ -1068,8 +1068,8 @@ def test_satpos_no_valid_orbit_polynomial(self, file_handler): 'standard_name': 'counts' } with pytest.warns(UserWarning, match="No orbit polynomial"): - res = file_handler.get_dataset(dataset_id, dataset_info) - assert 'satellite_actual_longitude' not in res.attrs[ + xarr = file_handler.get_dataset(dataset_id, dataset_info) + assert 'satellite_actual_longitude' not in xarr.attrs[ 'orbital_parameters'] @@ -1134,10 +1134,10 @@ def test_file_pattern(self, reader): @pytest.mark.parametrize( 'file_content,exp_header_size', - [ + ( (ASCII_STARTSWITH, 450400), # with ascii header (b'foobar', 445286), # without ascii header - ] + ) ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" @@ -1157,7 +1157,7 @@ def test_header_type(file_content, exp_header_size): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) - fh = NativeMSGFileHandler('myfile', {}, None) + fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) assert fh.header_type.itemsize == exp_header_size assert '15_SECONDARY_PRODUCT_HEADER' in fh.header @@ -1192,38 +1192,38 @@ def test_header_warning(): fromfile.return_value = header_good with warnings.catch_warnings(): warnings.simplefilter("error") - NativeMSGFileHandler('myfile', {}, None) + NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fromfile.return_value = header_bad with pytest.warns(UserWarning, match=exp_warning): - NativeMSGFileHandler('myfile', {}, None) + NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) @pytest.mark.parametrize( "starts_with, expected", [ (ASCII_STARTSWITH, True), - (b'invalid_startswith', False) + (b'this_shall_fail', False) ] ) def test_has_archive_header(starts_with, expected): """Test if the file includes an ASCII archive header.""" with mock.patch("builtins.open", mock.mock_open(read_data=starts_with)): - assert has_archive_header('filename') == expected + actual = has_archive_header('filename') + assert actual == expected def test_read_header(): """Test that reading header returns the header correctly converted to a dictionary.""" - expected = {'SatelliteId': 324, 'NominalLongitude': 0.0, 'SatelliteStatus': 1} + keys = ('SatelliteId', 'NominalLongitude', 'SatelliteStatus') + values = (324, 0.0, 1) + expected = dict(zip(keys, values)) - dtypes = np.dtype([ - ('SatelliteId', np.uint16), - ('NominalLongitude', np.float32), - ('SatelliteStatus', np.uint8) - ]) - hdr_data = np.array([(324, 0.0, 1)], dtype=dtypes) + types = (np.uint16, np.float32, np.uint8) + dtypes = np.dtype([(k, t) for k, t in zip(keys, types)]) + hdr_data = np.array([values], dtype=dtypes) with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile: fromfile.return_value = hdr_data actual = recarray2dict(hdr_data) - unittest.TestCase().assertDictEqual(actual, expected) + assert actual == expected From 33c288e9b64102a8e456eadee3811f7e11033b92 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 17 May 2023 21:42:57 +0200 Subject: [PATCH 0136/1416] Fix docstrings. --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 323fabd2a9..33b975dd45 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -503,9 +503,9 @@ def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'): - """Create mocked NativeMSGFileHandler. + """Create test header for SEVIRI L1.5 product. - Contains sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute. + Header includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ if dataset_id['name'] == 'HRV': reference_grid = 'ReferenceGridHRV' @@ -594,10 +594,9 @@ def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, goo def create_test_trailer(is_rapid_scan): - """Create Test Trailer. + """Create test trailer for SEVIRI L1.5 product. - Mocked Trailer with sufficient attributes for - NativeMSGFileHandler.get_area_extent to be able to execute. + Trailer includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ trailer = { '15TRAILER': { From 01608a1bc0372a459d00587667ad241a2ed1a03d Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 19 May 2023 11:53:39 -0500 Subject: [PATCH 0137/1416] Add minor tests and refactor netcdf test to use a mock class so that repeating parameterize decorators can be removed. --- satpy/readers/clavrx.py | 70 +++---- satpy/tests/reader_tests/test_clavrx.py | 70 +++++-- satpy/tests/reader_tests/test_clavrx_nc.py | 218 ++++++++++++--------- 3 files changed, 215 insertions(+), 143 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 3add1ea380..3d7455d209 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -296,6 +296,7 @@ def __init__(self, filename, filename_info, filetype_info): self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) self.platform = _get_platform(self.file_content.get('/attr/platform')) + self.resolution = self.get_nadir_resolution(self.sensor) @property def start_time(self): @@ -329,60 +330,63 @@ def get_nadir_resolution(self, sensor): def _available_aliases(self, ds_info, current_var): """Add alias if there is a match.""" + new_info = ds_info.copy() alias_info = CHANNEL_ALIASES.get(self.sensor).get(current_var, None) if alias_info is not None: alias_info.update({"file_key": current_var}) - alias_info["resolution"] = self.get_nadir_resolution(self.sensor) - ds_info.update(alias_info) - yield True, ds_info + new_info.update(alias_info) + yield True, new_info - def _add_info_if_appropriate(self, is_avail, ds_info, nadir_resolution): + def _supplement_configured(self, configured_datasets=None): """Add more information if this reader can provide it.""" - new_info = ds_info.copy() # don't change input - this_res = ds_info.get('resolution') - var_name = ds_info.get('file_key', ds_info['name']) - matches = self.file_type_matches(ds_info['file_type']) - # we can confidently say that we can provide this dataset and can - # provide more info - if matches and var_name in self and this_res != nadir_resolution: - new_info['resolution'] = nadir_resolution - if self._is_polar(): - new_info['coordinates'] = ds_info.get('coordinates', ('longitude', 'latitude')) - yield True, new_info - elif is_avail is None: - # if we didn't know how to handle this dataset and no one else did - # then we should keep it going down the chain - yield is_avail, ds_info + for is_avail, ds_info in (configured_datasets or []): + # some other file handler knows how to load this + print(is_avail, ds_info) + if is_avail is not None: + yield is_avail, ds_info + + new_info = ds_info.copy() # don't change input + this_res = ds_info.get('resolution') + var_name = ds_info.get('file_key', ds_info['name']) + matches = self.file_type_matches(ds_info['file_type']) + # we can confidently say that we can provide this dataset and can + # provide more info + if matches and var_name in self and this_res != self.resolution: + new_info['resolution'] = self.resolution + if self._is_polar(): + new_info['coordinates'] = ds_info.get('coordinates', ('longitude', 'latitude')) + yield True, new_info + elif is_avail is None: + # if we didn't know how to handle this dataset and no one else did + # then we should keep it going down the chain + yield is_avail, ds_info - def _dynamic_datasets(self, nadir_resolution): + def _dynamic_datasets(self): """Get data from file and build aliases.""" for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { 'file_type': self.filetype_info['file_type'], - 'resolution': nadir_resolution, + 'resolution': self.resolution, 'name': var_name, } if self._is_polar(): ds_info['coordinates'] = ['longitude', 'latitude'] - yield True, ds_info + # always yield what we have + yield True, ds_info if CHANNEL_ALIASES.get(self.sensor) is not None: + # yield variable as it is + # yield any associated aliases yield from self._available_aliases(ds_info, var_name) def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - nadir_resolution = self.get_nadir_resolution(self.sensor) - # update previously configured datasets - for is_avail, ds_info in (configured_datasets or []): - # some other file handler knows how to load this - if is_avail is not None: - yield is_avail, ds_info - - yield from self._add_info_if_appropriate(is_avail, ds_info, nadir_resolution) + yield from self._supplement_configured(configured_datasets) - yield from self._dynamic_datasets(nadir_resolution) + # get data from file dynamically + yield from self._dynamic_datasets() def get_shape(self, dataset_id, ds_info): """Get the shape.""" @@ -433,7 +437,7 @@ def __init__(self, filename, filename_info, filetype_info): self.nc.coords["longitude"] = _CLAVRxHelper._get_data(self.nc.coords["longitude"], {"name": "longitude"}) - def _get_ds_info_for_data_arr(self, var_name): + def _dynamic_dataset_info(self, var_name): """Set data name and, if applicable, aliases.""" channel_info = None ds_info = { @@ -468,7 +472,7 @@ def _available_file_datasets(self, handled_vars): # we need 'traditional' y/x dimensions currently continue - yield from self._get_ds_info_for_data_arr(var_name) + yield from self._dynamic_dataset_info(var_name) def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 5a90bf873a..94a3da097f 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -48,7 +48,6 @@ def get_test_content(self, filename, filename_info, filetype_info): '/attr/platform': 'SNPP', '/attr/sensor': 'VIIRS', } - file_content['longitude'] = xr.DataArray( da.from_array(DEFAULT_LON_DATA, chunks=4096), attrs={ @@ -104,6 +103,20 @@ def get_test_content(self, filename, filename_info, filetype_info): }) file_content['variable3/shape'] = DEFAULT_FILE_SHAPE + file_content['refl_1_38um_nom'] = xr.DataArray( + da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), + attrs={ + 'SCALED': 1, + 'add_offset': 59.0, + 'scale_factor': 0.0018616290763020515, + 'units': '%', + '_FillValue': -32768, + 'valid_range': [-32767, 32767], + 'actual_range': [-2., 120.], + 'actual_missing': -999.0 + }) + file_content['refl_1_38um_nom/shape'] = DEFAULT_FILE_SHAPE + return file_content @@ -204,6 +217,24 @@ def test_available_datasets(self): self.assertTrue(new_ds_infos[8][0]) self.assertEqual(new_ds_infos[8][1]['resolution'], 742) + def test_available_datasets_with_alias(self): + """Test availability of aliased dataset.""" + import xarray as xr + + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + loadables = r.select_files_from_pathnames([ + 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + ]) + r.create_filehandlers(loadables) + available_ds = list(r.file_handlers['clavrx_hdf4'][0].available_datasets()) + + self.assertEqual(available_ds[5][1]["name"], "refl_1_38um_nom") + + self.assertEqual(available_ds[6][1]["name"], "M09") + self.assertEqual(available_ds[6][1]["file_key"], "refl_1_38um_nom") + def test_load_all(self): """Test loading all test datasets.""" import xarray as xr @@ -216,17 +247,17 @@ def test_load_all(self): ]) r.create_filehandlers(loadables) - var_list = ['variable1', 'variable2', 'variable3'] + var_list = ["M09", 'variable2', 'variable3'] datasets = r.load(var_list) self.assertEqual(len(datasets), len(var_list)) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') + self.assertIn(v.attrs['units'], ['1', '%']) self.assertEqual(v.attrs['platform_name'], 'npp') self.assertEqual(v.attrs['sensor'], 'viirs') self.assertIsInstance(v.attrs['area'], SwathDefinition) self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 16) self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 16) - self.assertIsInstance(datasets["variable3"].attrs.get("flag_meanings"), list) + self.assertIsInstance(datasets["variable3"].attrs.get("flag_meanings"), list) class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): @@ -263,17 +294,20 @@ def get_test_content(self, filename, filename_info, filetype_info): }) file_content['latitude/shape'] = DEFAULT_FILE_SHAPE - file_content['variable1'] = xr.DataArray( + file_content['refl_1_38um_nom'] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), dims=('y', 'x'), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': (-32767, 32767), + 'SCALED': 1, + 'add_offset': 59.0, + 'scale_factor': 0.0018616290763020515, + 'units': '%', + '_FillValue': -32768, + 'valid_range': [-32767, 32767], + 'actual_range': [-2., 120.], + 'actual_missing': -999.0 }) - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + file_content['refl_1_38um_nom/shape'] = DEFAULT_FILE_SHAPE # data with fill values file_content['variable2'] = xr.DataArray( @@ -347,7 +381,7 @@ def test_no_nav_donor(self): 'clavrx_H08_20180806_1800.level2.hdf', ]) r.create_filehandlers(loadables) - self.assertRaises(IOError, r.load, ['variable1', 'variable2', 'variable3']) + self.assertRaises(IOError, r.load, ['refl_1_38um_nom', 'variable2', 'variable3']) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" @@ -375,18 +409,18 @@ def test_load_all_old_donor(self): variables={'Projection': proj, 'x': x, 'y': y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(['variable1', 'variable2', 'variable3']) + datasets = r.load(['refl_1_38um_nom', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertNotIn('calibration', v.attrs) - self.assertEqual(v.attrs['units'], '1') + self.assertIn(v.attrs['units'], ['1', '%']) self.assertIsInstance(v.attrs['area'], AreaDefinition) if v.attrs.get("flag_values"): self.assertIn('_FillValue', v.attrs) else: self.assertNotIn('_FillValue', v.attrs) - if v.attrs["name"] == 'variable1': - self.assertIsInstance(v.attrs["valid_range"], tuple) + if v.attrs["name"] == 'refl_1_38um_nom': + self.assertIsInstance(v.attrs["valid_range"], list) else: self.assertNotIn('valid_range', v.attrs) if 'flag_values' in v.attrs: @@ -419,11 +453,11 @@ def test_load_all_new_donor(self): variables={'goes_imager_projection': proj, 'x': x, 'y': y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(['variable1', 'variable2', 'variable3']) + datasets = r.load(['refl_1_38um_nom', 'variable2', 'variable3']) self.assertEqual(len(datasets), 3) for v in datasets.values(): self.assertNotIn('calibration', v.attrs) - self.assertEqual(v.attrs['units'], '1') + self.assertIn(v.attrs['units'], ['1', '%']) self.assertIsInstance(v.attrs['area'], AreaDefinition) self.assertTrue(v.attrs['area'].is_geostationary) self.assertEqual(v.attrs['platform_name'], 'himawari8') diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index 5f0ba812b7..a7dba879e5 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2021 Satpy developers +# Copyright (c) 2018 Satpy developers # # This file is part of satpy. # @@ -18,13 +18,17 @@ """Module for testing the satpy.readers.clavrx module.""" import os +import unittest from unittest import mock import numpy as np -import pytest import xarray as xr from pyresample.geometry import AreaDefinition +from satpy.readers import load_reader +from satpy.tests.reader_tests.test_netCDF_utils import FakeNetCDF4FileHandler + +ABI_FILE = 'clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc' DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], @@ -40,7 +44,7 @@ FILL_VALUE = -32768 -def fake_test_content(filename, **kwargs): +def fake_dataset(): """Mimic reader input file content.""" attrs = { 'platform': 'G16', @@ -115,99 +119,129 @@ def fake_test_content(filename, **kwargs): return ds -class TestCLAVRXReaderGeo: - """Test CLAVR-X Reader with Geo files.""" +class FakeNetCDF4FileHandlerCLAVRx(FakeNetCDF4FileHandler): + """Swap-in NetCDF4 File Handler.""" + + def get_test_content(self, filename, filename_info, filetype_info): + """Get a fake dataset.""" + return fake_dataset() + + +class TestCLAVRXReaderNetCDF(unittest.TestCase): + """Test CLAVR-X Reader with NetCDF files.""" yaml_file = "clavrx.yaml" + filename = ABI_FILE + loadable_ids = list(fake_dataset().keys()) - def setup_method(self): - """Read fake data.""" + def setUp(self): + """Wrap NetCDF file handler with a fake handler.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + from satpy.readers.clavrx import CLAVRXNetCDFFileHandler - @pytest.mark.parametrize( - ("filenames", "expected_loadables"), - [([ABI_FILE], 1)] - ) - def test_reader_creation(self, filenames, expected_loadables): - """Test basic initialization.""" - from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: - od.side_effect = fake_test_content - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - assert len(loadables) == expected_loadables - r.create_filehandlers(loadables) - # make sure we have some files - assert r.file_handlers - - @pytest.mark.parametrize( - ("filenames", "expected_datasets"), - [([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'variable3']), ] - ) - def test_available_datasets(self, filenames, expected_datasets): + self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library + self.p = mock.patch.object(CLAVRXNetCDFFileHandler, '__bases__', + (FakeNetCDF4FileHandlerCLAVRx,), spec=True) + self.fake_open_dataset = mock.patch('satpy.readers.clavrx.xr.open_dataset', + return_value=fake_dataset()).start() + self.fake_handler = self.p.start() + self.p.is_local = True + + self.addCleanup(mock.patch.stopall) + + def test_init(self): + """Test basic init with no extra parameters.""" + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ABI_FILE]) + self.assertEqual(len(loadables), 1) + r.create_filehandlers(loadables) + # make sure we have some files + self.assertTrue(r.file_handlers) + + def test_available_datasets(self): """Test that variables are dynamically discovered.""" - from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: - od.side_effect = fake_test_content - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - r.create_filehandlers(loadables) - avails = list(r.available_dataset_names) - for var_name in expected_datasets: - assert var_name in avails - # check extra datasets created by alias or coordinates - for var_name in ["latitude", "longitude"]: - assert var_name in avails - - @pytest.mark.parametrize( - ("filenames", "loadable_ids"), - [([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'C02', 'variable3']), ] - ) - def test_load_all_new_donor(self, filenames, loadable_ids): + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ABI_FILE]) + r.create_filehandlers(loadables) + avails = list(r.available_dataset_names) + expected_datasets = self.loadable_ids + ["latitude", "longitude"] + self.assertEqual(avails.sort(), expected_datasets.sort()) + + def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" - from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: - od.side_effect = fake_test_content - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, \ - mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] - x = np.linspace(-0.1518, 0.1518, 300) - y = np.linspace(0.1518, -0.1518, 10) - proj = mock.Mock( - semi_major_axis=6378137, - semi_minor_axis=6356752.3142, - perspective_point_height=35791000, - longitude_of_projection_origin=-137.2, - sweep_angle_axis='x', - ) - d.return_value = fake_donor = mock.MagicMock( - variables={'goes_imager_projection': proj, 'x': x, 'y': y}, - ) - fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(loadable_ids) - assert len(datasets) == 4 - for v in datasets.values(): - assert 'calibration' not in v.attrs - assert "units" in v.attrs - assert isinstance(v.attrs['area'], AreaDefinition) - assert v.attrs['platform_name'] == 'GOES-16' - assert v.attrs['sensor'] == 'abi' - assert 'rows_per_scan' not in v.coords.get('longitude').attrs - if v.attrs["name"] == 'variable1': - assert "valid_range" not in v.attrs - assert v.dtype == np.float64 - assert "_FillValue" not in v.attrs - # should have file variable and one alias for reflectance - elif v.attrs["name"] in ["refl_0_65um_nom", "C02"]: - assert isinstance(v.attrs["valid_range"], list) - assert v.dtype == np.float64 - assert "_FillValue" not in v.attrs.keys() - assert (v.attrs["file_key"] == "refl_0_65um_nom") - else: - assert (datasets['variable3'].attrs.get('flag_meanings')) is not None - assert (datasets['variable3'].attrs.get('flag_meanings') == '') - assert np.issubdtype(v.dtype, np.integer) + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ABI_FILE]) + r.create_filehandlers(loadables) + with mock.patch('satpy.readers.clavrx.glob') as g, \ + mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: + g.return_value = ['fake_donor.nc'] + x = np.linspace(-0.1518, 0.1518, 300) + y = np.linspace(0.1518, -0.1518, 10) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=-137.2, + sweep_angle_axis='x', + ) + d.return_value = fake_donor = mock.MagicMock( + variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + + datasets = r.load(self.loadable_ids) + self.assertEqual(len(datasets), len(self.loadable_ids)) + for v in datasets.values(): + self.assertIsInstance(v.area, AreaDefinition) + self.assertEqual(v.platform_name, 'GOES-16') + self.assertEqual(v.sensor, 'abi') + + self.assertNotIn('calibration', v.attrs) + self.assertIn("units", v.attrs) + self.assertNotIn('rows_per_scan', v.coords.get('longitude').attrs) + # should have file variable and one alias for reflectance + if v.name == "variable1": + self.assertNotIn("valid_range", v.attrs) + self.assertNotIn("_FillValue", v.attrs) + self.assertEqual(np.float64, v.dtype) + elif v.name in ["refl_0_65um_nom", "C02"]: + self.assertIsInstance(v.valid_range, list) + self.assertEqual(np.float64, v.dtype) + self.assertNotIn("_FillValue", v.attrs) + if v.name == "C02": + self.assertEqual("refl_0_65um_nom", v.file_key) + else: + self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + self.assertEqual('', + datasets['variable3'].attrs.get('flag_meanings'), + ) + assert np.issubdtype(v.dtype, np.integer) + + def test_yaml_datasets(self): + """Test available_datasets with fake variables from YAML.""" + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ABI_FILE]) + r.create_filehandlers(loadables) + # mimic the YAML file being configured for more datasets + fake_dataset_info = [ + (None, {'name': 'yaml1', 'resolution': None, 'file_type': ['clavrx_nc']}), + (True, {'name': 'yaml2', 'resolution': 0.5, 'file_type': ['clavrx_nc']}), + ] + new_ds_infos = list(r.file_handlers['clavrx_nc'][0].available_datasets( + fake_dataset_info)) + self.assertEqual(len(new_ds_infos), 9) + + # we have this and can provide the resolution + self.assertTrue(new_ds_infos[0][0]) + self.assertEqual(new_ds_infos[0][1]['resolution'], 2004) # hardcoded + + # we have this, but previous file handler said it knew about it + # and it is producing the same resolution as what we have + self.assertTrue(new_ds_infos[1][0]) + self.assertEqual(new_ds_infos[1][1]['resolution'], 0.5) + + # we have this, but don't want to change the resolution + # because a previous handler said it has it + self.assertTrue(new_ds_infos[2][0]) + self.assertEqual(new_ds_infos[2][1]['resolution'], 2004) From f6caf710ba115118b1142e417eefe8d984cef85c Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 19 May 2023 12:23:20 -0500 Subject: [PATCH 0138/1416] Remove excessive if/then statements and actually test the alias name --- satpy/tests/reader_tests/test_clavrx_nc.py | 40 ++++++++++++---------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index a7dba879e5..b95a7dcce2 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -190,8 +190,27 @@ def test_load_all_new_donor(self): ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(self.loadable_ids) - self.assertEqual(len(datasets), len(self.loadable_ids)) + datasets = r.load(self.loadable_ids + ["C02"]) + self.assertEqual(len(datasets), len(self.loadable_ids)+1) + + # should have file variable and one alias for reflectance + self.assertNotIn("valid_range", datasets["variable1"].attrs) + self.assertNotIn("_FillValue", datasets["variable1"].attrs) + self.assertEqual(np.float64, datasets["variable1"].dtype) + + assert np.issubdtype(datasets["variable3"].dtype, np.integer) + self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + self.assertEqual('', + datasets['variable3'].attrs.get('flag_meanings'), + ) + + self.assertIsInstance(datasets["refl_0_65um_nom"].valid_range, list) + self.assertEqual(np.float64, datasets["refl_0_65um_nom"].dtype) + self.assertNotIn("_FillValue", datasets["refl_0_65um_nom"].attrs) + + self.assertEqual("refl_0_65um_nom", datasets["C02"].file_key) + self.assertNotIn("_FillValue", datasets["C02"].attrs) + for v in datasets.values(): self.assertIsInstance(v.area, AreaDefinition) self.assertEqual(v.platform_name, 'GOES-16') @@ -200,23 +219,6 @@ def test_load_all_new_donor(self): self.assertNotIn('calibration', v.attrs) self.assertIn("units", v.attrs) self.assertNotIn('rows_per_scan', v.coords.get('longitude').attrs) - # should have file variable and one alias for reflectance - if v.name == "variable1": - self.assertNotIn("valid_range", v.attrs) - self.assertNotIn("_FillValue", v.attrs) - self.assertEqual(np.float64, v.dtype) - elif v.name in ["refl_0_65um_nom", "C02"]: - self.assertIsInstance(v.valid_range, list) - self.assertEqual(np.float64, v.dtype) - self.assertNotIn("_FillValue", v.attrs) - if v.name == "C02": - self.assertEqual("refl_0_65um_nom", v.file_key) - else: - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) - self.assertEqual('', - datasets['variable3'].attrs.get('flag_meanings'), - ) - assert np.issubdtype(v.dtype, np.integer) def test_yaml_datasets(self): """Test available_datasets with fake variables from YAML.""" From 26ad2b694a449434e67d7d41949b83d15e3804ba Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 19 May 2023 12:27:39 -0500 Subject: [PATCH 0139/1416] Remove extra space --- satpy/tests/reader_tests/test_clavrx.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 94a3da097f..7f1fecc2be 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -233,7 +233,7 @@ def test_available_datasets_with_alias(self): self.assertEqual(available_ds[5][1]["name"], "refl_1_38um_nom") self.assertEqual(available_ds[6][1]["name"], "M09") - self.assertEqual(available_ds[6][1]["file_key"], "refl_1_38um_nom") + self.assertEqual(available_ds[6][1]["file_key"], "refl_1_38um_nom") def test_load_all(self): """Test loading all test datasets.""" From 6e498099627a7a6cf4a5ce98a11972367b9c3252 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 22 May 2023 09:35:09 +0000 Subject: [PATCH 0140/1416] Add navigation test for VIS channel --- satpy/readers/gms5_vissr_l1b.py | 6 --- .../tests/reader_tests/test_gms5_vissr_l1b.py | 52 +++++++++++++++---- 2 files changed, 41 insertions(+), 17 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index b26656f092..567527cf3e 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -440,10 +440,6 @@ def __init__(self, filename, filename_info, filetype_info): self._filename = filename self._filename_info = filename_info self._header, self._channel_type = self._read_header(filename) - - from pprint import pprint # FIXME - pprint(self._header) - self._mda = self._get_mda() def _read_header(self, filename): @@ -649,8 +645,6 @@ def _get_lons_lats(self, dataset_id, lines, pixels): earth_equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS ) lons, lats = nav.get_lons_lats( - # lines=np.array([686, 2089]), - # pixels=np.array([1680, 1793]), # FIXME TODO lines=lines.astype(np.float64), pixels=pixels.astype(np.float64), static_params=(scan_params, proj_params), diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 373970e8dc..257112c8a7 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -518,6 +518,22 @@ def dataset_id(self, request): @pytest.fixture def image_data(self, dataset_id): + """Get fake image data. + + Data type: + + ((line number, timestamp), (data1, data2)) + + VIS channel: + + pix = [6688, 6688, 6689, 6689] + lin = [2744, 8356, 2744, 8356] + + IR1 channel: + + pix = [1672, 1672, 1673, 1673] + lin = [686, 2089, 686, 2089] + """ line_control_word = np.dtype([ ('line_number', vissr.I4), ('scan_time', vissr.R8), @@ -526,8 +542,9 @@ def image_data(self, dataset_id): ('image_data', vissr.U1, (2,))]) if dataset_id['name'] == 'IR1': return np.array([((686, 50000), (1, 2)), ((2089, 50000), (3, 4))], dtype=dtype) - else: - raise NotImplementedError + elif dataset_id['name'] == 'VIS': + return np.array([((2744, 50000), (1, 2)), ((8356, 50000), (3, 4))], dtype=dtype) + raise NotImplementedError @pytest.fixture def header(self, control_block, image_params): @@ -581,10 +598,9 @@ def vis_frame_parameters(self): def coordinate_conversion(self): """Provide parameters for coordinate conversions. - Since we are testing with very small images, adjust pixel offset so that - the first column is at the image center. This has the advantage, that - the lat/lon coordinates are finite for every column. Otherwise they - would be in space. + Reference coordinates were computed near the central column. Adjust + pixel offset so that the first column is at the image center. This has + the advantage that we can test with very small 2x2 images. """ return { 'central_line_number_of_vissr_frame': { @@ -594,9 +610,9 @@ def coordinate_conversion(self): 'WV': 1379.1001 }, 'central_pixel_number_of_vissr_frame': { - 'IR1': 0.5, # to obtain finite lat/lon coordinates + 'IR1': 0.5, # instead of 1672.5 'IR2': 1672.5, - 'VIS': 0.5, # to obtain finite lat/lon coordinates + 'VIS': 0.5, # instead of 6688.5 'WV': 1672.5 }, 'pixel_difference_of_vissr_center_from_normal_position': { @@ -737,15 +753,29 @@ def simple_coordinate_conversion_table(self): def lons_lats_exp(self, dataset_id): """Get expected lon/lat coordinates. - Computed with JMA's Msial library. + Computed with JMA's Msial library for 2 pixels near the central column + (6688.5/1672.5 for VIS/IR). + + VIS: + + pix = [6688, 6688, 6689, 6689] + lin = [2744, 8356, 2744, 8356] + + IR1: + + pix = [1672, 1672, 1673, 1673] + lin = [686, 2089, 686, 2089] """ if dataset_id['name'] == 'IR1': lons_exp = [[139.680120, 139.718902], [140.307367, 140.346062]] lats_exp = [[35.045132, 35.045361], [-34.971012, -34.970738]] - elif dataset_id == 'VIS': - lons_exp = lats_exp = None + elif dataset_id['name'] == 'VIS': + lons_exp = [[139.665133, 139.674833], + [140.292579, 140.302249]] + lats_exp = [[35.076113, 35.076170], + [-34.940439, -34.940370]] else: raise NotImplementedError return lons_exp, lats_exp From 0c02ca7747ecb67b90b214eee1a5f5f4567f49ad Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 22 May 2023 09:42:16 +0000 Subject: [PATCH 0141/1416] Fix end-to-end navigation test Don't know where the original values came from. But since the code reproduces the reference values from Msial, I'm confident that the new values are correct. --- satpy/tests/reader_tests/test_gms5_vissr_l1b.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 257112c8a7..5e0e87c9f2 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -280,12 +280,12 @@ def test_get_lons_lats( orbit_prediction, proj_params ): - lons_exp = [[-114.70480148, -112.23691703, -109.70496014], - [8.27367963, 8.74144293, 9.17639531], - [15.92344528, 16.27070079, 16.63288666]] - lats_exp = [[-22.86714851, -24.4190695, -25.92376274], - [-42.65777933, -39.93518282, -37.2094099], - [3.32018285, 6.04812029, 8.7739802]] + lons_exp = [[-114.56923, -112.096837, -109.559702], + [8.33221, 8.793893, 9.22339], + [15.918476, 16.268354, 16.6332]] + lats_exp = [[-23.078721, -24.629845, -26.133314], + [-42.513409, -39.790231, -37.06392], + [3.342834, 6.07043, 8.795932]] lons, lats = nav.get_lons_lats( lines=np.array([1000, 1500, 2000]), pixels=np.array([1000, 1500, 2000]), From c6fd10f61f82cb26d7e84579f72585d298a7d4b6 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 22 May 2023 10:05:27 +0000 Subject: [PATCH 0142/1416] Sort imports --- satpy/readers/gms5_vissr_l1b.py | 10 ++++++---- satpy/tests/reader_tests/test_gms5_vissr_l1b.py | 10 +++++++--- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 00017e2ab2..5df0bda8f1 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -24,11 +24,11 @@ import numpy as np import xarray as xr -from satpy.utils import get_legacy_chunk_size -from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.hrit_jma import mjd2datetime64 import satpy.readers._geos_area as geos_area import satpy.readers.gms5_vissr_navigation as nav +from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.hrit_jma import mjd2datetime64 +from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() @@ -54,6 +54,7 @@ CHANNELS = [('VIS', R4), ('IR1', R4), ('IR2', R4), ('WV', R4)] VISIR_SOLAR = [('VIS', R4), ('IR', R4)] +# fmt: off CONTROL_BLOCK = np.dtype([('control_block_size', I2), ('head_block_number_of_parameter_block', I2), ('parameter_block_size', I2), @@ -417,7 +418,7 @@ 'dtype': IMAGE_DATA_BLOCK_IR } } - +# fmt: on def recarr2dict(arr, preserve=None): if not preserve: @@ -591,6 +592,7 @@ def get_area_def_test(self, dsid): 'scandir': 'N2S' } from pprint import pprint + # pprint(mode_block) pprint(coord_conv) extent = geos_area.get_area_extent(proj_dict) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 5e0e87c9f2..8da3cc5f99 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -1,14 +1,14 @@ """Unit tests for GMS-5 VISSR reader.""" +from unittest import mock + import numpy as np import pytest -from unittest import mock -import satpy.readers.gms5_vissr_navigation as nav import satpy.readers.gms5_vissr_l1b as vissr +import satpy.readers.gms5_vissr_navigation as nav from satpy.tests.utils import make_dataid - # Navigation references computed with JMA's Msial library (files # VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS # navigation is slightly off (< 0.01 deg) compared to JMA's reference. @@ -654,6 +654,7 @@ def coordinate_conversion(self): @pytest.fixture def attitude_prediction(self): + # fmt: off return { 'data': np.array([ (50130.93055556, (19960217, 222000), 3.14911863, 0.00054604, 4.3324597 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), @@ -692,9 +693,11 @@ def attitude_prediction(self): dtype=vissr.ATTITUDE_PREDICTION_DATA ), } + # fmt: on @pytest.fixture def orbit_prediction(self): + # fmt: off return { 'data': np.array([ (50130.96180556, (960217, 230500), [ 2247604.14185506, -42110997.39399951, -276688.79765022], [3069.77904265, 164.12584895, 3.65437628], [-32392525.09983424, 27002204.93121811, -263873.25702763], [ 0.81859376, 0.6760037 , 17.44588753], 133.46391815, (330.12326803, -12.19424863), (197.27884747, -11.96904141), [[ 9.99936382e-01, 1.03449318e-02, 4.49611916e-03], [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01]], [ 2.46885475e+08, -2.07840219e+08, -7.66028692e+07], (-0.35887085, 140.18562594, 35793706.31768975), 0, 0), @@ -718,6 +721,7 @@ def orbit_prediction(self): dtype=vissr.ORBIT_PREDICTION_DATA ) } + # fmt: on @pytest.fixture def vis_calibration(self): From 6d8eed79f4d273193b4b7d1a5177c3809bdd5434 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 13:12:14 +0300 Subject: [PATCH 0143/1416] Add a blend method to create temporal RGB from MultiScene --- satpy/multiscene/__init__.py | 2 +- satpy/multiscene/_blend_funcs.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/satpy/multiscene/__init__.py b/satpy/multiscene/__init__.py index 3cfa907017..0338f47d77 100644 --- a/satpy/multiscene/__init__.py +++ b/satpy/multiscene/__init__.py @@ -1,4 +1,4 @@ """Functions and classes related to MultiScene functionality.""" -from ._blend_funcs import stack, timeseries # noqa +from ._blend_funcs import stack, temporal_rgb, timeseries # noqa from ._multiscene import MultiScene # noqa diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index 0210cef5cc..e8d25e5f0a 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -178,3 +178,22 @@ def timeseries(datasets): res = xr.concat(expanded_ds, dim="time") res.attrs = combine_metadata(*[x.attrs for x in expanded_ds]) return res + + +def temporal_rgb( + data_arrays: Sequence[xr.DataArray], + weights: Optional[Sequence[xr.DataArray]] = None, + combine_times: bool = True, + blend_type: str = 'select_with_weights' +) -> xr.DataArray: + """Combine a series of datasets as a temporal RGB. + + The first dataset is used as the Red component of the new composite, the second as Green and the third as Blue. + All the other datasets are discarded. + """ + from satpy.composites import GenericCompositor + + compositor = GenericCompositor("temporal_composite") + composite = compositor((data_arrays[0], data_arrays[1], data_arrays[2]), attrs=data_arrays[2].attrs) + + return composite From 0d4b884ccffdc6f546ede3314e62258e13cffe3b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 22 May 2023 10:29:43 +0000 Subject: [PATCH 0144/1416] Refactor get_dataset method --- satpy/readers/gms5_vissr_l1b.py | 114 +++++++++++++++++++------------- 1 file changed, 68 insertions(+), 46 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 5df0bda8f1..0873f7d070 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -420,6 +420,7 @@ } # fmt: on + def recarr2dict(arr, preserve=None): if not preserve: preserve = [] @@ -515,42 +516,46 @@ def _get_mda(self): } def get_dataset(self, dataset_id, ds_info): - """ - - TODO: Split in two methods - """ - - num_lines, num_pixels = self._get_actual_shape() - memmap = np.memmap( + dataset = self._get_image_data() + lons, lats = self._get_lons_lats(dataset_id, dataset) + self._attach_coords(dataset, lons, lats) + return dataset + + def _get_image_data(self): + dask_array = self._read_image_data() + return self._make_image_dataset(dask_array) + + def _read_image_data(self): + memmap = self._get_memmap() + return da.from_array(memmap, chunks=(CHUNK_SIZE,)) + + def _get_memmap(self): + num_lines, _ = self._get_actual_shape() + return np.memmap( filename=self._filename, mode='r', dtype=IMAGE_DATA[self._channel_type]['dtype'], offset=IMAGE_DATA[self._channel_type]['offset'], shape=(num_lines,) ) - dask_array = da.from_array(memmap, chunks=(CHUNK_SIZE,)) - data = xr.DataArray( + + def _make_image_dataset(self, dask_array): + return xr.DataArray( dask_array['image_data'], dims=('y', 'x'), coords={ 'acq_time': ('y', self._get_acq_time(dask_array)), - 'line_number': ('y', dask_array['LCW']['line_number'].compute()) + 'line_number': ('y', self._get_line_number(dask_array)) } ) - lines, pixels = self._get_image_coords(data) - lons, lats = self._get_lons_lats(dataset_id, lines, pixels) - lons = xr.DataArray(lons, dims=('y', 'x'), attrs={'standard_name': 'longitude'}) - lats = xr.DataArray(lats, dims=('y', 'x'), attrs={'standard_name': 'latitude'}) - data.coords['lon'] = lons - data.coords['lat'] = lats - - return data - def _get_acq_time(self, dask_array): acq_time = dask_array['LCW']['scan_time'].compute() return mjd2datetime64(acq_time) + def _get_line_number(self, dask_array): + return dask_array['LCW']['line_number'].compute() + def get_area_def_test(self, dsid): alt_ch_name = ALT_CHANNEL_NAMES[dsid['name']] num_lines, num_pixels = self._get_actual_shape() @@ -599,25 +604,55 @@ def get_area_def_test(self, dsid): area = geos_area.get_area_definition(proj_dict, extent) return area - def _get_lons_lats(self, dataset_id, lines, pixels): + def _get_lons_lats(self, dataset_id, image_data): # TODO: Store channel name in self.channel_name + lines, pixels = self._get_image_coords(image_data) + static_params = self._get_static_navigation_params(dataset_id) + predicted_params = self._get_predicted_navigation_params() + lons, lats = nav.get_lons_lats( + lines=lines, + pixels=pixels, + static_params=static_params, + predicted_params=predicted_params + ) + return self._make_lons_lats_data_array(lats, lons) + + def _get_image_coords(self, data): + lines = data.coords['line_number'].values + pixels = np.arange(data.shape[1]) + return lines.astype(np.float64), pixels.astype(np.float64) + + def _get_static_navigation_params(self, dataset_id): alt_ch_name = ALT_CHANNEL_NAMES[dataset_id['name']] mode_block = self._header['image_parameters']['mode'] coord_conv = self._header['image_parameters']['coordinate_conversion'] - att_pred = self._header['image_parameters']['attitude_prediction']['data'] - orb_pred = self._header['image_parameters']['orbit_prediction']['data'] - center_line_vissr_frame = coord_conv['central_line_number_of_vissr_frame'][alt_ch_name] center_pixel_vissr_frame = coord_conv['central_pixel_number_of_vissr_frame'][alt_ch_name] pixel_offset = coord_conv['pixel_difference_of_vissr_center_from_normal_position'][ alt_ch_name] - scan_params = nav.ScanningParameters( start_time_of_scan=coord_conv['scheduled_observation_time'], spinning_rate=mode_block['spin_rate'], num_sensors=coord_conv['number_of_sensor_elements'][alt_ch_name], sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], ) + # Use earth radius and flattening from JMA's Msial library, because + # the values in the data seem to be pretty old. For example the + # equatorial radius is from the Bessel Ellipsoid (1841). + proj_params = nav.ProjectionParameters( + line_offset=center_line_vissr_frame, + pixel_offset=center_pixel_vissr_frame + pixel_offset, + stepping_angle=coord_conv['stepping_angle_along_line'][alt_ch_name], + sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], + misalignment=np.ascontiguousarray(coord_conv['matrix_of_misalignment'].transpose().astype(np.float64)), + earth_flattening=nav.EARTH_FLATTENING, + earth_equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS + ) + return scan_params, proj_params + + def _get_predicted_navigation_params(self): + att_pred = self._header['image_parameters']['attitude_prediction']['data'] + orb_pred = self._header['image_parameters']['orbit_prediction']['data'] attitude_prediction = nav.AttitudePrediction( prediction_times=att_pred['prediction_time_mjd'].astype(np.float64), angle_between_earth_and_sun=att_pred['sun_earth_angle'].astype(np.float64), @@ -634,28 +669,15 @@ def _get_lons_lats(self, dataset_id, lines, pixels): sat_position_earth_fixed_z=orb_pred['satellite_position_earth_fixed'][:, 2].astype(np.float64), nutation_precession=np.ascontiguousarray(orb_pred['conversion_matrix'].transpose(0, 2, 1).astype(np.float64)) ) + return attitude_prediction, orbit_prediction - # Use earth radius and flattening from JMA's Msial library, because - # the values in the data seem to be pretty old. For example the - # equatorial radius is from the Bessel Ellipsoid (1841). - proj_params = nav.ProjectionParameters( - line_offset=center_line_vissr_frame, - pixel_offset=center_pixel_vissr_frame + pixel_offset, - stepping_angle=coord_conv['stepping_angle_along_line'][alt_ch_name], - sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], - misalignment=np.ascontiguousarray(coord_conv['matrix_of_misalignment'].transpose().astype(np.float64)), - earth_flattening=nav.EARTH_FLATTENING, - earth_equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS - ) - lons, lats = nav.get_lons_lats( - lines=lines.astype(np.float64), - pixels=pixels.astype(np.float64), - static_params=(scan_params, proj_params), - predicted_params=(attitude_prediction, orbit_prediction) - ) + def _make_lons_lats_data_array(self, lats, lons): + lons = xr.DataArray(lons, dims=('y', 'x'), + attrs={'standard_name': 'longitude'}) + lats = xr.DataArray(lats, dims=('y', 'x'), + attrs={'standard_name': 'latitude'}) return lons, lats - def _get_image_coords(self, data): - lines = data.coords['line_number'].values - pixels = np.arange(data.shape[1]) - return lines, pixels + def _attach_coords(self, dataset, lons, lats): + dataset.coords['lon'] = lons + dataset.coords['lat'] = lats From 72ccf1f439178fa8d629701b64c8608906ff9def Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 13:42:05 +0300 Subject: [PATCH 0145/1416] Remove extra parameters --- satpy/multiscene/_blend_funcs.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index e8d25e5f0a..ff2749db81 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -182,9 +182,6 @@ def timeseries(datasets): def temporal_rgb( data_arrays: Sequence[xr.DataArray], - weights: Optional[Sequence[xr.DataArray]] = None, - combine_times: bool = True, - blend_type: str = 'select_with_weights' ) -> xr.DataArray: """Combine a series of datasets as a temporal RGB. From 3c48c53a1c51a966b6cbd9628f510380cfb491af Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 13:49:51 +0300 Subject: [PATCH 0146/1416] Fix using the attributes from the latest scene --- satpy/multiscene/_blend_funcs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index ff2749db81..82597aa3fc 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -191,6 +191,7 @@ def temporal_rgb( from satpy.composites import GenericCompositor compositor = GenericCompositor("temporal_composite") - composite = compositor((data_arrays[0], data_arrays[1], data_arrays[2]), attrs=data_arrays[2].attrs) + composite = compositor((data_arrays[0], data_arrays[1], data_arrays[2])) + composite.attrs = data_arrays[2].attrs return composite From ef355b4e9c9c88dfd968540d7a29a6a3a54906d0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 14:49:15 +0300 Subject: [PATCH 0147/1416] Add tests for temporal RGB blending --- satpy/tests/multiscene_tests/test_blend.py | 43 +++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 6b85dd9d79..a4aca901d2 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -234,7 +234,6 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): simple_groups = {DataQuery(name='CloudType'): groups[DataQuery(name='CloudType')]} multi_scene.group(simple_groups) - weights = [weights[0][0], weights[1][0]] stack_func = partial(stack, weights=weights, blend_type="i_dont_exist") with pytest.raises(ValueError): @@ -390,3 +389,45 @@ def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: assert 'sensor' not in data_arr.attrs assert 'platform_name' not in data_arr.attrs assert 'long_name' not in data_arr.attrs + + +class TestTemporalRGB: + """Test the temporal RGB blending method.""" + + @pytest.fixture + def nominal_data(self): + """Return the input arrays for the nominal use case.""" + da1 = xr.DataArray([1, 0, 0], attrs={'start_time': datetime(2023, 5, 22, 9, 0, 0)}) + da2 = xr.DataArray([0, 1, 0], attrs={'start_time': datetime(2023, 5, 22, 10, 0, 0)}) + da3 = xr.DataArray([0, 0, 1], attrs={'start_time': datetime(2023, 5, 22, 11, 0, 0)}) + + return [da1, da2, da3] + + @pytest.fixture + def expected_result(self): + """Return the expected result arrays.""" + return [[1, 0, 0], [0, 1, 0], [0, 0, 1]] + + @staticmethod + def _assert_results(res, expected_start_time, expected_result): + assert res.attrs['start_time'] == expected_start_time + for i in range(3): + np.testing.assert_equal(res.data[i, :], expected_result[i]) + + def test_nominal(self, nominal_data, expected_result): + """Test that nominal usage with 3 datasets works.""" + from satpy.multiscene import temporal_rgb + + res = temporal_rgb(nominal_data) + + self._assert_results(res, nominal_data[-1].attrs['start_time'], expected_result) + + def test_extra_datasets(self, nominal_data, expected_result): + """Test that only the first three arrays affect the usage.""" + from satpy.multiscene import temporal_rgb + + da4 = xr.DataArray([0, 0, 1], attrs={'start_time': datetime(2023, 5, 22, 12, 0, 0)}) + + res = temporal_rgb(nominal_data + da4) + + self._assert_results(res, nominal_data[-1].attrs['start_time'], expected_result) From 02b971c4d7156853aebe831dfc479d7eab1a964c Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 May 2023 14:55:40 +0300 Subject: [PATCH 0148/1416] Update MultiScene.blend() docstring --- satpy/multiscene/_multiscene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index d803758b88..48c8db6b99 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -338,7 +338,7 @@ def blend( then assigns those datasets to the blended scene. Blending functions provided in this module are :func:`stack` - (the default) and :func:`timeseries`, but the Python built-in + (the default), :func:`timeseries` and :func:`temporal_rgb`, but the Python built-in function :func:`sum` also works and may be appropriate for some types of data. From e9491e567fcd368d416e8d14c1a58de396e5331b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 22 May 2023 13:30:00 +0000 Subject: [PATCH 0149/1416] Add numba to requirements --- continuous_integration/environment.yaml | 3 ++- setup.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 0df1582fd5..d773951ab4 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -1,4 +1,4 @@ -name: test-environment +name: satpy channels: - conda-forge dependencies: @@ -9,6 +9,7 @@ dependencies: - appdirs - toolz - Cython + - numba - sphinx - cartopy - panel>=0.12.7 diff --git a/setup.py b/setup.py index 2ad639c6fa..d219c06810 100644 --- a/setup.py +++ b/setup.py @@ -67,6 +67,7 @@ 'hsaf_grib': ['pygrib'], 'remote_reading': ['fsspec'], 'insat_3d': ['xarray-datatree'], + 'gms5-vissr_l1b': ["numba"], # Writers: 'cf': ['h5netcdf >= 0.7.3'], 'awips_tiled': ['netCDF4 >= 1.1.8'], From cf1f32eb01169a4dc1ed16ca990d91397ea8ffa6 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 22 May 2023 13:30:23 +0000 Subject: [PATCH 0150/1416] Remove lat/lon coordinates from yaml definition --- satpy/etc/readers/gms5-vissr_l1b.yaml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index cfd1dd16fe..77ecf32ed8 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -48,9 +48,6 @@ datasets: # standard_name: toa_bidirectional_reflectance # units: "%" file_type: gms5_vissr_vis - coordinates: - - longitude - - latitude IR1: name: IR1 @@ -65,9 +62,6 @@ datasets: # standard_name: toa_brightness_temperature # units: "K" file_type: gms5_vissr_ir1 - coordinates: - - longitude - - latitude IR2: name: IR2 @@ -82,9 +76,6 @@ datasets: # standard_name: toa_brightness_temperature # units: "K" file_type: gms5_vissr_ir2 - coordinates: - - longitude - - latitude IR3: name: IR3 @@ -99,6 +90,3 @@ datasets: # standard_name: toa_brightness_temperature # units: "K" file_type: gms5_vissr_ir3 - coordinates: - - longitude - - latitude From 652e32515ac24b524df9c3edde86ca94d820898a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 23 May 2023 08:18:32 +0300 Subject: [PATCH 0151/1416] Update satpy/multiscene/_multiscene.py Co-authored-by: David Hoese --- satpy/multiscene/_multiscene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index 48c8db6b99..c93f5706bc 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -338,7 +338,7 @@ def blend( then assigns those datasets to the blended scene. Blending functions provided in this module are :func:`stack` - (the default), :func:`timeseries` and :func:`temporal_rgb`, but the Python built-in + (the default), :func:`timeseries`, and :func:`temporal_rgb`, but the Python built-in function :func:`sum` also works and may be appropriate for some types of data. From 6c41a99c7b0f76b92330b09a3b2ba7df1ed1245b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 23 May 2023 08:21:41 +0300 Subject: [PATCH 0152/1416] Use explicit indices in test assertions --- satpy/tests/multiscene_tests/test_blend.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index a4aca901d2..2fca990f21 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -411,8 +411,9 @@ def expected_result(self): @staticmethod def _assert_results(res, expected_start_time, expected_result): assert res.attrs['start_time'] == expected_start_time - for i in range(3): - np.testing.assert_equal(res.data[i, :], expected_result[i]) + np.testing.assert_equal(res.data[0, :], expected_result[0]) + np.testing.assert_equal(res.data[1, :], expected_result[1]) + np.testing.assert_equal(res.data[2, :], expected_result[2]) def test_nominal(self, nominal_data, expected_result): """Test that nominal usage with 3 datasets works.""" From fd3ade3b5ac831dcba8328cf9ecd584763d46b28 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 23 May 2023 13:34:41 +0800 Subject: [PATCH 0153/1416] Update __init__.py --- satpy/composites/__init__.py | 58 ++++++++++++++++++++++++++++++++---- 1 file changed, 53 insertions(+), 5 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0948d543ab..6636cc2fe6 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1039,15 +1039,35 @@ class RatioSharpenedRGB(GenericCompositor): new_G = G * ratio new_B = B * ratio + In some cases, there could be another high resolution band: + + R_lo - 1000m resolution - shape=(2000, 2000) + G_hi - 500m resolution - shape=(4000, 4000) + B - 1000m resolution - shape=(2000, 2000) + R_hi - 500m resolution - shape=(4000, 4000) + + To avoid the green band getting involved in calculating ratio or sharpening, + specify it by "neutral_resolution_band: green" in YAML config file. Then: + + ratio = R_hi / R_lo + new_R = R_hi + new_G = G_hi + new_B = B * ratio + """ def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_color = kwargs.pop("high_resolution_band", "red") + self.neutral_resolution_color = kwargs.pop("neutral_resolution_band", "red") if self.high_resolution_color not in ['red', 'green', 'blue', None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.high_resolution_color)) + if self.neutral_resolution_color not in ['red', 'green', 'blue', None]: + raise ValueError("RatioSharpenedRGB.neutral_resolution_band must " + "be one of ['red', 'green', 'blue', None]. Not " + "'{}'".format(self.neutral_resolution_color)) super(RatioSharpenedRGB, self).__init__(*args, **kwargs) def __call__(self, datasets, optional_datasets=None, **info): @@ -1082,12 +1102,22 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) if 'rows_per_scan' in high_res.attrs: new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) new_attrs.setdefault('resolution', high_res.attrs['resolution']) - low_res_colors = ['red', 'green', 'blue'] - low_resolution_index = low_res_colors.index(self.high_resolution_color) + colors = ['red', 'green', 'blue'] + low_resolution_index = colors.index(self.high_resolution_color) + high_resolution_index = low_resolution_index + if self.neutral_resolution_color is not None: + neutral_resolution_index = colors.index(self.neutral_resolution_color) + neutral_res = datasets[neutral_resolution_index] + else: + neutral_res = None + neutral_resolution_index = 0 else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None + neutral_res = None low_resolution_index = 0 + high_resolution_index = 0 + neutral_resolution_index = 0 if high_res is not None: low_res = (low_res_red, low_res_green, low_res_blue)[low_resolution_index] @@ -1100,9 +1130,27 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) chunks=high_res.chunks, ) with xr.set_options(keep_attrs=True): - low_res_red = high_res if low_resolution_index == 0 else low_res_red * ratio - low_res_green = high_res if low_resolution_index == 1 else low_res_green * ratio - low_res_blue = high_res if low_resolution_index == 2 else low_res_blue * ratio + if neutral_res is not None: + if high_resolution_index == 0: + low_res_red = high_res + low_res_green = neutral_res if neutral_resolution_index == 1 else low_res_green * ratio + low_res_blue = neutral_res if neutral_resolution_index == 2 else low_res_blue * ratio + + elif high_resolution_index == 1: + low_res_red = neutral_res if neutral_resolution_index == 0 else low_res_red * ratio + low_res_green = high_res + low_res_blue = neutral_res if neutral_resolution_index == 2 else low_res_blue * ratio + + elif high_resolution_index == 2: + low_res_red = neutral_res if neutral_resolution_index == 0 else low_res_red * ratio + low_res_green = neutral_res if neutral_resolution_index == 1 else low_res_green * ratio + low_res_blue = high_res + + else: + low_res_red = high_res if high_resolution_index == 0 else low_res_red * ratio + low_res_green = high_res if high_resolution_index == 1 else low_res_green * ratio + low_res_blue = high_res if high_resolution_index == 2 else low_res_blue * ratio + return low_res_red, low_res_green, low_res_blue, new_attrs def _combined_sharpened_info(self, info, new_attrs): From b16df65d66bd0d4d282e7469c01fb9fd9609ff9e Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 22 May 2023 16:13:15 +0000 Subject: [PATCH 0154/1416] Add calibration methods --- satpy/etc/readers/gms5-vissr_l1b.yaml | 24 ++--- satpy/readers/gms5_vissr_l1b.py | 74 +++++++++++---- .../tests/reader_tests/test_gms5_vissr_l1b.py | 89 ++++++++++++++----- 3 files changed, 137 insertions(+), 50 deletions(-) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index 77ecf32ed8..313beb8928 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -44,9 +44,9 @@ datasets: counts: standard_name: counts units: 1 -# reflectance: -# standard_name: toa_bidirectional_reflectance -# units: "%" + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" file_type: gms5_vissr_vis IR1: @@ -58,9 +58,9 @@ datasets: counts: standard_name: counts units: 1 -# brightness_temperature: -# standard_name: toa_brightness_temperature -# units: "K" + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" file_type: gms5_vissr_ir1 IR2: @@ -72,9 +72,9 @@ datasets: counts: standard_name: counts units: 1 -# brightness_temperature: -# standard_name: toa_brightness_temperature -# units: "K" + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" file_type: gms5_vissr_ir2 IR3: @@ -86,7 +86,7 @@ datasets: counts: standard_name: counts units: 1 -# brightness_temperature: -# standard_name: toa_brightness_temperature -# units: "K" + brightness_temperature: + standard_name: toa_brightness_temperature + units: "K" file_type: gms5_vissr_ir3 diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 0873f7d070..9c190f768a 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -516,14 +516,14 @@ def _get_mda(self): } def get_dataset(self, dataset_id, ds_info): - dataset = self._get_image_data() - lons, lats = self._get_lons_lats(dataset_id, dataset) - self._attach_coords(dataset, lons, lats) + counts = self._get_counts() + dataset = self._calibrate(counts, dataset_id) + self._attach_coords(dataset, dataset_id) # FIXME return dataset - def _get_image_data(self): - dask_array = self._read_image_data() - return self._make_image_dataset(dask_array) + def _get_counts(self): + image_data = self._read_image_data() + return self._make_counts_data_array(image_data) def _read_image_data(self): memmap = self._get_memmap() @@ -539,13 +539,13 @@ def _get_memmap(self): shape=(num_lines,) ) - def _make_image_dataset(self, dask_array): + def _make_counts_data_array(self, image_data): return xr.DataArray( - dask_array['image_data'], + image_data['image_data'], dims=('y', 'x'), coords={ - 'acq_time': ('y', self._get_acq_time(dask_array)), - 'line_number': ('y', self._get_line_number(dask_array)) + 'acq_time': ('y', self._get_acq_time(image_data)), + 'line_number': ('y', self._get_line_number(image_data)) } ) @@ -556,6 +556,20 @@ def _get_acq_time(self, dask_array): def _get_line_number(self, dask_array): return dask_array['LCW']['line_number'].compute() + def _calibrate(self, counts, dataset_id): + table = self._get_calibration_table(dataset_id) + cal = Calibrator(table) + return cal.calibrate(counts, dataset_id["calibration"]) + + def _get_calibration_table(self, dataset_id): + tables = { + "VIS": self._header['image_parameters']['vis_calibration']["vis1_calibration_table"], + "IR1": self._header['image_parameters']['ir1_calibration']["conversion_table_of_equivalent_black_body_temperature"], + "IR2": self._header['image_parameters']['ir2_calibration']["conversion_table_of_equivalent_black_body_temperature"], + "IR3": self._header['image_parameters']['wv_calibration']["conversion_table_of_equivalent_black_body_temperature"] + } + return tables[dataset_id["name"]] + def get_area_def_test(self, dsid): alt_ch_name = ALT_CHANNEL_NAMES[dsid['name']] num_lines, num_pixels = self._get_actual_shape() @@ -604,9 +618,14 @@ def get_area_def_test(self, dsid): area = geos_area.get_area_definition(proj_dict, extent) return area - def _get_lons_lats(self, dataset_id, image_data): + def _attach_coords(self, dataset, dataset_id): + lons, lats = self._get_lons_lats(dataset, dataset_id) + dataset.coords['lon'] = lons + dataset.coords['lat'] = lats + + def _get_lons_lats(self, dataset, dataset_id): # TODO: Store channel name in self.channel_name - lines, pixels = self._get_image_coords(image_data) + lines, pixels = self._get_image_coords(dataset) static_params = self._get_static_navigation_params(dataset_id) predicted_params = self._get_predicted_navigation_params() lons, lats = nav.get_lons_lats( @@ -678,6 +697,31 @@ def _make_lons_lats_data_array(self, lats, lons): attrs={'standard_name': 'latitude'}) return lons, lats - def _attach_coords(self, dataset, lons, lats): - dataset.coords['lon'] = lons - dataset.coords['lat'] = lats + +class Calibrator: + def __init__(self, calib_table_y): + self.calib_table_y = calib_table_y + self.calib_table_x = np.arange(calib_table_y.size) + + def calibrate(self, counts, calibration): + if calibration == "counts": + return counts + interp = counts.data.map_blocks( + _interpolate_calibration_table, + self.calib_table_x, + self.calib_table_y, + dtype=np.float32 + ) + return self._make_data_array(interp, counts) + + def _make_data_array(self, interp, counts): + return xr.DataArray( + interp, + dims=counts.dims, + coords=counts.coords, + ) + + +def _interpolate_calibration_table(counts, calib_table_x, calib_table_y): + interp = np.interp(counts.ravel(), calib_table_x, calib_table_y) + return interp.reshape(counts.shape) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 8da3cc5f99..229925ec04 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -1,9 +1,11 @@ """Unit tests for GMS-5 VISSR reader.""" from unittest import mock +import datetime as dt import numpy as np import pytest +import xarray as xr import satpy.readers.gms5_vissr_l1b as vissr import satpy.readers.gms5_vissr_navigation as nav @@ -492,13 +494,9 @@ class TestFileHandler: 'IR3': 'IR' } - def test_dataset_navigation(self, file_handler, dataset_id, lons_lats_exp): - lons_exp, lats_exp = lons_lats_exp + def test_get_dataset(self, file_handler, dataset_id, dataset_exp): dataset = file_handler.get_dataset(dataset_id, {}) - lons = dataset.coords['lon'] - lats = dataset.coords['lat'] - np.testing.assert_allclose(lons, lons_exp, atol=1E-6) - np.testing.assert_allclose(lats, lats_exp, atol=1E-6) + xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1E-6) @pytest.fixture def file_handler(self, header, dataset_id, image_data): @@ -512,9 +510,13 @@ def file_handler(self, header, dataset_id, image_data): # See https://stackoverflow.com/a/59045506/5703449 yield fh - @pytest.fixture(params=['VIS', 'IR1']) + @pytest.fixture(params=[ + make_dataid(name="VIS", calibration="reflectance"), + make_dataid(name='IR1', calibration="brightness_temperature"), + make_dataid(name='IR1', calibration="counts") + ]) def dataset_id(self, request): - return make_dataid(name=request.param) + return request.param @pytest.fixture def image_data(self, dataset_id): @@ -541,9 +543,9 @@ def image_data(self, dataset_id): dtype = np.dtype([('LCW', line_control_word), ('image_data', vissr.U1, (2,))]) if dataset_id['name'] == 'IR1': - return np.array([((686, 50000), (1, 2)), ((2089, 50000), (3, 4))], dtype=dtype) + return np.array([((686, 50000), (0, 1)), ((2089, 50000), (2, 3))], dtype=dtype) elif dataset_id['name'] == 'VIS': - return np.array([((2744, 50000), (1, 2)), ((8356, 50000), (3, 4))], dtype=dtype) + return np.array([((2744, 50000), (0, 1)), ((8356, 50000), (2, 3))], dtype=dtype) raise NotImplementedError @pytest.fixture @@ -726,25 +728,25 @@ def orbit_prediction(self): @pytest.fixture def vis_calibration(self): return { - + "vis1_calibration_table": np.array([0, 0.25, 0.5, 1]) } @pytest.fixture def ir1_calibration(self): return { - + "conversion_table_of_equivalent_black_body_temperature": np.array([0, 100, 200, 300]) } @pytest.fixture def ir2_calibration(self): return { - + "conversion_table_of_equivalent_black_body_temperature": None } @pytest.fixture def wv_calibration(self): return { - + "conversion_table_of_equivalent_black_body_temperature": None } @pytest.fixture @@ -753,6 +755,45 @@ def simple_coordinate_conversion_table(self): } + @pytest.fixture + def dataset_exp(self, dataset_id, lons_lats_exp): + lons, lats = lons_lats_exp + if dataset_id["calibration"] == "counts": + return xr.DataArray( + [[0, 1], [2, 3]], + dims=('y', 'x'), + coords={ + "lon": lons, + "lat": lats, + 'acq_time': ('y', [dt.datetime(1995, 10, 10), + dt.datetime(1995, 10, 10)]), + 'line_number': ('y', [686, 2089]) + } + ) + elif dataset_id["name"] == "VIS": + return xr.DataArray( + [[0, 0.25], [0.5, 1]], + dims=('y', 'x'), + coords={ + "lon": lons, + "lat": lats, + 'acq_time': ('y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), + 'line_number': ('y', [2744, 8356]) + } + ) + elif dataset_id["name"] == "IR1": + return xr.DataArray( + [[0, 100], [200, 300]], + dims=('y', 'x'), + coords={ + "lon": lons, + "lat": lats, + 'acq_time': ('y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), + 'line_number': ('y', [686, 2089]) + } + ) + raise NotImplementedError + @pytest.fixture def lons_lats_exp(self, dataset_id): """Get expected lon/lat coordinates. @@ -771,18 +812,20 @@ def lons_lats_exp(self, dataset_id): lin = [686, 2089, 686, 2089] """ if dataset_id['name'] == 'IR1': - lons_exp = [[139.680120, 139.718902], - [140.307367, 140.346062]] - lats_exp = [[35.045132, 35.045361], - [-34.971012, -34.970738]] + lons = [[139.680120, 139.718902], + [140.307367, 140.346062]] + lats = [[35.045132, 35.045361], + [-34.971012, -34.970738]] elif dataset_id['name'] == 'VIS': - lons_exp = [[139.665133, 139.674833], - [140.292579, 140.302249]] - lats_exp = [[35.076113, 35.076170], - [-34.940439, -34.940370]] + lons = [[139.665133, 139.674833], + [140.292579, 140.302249]] + lats = [[35.076113, 35.076170], + [-34.940439, -34.940370]] else: raise NotImplementedError - return lons_exp, lats_exp + lons = xr.DataArray(lons, dims=("y", "x")) + lats = xr.DataArray(lats, dims=("y", "x")) + return lons, lats def assert_namedtuple_close(a, b): From acc6268029e7f21c960515b57e2e732a40a58f3e Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 23 May 2023 10:51:54 +0000 Subject: [PATCH 0155/1416] Improve navigation performance Unwrap angles only once. --- satpy/readers/gms5_vissr_l1b.py | 1 - satpy/readers/gms5_vissr_navigation.py | 246 ++++++++++++------ .../tests/reader_tests/test_gms5_vissr_l1b.py | 34 +-- 3 files changed, 174 insertions(+), 107 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 9c190f768a..554c88fee8 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -624,7 +624,6 @@ def _attach_coords(self, dataset, dataset_id): dataset.coords['lat'] = lats def _get_lons_lats(self, dataset, dataset_id): - # TODO: Store channel name in self.channel_name lines, pixels = self._get_image_coords(dataset) static_params = self._get_static_navigation_params(dataset_id) predicted_params = self._get_predicted_navigation_params() diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 5b20e1ecd5..5f72894af3 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -17,8 +17,161 @@ """Constants taken from JMA's Msial library.""" -@numba.njit +Attitude = namedtuple( + 'Attitude', + [ + 'angle_between_earth_and_sun', + 'angle_between_sat_spin_and_z_axis', + 'angle_between_sat_spin_and_yz_plane' + ] +) + + +Orbit = namedtuple( + 'Orbit', + [ + 'greenwich_sidereal_time', + 'declination_from_sat_to_sun', + 'right_ascension_from_sat_to_sun', + 'sat_position_earth_fixed_x', + 'sat_position_earth_fixed_y', + 'sat_position_earth_fixed_z', + 'nutation_precession', + ] +) + + +ProjectionParameters = namedtuple( + 'ProjectionParameters', + [ + 'line_offset', + 'pixel_offset', + 'stepping_angle', + 'sampling_angle', + 'misalignment', + 'earth_flattening', + 'earth_equatorial_radius', + ] +) + + +_AttitudePrediction = namedtuple( + '_AttitudePrediction', + [ + 'prediction_times', + 'angle_between_earth_and_sun', + 'angle_between_sat_spin_and_z_axis', + 'angle_between_sat_spin_and_yz_plane', + ] +) + + +_OrbitPrediction = namedtuple( + '_OrbitPrediction', + [ + 'prediction_times', + 'greenwich_sidereal_time', + 'declination_from_sat_to_sun', + 'right_ascension_from_sat_to_sun', + 'sat_position_earth_fixed_x', + 'sat_position_earth_fixed_y', + 'sat_position_earth_fixed_z', + 'nutation_precession', + ] +) + + +class AttitudePrediction(object): + """Attitude prediction. + + Use .to_numba() to pass this object to jitted methods. This extra + layer avoids usage of jitclasses and having to re-implement np.unwrap in + numba. + """ + def __init__(self, + prediction_times, + angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane + ): + # In order to accelerate interpolation, the 2-pi periodicity of angles + # is unwrapped here already (that means phase jumps greater than pi + # are wrapped to their 2*pi complement). + self.prediction_times = prediction_times + self.angle_between_earth_and_sun = np.unwrap(angle_between_earth_and_sun) + self.angle_between_sat_spin_and_z_axis = np.unwrap(angle_between_sat_spin_and_z_axis) + self.angle_between_sat_spin_and_yz_plane = np.unwrap(angle_between_sat_spin_and_yz_plane) + + def to_numba(self): + """Convert to numba-compatible type.""" + return _AttitudePrediction( + prediction_times=self.prediction_times, + angle_between_earth_and_sun=self.angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis=self.angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane=self.angle_between_sat_spin_and_yz_plane + ) + + +class OrbitPrediction(object): + """Orbit prediction. + + Use .to_numba() to pass this object to jitted methods. This extra + layer avoids usage of jitclasses and having to re-implement np.unwrap in + numba. + """ + def __init__(self, + prediction_times, + greenwich_sidereal_time, + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun, + sat_position_earth_fixed_x, + sat_position_earth_fixed_y, + sat_position_earth_fixed_z, + nutation_precession + ): + # In order to accelerate interpolation, the 2-pi periodicity of angles + # is unwrapped here already (that means phase jumps greater than pi + # are wrapped to their 2*pi complement). + self.prediction_times = prediction_times + self.greenwich_sidereal_time = np.unwrap(greenwich_sidereal_time) + self.declination_from_sat_to_sun = np.unwrap(declination_from_sat_to_sun) + self.right_ascension_from_sat_to_sun = np.unwrap(right_ascension_from_sat_to_sun) + self.sat_position_earth_fixed_x = sat_position_earth_fixed_x + self.sat_position_earth_fixed_y = sat_position_earth_fixed_y + self.sat_position_earth_fixed_z = sat_position_earth_fixed_z + self.nutation_precession = nutation_precession + + def to_numba(self): + """Convert to numba-compatible type.""" + return _OrbitPrediction( + prediction_times=self.prediction_times, + greenwich_sidereal_time=self.greenwich_sidereal_time, + declination_from_sat_to_sun=self.declination_from_sat_to_sun, + right_ascension_from_sat_to_sun=self.right_ascension_from_sat_to_sun, + sat_position_earth_fixed_x=self.sat_position_earth_fixed_x, + sat_position_earth_fixed_y=self.sat_position_earth_fixed_y, + sat_position_earth_fixed_z=self.sat_position_earth_fixed_z, + nutation_precession=self.nutation_precession + ) + + def get_lons_lats(lines, pixels, static_params, predicted_params): + return _get_lons_lats_numba( + lines, + pixels, + static_params, + _get_predicted_params_numba(predicted_params) + ) + + +def _get_predicted_params_numba(predicted_params): + """Get predicted parameters in numba-compatible type.""" + att_pred, orb_pred = predicted_params + return att_pred.to_numba(), orb_pred.to_numba() + + +@numba.njit +def _get_lons_lats_numba(lines, pixels, static_params, predicted_params): scan_params, proj_params = static_params attitude_prediction, orbit_prediction = predicted_params num_lines = len(lines) @@ -386,59 +539,6 @@ def normalize_vector(v): return v / np.sqrt(np.dot(v, v)) -Attitude = namedtuple( - 'Attitude', - [ - 'angle_between_earth_and_sun', - 'angle_between_sat_spin_and_z_axis', - 'angle_between_sat_spin_and_yz_plane' - ] -) - - -Orbit = namedtuple( - 'Orbit', - [ - 'greenwich_sidereal_time', - 'declination_from_sat_to_sun', - 'right_ascension_from_sat_to_sun', - 'sat_position_earth_fixed_x', - 'sat_position_earth_fixed_y', - 'sat_position_earth_fixed_z', - 'nutation_precession', - ] -) - - -ProjectionParameters = namedtuple( - 'ProjectionParameters', - [ - 'line_offset', - 'pixel_offset', - 'stepping_angle', - 'sampling_angle', - 'misalignment', - 'earth_flattening', - 'earth_equatorial_radius', - ] -) - - -OrbitPrediction = namedtuple( - 'OrbitPrediction', - [ - 'prediction_times', - 'greenwich_sidereal_time', - 'declination_from_sat_to_sun', - 'right_ascension_from_sat_to_sun', - 'sat_position_earth_fixed_x', - 'sat_position_earth_fixed_y', - 'sat_position_earth_fixed_z', - 'nutation_precession', - ] -) - - @numba.njit def interpolate_orbit_prediction(orbit_prediction, observation_time): greenwich_sidereal_time = interpolate_angles( @@ -487,17 +587,6 @@ def interpolate_orbit_prediction(orbit_prediction, observation_time): ) -AttitudePrediction = namedtuple( - 'AttitudePrediction', - [ - 'prediction_times', - 'angle_between_earth_and_sun', - 'angle_between_sat_spin_and_z_axis', - 'angle_between_sat_spin_and_yz_plane', - ] -) - - @numba.njit def interpolate_attitude_prediction(attitude_prediction, observation_time): angle_between_earth_and_sun = interpolate_angles( @@ -538,14 +627,14 @@ def interpolate_continuous(x, x_sample, y_sample): @numba.njit def interpolate_angles(x, x_sample, y_sample): - """Linear interpolation of periodic angles. + """Linear interpolation of angles. - In order to preserve the periodicity, change phase jumps greater than pi - to their 2*pi complement, then perform interpolation and finally wrap - the results to [-pi, pi]. + Requires 2-pi periodicity to be unwrapped before (for + performance reasons). Interpolated angles are wrapped + back to [-pi, pi] to restore periodicity. """ try: - return _wrap_2pi(_interpolate(x, x_sample, unwrap(y_sample))) + return _wrap_2pi(_interpolate(x, x_sample, y_sample)) except: # Numba cannot distinguish exception types return np.nan @@ -585,7 +674,7 @@ def interpolate_nearest(x, x_sample, y_sample): """Nearest neighbour interpolation.""" try: return _interpolate_nearest(x, x_sample, y_sample) - except Exception: + except: return np.nan * np.ones_like(y_sample[0]) @@ -593,18 +682,3 @@ def interpolate_nearest(x, x_sample, y_sample): def _interpolate_nearest(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) return y_sample[i] - - -@numba.njit -def unwrap(p, discont=np.pi): - """Numba implementation of np.unwrap in one dimension.""" - p = np.ascontiguousarray(p) - dd = np.diff(p) - slice1 = slice(1, None) - ddmod = np.mod(dd + np.pi, 2*np.pi) - np.pi - ddmod = np.where((ddmod == -np.pi) & (dd > 0), np.pi, ddmod) - ph_correct = ddmod - dd - ph_correct = np.where(np.fabs(dd) < discont, 0, ph_correct) - up = p.copy() - up[slice1] = p[slice1] + ph_correct.cumsum() - return up diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 229925ec04..416ded7e36 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -324,14 +324,16 @@ def test_interpolate_continuous(self, obs_time, expected): [ (-1, np.nan), (1.5, 0.75*np.pi), - (2.5, -np.pi), - (3.5, -0.75*np.pi), + (2.5, -0.75*np.pi), + (3.5, -0.25*np.pi), (5, np.nan), ] ) def test_interpolate_angles(self, obs_time, expected): prediction_times = np.array([0, 1, 2, 3, 4]) - predicted_angles = np.array([0, np.pi/2, np.pi, -np.pi, -np.pi/2]) + predicted_angles = np.array( + [0, 0.5*np.pi, np.pi, 1.5*np.pi, 2*np.pi] + ) # already unwrapped res = nav.interpolate_angles( obs_time, prediction_times, @@ -362,12 +364,14 @@ def test_interpolate_nearest(self, obs_time, expected): np.testing.assert_allclose(res, expected) def test_interpolate_orbit_prediction(self, obs_time, orbit_prediction, orbit_expected): + orbit_prediction = orbit_prediction.to_numba() orbit = nav.interpolate_orbit_prediction( orbit_prediction, obs_time ) assert_namedtuple_close(orbit, orbit_expected) def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, attitude_expected): + attitude_prediction = attitude_prediction.to_numba() attitude = nav.interpolate_attitude_prediction( attitude_prediction, obs_time ) @@ -397,12 +401,6 @@ def attitude_expected(self): angle_between_sat_spin_and_yz_plane=1.7, ) - @pytest.fixture - def nav_params_expected(self, attitude_expected, orbit_expected, proj_params): - return nav.NavigationParameters( - attitude_expected, orbit_expected, proj_params - ) - @pytest.fixture def sampling_angle(): @@ -463,17 +461,6 @@ def proj_params(sampling_angle): ) -@pytest.mark.parametrize( - 'angles', - [ - (np.array([0, np.pi/2, np.pi, -np.pi, -np.pi/2])), - (np.array([0, 0.78539816, 1.57079633, 5.49778714, 6.28318531])) - ] -) -def test_unwrap(angles): - np.testing.assert_allclose(nav.unwrap(angles), np.unwrap(angles)) - - def test_get_observation_time(): scan_params = nav.ScanningParameters( start_time_of_scan=50000.0, @@ -757,6 +744,9 @@ def simple_coordinate_conversion_table(self): @pytest.fixture def dataset_exp(self, dataset_id, lons_lats_exp): + + # TODO: Use dictionary + lons, lats = lons_lats_exp if dataset_id["calibration"] == "counts": return xr.DataArray( @@ -811,6 +801,10 @@ def lons_lats_exp(self, dataset_id): pix = [1672, 1672, 1673, 1673] lin = [686, 2089, 686, 2089] """ + + # TODO: Use dictionary + + if dataset_id['name'] == 'IR1': lons = [[139.680120, 139.718902], [140.307367, 140.346062]] From 6e418127b211a1179ca70cedbfad7e18d606d5b6 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 23 May 2023 11:03:34 +0000 Subject: [PATCH 0156/1416] Refactor navigation --- satpy/readers/gms5_vissr_navigation.py | 60 +++++++++++++++----------- 1 file changed, 35 insertions(+), 25 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 5f72894af3..b7b213dff9 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -41,6 +41,10 @@ ) +ScanningParameters = namedtuple( + 'ScanningParameters', ['start_time_of_scan', 'spinning_rate', 'num_sensors', 'sampling_angle'] +) + ProjectionParameters = namedtuple( 'ProjectionParameters', [ @@ -160,12 +164,11 @@ def get_lons_lats(lines, pixels, static_params, predicted_params): lines, pixels, static_params, - _get_predicted_params_numba(predicted_params) + _make_predicted_params_numba_compatible(predicted_params) ) -def _get_predicted_params_numba(predicted_params): - """Get predicted parameters in numba-compatible type.""" +def _make_predicted_params_numba_compatible(predicted_params): att_pred, orb_pred = predicted_params return att_pred.to_numba(), orb_pred.to_numba() @@ -182,20 +185,31 @@ def _get_lons_lats_numba(lines, pixels, static_params, predicted_params): for i in range(num_lines): for j in range(num_pixels): point = (lines[i], pixels[j]) - obs_time = get_observation_time(point, scan_params) - attitude, orbit = interpolate_navigation_prediction( - attitude_prediction, orbit_prediction, obs_time + nav_params = _get_navigation_parameters( + point, + attitude_prediction, + orbit_prediction, + proj_params, + scan_params ) - nav_params = (attitude, orbit, proj_params) lon, lat = get_lon_lat(point, nav_params) lons[i, j] = lon lats[i, j] = lat return lons, lats -ScanningParameters = namedtuple( - 'ScanningParameters', ['start_time_of_scan', 'spinning_rate', 'num_sensors', 'sampling_angle'] -) +@numba.njit +def _get_navigation_parameters( + point, + attitude_prediction, + orbit_prediction, + proj_params, + scan_params): + obs_time = get_observation_time(point, scan_params) + attitude, orbit = interpolate_navigation_prediction( + attitude_prediction, orbit_prediction, obs_time + ) + return attitude, orbit, proj_params @numba.njit @@ -625,21 +639,6 @@ def interpolate_continuous(x, x_sample, y_sample): return np.nan -@numba.njit -def interpolate_angles(x, x_sample, y_sample): - """Linear interpolation of angles. - - Requires 2-pi periodicity to be unwrapped before (for - performance reasons). Interpolated angles are wrapped - back to [-pi, pi] to restore periodicity. - """ - try: - return _wrap_2pi(_interpolate(x, x_sample, y_sample)) - except: - # Numba cannot distinguish exception types - return np.nan - - @numba.njit def _interpolate(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) @@ -660,6 +659,17 @@ def _find_enclosing_index(x, x_sample): raise Exception('x not enclosed by x_sample') +@numba.njit +def interpolate_angles(x, x_sample, y_sample): + """Linear interpolation of angles. + + Requires 2-pi periodicity to be unwrapped before (for + performance reasons). Interpolated angles are wrapped + back to [-pi, pi] to restore periodicity. + """ + return _wrap_2pi(interpolate_continuous(x, x_sample, y_sample)) + + @numba.njit def _wrap_2pi(values): """Wrap values to interval [-pi, pi]. From 1ae1df4804cf609775b9349778559abd8223c11a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 23 May 2023 21:22:32 +0800 Subject: [PATCH 0157/1416] Update __init__.py --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 6636cc2fe6..95045c7da1 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1039,7 +1039,7 @@ class RatioSharpenedRGB(GenericCompositor): new_G = G * ratio new_B = B * ratio - In some cases, there could be another high resolution band: + In some cases, there could be another high resolution band:: R_lo - 1000m resolution - shape=(2000, 2000) G_hi - 500m resolution - shape=(4000, 4000) @@ -1047,7 +1047,7 @@ class RatioSharpenedRGB(GenericCompositor): R_hi - 500m resolution - shape=(4000, 4000) To avoid the green band getting involved in calculating ratio or sharpening, - specify it by "neutral_resolution_band: green" in YAML config file. Then: + specify it by "neutral_resolution_band: green" in YAML config file. Then:: ratio = R_hi / R_lo new_R = R_hi From 386c5fc76e4fe51f44ac3218153a42c1f371e943 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 23 May 2023 21:39:57 +0800 Subject: [PATCH 0158/1416] Update test_composites.py --- satpy/tests/test_composites.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 1749013837..40b45c1229 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -187,7 +187,8 @@ def setUp(self): def test_bad_color(self): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB - self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad') + self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad', + neutral_resolution_band='bad') def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" @@ -227,6 +228,17 @@ def test_basic_red(self): np.testing.assert_allclose(res[1], np.array([[0.6, 0.6], [np.nan, 3.0]], dtype=np.float64)) np.testing.assert_allclose(res[2], np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)) + def test_basic_red_neutral_green(self): + """Test that basic high resolution red can be passed.""" + from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name='true_color', neutral_resolution_band='green') + res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = res.values + self.assertEqual(res.shape, (3, 2, 2)) + np.testing.assert_allclose(res[0], self.ds4.values) + np.testing.assert_allclose(res[1], np.array([[3.0, 3.0], [np.nan, 3.0]], dtype=np.float64)) + np.testing.assert_allclose(res[2], np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)) + def test_self_sharpened_no_high_res(self): """Test for exception when no high res band is specified.""" from satpy.composites import SelfSharpenedRGB From 6e6ed7a0564fea2f4c252d327f690f1e2d3f25db Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 24 May 2023 14:50:59 +0200 Subject: [PATCH 0159/1416] Refactor of collect_cf_datasets --- satpy/tests/scene_tests/test_conversions.py | 62 ++++ satpy/tests/writer_tests/test_cf.py | 186 +++++++----- satpy/writers/cf_writer.py | 306 ++++++++++++-------- 3 files changed, 359 insertions(+), 195 deletions(-) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 0a15fd941f..35e02ddc1f 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -53,6 +53,68 @@ def test_to_xarray_dataset_with_empty_scene(self): assert len(xrds.variables) == 0 assert len(xrds.coords) == 0 + def test_to_xarray_with_empty_scene(self): + """Test converting empty Scene to xarray.""" + scn = Scene() + ds = scn.to_xarray() + assert isinstance(ds, xr.Dataset) + assert len(ds.variables) == 0 + assert len(ds.coords) == 0 + + def test_to_xarray_with_single_area_scene(self): + """Test converting single area Scene to xarray dataset.""" + from pyresample.geometry import AreaDefinition + + area = AreaDefinition('test', 'test', 'test', + {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), + dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1), 'area': area}) + + scn = Scene() + scn['var1'] = data_array + ds = scn.to_xarray() + + # Assert dataset type + assert isinstance(ds, xr.Dataset) + # Assert var1 is a Dataset variables + assert "var1" in ds.data_vars + + # Assert by default it include lon lats + assert "latitude" in ds.coords + assert "longitude" in ds.coords + + # Assert include_lonlats=False works + ds = scn.to_xarray(include_lonlats=False) + assert "latitude" not in ds.coords + assert "longitude" not in ds.coords + + def test_to_xarray_with_multiple_area_scene(self): + """Test converting muiltple area Scene to xarray.""" + from pyresample.geometry import AreaDefinition + + area1 = AreaDefinition('test', 'test', 'test', + {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + area2 = AreaDefinition('test', 'test', 'test', + {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + 4, 4, [-200, -200, 200, 200]) + + data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1), + dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1), 'area': area1}) + data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1), + dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1), 'area': area2}) + scn = Scene() + scn['var1'] = data_array1 + scn['var2'] = data_array2 + + # TODO: in future adapt for DataTree implementation + with pytest.raises(NotImplementedError): + _ = scn.to_xarray() + def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 100d6b75f2..5621438904 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -70,6 +70,116 @@ def __exit__(self, *args): os.remove(self.filename) +def test_lonlat_storage(tmp_path): + """Test correct storage for area with lon/lat units.""" + from ..utils import make_fake_scene + scn = make_fake_scene( + {"ketolysis": np.arange(25).reshape(5, 5)}, + daskify=True, + area=create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1))) + + filename = os.fspath(tmp_path / "test.nc") + scn.save_datasets(filename=filename, writer="cf", include_lonlats=False) + with xr.open_dataset(filename) as ds: + assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" + assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" + assert ds["x"].attrs["units"] == "degrees_east" + assert ds["y"].attrs["units"] == "degrees_north" + assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 + np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) + np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) + + +def test_da2cf_lonlat(): + """Test correct da2cf encoding for area with lon/lat units.""" + from satpy.resample import add_crs_xy_coords + from satpy.writers.cf_writer import CFWriter + + area = create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1)) + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={"area": area}) + da = add_crs_xy_coords(da, area) + new_da = CFWriter.da2cf(da) + assert new_da["x"].attrs["units"] == "degrees_east" + assert new_da["y"].attrs["units"] == "degrees_north" + + +def test_is_projected(caplog): + """Tests for private _is_projected function.""" + from satpy.writers.cf_writer import CFWriter + + # test case with units but no area + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) + assert CFWriter._is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) + assert not CFWriter._is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x")) + with caplog.at_level(logging.WARNING): + assert CFWriter._is_projected(da) + assert "Failed to tell if data are projected." in caplog.text + + +def test_preprocess_dataarray_name(): + """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" + from satpy import Scene + from satpy.writers.cf_writer import _preprocess_dataarray_name + + scn = Scene() + scn['1'] = xr.DataArray([1, 2, 3]) + dataarray = scn['1'] + # If numeric_name_prefix is a string, test add the original_name attributes + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) + assert out_da.attrs['original_name'] == '1' + + # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) + assert "original_name" not in out_da.attrs + + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True) + assert "original_name" not in out_da.attrs + + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True) + assert "original_name" not in out_da.attrs + + +def test_add_time_cf_attrs(): + """Test addition of CF-compliant time attributes.""" + from satpy import Scene + from satpy.writers.cf_writer import add_time_cf_attrs + + scn = Scene() + test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) + times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', + '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) + scn['test-array'] = xr.DataArray(test_array, + dims=['y', 'x'], + coords={'time': ('y', times)}, + attrs=dict(start_time=times[0], end_time=times[-1])) + ds = scn['test-array'].to_dataset(name='test-array') + ds = add_time_cf_attrs(ds) + assert "bnds_1d" in ds.dims + assert ds.dims['bnds_1d'] == 2 + assert "time_bnds" in list(ds.data_vars) + assert "bounds" in ds["time"].attrs + assert "standard_name" in ds["time"].attrs + + class TestCFWriter(unittest.TestCase): """Test case for CF writer.""" @@ -594,19 +704,16 @@ def test_collect_datasets(self, *mocks): # Collect datasets writer = CFWriter() - datas, start_times, end_times = writer._collect_datasets(datasets, include_lonlats=True) + datas = writer._collect_datasets(datasets, include_lonlats=True) # Test results self.assertEqual(len(datas), 3) self.assertEqual(set(datas.keys()), {'var1', 'var2', 'geos'}) - self.assertListEqual(start_times, [None, tstart, None]) - self.assertListEqual(end_times, [None, tend, None]) + var1 = datas['var1'] var2 = datas['var2'] self.assertEqual(var1.name, 'var1') self.assertEqual(var1.attrs['grid_mapping'], 'geos') - self.assertEqual(var1.attrs['start_time'], '2019-04-01 12:00:00') - self.assertEqual(var1.attrs['end_time'], '2019-04-01 12:15:00') self.assertEqual(var1.attrs['long_name'], 'var1') # variable 2 self.assertNotIn('grid_mapping', var2.attrs) @@ -1038,71 +1145,6 @@ def test_global_attr_history_and_Conventions(self): self.assertIn('Created by pytroll/satpy on', f.attrs['history']) -def test_lonlat_storage(tmp_path): - """Test correct storage for area with lon/lat units.""" - from ..utils import make_fake_scene - scn = make_fake_scene( - {"ketolysis": np.arange(25).reshape(5, 5)}, - daskify=True, - area=create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1))) - - filename = os.fspath(tmp_path / "test.nc") - scn.save_datasets(filename=filename, writer="cf", include_lonlats=False) - with xr.open_dataset(filename) as ds: - assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" - assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" - assert ds["x"].attrs["units"] == "degrees_east" - assert ds["y"].attrs["units"] == "degrees_north" - assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 - np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) - np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) - - -def test_da2cf_lonlat(): - """Test correct da2cf encoding for area with lon/lat units.""" - from satpy.resample import add_crs_xy_coords - from satpy.writers.cf_writer import CFWriter - - area = create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1)) - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"area": area}) - da = add_crs_xy_coords(da, area) - new_da = CFWriter.da2cf(da) - assert new_da["x"].attrs["units"] == "degrees_east" - assert new_da["y"].attrs["units"] == "degrees_north" - - -def test_is_projected(caplog): - """Tests for private _is_projected function.""" - from satpy.writers.cf_writer import CFWriter - - # test case with units but no area - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) - assert CFWriter._is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) - assert not CFWriter._is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x")) - with caplog.at_level(logging.WARNING): - assert CFWriter._is_projected(da) - assert "Failed to tell if data are projected." in caplog.text - - class TestCFWriterData(unittest.TestCase): """Test case for CF writer where data arrays are needed.""" @@ -1165,8 +1207,8 @@ def test_collect_datasets_with_latitude_named_lat(self, *mocks): # Collect datasets writer = CFWriter() - datas, start_times, end_times = writer._collect_datasets(self.datasets_list, include_lonlats=True) - datas2, start_times, end_times = writer._collect_datasets(self.datasets_list_no_latlon, include_lonlats=True) + datas = writer._collect_datasets(self.datasets_list, include_lonlats=True) + datas2 = writer._collect_datasets(self.datasets_list_no_latlon, include_lonlats=True) # Test results self.assertEqual(len(datas), 5) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 4f91aeb85b..ab0eb845ea 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -238,6 +238,10 @@ def get_extra_ds(dataset, keys=None): return ds_collection +# ###--------------------------------------------------------------------------. +# ### CF-Area + + def area2lonlat(dataarray): """Convert an area to longitudes and latitudes.""" dataarray = dataarray.copy() @@ -279,63 +283,6 @@ def area2cf(dataarray, strict=False, got_lonlats=False): return res -def make_time_bounds(start_times, end_times): - """Create time bounds for the current *dataarray*.""" - start_time = min(start_time for start_time in start_times - if start_time is not None) - end_time = min(end_time for end_time in end_times - if end_time is not None) - data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - return data - - -def assert_xy_unique(datas): - """Check that all datasets share the same projection coordinates x/y.""" - unique_x = set() - unique_y = set() - for dataset in datas.values(): - if 'y' in dataset.dims: - token_y = tokenize(dataset['y'].data) - unique_y.add(token_y) - if 'x' in dataset.dims: - token_x = tokenize(dataset['x'].data) - unique_x.add(token_x) - if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' - 'Please group them by area or save them in separate files.') - - -def link_coords(datas): - """Link dataarrays and coordinates. - - If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example - `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to - `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set - automatically. - - """ - for da_name, data in datas.items(): - declared_coordinates = data.attrs.get('coordinates', []) - if isinstance(declared_coordinates, str): - declared_coordinates = declared_coordinates.split(' ') - for coord in declared_coordinates: - if coord not in data.coords: - try: - dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) - data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) - except KeyError: - warnings.warn( - 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), - stacklevel=2 - ) - continue - - # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - data.attrs.pop('coordinates', None) - - def dataset_is_projection_coords(dataset): """Check if dataset is a projection coords.""" if 'standard_name' in dataset.attrs and dataset.attrs['standard_name'] in ['longitude', 'latitude']: @@ -397,6 +344,88 @@ def make_alt_coords_unique(datas, pretty=False): return new_datas +def assert_xy_unique(datas): + """Check that all datasets share the same projection coordinates x/y.""" + unique_x = set() + unique_y = set() + for dataset in datas.values(): + if 'y' in dataset.dims: + token_y = tokenize(dataset['y'].data) + unique_y.add(token_y) + if 'x' in dataset.dims: + token_x = tokenize(dataset['x'].data) + unique_x.add(token_x) + if len(unique_x) > 1 or len(unique_y) > 1: + raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' + 'Please group them by area or save them in separate files.') + + +def link_coords(datas): + """Link dataarrays and coordinates. + + If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example + `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to + `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set + automatically. + + """ + for da_name, data in datas.items(): + declared_coordinates = data.attrs.get('coordinates', []) + if isinstance(declared_coordinates, str): + declared_coordinates = declared_coordinates.split(' ') + for coord in declared_coordinates: + if coord not in data.coords: + try: + dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) + data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) + except KeyError: + warnings.warn( + 'Coordinate "{}" referenced by dataarray {} does not ' + 'exist, dropping reference.'.format(coord, da_name), + stacklevel=2 + ) + continue + + # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() + data.attrs.pop('coordinates', None) + + +# ###--------------------------------------------------------------------------. +# ### CF-Time +def make_time_bounds(start_times, end_times): + """Create time bounds for the current *dataarray*.""" + start_time = min(start_time for start_time in start_times + if start_time is not None) + end_time = min(end_time for end_time in end_times + if end_time is not None) + data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], + dims=['time', 'bnds_1d']) + return data + + +def add_time_cf_attrs(ds): + """Add time CF-compliant attributes if Dataset has time coordinate.""" + if 'time' in ds: + # Retrieve list of start_time and end_time + start_times = [] + end_times = [] + for _var_name, data_array in ds.items(): + start_times.append(data_array.attrs.get("start_time", None)) + end_times.append(data_array.attrs.get("end_time", None)) + # Add time bounds dimension and bounds attribute + ds['time_bnds'] = make_time_bounds(start_times, end_times) + ds['time'].attrs['bounds'] = "time_bnds" + ds['time'].attrs['standard_name'] = "time" + # else: + # grp_str = ' of group {}'.format(group_name) if group_name is not None else '' + # logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str)) + return ds + + +# ###--------------------------------------------------------------------------. +# ### Attributes + + class AttributeEncoder(json.JSONEncoder): """JSON encoder for dataset attributes.""" @@ -503,6 +532,25 @@ def encode_attrs_nc(attrs): return OrderedDict(encoded_attrs) +def preprocess_header_attrs(header_attrs, flatten_attrs=False): + """Prepare header attributes.""" + # Define file header attributes + if header_attrs is not None: + if flatten_attrs: + header_attrs = flatten_dict(header_attrs) + header_attrs = encode_attrs_nc(header_attrs) # OrderedDict + else: + header_attrs = {} + # - Add "Created by pytroll/satpy ..." to history attribute + header_attrs = _add_history(header_attrs) + # - Return header attributes + return header_attrs + + +# ###--------------------------------------------------------------------------. +# ### netCDF encodings + + def _set_default_chunks(encoding, dataset): """Update encoding to preserve current dask chunks. @@ -516,6 +564,7 @@ def _set_default_chunks(encoding, dataset): ) # Chunksize may not exceed shape encoding.setdefault(var_name, {}) encoding[var_name].setdefault('chunksizes', chunks) + return encoding def _set_default_fill_value(encoding, dataset): @@ -530,13 +579,15 @@ def _set_default_fill_value(encoding, dataset): for coord_var in coord_vars: encoding.setdefault(coord_var, {}) encoding[coord_var].update({'_FillValue': None}) + return encoding def _set_default_time_encoding(encoding, dataset): """Set default time encoding. - Make sure time coordinates and bounds have the same units. Default is xarray's CF datetime - encoding, which can be overridden by user-defined encoding. + Make sure time coordinates and bounds have the same units. + Default is xarray's CF datetime encoding, which can be overridden + by user-defined encoding. """ if 'time' in dataset: try: @@ -552,21 +603,26 @@ def _set_default_time_encoding(encoding, dataset): '_FillValue': None} encoding['time'] = time_enc encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ + return encoding -def _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix): - """Set Netcdf variable names encoding according to numeric_name_prefix. +def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): + """Ensure variable names of the encoding dictionary account for numeric_name_prefix. - A lot of channel names in satpy starts with a digit. When writing data with the satpy_cf_nc - these channels are prepended with numeric_name_prefix. - This ensures this is also done with any matching variables in encoding. + A lot of channel names in satpy starts with a digit. + When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix. + If variables names in the encoding dictionary are numeric digits, their name is prefixed + with numeric_name_prefix """ - for _var_name, _variable in dataset.variables.items(): - if not numeric_name_prefix or not _var_name.startswith(numeric_name_prefix): + for var_name in list(dataset.variables): + # If var_name do no start with numeric_name_prefix, do nothing + if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): continue - _orig_var_name = _var_name.replace(numeric_name_prefix, '') - if _orig_var_name in encoding: - encoding[_var_name] = encoding.pop(_orig_var_name) + # Else update the encoding dictionary key name with the prefixed one + orig_var_name = var_name.replace(numeric_name_prefix, '') + if orig_var_name in encoding: + encoding[var_name] = encoding.pop(orig_var_name) + return encoding def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): @@ -578,25 +634,57 @@ def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): other_to_netcdf_kwargs = to_netcdf_kwargs.copy() encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() - _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix) - _set_default_chunks(encoding, dataset) - _set_default_fill_value(encoding, dataset) - _set_default_time_encoding(encoding, dataset) + encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) + encoding = _set_default_chunks(encoding, dataset) + encoding = _set_default_fill_value(encoding, dataset) + encoding = _set_default_time_encoding(encoding, dataset) return encoding, other_to_netcdf_kwargs +# ###--------------------------------------------------------------------------. +# ### CF-conversion + + def _handle_dataarray_name(original_name, numeric_name_prefix): - name = original_name - if name[0].isdigit(): + if original_name[0].isdigit(): if numeric_name_prefix: - name = numeric_name_prefix + original_name + new_name = numeric_name_prefix + original_name else: warnings.warn( - 'Invalid NetCDF dataset name: {} starts with a digit.'.format(name), + f'Invalid NetCDF dataset name: {original_name} starts with a digit.', stacklevel=5 ) - return original_name, name + new_name = original_name # occurs when numeric_name_prefix = '', None or False + else: + new_name = original_name + return original_name, new_name + + +def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): + """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" + # Replace datarray name if start with a digit + original_name = None + dataarray = dataarray.copy() + if 'name' in dataarray.attrs: + original_name = dataarray.attrs.pop('name') + original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) + dataarray = dataarray.rename(new_name) + # If the name changed, add the original_name attribute + if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: + dataarray.attrs['original_name'] = original_name + + return dataarray + + +def _remove_satpy_attributes(new_data): + """Remove _satpy attribute.""" + # Remove _satpy* attributes + satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] + for satpy_attr in satpy_attrs: + new_data.attrs.pop(satpy_attr) + new_data.attrs.pop('_last_resampler', None) + return new_data def _add_history(attrs): @@ -661,14 +749,11 @@ def make_cf_dataarray(dataarray, epoch=EPOCH, flatten_attrs=False, if exclude_attrs is None: exclude_attrs = [] - original_name = None - new_data = dataarray.copy() - if 'name' in new_data.attrs: - name = new_data.attrs.pop('name') - original_name, name = _handle_dataarray_name(name, numeric_name_prefix) - new_data = new_data.rename(name) + new_data = _preprocess_dataarray_name(dataarray=dataarray, + numeric_name_prefix=numeric_name_prefix, + include_orig_name=include_orig_name) - CFWriter._remove_satpy_attributes(new_data) + new_data = _remove_satpy_attributes(new_data) new_data = CFWriter._encode_time(new_data, epoch) new_data = CFWriter._encode_coords(new_data) @@ -691,9 +776,6 @@ def make_cf_dataarray(dataarray, epoch=EPOCH, flatten_attrs=False, if 'prerequisites' in new_data.attrs: new_data.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in new_data.attrs['prerequisites']] - if include_orig_name and numeric_name_prefix and original_name and original_name != name: - new_data.attrs['original_name'] = original_name - # Flatten dict-type attributes, if desired if flatten_attrs: new_data.attrs = flatten_dict(new_data.attrs) @@ -766,24 +848,18 @@ def collect_cf_datasets(list_dataarrays, "dimensions (eg. through resampling).") # Define file header attributes - if header_attrs is not None: - if flatten_attrs: - header_attrs = flatten_dict(header_attrs) - header_attrs = encode_attrs_nc(header_attrs) # OrderedDict - else: - header_attrs = {} + header_attrs = preprocess_header_attrs(header_attrs=header_attrs, + flatten_attrs=flatten_attrs) # Retrieve groups # - If groups is None: {None: list_dataarrays} # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} - # - TODO: if all dataset names are wrong, currently and before the PR behave like groups = None ! + # Note: if all dataset names are wrong, behave like groups = None ! grouped_dataarrays = _get_groups(groups, list_dataarrays) is_grouped = len(grouped_dataarrays) >= 2 - # Update header_attrs with 'history' and 'Conventions' - # - Add "Created by pytroll/satpy ..." to history attribute - header_attrs = _add_history(header_attrs) - # - Add CF conventions if not grouped. If 'Conventions' key already present, do not overwrite + # If not grouped, add CF conventions. + # - If 'Conventions' key already present, do not overwrite if "Conventions" not in header_attrs and not is_grouped: header_attrs['Conventions'] = CF_VERSION @@ -791,8 +867,7 @@ def collect_cf_datasets(list_dataarrays, # --> If no groups (groups=None) --> group_name=None grouped_datasets = {} for group_name, group_dataarrays in grouped_dataarrays.items(): - # XXX: Should we combine the info of all datasets? - dict_datarrays, start_times, end_times = CFWriter._collect_datasets( + dict_datarrays = CFWriter._collect_datasets( datasets=group_dataarrays, epoch=epoch, flatten_attrs=flatten_attrs, @@ -807,14 +882,8 @@ def collect_cf_datasets(list_dataarrays, if not is_grouped: ds.attrs = header_attrs - # Add time_bnds - if 'time' in ds: - ds['time_bnds'] = make_time_bounds(start_times, end_times) - ds['time'].attrs['bounds'] = "time_bnds" - ds['time'].attrs['standard_name'] = "time" - else: - grp_str = ' of group {}'.format(group_name) if group_name is not None else '' - logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str)) + # If "time" is a coordinate, add relevant variable/attributes (i.e. time_bnds) + ds = add_time_cf_attrs(ds) # Add xr.Dataset to dictionary grouped_datasets[group_name] = ds @@ -951,20 +1020,12 @@ def _add_time_dimension(new_data): new_data = new_data.expand_dims('time') return new_data - @staticmethod - def _remove_satpy_attributes(new_data): - # Remove _satpy* attributes - satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] - for satpy_attr in satpy_attrs: - new_data.attrs.pop(satpy_attr) - new_data.attrs.pop('_last_resampler', None) - @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" warnings.warn('CFWriter.update_encoding is deprecated. ' 'Use satpy.writers.cf_writer.update_encoding instead.', - DeprecationWarning, stacklevel=3) + DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) @staticmethod @@ -978,8 +1039,6 @@ def _collect_datasets(datasets, epoch=EPOCH, flatten_attrs=False, ds_collection.update(get_extra_ds(ds)) got_lonlats = has_projection_coords(ds_collection) datas = {} - start_times = [] - end_times = [] # sort by name, but don't use the name for _, ds in sorted(ds_collection.items()): if ds.dtype not in CF_DTYPES: @@ -995,8 +1054,6 @@ def _collect_datasets(datasets, epoch=EPOCH, flatten_attrs=False, except KeyError: new_datasets = [ds] for new_ds in new_datasets: - start_times.append(new_ds.attrs.get("start_time", None)) - end_times.append(new_ds.attrs.get("end_time", None)) new_var = make_cf_dataarray(new_ds, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, @@ -1009,7 +1066,7 @@ def _collect_datasets(datasets, epoch=EPOCH, flatten_attrs=False, link_coords(datas) datas = make_alt_coords_unique(datas, pretty=pretty) - return datas, start_times, end_times + return datas def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" @@ -1113,6 +1170,9 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, # Return list of writing results return written +# --------------------------------------------------------------------------. +# NetCDF version + def _check_backend_versions(): """Issue warning if backend versions do not match.""" From 8956cd011f0eab5af672fcf93442cbaf2f416867 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 24 May 2023 13:11:54 +0000 Subject: [PATCH 0160/1416] Daskify navigation --- satpy/readers/gms5_vissr_l1b.py | 6 +-- satpy/readers/gms5_vissr_navigation.py | 73 +++++++++++++++++--------- 2 files changed, 51 insertions(+), 28 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 554c88fee8..abaf2e9a83 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -633,7 +633,7 @@ def _get_lons_lats(self, dataset, dataset_id): static_params=static_params, predicted_params=predicted_params ) - return self._make_lons_lats_data_array(lats, lons) + return self._make_lons_lats_data_array(lons, lats) def _get_image_coords(self, data): lines = data.coords['line_number'].values @@ -689,7 +689,7 @@ def _get_predicted_navigation_params(self): ) return attitude_prediction, orbit_prediction - def _make_lons_lats_data_array(self, lats, lons): + def _make_lons_lats_data_array(self, lons, lats): lons = xr.DataArray(lons, dims=('y', 'x'), attrs={'standard_name': 'longitude'}) lats = xr.DataArray(lats, dims=('y', 'x'), @@ -709,7 +709,7 @@ def calibrate(self, counts, calibration): _interpolate_calibration_table, self.calib_table_x, self.calib_table_y, - dtype=np.float32 + dtype=np.float32, ) return self._make_data_array(interp, counts) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index b7b213dff9..445497380c 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -9,8 +9,12 @@ from collections import namedtuple +import dask.array as da import numba import numpy as np +from satpy.utils import get_legacy_chunk_size + +CHUNK_SIZE = get_legacy_chunk_size() EARTH_FLATTENING = 1/298.257 EARTH_EQUATORIAL_RADIUS = 6378136.0 @@ -160,12 +164,16 @@ def to_numba(self): def get_lons_lats(lines, pixels, static_params, predicted_params): - return _get_lons_lats_numba( - lines, - pixels, - static_params, - _make_predicted_params_numba_compatible(predicted_params) + pixels_2d, lines_2d = da.meshgrid(pixels, lines) + lons, lats = da.map_blocks( + _get_lons_lats_numba, + lines_2d, + pixels_2d, + static_params=static_params, + predicted_params=_make_predicted_params_numba_compatible(predicted_params), + **_get_map_blocks_kwargs(pixels_2d.chunks) ) + return lons, lats def _make_predicted_params_numba_compatible(predicted_params): @@ -173,38 +181,44 @@ def _make_predicted_params_numba_compatible(predicted_params): return att_pred.to_numba(), orb_pred.to_numba() +def _get_map_blocks_kwargs(chunks): + # Get keyword arguments for da.map_blocks, so that it can be used + # with a function that returns two arguments. + return { + "new_axis": 0, + "chunks": (2, ) + chunks, + "dtype": np.float32, + } + + @numba.njit -def _get_lons_lats_numba(lines, pixels, static_params, predicted_params): - scan_params, proj_params = static_params - attitude_prediction, orbit_prediction = predicted_params - num_lines = len(lines) - num_pixels = len(pixels) - output_shape = (num_lines, num_pixels) - lons = np.zeros(output_shape) - lats = np.zeros(output_shape) - for i in range(num_lines): - for j in range(num_pixels): - point = (lines[i], pixels[j]) +def _get_lons_lats_numba(lines_2d, pixels_2d, static_params, predicted_params): + shape = lines_2d.shape + lons = np.zeros(shape, dtype=np.float32) + lats = np.zeros(shape, dtype=np.float32) + for i in range(shape[0]): + for j in range(shape[1]): + point = (lines_2d[i, j], pixels_2d[i, j]) nav_params = _get_navigation_parameters( point, - attitude_prediction, - orbit_prediction, - proj_params, - scan_params + static_params, + predicted_params ) lon, lat = get_lon_lat(point, nav_params) lons[i, j] = lon lats[i, j] = lat - return lons, lats + # Stack lons and lats because da.map_blocks doesn't support multiple + # return values. + return np.stack((lons, lats)) @numba.njit def _get_navigation_parameters( point, - attitude_prediction, - orbit_prediction, - proj_params, - scan_params): + static_params, + predicted_params): + scan_params, proj_params = static_params + attitude_prediction, orbit_prediction = predicted_params obs_time = get_observation_time(point, scan_params) attitude, orbit = interpolate_navigation_prediction( attitude_prediction, orbit_prediction, obs_time @@ -692,3 +706,12 @@ def interpolate_nearest(x, x_sample, y_sample): def _interpolate_nearest(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) return y_sample[i] + + +# TODO +""" +Possible acceleration: + +- Call find_enclosing_index only once for all predictions +- cache coordinates +""" \ No newline at end of file From fd9dc2a16ab6e7a9a0a05706f7e3d95a6682e9bd Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 24 May 2023 15:21:44 +0200 Subject: [PATCH 0161/1416] Initial refactor of collect_cf_dataset --- satpy/tests/writer_tests/test_cf.py | 62 ++++++------- satpy/writers/cf_writer.py | 137 +++++++++++++++++----------- 2 files changed, 114 insertions(+), 85 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 5621438904..d5252f5504 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -677,10 +677,9 @@ def test_da2cf_one_dimensional_array(self): coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) _ = CFWriter.da2cf(arr) - @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None) - def test_collect_datasets(self, *mocks): + def test_collect_cf_dataarrays(self, *mocks): """Test collecting CF datasets from a DataArray objects.""" - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import _collect_cf_dataset geos = pyresample.geometry.AreaDefinition( area_id='geos', @@ -697,27 +696,26 @@ def test_collect_datasets(self, *mocks): time = [1, 2] tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) - datasets = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), - xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var2', 'long_name': 'variable 2'})] + list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, + attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), + xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, + attrs={'name': 'var2', 'long_name': 'variable 2'})] # Collect datasets - writer = CFWriter() - datas = writer._collect_datasets(datasets, include_lonlats=True) + ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) # Test results - self.assertEqual(len(datas), 3) - self.assertEqual(set(datas.keys()), {'var1', 'var2', 'geos'}) - - var1 = datas['var1'] - var2 = datas['var2'] - self.assertEqual(var1.name, 'var1') - self.assertEqual(var1.attrs['grid_mapping'], 'geos') - self.assertEqual(var1.attrs['long_name'], 'var1') + assert len(ds.keys()) == 3 + assert set(ds.keys()) == {'var1', 'var2', 'geos'} + + da_var1 = ds['var1'] + da_var2 = ds['var2'] + assert da_var1.name == 'var1' + assert da_var1.attrs['grid_mapping'] == 'geos' + assert da_var1.attrs['long_name'] == 'var1' # variable 2 - self.assertNotIn('grid_mapping', var2.attrs) - self.assertEqual(var2.attrs['long_name'], 'variable 2') + assert 'grid_mapping' not in da_var2.attrs + assert da_var2.attrs['long_name'] == 'variable 2' def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" @@ -1079,7 +1077,7 @@ def test_area2lonlat(self): [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=da.from_array(np.arange(3*10*10).reshape(3, 10, 10), chunks=(1, 5, 5)), + dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), dims=('bands', 'y', 'x'), attrs={'area': area}) res = area2lonlat(dataarray) @@ -1195,28 +1193,26 @@ def test_has_projection_coords(self): self.datasets['lat'].attrs['standard_name'] = 'dummy' self.assertFalse(has_projection_coords(self.datasets)) - @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None) - def test_collect_datasets_with_latitude_named_lat(self, *mocks): + def test_collect_cf_dataarrays_with_latitude_named_lat(self, *mocks): """Test collecting CF datasets with latitude named lat.""" from operator import getitem - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import _collect_cf_dataset self.datasets_list = [self.datasets[key] for key in self.datasets] self.datasets_list_no_latlon = [self.datasets[key] for key in ['var1', 'var2']] # Collect datasets - writer = CFWriter() - datas = writer._collect_datasets(self.datasets_list, include_lonlats=True) - datas2 = writer._collect_datasets(self.datasets_list_no_latlon, include_lonlats=True) - # Test results + ds = _collect_cf_dataset(self.datasets_list, include_lonlats=True) + ds2 = _collect_cf_dataset(self.datasets_list_no_latlon, include_lonlats=True) - self.assertEqual(len(datas), 5) - self.assertEqual(set(datas.keys()), {'var1', 'var2', 'lon', 'lat', 'geos'}) - self.assertRaises(KeyError, getitem, datas['var1'], 'latitude') - self.assertRaises(KeyError, getitem, datas['var1'], 'longitude') - self.assertEqual(datas2['var1']['latitude'].attrs['name'], 'latitude') - self.assertEqual(datas2['var1']['longitude'].attrs['name'], 'longitude') + # Test results + self.assertEqual(len(ds.keys()), 5) + self.assertEqual(set(ds.keys()), {'var1', 'var2', 'lon', 'lat', 'geos'}) + self.assertRaises(KeyError, getitem, ds['var1'], 'latitude') + self.assertRaises(KeyError, getitem, ds['var1'], 'longitude') + self.assertEqual(ds2['var1']['latitude'].attrs['name'], 'latitude') + self.assertEqual(ds2['var1']['longitude'].attrs['name'], 'longitude') class EncodingUpdateTest(unittest.TestCase): diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index ab0eb845ea..5b74da07b4 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -422,7 +422,7 @@ def add_time_cf_attrs(ds): return ds -# ###--------------------------------------------------------------------------. +# --------------------------------------------------------------------------. # ### Attributes @@ -786,6 +786,80 @@ def make_cf_dataarray(dataarray, epoch=EPOCH, flatten_attrs=False, return new_data +def _collect_cf_dataset(list_dataarrays, + epoch=EPOCH, + flatten_attrs=False, + exclude_attrs=None, + include_lonlats=True, + pretty=False, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.DataArrays. + + Parameters + ---------- + list_dataarrays (list): + List of DataArrays to make CF compliant and merge into a xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + + Returns + ------- + dict_datarrays : dict + A dictionary of CF-compliant xr.DataArray: {name: xr.DataArray} + """ + ds_collection = {} + for ds in list_dataarrays: + ds_collection.update(get_extra_ds(ds)) + got_lonlats = has_projection_coords(ds_collection) + datas = {} + # sort by name, but don't use the name + for _, ds in sorted(ds_collection.items()): + if ds.dtype not in CF_DTYPES: + warnings.warn( + 'Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION), + stacklevel=3 + ) + # we may be adding attributes, coordinates, or modifying the + # structure of attributes + ds = ds.copy(deep=True) + try: + new_datasets = area2cf(ds, strict=include_lonlats, got_lonlats=got_lonlats) + except KeyError: + new_datasets = [ds] + for new_ds in new_datasets: + new_var = make_cf_dataarray(new_ds, epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + datas[new_var.name] = new_var + + # Check and prepare coordinates + assert_xy_unique(datas) + link_coords(datas) + datas = make_alt_coords_unique(datas, pretty=pretty) + + ds = xr.Dataset(datas) + return ds + + def collect_cf_datasets(list_dataarrays, header_attrs=None, exclude_attrs=None, @@ -803,10 +877,8 @@ def collect_cf_datasets(list_dataarrays, Parameters ---------- - datasets (list): - List of Satpy Scene datasets to include in the output xr.Dataset. - The list must include either dataset names or DataIDs. - If None (the default), it include all loaded Scene datasets. + list_dataarrays (list): + List of DataArrays to make CF compliant and merge into groups of xr.Datasets. header_attrs: Global attributes of the output xr.Dataset. epoch (str): @@ -867,8 +939,8 @@ def collect_cf_datasets(list_dataarrays, # --> If no groups (groups=None) --> group_name=None grouped_datasets = {} for group_name, group_dataarrays in grouped_dataarrays.items(): - dict_datarrays = CFWriter._collect_datasets( - datasets=group_dataarrays, + ds = _collect_cf_dataset( + list_dataarrays=group_dataarrays, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, @@ -876,7 +948,6 @@ def collect_cf_datasets(list_dataarrays, pretty=pretty, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) - ds = xr.Dataset(dict_datarrays) # If no groups, add global header to xr.Dataset if not is_grouped: @@ -1028,46 +1099,6 @@ def update_encoding(dataset, to_netcdf_kwargs): DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) - @staticmethod - def _collect_datasets(datasets, epoch=EPOCH, flatten_attrs=False, - exclude_attrs=None, include_lonlats=True, - pretty=False, - include_orig_name=True, numeric_name_prefix='CHANNEL_'): - """Collect and prepare datasets to be written.""" - ds_collection = {} - for ds in datasets: - ds_collection.update(get_extra_ds(ds)) - got_lonlats = has_projection_coords(ds_collection) - datas = {} - # sort by name, but don't use the name - for _, ds in sorted(ds_collection.items()): - if ds.dtype not in CF_DTYPES: - warnings.warn( - 'Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION), - stacklevel=3 - ) - # we may be adding attributes, coordinates, or modifying the - # structure of attributes - ds = ds.copy(deep=True) - try: - new_datasets = area2cf(ds, strict=include_lonlats, got_lonlats=got_lonlats) - except KeyError: - new_datasets = [ds] - for new_ds in new_datasets: - new_var = make_cf_dataarray(new_ds, epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - datas[new_var.name] = new_var - - # Check and prepare coordinates - assert_xy_unique(datas) - link_coords(datas) - datas = make_alt_coords_unique(datas, pretty=pretty) - - return datas - def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) @@ -1158,11 +1189,13 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, for group_name, ds in grouped_datasets.items(): # Update encoding encoding, other_to_netcdf_kwargs = update_encoding(ds, - to_netcdf_kwargs, - numeric_name_prefix) + to_netcdf_kwargs=to_netcdf_kwargs, + numeric_name_prefix=numeric_name_prefix) # Write (append) dataset - res = ds.to_netcdf(filename, engine=engine, - group=group_name, mode=mode, + res = ds.to_netcdf(filename, + engine=engine, + group=group_name, + mode=mode, encoding=encoding, **other_to_netcdf_kwargs) written.append(res) From abe05317eed99990343d18878b6eac9ef67945e2 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 24 May 2023 13:31:46 +0000 Subject: [PATCH 0162/1416] Refactor tests --- .../tests/reader_tests/test_gms5_vissr_l1b.py | 121 +++++++++--------- 1 file changed, 60 insertions(+), 61 deletions(-) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 416ded7e36..9c4a237442 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -529,11 +529,11 @@ def image_data(self, dataset_id): ]) dtype = np.dtype([('LCW', line_control_word), ('image_data', vissr.U1, (2,))]) - if dataset_id['name'] == 'IR1': - return np.array([((686, 50000), (0, 1)), ((2089, 50000), (2, 3))], dtype=dtype) - elif dataset_id['name'] == 'VIS': - return np.array([((2744, 50000), (0, 1)), ((8356, 50000), (2, 3))], dtype=dtype) - raise NotImplementedError + cases = { + "IR1": np.array([((686, 50000), (0, 1)), ((2089, 50000), (2, 3))], dtype=dtype), + "VIS": np.array([((2744, 50000), (0, 1)), ((8356, 50000), (2, 3))], dtype=dtype) + } + return cases[dataset_id["name"]] @pytest.fixture def header(self, control_block, image_params): @@ -744,45 +744,45 @@ def simple_coordinate_conversion_table(self): @pytest.fixture def dataset_exp(self, dataset_id, lons_lats_exp): - - # TODO: Use dictionary - lons, lats = lons_lats_exp - if dataset_id["calibration"] == "counts": - return xr.DataArray( - [[0, 1], [2, 3]], - dims=('y', 'x'), - coords={ - "lon": lons, - "lat": lats, - 'acq_time': ('y', [dt.datetime(1995, 10, 10), - dt.datetime(1995, 10, 10)]), - 'line_number': ('y', [686, 2089]) - } - ) - elif dataset_id["name"] == "VIS": - return xr.DataArray( - [[0, 0.25], [0.5, 1]], - dims=('y', 'x'), - coords={ - "lon": lons, - "lat": lats, - 'acq_time': ('y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), - 'line_number': ('y', [2744, 8356]) - } - ) - elif dataset_id["name"] == "IR1": - return xr.DataArray( - [[0, 100], [200, 300]], - dims=('y', 'x'), - coords={ - "lon": lons, - "lat": lats, - 'acq_time': ('y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), - 'line_number': ('y', [686, 2089]) - } - ) - raise NotImplementedError + ir1_counts = xr.DataArray( + [[0, 1], [2, 3]], + dims=('y', 'x'), + coords={ + "lon": lons, + "lat": lats, + 'acq_time': ('y', [dt.datetime(1995, 10, 10), + dt.datetime(1995, 10, 10)]), + 'line_number': ('y', [686, 2089]) + } + ) + ir1_bt = xr.DataArray( + [[0, 100], [200, 300]], + dims=('y', 'x'), + coords={ + "lon": lons, + "lat": lats, + 'acq_time': ( + 'y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), + 'line_number': ('y', [686, 2089]) + } + ) + vis_refl = xr.DataArray( + [[0, 0.25], [0.5, 1]], + dims=('y', 'x'), + coords={ + "lon": lons, + "lat": lats, + 'acq_time': ('y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), + 'line_number': ('y', [2744, 8356]) + } + ) + expectations = { + make_dataid(name="IR1", calibration="counts"): ir1_counts, + make_dataid(name="IR1", calibration="brightness_temperature"): ir1_bt, + make_dataid(name="VIS", calibration="reflectance"): vis_refl + } + return expectations[dataset_id] @pytest.fixture def lons_lats_exp(self, dataset_id): @@ -801,24 +801,23 @@ def lons_lats_exp(self, dataset_id): pix = [1672, 1672, 1673, 1673] lin = [686, 2089, 686, 2089] """ - - # TODO: Use dictionary - - - if dataset_id['name'] == 'IR1': - lons = [[139.680120, 139.718902], - [140.307367, 140.346062]] - lats = [[35.045132, 35.045361], - [-34.971012, -34.970738]] - elif dataset_id['name'] == 'VIS': - lons = [[139.665133, 139.674833], - [140.292579, 140.302249]] - lats = [[35.076113, 35.076170], - [-34.940439, -34.940370]] - else: - raise NotImplementedError - lons = xr.DataArray(lons, dims=("y", "x")) - lats = xr.DataArray(lats, dims=("y", "x")) + expectations = { + "IR1": { + "lons": [[139.680120, 139.718902], + [140.307367, 140.346062]], + "lats": [[35.045132, 35.045361], + [-34.971012, -34.970738]] + }, + "VIS": { + "lons": [[139.665133, 139.674833], + [140.292579, 140.302249]], + "lats": [[35.076113, 35.076170], + [-34.940439, -34.940370]] + } + } + exp = expectations[dataset_id["name"]] + lons = xr.DataArray(exp["lons"], dims=("y", "x")) + lats = xr.DataArray(exp["lats"], dims=("y", "x")) return lons, lats From 94c2542f1bbe4d37812b95790c14199deb218e3c Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 24 May 2023 16:00:46 +0200 Subject: [PATCH 0163/1416] Refactor of collect_cf_dataset --- satpy/writers/cf_writer.py | 136 +++++++++++++++++++++++++++---------- 1 file changed, 102 insertions(+), 34 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 5b74da07b4..d57f63ea16 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -301,12 +301,16 @@ def has_projection_coords(ds_collection): def make_alt_coords_unique(datas, pretty=False): """Make non-dimensional coordinates unique among all datasets. - Non-dimensional (or alternative) coordinates, such as scanline timestamps, may occur in multiple datasets with - the same name and dimension but different values. In order to avoid conflicts, prepend the dataset name to the - coordinate name. If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, its name will not - be modified. + Non-dimensional (or alternative) coordinates, such as scanline timestamps, + may occur in multiple datasets with the same name and dimension + but different values. - Since all datasets must have the same projection coordinates, this is not applied to latitude and longitude. + In order to avoid conflicts, prepend the dataset name to the coordinate name. + If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, + its name will not be modified. + + Since all datasets must have the same projection coordinates, + this is not applied to latitude and longitude. Args: datas (dict): @@ -794,7 +798,7 @@ def _collect_cf_dataset(list_dataarrays, pretty=False, include_orig_name=True, numeric_name_prefix='CHANNEL_'): - """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.DataArrays. + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Parameters ---------- @@ -821,44 +825,108 @@ def _collect_cf_dataset(list_dataarrays, Returns ------- - dict_datarrays : dict - A dictionary of CF-compliant xr.DataArray: {name: xr.DataArray} + ds : xr.Dataset + A partially CF-compliant xr.Dataset """ + # Create dictionary of input datarrays + # --> Since keys=None, it doesn't never retrieve ancillary variables !!! ds_collection = {} - for ds in list_dataarrays: - ds_collection.update(get_extra_ds(ds)) + for dataarray in list_dataarrays: + ds_collection.update(get_extra_ds(dataarray)) + + # Check ??? got_lonlats = has_projection_coords(ds_collection) - datas = {} - # sort by name, but don't use the name - for _, ds in sorted(ds_collection.items()): - if ds.dtype not in CF_DTYPES: + + # Sort dictionary by keys name + ds_collection = dict(sorted(ds_collection.items())) + + dict_dataarrays = {} + for dataarray in ds_collection.values(): + dataarray_type = dataarray.dtype + if dataarray_type not in CF_DTYPES: warnings.warn( - 'Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION), + f'dtype {dataarray_type} not compatible with {CF_VERSION}.', stacklevel=3 ) - # we may be adding attributes, coordinates, or modifying the - # structure of attributes - ds = ds.copy(deep=True) + # Deep copy the datarray since adding/modifying attributes and coordinates + dataarray = dataarray.copy(deep=True) + + # Add CF-compliant area information from the pyresample area + # - If include_lonlats=True, add latitude and longitude coordinates + # - Add grid_mapping attribute to the DataArray + # - Return the CRS DataArray as first list element + # - Return the CF-compliant input DataArray as second list element try: - new_datasets = area2cf(ds, strict=include_lonlats, got_lonlats=got_lonlats) + list_new_dataarrays = area2cf(dataarray, + strict=include_lonlats, + got_lonlats=got_lonlats) except KeyError: - new_datasets = [ds] - for new_ds in new_datasets: - new_var = make_cf_dataarray(new_ds, epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - datas[new_var.name] = new_var - - # Check and prepare coordinates - assert_xy_unique(datas) - link_coords(datas) - datas = make_alt_coords_unique(datas, pretty=pretty) - - ds = xr.Dataset(datas) + list_new_dataarrays = [dataarray] + + # Ensure each DataArray is CF-compliant + # --> NOTE: Here the CRS DataArray is repeatedly overwrited + # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name + # area information can be lost here !!! + for new_dataarray in list_new_dataarrays: + new_dataarray = make_cf_dataarray(new_dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + dict_dataarrays[new_dataarray.name] = new_dataarray + + # Check all DataArray have same size + assert_xy_unique(dict_dataarrays) + + # Deal with the 'coordinates' attributes indicating lat/lon coords + # NOTE: this currently is dropped by default !!! + link_coords(dict_dataarrays) + + # Ensure non-dimensional coordinates to be unique across DataArrays + # --> If not unique, prepend the DataArray name to the coordinate + # --> If unique, does not prepend the DataArray name only if pretty=True + # --> 'longitude' and 'latitude' coordinates are not prepended + dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) + # Create a xr.Dataset + ds = xr.Dataset(dict_dataarrays) return ds + # ds_collection = {} + # for dataarray in list_dataarrays: + # ds_collection.update(get_extra_ds(dataarray)) + # got_lonlats = has_projection_coords(ds_collection) + # datas = {} + # # sort by name, but don't use the name + # for _, ds in sorted(ds_collection.items()): + # if ds.dtype not in CF_DTYPES: + # warnings.warn( + # 'Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION), + # stacklevel=3 + # ) + # # we may be adding attributes, coordinates, or modifying the + # # structure of attributes + # ds = ds.copy(deep=True) + # try: + # new_datasets = area2cf(ds, strict=include_lonlats, got_lonlats=got_lonlats) + # except KeyError: + # new_datasets = [ds] + # for new_ds in new_datasets: + # new_var = make_cf_dataarray(new_ds, epoch=epoch, + # flatten_attrs=flatten_attrs, + # exclude_attrs=exclude_attrs, + # include_orig_name=include_orig_name, + # numeric_name_prefix=numeric_name_prefix) + # datas[new_var.name] = new_var + + # # Check and prepare coordinates + # assert_xy_unique(datas) + # link_coords(datas) + # datas = make_alt_coords_unique(datas, pretty=pretty) + + # ds = xr.Dataset(datas) + # return ds + def collect_cf_datasets(list_dataarrays, header_attrs=None, From 54f992c9a15a9ba6502db88154a7ceb6ff17af8c Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 24 May 2023 16:03:36 +0200 Subject: [PATCH 0164/1416] Refactor of get_extra_ds (ancillary variables) --- satpy/writers/cf_writer.py | 51 +++++++------------------------------- 1 file changed, 9 insertions(+), 42 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index d57f63ea16..fb42fabb2a 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -226,15 +226,17 @@ def create_grid_mapping(area): return area.area_id, grid_mapping -def get_extra_ds(dataset, keys=None): +def get_extra_ds(dataarray, keys=None): """Get the extra datasets associated to *dataset*.""" ds_collection = {} - for ds in dataset.attrs.get('ancillary_variables', []): - if keys and ds.name not in keys: - keys.append(ds.name) - ds_collection.update(get_extra_ds(ds, keys)) - ds_collection[dataset.attrs['name']] = dataset - + # Retrieve ancillary variable datarrays + for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): + ancillary_variable = ancillary_dataarray.name + if keys and ancillary_variable not in keys: + keys.append(ancillary_variable) + ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) + # Add input dataarray + ds_collection[dataarray.attrs['name']] = dataarray return ds_collection @@ -892,41 +894,6 @@ def _collect_cf_dataset(list_dataarrays, ds = xr.Dataset(dict_dataarrays) return ds - # ds_collection = {} - # for dataarray in list_dataarrays: - # ds_collection.update(get_extra_ds(dataarray)) - # got_lonlats = has_projection_coords(ds_collection) - # datas = {} - # # sort by name, but don't use the name - # for _, ds in sorted(ds_collection.items()): - # if ds.dtype not in CF_DTYPES: - # warnings.warn( - # 'Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION), - # stacklevel=3 - # ) - # # we may be adding attributes, coordinates, or modifying the - # # structure of attributes - # ds = ds.copy(deep=True) - # try: - # new_datasets = area2cf(ds, strict=include_lonlats, got_lonlats=got_lonlats) - # except KeyError: - # new_datasets = [ds] - # for new_ds in new_datasets: - # new_var = make_cf_dataarray(new_ds, epoch=epoch, - # flatten_attrs=flatten_attrs, - # exclude_attrs=exclude_attrs, - # include_orig_name=include_orig_name, - # numeric_name_prefix=numeric_name_prefix) - # datas[new_var.name] = new_var - - # # Check and prepare coordinates - # assert_xy_unique(datas) - # link_coords(datas) - # datas = make_alt_coords_unique(datas, pretty=pretty) - - # ds = xr.Dataset(datas) - # return ds - def collect_cf_datasets(list_dataarrays, header_attrs=None, From faf4eb9c81d0d5b38fde196f0070f6d16c08f40a Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 24 May 2023 16:16:05 +0200 Subject: [PATCH 0165/1416] Refactor of encode_time --- satpy/writers/cf_writer.py | 58 +++++++++++++++++++++++++------------- 1 file changed, 39 insertions(+), 19 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index fb42fabb2a..7617116e36 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -428,6 +428,24 @@ def add_time_cf_attrs(ds): return ds +def _encode_time(dataarray, epoch): + """Add encodings to the 'time' coordinate.""" + if 'time' in dataarray.coords: + dataarray['time'].encoding['units'] = epoch + dataarray['time'].attrs['standard_name'] = 'time' + dataarray['time'].attrs.pop('bounds', None) + dataarray = _add_time_dimension(dataarray) + return dataarray + + +def _add_time_dimension(dataarray): + """Add 'time' dimension to the DataArray.""" + # BUG: When number of timesteps equals the shape of y or x + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') + return dataarray + + # --------------------------------------------------------------------------. # ### Attributes @@ -755,41 +773,42 @@ def make_cf_dataarray(dataarray, epoch=EPOCH, flatten_attrs=False, if exclude_attrs is None: exclude_attrs = [] - new_data = _preprocess_dataarray_name(dataarray=dataarray, - numeric_name_prefix=numeric_name_prefix, - include_orig_name=include_orig_name) + dataarray = _preprocess_dataarray_name(dataarray=dataarray, + numeric_name_prefix=numeric_name_prefix, + include_orig_name=include_orig_name) - new_data = _remove_satpy_attributes(new_data) + dataarray = _remove_satpy_attributes(dataarray) - new_data = CFWriter._encode_time(new_data, epoch) - new_data = CFWriter._encode_coords(new_data) + dataarray = _encode_time(dataarray, epoch=epoch) + dataarray = CFWriter._encode_coords(dataarray) # Remove area as well as user-defined attributes for key in ['area'] + exclude_attrs: - new_data.attrs.pop(key, None) + dataarray.attrs.pop(key, None) - anc = [ds.attrs['name'] - for ds in new_data.attrs.get('ancillary_variables', [])] - if anc: - new_data.attrs['ancillary_variables'] = ' '.join(anc) + # Retrieve list of ancillary variables + list_ancillary_variables = [da_ancillary.attrs['name'] + for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + if list_ancillary_variables: + dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variables) # TODO: make this a grid mapping or lon/lats # new_data.attrs['area'] = str(new_data.attrs.get('area')) - CFWriter._cleanup_attrs(new_data) + CFWriter._cleanup_attrs(dataarray) - if 'long_name' not in new_data.attrs and 'standard_name' not in new_data.attrs: - new_data.attrs['long_name'] = new_data.name - if 'prerequisites' in new_data.attrs: - new_data.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in new_data.attrs['prerequisites']] + if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: + dataarray.attrs['long_name'] = dataarray.name + if 'prerequisites' in dataarray.attrs: + dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] # Flatten dict-type attributes, if desired if flatten_attrs: - new_data.attrs = flatten_dict(new_data.attrs) + dataarray.attrs = flatten_dict(dataarray.attrs) # Encode attributes to netcdf-compatible datatype - new_data.attrs = encode_attrs_nc(new_data.attrs) + dataarray.attrs = encode_attrs_nc(dataarray.attrs) - return new_data + return dataarray def _collect_cf_dataset(list_dataarrays, @@ -890,6 +909,7 @@ def _collect_cf_dataset(list_dataarrays, # --> If unique, does not prepend the DataArray name only if pretty=True # --> 'longitude' and 'latitude' coordinates are not prepended dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) + # Create a xr.Dataset ds = xr.Dataset(dict_dataarrays) return ds From 44261aa47dc057373f9ff2abbc100b2a243d24fd Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 24 May 2023 14:14:45 +0000 Subject: [PATCH 0166/1416] Update documentation --- satpy/readers/gms5_vissr_l1b.py | 45 ++++++++++++++++++++++++++++++++- 1 file changed, 44 insertions(+), 1 deletion(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index abaf2e9a83..15c3dd6425 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -6,7 +6,39 @@ Navigation ---------- -TODO + +VISSR images are oversampled and not rectified. + + +Oversampling +~~~~~~~~~~~~ +VISSR oversamples the viewed scene in E-W direction by a factor of ~1.46: +IR/VIS pixels are 14/3.5 urad on a side, but the instrument samples every +9.57/2.39 urad in E-W direction. That means pixels are actually overlapping on +the ground. + +This cannot be represented by a pyresample area definition, so each dataset +is accompanied by 2-dimensional longitude and latitude coordinates. For +resampling purpose an area definition with uniform sampling is provided via +``scene[dataset].attrs["area_def_uniform_sampling"]``. + + +Rectification +~~~~~~~~~~~~~ + +VISSR images are not rectified. That means lon/lat coordinates are different + +1) for all channels of the same repeat cycle, even if their spatial resolution + is identical (IR channels) +2) for different repeat cycles, even if the channel is identical + + +Calibration +----------- + +Sensor counts are calibrated by looking up reflectance/temperature values in the +calibration tables included in each file. + References ---------- @@ -641,6 +673,16 @@ def _get_image_coords(self, data): return lines.astype(np.float64), pixels.astype(np.float64) def _get_static_navigation_params(self, dataset_id): + """Get static navigation parameters. + + Note that, "central_line_number_of_vissr_frame" is different for each + channel, even if their spatial resolution is identical. For example: + + VIS: 5513.0 + IR1: 1378.5 + IR2: 1378.7 + IR3: 1379.1001 + """ alt_ch_name = ALT_CHANNEL_NAMES[dataset_id['name']] mode_block = self._header['image_parameters']['mode'] coord_conv = self._header['image_parameters']['coordinate_conversion'] @@ -669,6 +711,7 @@ def _get_static_navigation_params(self, dataset_id): return scan_params, proj_params def _get_predicted_navigation_params(self): + """Get predictions of time-dependent navigation parameters.""" att_pred = self._header['image_parameters']['attitude_prediction']['data'] orb_pred = self._header['image_parameters']['orbit_prediction']['data'] attitude_prediction = nav.AttitudePrediction( From 7e96ba77cc770599f8438e1acbbd63cd2c40053f Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 24 May 2023 15:02:12 +0000 Subject: [PATCH 0167/1416] Update calibration Replace interpolation with simple lookup --- satpy/readers/gms5_vissr_l1b.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 15c3dd6425..60fdcbb595 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -550,7 +550,7 @@ def _get_mda(self): def get_dataset(self, dataset_id, ds_info): counts = self._get_counts() dataset = self._calibrate(counts, dataset_id) - self._attach_coords(dataset, dataset_id) # FIXME + self._attach_coords(dataset, dataset_id) # TODO: Remove return dataset def _get_counts(self): @@ -741,20 +741,19 @@ def _make_lons_lats_data_array(self, lons, lats): class Calibrator: - def __init__(self, calib_table_y): - self.calib_table_y = calib_table_y - self.calib_table_x = np.arange(calib_table_y.size) + def __init__(self, calib_table): + self._calib_table = calib_table def calibrate(self, counts, calibration): if calibration == "counts": return counts - interp = counts.data.map_blocks( - _interpolate_calibration_table, - self.calib_table_x, - self.calib_table_y, + res = da.map_blocks( + self._lookup_calib_table, + counts.data, + calib_table=self._calib_table, dtype=np.float32, ) - return self._make_data_array(interp, counts) + return self._make_data_array(res, counts) def _make_data_array(self, interp, counts): return xr.DataArray( @@ -763,7 +762,5 @@ def _make_data_array(self, interp, counts): coords=counts.coords, ) - -def _interpolate_calibration_table(counts, calib_table_x, calib_table_y): - interp = np.interp(counts.ravel(), calib_table_x, calib_table_y) - return interp.reshape(counts.shape) + def _lookup_calib_table(self, counts, calib_table): + return calib_table[counts] From 4aac44387ceb6fb1423dbafae2b1c91380f699a5 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 25 May 2023 11:58:14 +0800 Subject: [PATCH 0168/1416] Update test_composites.py --- satpy/tests/test_composites.py | 62 ++++++++++++++++++++++++++++++---- 1 file changed, 55 insertions(+), 7 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 40b45c1229..c293d551bf 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -184,11 +184,15 @@ def setUp(self): (-2000, -2000, 2000, 2000)) self.ds4_big = ds4 - def test_bad_color(self): + def test_high_bad_color(self): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB - self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad', - neutral_resolution_band='bad') + self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad') + + def test_neutral_bad_color(self): + """Test that only valid band colors can be provided.""" + from satpy.composites import RatioSharpenedRGB + self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', neutral_resolution_band='bad') def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" @@ -217,10 +221,10 @@ def test_basic_no_sharpen(self): res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) self.assertEqual(res.shape, (3, 2, 2)) - def test_basic_red(self): - """Test that basic high resolution red can be passed.""" + def test_basic_red_no_neutral(self): + """Test that basic high resolution red with no neutral band can be passed.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') + comp = RatioSharpenedRGB(name='true_color', neutral_resolution_band=None) res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) res = res.values self.assertEqual(res.shape, (3, 2, 2)) @@ -229,7 +233,7 @@ def test_basic_red(self): np.testing.assert_allclose(res[2], np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)) def test_basic_red_neutral_green(self): - """Test that basic high resolution red can be passed.""" + """Test that basic high resolution red with green neutral band can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color', neutral_resolution_band='green') res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) @@ -239,6 +243,50 @@ def test_basic_red_neutral_green(self): np.testing.assert_allclose(res[1], np.array([[3.0, 3.0], [np.nan, 3.0]], dtype=np.float64)) np.testing.assert_allclose(res[2], np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)) + def test_high_green_no_neutral(self): + """Test that high resolution green with no neutral band can be passed.""" + from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name='true_color', high_resolution_band='green', neutral_resolution_band=None) + res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = res.values + self.assertEqual(res.shape, (3, 2, 2)) + np.testing.assert_allclose(res[0], np.array([[5/3, 5/3], [np.nan, 0.0]], dtype=np.float64)) + np.testing.assert_allclose(res[1], np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) + np.testing.assert_allclose(res[2], np.array([[4/3, 4/3], [np.nan, 4/3]], dtype=np.float64)) + + def test_high_green_neutral_blue(self): + """Test that high resolution green with blue neutral band can be passed.""" + from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name='true_color', high_resolution_band='green', neutral_resolution_band='blue') + res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = res.values + self.assertEqual(res.shape, (3, 2, 2)) + np.testing.assert_allclose(res[0], np.array([[5/3, 5/3], [np.nan, 0.0]], dtype=np.float64)) + np.testing.assert_allclose(res[1], np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) + np.testing.assert_allclose(res[2], np.array([[4.0, 4.0], [np.nan, 4.0]], dtype=np.float64)) + + def test_high_blue_no_neutral(self): + """Test that high resolution blue with no neutral band can be passed.""" + from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name='true_color', high_resolution_band='blue', neutral_resolution_band=None) + res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = res.values + self.assertEqual(res.shape, (3, 2, 2)) + np.testing.assert_allclose(res[0], np.array([[1.25, 1.25], [np.nan, 0.0]], dtype=np.float64)) + np.testing.assert_allclose(res[1], np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64)) + np.testing.assert_allclose(res[2], np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) + + def test_high_blue_neutral_red(self): + """Test that high resolution blue with red neutral band can be passed.""" + from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name='true_color', high_resolution_band='blue', neutral_resolution_band='red') + res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = res.values + self.assertEqual(res.shape, (3, 2, 2)) + np.testing.assert_allclose(res[0], np.array([[5.0, 5.0], [np.nan, 0.0]], dtype=np.float64)) + np.testing.assert_allclose(res[1], np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64)) + np.testing.assert_allclose(res[2], np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) + def test_self_sharpened_no_high_res(self): """Test for exception when no high res band is specified.""" from satpy.composites import SelfSharpenedRGB From 8d9f868ef8199b12e47241c0320e13fe70b54663 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 25 May 2023 13:46:44 +0800 Subject: [PATCH 0169/1416] Update __init__.py --- satpy/composites/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 95045c7da1..57fe4bb9ea 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1047,7 +1047,8 @@ class RatioSharpenedRGB(GenericCompositor): R_hi - 500m resolution - shape=(4000, 4000) To avoid the green band getting involved in calculating ratio or sharpening, - specify it by "neutral_resolution_band: green" in YAML config file. Then:: + specify it by "neutral_resolution_band: green" in YAML config file. Therefore, + only blue band will get sharpened:: ratio = R_hi / R_lo new_R = R_hi From 1c146ac76219e52b6cc07939eecad282ffb1a554 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 25 May 2023 08:12:40 +0200 Subject: [PATCH 0170/1416] Fix symlinks --- README | 1 + doc/source/dev_guide/CONTRIBUTING.rst | 1 + 2 files changed, 2 insertions(+) create mode 120000 README create mode 120000 doc/source/dev_guide/CONTRIBUTING.rst diff --git a/README b/README new file mode 120000 index 0000000000..92cacd2853 --- /dev/null +++ b/README @@ -0,0 +1 @@ +README.rst \ No newline at end of file diff --git a/doc/source/dev_guide/CONTRIBUTING.rst b/doc/source/dev_guide/CONTRIBUTING.rst new file mode 120000 index 0000000000..ac9338fc25 --- /dev/null +++ b/doc/source/dev_guide/CONTRIBUTING.rst @@ -0,0 +1 @@ +../../../CONTRIBUTING.rst \ No newline at end of file From ec911dbe090691246431bcf81a588bf811f47f9f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 25 May 2023 10:17:29 +0300 Subject: [PATCH 0171/1416] Fix syntax error adding an item to a list --- satpy/tests/multiscene_tests/test_blend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 2fca990f21..7140d98c8e 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -429,6 +429,6 @@ def test_extra_datasets(self, nominal_data, expected_result): da4 = xr.DataArray([0, 0, 1], attrs={'start_time': datetime(2023, 5, 22, 12, 0, 0)}) - res = temporal_rgb(nominal_data + da4) + res = temporal_rgb(nominal_data + [da4,]) self._assert_results(res, nominal_data[-1].attrs['start_time'], expected_result) From 0eb2e03acd041ccddc5b862abe954d1fa0901645 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 25 May 2023 09:32:53 +0200 Subject: [PATCH 0172/1416] Test passing --- satpy/tests/scene_tests/test_conversions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 35e02ddc1f..681a4ccdb0 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -112,7 +112,7 @@ def test_to_xarray_with_multiple_area_scene(self): scn['var2'] = data_array2 # TODO: in future adapt for DataTree implementation - with pytest.raises(NotImplementedError): + with pytest.raises(ValueError): _ = scn.to_xarray() def test_geoviews_basic_with_area(self): From c5845b90dcea266c5ee9569e8041b18682965a2d Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 24 May 2023 16:56:50 +0000 Subject: [PATCH 0173/1416] Mask space pixels --- satpy/readers/gms5_vissr_l1b.py | 116 +++++++++++++++--- satpy/readers/gms5_vissr_navigation.py | 7 +- .../tests/reader_tests/test_gms5_vissr_l1b.py | 95 ++++++++++---- 3 files changed, 178 insertions(+), 40 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 60fdcbb595..667fd57cb9 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -40,6 +40,24 @@ calibration tables included in each file. +Space Pixels +------------ + +VISSR produces data for pixels outside the Earth disk (i,e: atmospheric limb or +deep space pixels). By default, these pixels are masked out as they contain +data of limited or no value, but some applications do require these pixels. +To turn off masking, set ``mask_space=False`` upon scene creation:: + + import satpy + import glob + + filenames = glob.glob("VISSR*.IMG") + scene = satpy.Scene(filenames, + reader="gms5-vissr_l1b", + reader_kwargs={"mask_space": False}) + scene.load(["VIS"]) + + References ---------- @@ -55,6 +73,7 @@ import dask.array as da import numpy as np import xarray as xr +import numba import satpy.readers._geos_area as geos_area import satpy.readers.gms5_vissr_navigation as nav @@ -389,7 +408,7 @@ 'ir1_calibration': { 'dtype': IR_CALIBRATION, 'offset': { - VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, IR_CHANNEL: 10 * BLOCK_SIZE_IR }, }, @@ -470,12 +489,13 @@ def recarr2dict(arr, preserve=None): class GMS5VISSRFileHandler(BaseFileHandler): - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, mask_space=True): super(GMS5VISSRFileHandler, self).__init__(filename, filename_info, filetype_info) self._filename = filename self._filename_info = filename_info self._header, self._channel_type = self._read_header(filename) self._mda = self._get_mda() + self._mask_space = mask_space def _read_header(self, filename): header = {} @@ -515,11 +535,11 @@ def _read_image_param(file_obj, param, channel_type): file_obj.seek(param['offset'][channel_type]) data = np.fromfile(file_obj, dtype=param['dtype'], count=1)[0] return recarr2dict(data, preserve=param.get('preserve')) - + @staticmethod def _concat_orbit_prediction(orb_pred_1, orb_pred_2): """Concatenate orbit prediction data. - + It is split over two image parameter blocks in the header. """ orb_pred = orb_pred_1 @@ -548,15 +568,14 @@ def _get_mda(self): } def get_dataset(self, dataset_id, ds_info): - counts = self._get_counts() + image_data = self._read_image_data() + counts = self._get_counts(image_data) dataset = self._calibrate(counts, dataset_id) - self._attach_coords(dataset, dataset_id) # TODO: Remove + space_masker = SpaceMasker(image_data, dataset_id["name"]) + dataset = self._mask_space_pixels(dataset, space_masker) + self._attach_lons_lats(dataset, dataset_id) return dataset - def _get_counts(self): - image_data = self._read_image_data() - return self._make_counts_data_array(image_data) - def _read_image_data(self): memmap = self._get_memmap() return da.from_array(memmap, chunks=(CHUNK_SIZE,)) @@ -571,6 +590,9 @@ def _get_memmap(self): shape=(num_lines,) ) + def _get_counts(self, image_data): + return self._make_counts_data_array(image_data) + def _make_counts_data_array(self, image_data): return xr.DataArray( image_data['image_data'], @@ -595,7 +617,7 @@ def _calibrate(self, counts, dataset_id): def _get_calibration_table(self, dataset_id): tables = { - "VIS": self._header['image_parameters']['vis_calibration']["vis1_calibration_table"], + "VIS": self._header['image_parameters']['vis_calibration']["vis1_calibration_table"]["brightness_albedo_conversion_table"], "IR1": self._header['image_parameters']['ir1_calibration']["conversion_table_of_equivalent_black_body_temperature"], "IR2": self._header['image_parameters']['ir2_calibration']["conversion_table_of_equivalent_black_body_temperature"], "IR3": self._header['image_parameters']['wv_calibration']["conversion_table_of_equivalent_black_body_temperature"] @@ -650,7 +672,12 @@ def get_area_def_test(self, dsid): area = geos_area.get_area_definition(proj_dict, extent) return area - def _attach_coords(self, dataset, dataset_id): + def _mask_space_pixels(self, dataset, space_masker): + if self._mask_space: + return space_masker.mask_space(dataset) + return dataset + + def _attach_lons_lats(self, dataset, dataset_id): lons, lats = self._get_lons_lats(dataset, dataset_id) dataset.coords['lon'] = lons dataset.coords['lat'] = lats @@ -734,9 +761,11 @@ def _get_predicted_navigation_params(self): def _make_lons_lats_data_array(self, lons, lats): lons = xr.DataArray(lons, dims=('y', 'x'), - attrs={'standard_name': 'longitude'}) + attrs={'standard_name': 'longitude', + "units": "degrees_east"}) lats = xr.DataArray(lats, dims=('y', 'x'), - attrs={'standard_name': 'latitude'}) + attrs={'standard_name': 'latitude', + "units": "degrees_north"}) return lons, lats @@ -764,3 +793,62 @@ def _make_data_array(self, interp, counts): def _lookup_calib_table(self, counts, calib_table): return calib_table[counts] + + +class SpaceMasker: + _fill_value = -1 # scanline not intersecting the earth + + def __init__(self, image_data, channel): + self._image_data = image_data + self._channel = channel + self._shape = image_data["image_data"].shape + self._earth_mask = self._get_earth_mask() + + def mask_space(self, dataset): + return dataset.where(self._earth_mask).astype(np.float32) + + def _get_earth_mask(self): + earth_edges = self._get_earth_edges() + return get_earth_mask(self._shape, earth_edges, self._fill_value) + + def _get_earth_edges(self): + west_edges = self._get_earth_edges_per_scan_line("west_side_earth_edge") + east_edges = self._get_earth_edges_per_scan_line("east_side_earth_edge") + return west_edges, east_edges + + def _get_earth_edges_per_scan_line(self, cardinal): + edges = self._image_data["LCW"][cardinal].compute().astype(np.int32) + if self._is_vis_channel(): + edges = self._correct_vis_edges(edges) + return edges + + def _is_vis_channel(self): + return self._channel == "VIS" + + def _correct_vis_edges(self, edges): + """Correct VIS edges. + + VIS data contains earth edges of IR channel. Compensate for that + by scaling with a factor of 4 (1 IR pixel ~ 4 VIS pixels). + """ + return np.where(edges != self._fill_value, edges * 4, edges) + + +@numba.njit +def get_earth_mask(shape, earth_edges, fill_value=-1): + """Get binary mask where 1/0 indicates earth/space. + + Args: + shape: Image shape + earth_edges: First and last earth pixel in each scanline + fill_value: Fill value for scanlines not intersecting the earth. + """ + first_earth_pixels, last_earth_pixels = earth_edges + mask = np.zeros(shape, dtype=np.int8) + for line in range(shape[0]): + first = first_earth_pixels[line] + last = last_earth_pixels[line] + if first == fill_value or last == fill_value: + continue + mask[line, first:last+1] = 1 + return mask diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 445497380c..213a677a2d 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -710,8 +710,9 @@ def _interpolate_nearest(x, x_sample, y_sample): # TODO """ -Possible acceleration: +- Area def +- Decompression +- Finish Documentation - Call find_enclosing_index only once for all predictions -- cache coordinates -""" \ No newline at end of file +""" diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 9c4a237442..1e580f3635 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -181,7 +181,6 @@ NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE - class TestSinglePixelNavigation: """Test navigation of a single pixel.""" @@ -298,6 +297,20 @@ def test_get_lons_lats( np.testing.assert_allclose(lats, lats_exp) +class TestEarthMask: + def test_get_earth_mask(self): + first_earth_pixels = np.array([-1, 1, 0, -1]) + last_earth_pixels = np.array([-1, 3, 2, -1]) + mask_exp = np.array( + [[0, 0, 0, 0], + [0, 1, 1, 1], + [1, 1, 1, 0], + [0, 0, 0, 0]] + ) + mask = vissr.get_earth_mask(mask_exp.shape, first_earth_pixels, last_earth_pixels) + np.testing.assert_equal(mask, mask_exp) + + class TestPredictionInterpolation: """Test interpolation of orbit and attitude predictions.""" @@ -486,13 +499,18 @@ def test_get_dataset(self, file_handler, dataset_id, dataset_exp): xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1E-6) @pytest.fixture - def file_handler(self, header, dataset_id, image_data): + def file_handler(self, header, dataset_id, mask_space, image_data): channel_type = self.channel_types[dataset_id['name']] with mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_header') as _read_header, \ mock.patch('satpy.readers.gms5_vissr_l1b.np.memmap') as memmap: _read_header.return_value = header, channel_type memmap.return_value = image_data - fh = vissr.GMS5VISSRFileHandler('foo', {'foo': 'bar'}, {'foo': 'bar'}) + fh = vissr.GMS5VISSRFileHandler( + 'foo', + {'foo': 'bar'}, + {'foo': 'bar'}, + mask_space=mask_space + ) # Yield instead of return, to make the memmap mock succeed. # See https://stackoverflow.com/a/59045506/5703449 yield fh @@ -505,33 +523,44 @@ def file_handler(self, header, dataset_id, image_data): def dataset_id(self, request): return request.param + @pytest.fixture(params=[True, False]) + def mask_space(self, request): + return request.param + @pytest.fixture def image_data(self, dataset_id): """Get fake image data. Data type: - ((line number, timestamp), (data1, data2)) + ( + (line number, timestamp, west edge, east edge), + (data1, data2, ...) + ) VIS channel: pix = [6688, 6688, 6689, 6689] lin = [2744, 8356, 2744, 8356] + earth mask = [[0, 0], [0, 1]] IR1 channel: pix = [1672, 1672, 1673, 1673] lin = [686, 2089, 686, 2089] + earth mask = [[1, 1], [1, 1]] """ line_control_word = np.dtype([ ('line_number', vissr.I4), ('scan_time', vissr.R8), + ('west_side_earth_edge', vissr.I4), + ('east_side_earth_edge', vissr.I4) ]) dtype = np.dtype([('LCW', line_control_word), ('image_data', vissr.U1, (2,))]) cases = { - "IR1": np.array([((686, 50000), (0, 1)), ((2089, 50000), (2, 3))], dtype=dtype), - "VIS": np.array([((2744, 50000), (0, 1)), ((8356, 50000), (2, 3))], dtype=dtype) + "IR1": np.array([((686, 50000, 0, 1), (0, 1)), ((2089, 50000, 0, 1), (2, 3))], dtype=dtype), + "VIS": np.array([((2744, 50000, -1, -1), (0, 1)), ((8356, 50000, 0, 1), (2, 3))], dtype=dtype) } return cases[dataset_id["name"]] @@ -715,7 +744,9 @@ def orbit_prediction(self): @pytest.fixture def vis_calibration(self): return { - "vis1_calibration_table": np.array([0, 0.25, 0.5, 1]) + "vis1_calibration_table": { + "brightness_albedo_conversion_table": np.array([0, 0.25, 0.5, 1]) + } } @pytest.fixture @@ -743,9 +774,27 @@ def simple_coordinate_conversion_table(self): } @pytest.fixture - def dataset_exp(self, dataset_id, lons_lats_exp): + def vis_refl_exp(self, mask_space, lons_lats_exp): lons, lats = lons_lats_exp - ir1_counts = xr.DataArray( + if mask_space: + data = [[np.nan, np.nan], [0.5, 1]] + else: + data = [[0, 0.25], [0.5, 1]] + return xr.DataArray( + data, + dims=('y', 'x'), + coords={ + "lon": lons, + "lat": lats, + 'acq_time': ('y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), + 'line_number': ('y', [2744, 8356]) + } + ) + + @pytest.fixture + def ir1_counts_exp(self, lons_lats_exp): + lons, lats = lons_lats_exp + return xr.DataArray( [[0, 1], [2, 3]], dims=('y', 'x'), coords={ @@ -756,7 +805,11 @@ def dataset_exp(self, dataset_id, lons_lats_exp): 'line_number': ('y', [686, 2089]) } ) - ir1_bt = xr.DataArray( + + @pytest.fixture + def ir1_bt_exp(self, lons_lats_exp): + lons, lats = lons_lats_exp + return xr.DataArray( [[0, 100], [200, 300]], dims=('y', 'x'), coords={ @@ -767,20 +820,16 @@ def dataset_exp(self, dataset_id, lons_lats_exp): 'line_number': ('y', [686, 2089]) } ) - vis_refl = xr.DataArray( - [[0, 0.25], [0.5, 1]], - dims=('y', 'x'), - coords={ - "lon": lons, - "lat": lats, - 'acq_time': ('y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), - 'line_number': ('y', [2744, 8356]) - } - ) + + @pytest.fixture + def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): + ir1_counts_id = make_dataid(name="IR1", calibration="counts") + ir1_bt_id = make_dataid(name="IR1", calibration="brightness_temperature") + vis_refl_id = make_dataid(name="VIS", calibration="reflectance") expectations = { - make_dataid(name="IR1", calibration="counts"): ir1_counts, - make_dataid(name="IR1", calibration="brightness_temperature"): ir1_bt, - make_dataid(name="VIS", calibration="reflectance"): vis_refl + ir1_counts_id: ir1_counts_exp, + ir1_bt_id: ir1_bt_exp, + vis_refl_id: vis_refl_exp } return expectations[dataset_id] From 241fe03c23c7621704f9275e7a86d643a83d4269 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 25 May 2023 14:25:16 +0200 Subject: [PATCH 0174/1416] Simplify encapsulation --- satpy/scene.py | 22 ++-- satpy/tests/writer_tests/test_cf.py | 18 +-- satpy/writers/cf_writer.py | 170 ++++++++++++++++++---------- 3 files changed, 130 insertions(+), 80 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 5a999aae39..c0ff90ac8a 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1139,24 +1139,15 @@ def to_xarray(self, if epoch is None: epoch = EPOCH - # Check datasets - # - If None, retrieve all loaded datasets - if isinstance(datasets, str): - datasets = [datasets] + # Get list of DataArrays + # - If datasets=None, retrieve all loaded datasets if datasets is None: datasets = list(self.keys()) # list DataIDs - - # Get list of DataArrays list_dataarrays = self._get_dataarrays_from_identifiers(datasets) # Check that some DataArray could be returned if len(list_dataarrays) == 0: return xr.Dataset() - if not list_dataarrays: - raise RuntimeError("None of the requested datasets have been " - "generated or could not be loaded. Requested " - "composite inputs may need to have matching " - "dimensions (eg. through resampling).") # Collect xr.Dataset for each group grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=list_dataarrays, @@ -1178,11 +1169,18 @@ def to_xarray(self, raise NotImplementedError(msg) def _get_dataarrays_from_identifiers(self, identifiers): + """Return a list of DataArray based on a single or list of identifiers. + + An identifier can be a DataID or a string with name of a valid DataID. + """ + if isinstance(identifiers, (str, DataID)): + identifiers = [identifiers] + if identifiers is not None: dataarrays = [self[ds] for ds in identifiers] else: dataarrays = [self._datasets.get(ds) for ds in self._wishlist] - dataarrays = [ds for ds in dataarrays if ds is not None] + dataarrays = [dataarray for dataarray in dataarrays if dataarray is not None] return dataarrays def images(self): diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 3af51e33c2..c0c6ad2342 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1183,20 +1183,20 @@ def setUp(self): def test_dataset_is_projection_coords(self): """Test the dataset_is_projection_coords function.""" from satpy.writers.cf_writer import dataset_is_projection_coords + self.assertTrue(dataset_is_projection_coords(self.datasets['lat'])) self.assertFalse(dataset_is_projection_coords(self.datasets['var1'])) def test_has_projection_coords(self): """Test the has_projection_coords function.""" from satpy.writers.cf_writer import has_projection_coords + self.assertTrue(has_projection_coords(self.datasets)) self.datasets['lat'].attrs['standard_name'] = 'dummy' self.assertFalse(has_projection_coords(self.datasets)) def test_collect_cf_dataarrays_with_latitude_named_lat(self, *mocks): """Test collecting CF datasets with latitude named lat.""" - from operator import getitem - from satpy.writers.cf_writer import _collect_cf_dataset self.datasets_list = [self.datasets[key] for key in self.datasets] @@ -1207,12 +1207,14 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, *mocks): ds2 = _collect_cf_dataset(self.datasets_list_no_latlon, include_lonlats=True) # Test results - self.assertEqual(len(ds.keys()), 5) - self.assertEqual(set(ds.keys()), {'var1', 'var2', 'lon', 'lat', 'geos'}) - self.assertRaises(KeyError, getitem, ds['var1'], 'latitude') - self.assertRaises(KeyError, getitem, ds['var1'], 'longitude') - self.assertEqual(ds2['var1']['latitude'].attrs['name'], 'latitude') - self.assertEqual(ds2['var1']['longitude'].attrs['name'], 'longitude') + assert len(ds.keys()) == 5 + assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} + with pytest.raises(KeyError): + ds['var1'].attrs["latitude"] + with pytest.raises(KeyError): + ds['var1'].attrs["longitude"] + assert ds2['var1']['latitude'].attrs['name'] == 'latitude' + assert ds2['var1']['longitude'].attrs['name'] == 'longitude' class EncodingUpdateTest(unittest.TestCase): diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 7617116e36..bd941b833a 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -556,6 +556,88 @@ def encode_attrs_nc(attrs): return OrderedDict(encoded_attrs) +def _add_ancillary_variables_attrs(dataarray): + """Replace ancillary_variables DataArray with a list of their name.""" + # Retrieve list of ancillary variables DataArrays name + list_ancillary_variables = [da_ancillary.attrs['name'] + for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + # Replace ancillary_variables attribute with the list of names + if list_ancillary_variables: + dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variables) + # If no ancillary_variables, drop the attribute + else: + dataarray.attrs.pop("ancillary_variables", None) + return dataarray + + +def _drop_exclude_attrs(dataarray, exclude_attrs): + """Remove user-specified list of attributes.""" + if exclude_attrs is None: + exclude_attrs = [] + for key in exclude_attrs: + dataarray.attrs.pop(key, None) + return dataarray + + +def _remove_satpy_attrs(new_data): + """Remove _satpy attribute.""" + # Remove _satpy* attributes + satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] + for satpy_attr in satpy_attrs: + new_data.attrs.pop(satpy_attr) + new_data.attrs.pop('_last_resampler', None) + return new_data + + +def _format_prerequisites_attrs(dataarray): + """Reformat prerequisites attribute value to string.""" + if 'prerequisites' in dataarray.attrs: + dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + return dataarray + + +def _cleanup_attrs(dataarray): + """Remove attribute keys with None value.""" + for key, val in dataarray.attrs.copy().items(): + if val is None: + dataarray.attrs.pop(key) + return dataarray + + +def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): + """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" + # Remove _satpy attribute + dataarray = _remove_satpy_attrs(dataarray) + + # Add ancillary_variables attribute + dataarray = _add_ancillary_variables_attrs(dataarray) + + # Drop exclude_attrs keys from DataArray attributes + dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) + + # Remove 'area' attribute + _ = dataarray.attrs.pop("area", None) + + # Set long_name to DataArray name if 'standard_name' attribute not provided + if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: + dataarray.attrs['long_name'] = dataarray.name + + # Format prequisites attribute + dataarray = _format_prerequisites_attrs(dataarray) + + # Remove attribute keys when their value is None + dataarray = _cleanup_attrs(dataarray) + + # If specified, flatten dict-type attributes + if flatten_attrs: + dataarray.attrs = flatten_dict(dataarray.attrs) + + # Encode attributes to netcdf-compatible datatype + dataarray.attrs = encode_attrs_nc(dataarray.attrs) + + return dataarray + + def preprocess_header_attrs(header_attrs, flatten_attrs=False): """Prepare header attributes.""" # Define file header attributes @@ -701,16 +783,6 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name return dataarray -def _remove_satpy_attributes(new_data): - """Remove _satpy attribute.""" - # Remove _satpy* attributes - satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] - for satpy_attr in satpy_attrs: - new_data.attrs.pop(satpy_attr) - new_data.attrs.pop('_last_resampler', None) - return new_data - - def _add_history(attrs): """Add 'history' attribute to dictionary.""" _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) @@ -770,44 +842,15 @@ def make_cf_dataarray(dataarray, epoch=EPOCH, flatten_attrs=False, CF-compliant xr.DataArray. """ - if exclude_attrs is None: - exclude_attrs = [] - dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, include_orig_name=include_orig_name) - - dataarray = _remove_satpy_attributes(dataarray) + dataarray = preprocess_datarray_attrs(dataarray=dataarray, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs) dataarray = _encode_time(dataarray, epoch=epoch) dataarray = CFWriter._encode_coords(dataarray) - - # Remove area as well as user-defined attributes - for key in ['area'] + exclude_attrs: - dataarray.attrs.pop(key, None) - - # Retrieve list of ancillary variables - list_ancillary_variables = [da_ancillary.attrs['name'] - for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] - if list_ancillary_variables: - dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variables) - - # TODO: make this a grid mapping or lon/lats - # new_data.attrs['area'] = str(new_data.attrs.get('area')) - CFWriter._cleanup_attrs(dataarray) - - if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: - dataarray.attrs['long_name'] = dataarray.name - if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] - - # Flatten dict-type attributes, if desired - if flatten_attrs: - dataarray.attrs = flatten_dict(dataarray.attrs) - - # Encode attributes to netcdf-compatible datatype - dataarray.attrs = encode_attrs_nc(dataarray.attrs) - return dataarray @@ -1018,6 +1061,26 @@ def collect_cf_datasets(list_dataarrays, return grouped_datasets, header_attrs +def _sanitize_writer_kwargs(writer_kwargs): + """Remove satpy-specific kwargs.""" + writer_kwargs = copy.deepcopy(writer_kwargs) + satpy_kwargs = ['overlay', 'decorate', 'config_files'] + for kwarg in satpy_kwargs: + writer_kwargs.pop(kwarg, None) + return writer_kwargs + + +def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): + """Initialize root empty netCDF.""" + root = xr.Dataset({}, attrs=header_attrs) + # - Define init kwargs + init_nc_kwargs = to_netcdf_kwargs.copy() + init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point + init_nc_kwargs.pop('unlimited_dims', None) + written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] + return written + + class CFWriter(Writer): """Writer producing NetCDF/CF compatible datasets.""" @@ -1050,14 +1113,6 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) - @staticmethod - def _cleanup_attrs(new_data): - for key, val in new_data.attrs.copy().items(): - if val is None: - new_data.attrs.pop(key) - if key == 'ancillary_variables' and val == []: - new_data.attrs.pop(key) - @staticmethod def _encode_coords(new_data): """Encode coordinates.""" @@ -1218,21 +1273,16 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, ) # Remove satpy-specific kwargs # - This kwargs can contain encoding dictionary - to_netcdf_kwargs = copy.deepcopy(to_netcdf_kwargs) - satpy_kwargs = ['overlay', 'decorate', 'config_files'] - for kwarg in satpy_kwargs: - to_netcdf_kwargs.pop(kwarg, None) + to_netcdf_kwargs = _sanitize_writer_kwargs(to_netcdf_kwargs) # If writing grouped netCDF, create an empty "root" netCDF file # - Add the global attributes # - All groups will be appended in the for loop below if groups is not None: - root = xr.Dataset({}, attrs=header_attrs) - # - Define init kwargs - init_nc_kwargs = to_netcdf_kwargs.copy() - init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point - init_nc_kwargs.pop('unlimited_dims', None) - written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] + written = _initialize_root_netcdf(filename=filename, + engine=engine, + header_attrs=header_attrs, + to_netcdf_kwargs=to_netcdf_kwargs) mode = "a" else: mode = "w" From 19a3b0ebd8ae3f494bcda9c9a8e554b683fb52e4 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 25 May 2023 13:28:05 +0000 Subject: [PATCH 0175/1416] Prepare for gzip reading Replace np.memmap with np.frombuffer --- satpy/readers/gms5_vissr_l1b.py | 52 +++++++++++++------ satpy/readers/gms5_vissr_navigation.py | 38 +++++++++++++- .../tests/reader_tests/test_gms5_vissr_l1b.py | 4 +- 3 files changed, 74 insertions(+), 20 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 667fd57cb9..f6f203c1e8 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -514,8 +514,12 @@ def _get_channel_type(parameter_block_size): raise ValueError('Cannot determine channel type: Unknown parameter block size.') def _read_control_block(self, file_obj): - ctrl_block = np.fromfile(file_obj, dtype=CONTROL_BLOCK, count=1)[0] - return recarr2dict(ctrl_block) + ctrl_block = read_from_file_obj( + file_obj, + dtype=CONTROL_BLOCK, + count=1 + ) + return recarr2dict(ctrl_block[0]) def _read_image_params(self, file_obj, channel_type): """Read image parameters from the header.""" @@ -532,9 +536,13 @@ def _read_image_params(self, file_obj, channel_type): @staticmethod def _read_image_param(file_obj, param, channel_type): """Read a single image parameter block from the header.""" - file_obj.seek(param['offset'][channel_type]) - data = np.fromfile(file_obj, dtype=param['dtype'], count=1)[0] - return recarr2dict(data, preserve=param.get('preserve')) + image_params = read_from_file_obj( + file_obj, + dtype=param["dtype"], + count=1, + offset=param['offset'][channel_type] + ) + return recarr2dict(image_params[0], preserve=param.get('preserve')) @staticmethod def _concat_orbit_prediction(orb_pred_1, orb_pred_2): @@ -568,7 +576,7 @@ def _get_mda(self): } def get_dataset(self, dataset_id, ds_info): - image_data = self._read_image_data() + image_data = self._get_image_data() counts = self._get_counts(image_data) dataset = self._calibrate(counts, dataset_id) space_masker = SpaceMasker(image_data, dataset_id["name"]) @@ -576,19 +584,23 @@ def get_dataset(self, dataset_id, ds_info): self._attach_lons_lats(dataset, dataset_id) return dataset - def _read_image_data(self): - memmap = self._get_memmap() - return da.from_array(memmap, chunks=(CHUNK_SIZE,)) + def _get_image_data(self): + image_data = self._read_image_data() + return da.from_array(image_data, chunks=(CHUNK_SIZE,)) - def _get_memmap(self): + def _read_image_data(self): num_lines, _ = self._get_actual_shape() - return np.memmap( - filename=self._filename, - mode='r', - dtype=IMAGE_DATA[self._channel_type]['dtype'], - offset=IMAGE_DATA[self._channel_type]['offset'], - shape=(num_lines,) - ) + specs = self._get_image_data_type_specs() + with open(self._filename, "rb") as file_obj: + return read_from_file_obj( + file_obj, + dtype=specs["dtype"], + count=num_lines, + offset=specs["offset"] + ) + + def _get_image_data_type_specs(self): + return IMAGE_DATA[self._channel_type] def _get_counts(self, image_data): return self._make_counts_data_array(image_data) @@ -769,6 +781,12 @@ def _make_lons_lats_data_array(self, lons, lats): return lons, lats +def read_from_file_obj(file_obj, dtype, count, offset=0): + file_obj.seek(offset) + data = file_obj.read(dtype.itemsize * count) + return np.frombuffer(data, dtype=dtype, count=count) + + class Calibrator: def __init__(self, calib_table): self._calib_table = calib_table diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 213a677a2d..b2ebd02c4b 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -710,9 +710,45 @@ def _interpolate_nearest(x, x_sample, y_sample): # TODO """ - +- Orbital parameters - Area def - Decompression - Finish Documentation - Call find_enclosing_index only once for all predictions """ + + +""" + def _read_data_from_file(self): + if self._is_file_like(): + return self._read_file_like() + return self._read_data_from_disk() + + def _is_file_like(self): + return isinstance(self.filename, FSFile) + + def _read_data_from_disk(self): + # For reading the image data, unzip_context is faster than generic_open + dtype, shape = self._get_input_info() + with utils.unzip_context(self.filename) as fn: + with decompressed(fn) if self.compressed else nullcontext(fn) as filename: + return np.fromfile(filename, + offset=self.offset, + dtype=dtype, + count=np.prod(shape)) + + def _read_file_like(self): + # filename is likely to be a file-like object, already in memory + dtype, shape = self._get_input_info() + with utils.generic_open(self.filename, mode="rb") as fp: + no_elements = np.prod(shape) + fp.seek(self.offset) + return np.frombuffer( + fp.read(np.dtype(dtype).itemsize * no_elements), + dtype=np.dtype(dtype), + count=no_elements.item() + ).reshape(shape) + + + +""" diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 1e580f3635..d641626ebd 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -502,9 +502,9 @@ def test_get_dataset(self, file_handler, dataset_id, dataset_exp): def file_handler(self, header, dataset_id, mask_space, image_data): channel_type = self.channel_types[dataset_id['name']] with mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_header') as _read_header, \ - mock.patch('satpy.readers.gms5_vissr_l1b.np.memmap') as memmap: + mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_image_data') as _read_image_data: _read_header.return_value = header, channel_type - memmap.return_value = image_data + _read_image_data.return_value = image_data fh = vissr.GMS5VISSRFileHandler( 'foo', {'foo': 'bar'}, From 21bef18059a99f83ce8f38ea998fe6f10433b379 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 25 May 2023 13:48:39 +0000 Subject: [PATCH 0176/1416] Add support for gzipped files --- satpy/etc/readers/gms5-vissr_l1b.yaml | 5 +++ satpy/readers/gms5_vissr_l1b.py | 47 +++++++++++++++++++++------ 2 files changed, 42 insertions(+), 10 deletions(-) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index 313beb8928..f37be582a4 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -17,21 +17,26 @@ file_types: file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.A.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.A.IMG.gz' gms5_vissr_ir1: file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.A.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.A.IMG.gz' gms5_vissr_ir2: file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.A.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.A.IMG.gz' + gms5_vissr_ir3: file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.A.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.A.IMG.gz' datasets: diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index f6f203c1e8..f99417023c 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -4,6 +4,33 @@ ------------ TODO + +Compression +----------- + +Gzip-compressed VISSR files can be decompressed on the fly using +:class:`~satpy.readers.FSFile`: + +.. code-block:: python + + import fsspec + from satpy import Scene + from satpy.readers import FSFile + + filename = "VISSR_19960217_2331_IR1.A.IMG.gz" + open_file = fsspec.open(filename, compression="gzip") + fs_file = FSFile(open_file) + scene = Scene([fs_file], reader="gms5-vissr_l1b") + scene.load(["IR1"]) + + +Calibration +----------- + +Sensor counts are calibrated by looking up reflectance/temperature values in the +calibration tables included in each file. + + Navigation ---------- @@ -20,7 +47,10 @@ This cannot be represented by a pyresample area definition, so each dataset is accompanied by 2-dimensional longitude and latitude coordinates. For resampling purpose an area definition with uniform sampling is provided via -``scene[dataset].attrs["area_def_uniform_sampling"]``. + +.. code-block:: python + + scene[dataset].attrs["area_def_uniform_sampling"] Rectification @@ -33,13 +63,6 @@ 2) for different repeat cycles, even if the channel is identical -Calibration ------------ - -Sensor counts are calibrated by looking up reflectance/temperature values in the -calibration tables included in each file. - - Space Pixels ------------ @@ -48,6 +71,8 @@ data of limited or no value, but some applications do require these pixels. To turn off masking, set ``mask_space=False`` upon scene creation:: +.. code-block:: python + import satpy import glob @@ -58,6 +83,7 @@ scene.load(["VIS"]) + References ---------- @@ -80,6 +106,7 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.hrit_jma import mjd2datetime64 from satpy.utils import get_legacy_chunk_size +from satpy.readers.utils import generic_open CHUNK_SIZE = get_legacy_chunk_size() @@ -499,7 +526,7 @@ def __init__(self, filename, filename_info, filetype_info, mask_space=True): def _read_header(self, filename): header = {} - with open(filename, mode='rb') as file_obj: + with generic_open(filename, mode='rb') as file_obj: header['control_block'] = self._read_control_block(file_obj) channel_type = self._get_channel_type(header['control_block']['parameter_block_size']) header['image_parameters'] = self._read_image_params(file_obj, channel_type) @@ -591,7 +618,7 @@ def _get_image_data(self): def _read_image_data(self): num_lines, _ = self._get_actual_shape() specs = self._get_image_data_type_specs() - with open(self._filename, "rb") as file_obj: + with generic_open(self._filename, "rb") as file_obj: return read_from_file_obj( file_obj, dtype=specs["dtype"], From f5006226a52d0a67acd4d76ab3d0deb67a56bec1 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 25 May 2023 16:41:14 +0200 Subject: [PATCH 0177/1416] Fix docstrings for readthedoc warnings --- satpy/scene.py | 8 +- satpy/tests/scene_tests/test_conversions.py | 8 ++ satpy/tests/writer_tests/test_cf.py | 16 ++- satpy/writers/cf/__init__.py | 3 + satpy/writers/cf/coords_attrs.py | 46 ++++++++ satpy/writers/cf/crs.py | 47 ++++++++ satpy/writers/cf_writer.py | 123 ++++---------------- 7 files changed, 142 insertions(+), 109 deletions(-) create mode 100644 satpy/writers/cf/__init__.py create mode 100644 satpy/writers/cf/coords_attrs.py create mode 100644 satpy/writers/cf/crs.py diff --git a/satpy/scene.py b/satpy/scene.py index c0ff90ac8a..b012c74159 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1059,6 +1059,10 @@ def to_xarray_dataset(self, datasets=None): Returns: :class:`xarray.Dataset` """ + warnings.warn('Scene.to_xarray_dataset() is deprecated.' + 'Use Scene.to_xarray() instead, to obtain a CF-compliant xr.Dataset .', + DeprecationWarning, stacklevel=3) + dataarrays = self._get_dataarrays_from_identifiers(datasets) if len(dataarrays) == 0: @@ -1094,8 +1098,8 @@ def to_xarray(self, """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. - If Scene DataArrays are on different areas, currently it fails, - but in future will returns a xr.DataTree. + If Scene DataArrays are on different areas, currently it fails, although + in future we might return a DataTree object, grouped by area. Parameters ---------- diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 681a4ccdb0..a260ddf31f 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -90,6 +90,14 @@ def test_to_xarray_with_single_area_scene(self): assert "latitude" not in ds.coords assert "longitude" not in ds.coords + # Assert dataset string is accepted + ds = scn.to_xarray(datasets="var1") + assert isinstance(ds, xr.Dataset) + + # Assert wrong datasets key + with pytest.raises(KeyError): + ds = scn.to_xarray(datasets="var2") + def test_to_xarray_with_multiple_area_scene(self): """Test converting muiltple area Scene to xarray.""" from pyresample.geometry import AreaDefinition diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index c0c6ad2342..e232da31e4 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -110,7 +110,7 @@ def test_da2cf_lonlat(): def test_is_projected(caplog): """Tests for private _is_projected function.""" - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf.crs import _is_projected # test case with units but no area da = xr.DataArray( @@ -118,20 +118,20 @@ def test_is_projected(caplog): dims=("y", "x"), coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) - assert CFWriter._is_projected(da) + assert _is_projected(da) da = xr.DataArray( np.arange(25).reshape(5, 5), dims=("y", "x"), coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) - assert not CFWriter._is_projected(da) + assert not _is_projected(da) da = xr.DataArray( np.arange(25).reshape(5, 5), dims=("y", "x")) with caplog.at_level(logging.WARNING): - assert CFWriter._is_projected(da) + assert _is_projected(da) assert "Failed to tell if data are projected." in caplog.text @@ -180,6 +180,14 @@ def test_add_time_cf_attrs(): assert "standard_name" in ds["time"].attrs +def test_empty_collect_cf_datasets(): + """Test that if no DataArrays, collect_cf_datasets raise error.""" + from satpy.writers.cf_writer import collect_cf_datasets + + with pytest.raises(RuntimeError): + collect_cf_datasets(list_dataarrays=[]) + + class TestCFWriter(unittest.TestCase): """Test case for CF writer.""" diff --git a/satpy/writers/cf/__init__.py b/satpy/writers/cf/__init__.py new file mode 100644 index 0000000000..f597a9264c --- /dev/null +++ b/satpy/writers/cf/__init__.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""Code for generation of CF-compliant datasets.""" diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py new file mode 100644 index 0000000000..9a35a25a00 --- /dev/null +++ b/satpy/writers/cf/coords_attrs.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""Set CF-compliant attributes to x and y spatial dimensions.""" + +import logging + +from satpy.writers.cf.crs import _is_projected + +logger = logging.getLogger(__name__) + + +def _add_xy_coords_attrs(dataarray): + """Add relevant attributes to x, y coordinates.""" + # If there are no coords, return dataarray + if not dataarray.coords.keys() & {"x", "y", "crs"}: + return dataarray + # If projected area + if _is_projected(dataarray): + dataarray = _add_xy_projected_coords_attrs(dataarray) + else: + dataarray = _add_xy_geographic_coords_attrs(dataarray) + if 'crs' in dataarray.coords: + dataarray = dataarray.drop_vars('crs') + return dataarray + + +def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): + """Add relevant attributes to x, y coordinates of a projected CRS.""" + if x in dataarray.coords: + dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' + dataarray[x].attrs['units'] = 'm' + if y in dataarray.coords: + dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' + dataarray[y].attrs['units'] = 'm' + return dataarray + + +def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): + """Add relevant attributes to x, y coordinates of a geographic CRS.""" + if x in dataarray.coords: + dataarray[x].attrs['standard_name'] = 'longitude' + dataarray[x].attrs['units'] = 'degrees_east' + if y in dataarray.coords: + dataarray[y].attrs['standard_name'] = 'latitude' + dataarray[y].attrs['units'] = 'degrees_north' + return dataarray diff --git a/satpy/writers/cf/crs.py b/satpy/writers/cf/crs.py new file mode 100644 index 0000000000..e6952a484f --- /dev/null +++ b/satpy/writers/cf/crs.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""CRS utility.""" + +import logging +from contextlib import suppress + +from pyresample.geometry import AreaDefinition, SwathDefinition + +logger = logging.getLogger(__name__) + + +def _is_projected(dataarray): + """Guess whether data are projected or not.""" + crs = _try_to_get_crs(dataarray) + if crs: + return crs.is_projected + units = _try_get_units_from_coords(dataarray) + if units: + if units.endswith("m"): + return True + if units.startswith("degrees"): + return False + logger.warning("Failed to tell if data are projected. Assuming yes.") + return True + + +def _try_to_get_crs(dataarray): + """Try to get a CRS from attributes.""" + if "area" in dataarray.attrs: + if isinstance(dataarray.attrs["area"], AreaDefinition): + return dataarray.attrs["area"].crs + if not isinstance(dataarray.attrs["area"], SwathDefinition): + logger.warning( + f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " + "Assuming projected CRS.") + if "crs" in dataarray.coords: + return dataarray.coords["crs"].item() + + +def _try_get_units_from_coords(dataarray): + """Try to retrieve coordinate x/y units.""" + for c in ["x", "y"]: + with suppress(KeyError): + # If the data has only 1 dimension, it has only one of x or y coords + if "units" in dataarray.coords[c].attrs: + return dataarray.coords[c].attrs["units"] diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index bd941b833a..f4e9b28770 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -160,7 +160,6 @@ import logging import warnings from collections import OrderedDict, defaultdict -from contextlib import suppress from datetime import datetime import numpy as np @@ -171,6 +170,7 @@ from xarray.coding.times import CFDatetimeCoder from satpy.writers import Writer +from satpy.writers.cf.coords_attrs import _add_xy_coords_attrs from satpy.writers.utils import flatten_dict logger = logging.getLogger(__name__) @@ -811,9 +811,12 @@ def _get_groups(groups, list_datarrays): return grouped_dataarrays -def make_cf_dataarray(dataarray, epoch=EPOCH, flatten_attrs=False, +def make_cf_dataarray(dataarray, + epoch=EPOCH, + flatten_attrs=False, exclude_attrs=None, - include_orig_name=True, numeric_name_prefix='CHANNEL_'): + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): """ Make the xr.DataArray CF-compliant. @@ -848,9 +851,9 @@ def make_cf_dataarray(dataarray, epoch=EPOCH, flatten_attrs=False, dataarray = preprocess_datarray_attrs(dataarray=dataarray, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs) - + dataarray = _add_xy_coords_attrs(dataarray) dataarray = _encode_time(dataarray, epoch=epoch) - dataarray = CFWriter._encode_coords(dataarray) + return dataarray @@ -866,24 +869,24 @@ def _collect_cf_dataset(list_dataarrays, Parameters ---------- - list_dataarrays (list): + list_dataarrays : list List of DataArrays to make CF compliant and merge into a xr.Dataset. - epoch (str): + epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` - flatten_attrs (bool): + flatten_attrs : bool, optional If True, flatten dict-type attributes. - exclude_attrs (list): + exclude_attrs : list, optional List of xr.DataArray attribute names to be excluded. - include_lonlats (bool): + include_lonlats : bool, optional If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty (bool): + pretty : bool, optional Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name (bool). + include_orig_name : bool, optional Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix (str): + numeric_name_prefix : str, optional Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. @@ -977,7 +980,7 @@ def collect_cf_datasets(list_dataarrays, ---------- list_dataarrays (list): List of DataArrays to make CF compliant and merge into groups of xr.Datasets. - header_attrs: + header_attrs: (dict): Global attributes of the output xr.Dataset. epoch (str): Reference time for encoding the time coordinates (if available). @@ -999,7 +1002,9 @@ def collect_cf_datasets(list_dataarrays, Use '' or None to leave this out. groups (dict): Group datasets according to the given assignment: - `{'': ['dataset_name1', 'dataset_name2', ...]}`. + + `{'': ['dataset_name1', 'dataset_name2', ...]}` + It is used to create grouped netCDFs using the CF_Writer. If None (the default), no groups will be created. @@ -1113,94 +1118,6 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) - @staticmethod - def _encode_coords(new_data): - """Encode coordinates.""" - if not new_data.coords.keys() & {"x", "y", "crs"}: - # there are no coordinates - return new_data - is_projected = CFWriter._is_projected(new_data) - if is_projected: - new_data = CFWriter._encode_xy_coords_projected(new_data) - else: - new_data = CFWriter._encode_xy_coords_geographic(new_data) - if 'crs' in new_data.coords: - new_data = new_data.drop_vars('crs') - return new_data - - @staticmethod - def _is_projected(new_data): - """Guess whether data are projected or not.""" - crs = CFWriter._try_to_get_crs(new_data) - if crs: - return crs.is_projected - units = CFWriter._try_get_units_from_coords(new_data) - if units: - if units.endswith("m"): - return True - if units.startswith("degrees"): - return False - logger.warning("Failed to tell if data are projected. Assuming yes.") - return True - - @staticmethod - def _try_to_get_crs(new_data): - """Try to get a CRS from attributes.""" - if "area" in new_data.attrs: - if isinstance(new_data.attrs["area"], AreaDefinition): - return new_data.attrs["area"].crs - if not isinstance(new_data.attrs["area"], SwathDefinition): - logger.warning( - f"Could not tell CRS from area of type {type(new_data.attrs['area']).__name__:s}. " - "Assuming projected CRS.") - if "crs" in new_data.coords: - return new_data.coords["crs"].item() - - @staticmethod - def _try_get_units_from_coords(new_data): - for c in "xy": - with suppress(KeyError): - # If the data has only 1 dimension, it has only one of x or y coords - if "units" in new_data.coords[c].attrs: - return new_data.coords[c].attrs["units"] - - @staticmethod - def _encode_xy_coords_projected(new_data): - """Encode coordinates, assuming projected CRS.""" - if 'x' in new_data.coords: - new_data['x'].attrs['standard_name'] = 'projection_x_coordinate' - new_data['x'].attrs['units'] = 'm' - if 'y' in new_data.coords: - new_data['y'].attrs['standard_name'] = 'projection_y_coordinate' - new_data['y'].attrs['units'] = 'm' - return new_data - - @staticmethod - def _encode_xy_coords_geographic(new_data): - """Encode coordinates, assuming geographic CRS.""" - if 'x' in new_data.coords: - new_data['x'].attrs['standard_name'] = 'longitude' - new_data['x'].attrs['units'] = 'degrees_east' - if 'y' in new_data.coords: - new_data['y'].attrs['standard_name'] = 'latitude' - new_data['y'].attrs['units'] = 'degrees_north' - return new_data - - @staticmethod - def _encode_time(new_data, epoch): - if 'time' in new_data.coords: - new_data['time'].encoding['units'] = epoch - new_data['time'].attrs['standard_name'] = 'time' - new_data['time'].attrs.pop('bounds', None) - new_data = CFWriter._add_time_dimension(new_data) - return new_data - - @staticmethod - def _add_time_dimension(new_data): - if 'time' not in new_data.dims and new_data["time"].size not in new_data.shape: - new_data = new_data.expand_dims('time') - return new_data - @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" From 0816b21f5aad95cba558b92254f85024ffe5c007 Mon Sep 17 00:00:00 2001 From: Gionata Ghiggi Date: Thu, 25 May 2023 17:10:15 +0200 Subject: [PATCH 0178/1416] Update satpy/scene.py Co-authored-by: David Hoese --- satpy/scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index b012c74159..6723c060d4 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1169,7 +1169,7 @@ def to_xarray(self, return ds else: msg = """The Scene object contains datasets with different areas. - Resample the Scene to have matching dimensions using i.e. scn.resample("native") """ + Resample the Scene to have matching dimensions using i.e. scn.resample(resampler="native") """ raise NotImplementedError(msg) def _get_dataarrays_from_identifiers(self, identifiers): From 4a12eacd1cca882084e0cec4545dad0de962355c Mon Sep 17 00:00:00 2001 From: Gionata Ghiggi Date: Thu, 25 May 2023 17:10:26 +0200 Subject: [PATCH 0179/1416] Update satpy/scene.py Co-authored-by: David Hoese --- satpy/scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index 6723c060d4..40c63ee26b 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1061,7 +1061,7 @@ def to_xarray_dataset(self, datasets=None): """ warnings.warn('Scene.to_xarray_dataset() is deprecated.' 'Use Scene.to_xarray() instead, to obtain a CF-compliant xr.Dataset .', - DeprecationWarning, stacklevel=3) + DeprecationWarning, stacklevel=2) dataarrays = self._get_dataarrays_from_identifiers(datasets) From 265a3668d518bed1835bb7558bea27b9f5b205ca Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 25 May 2023 18:49:25 +0200 Subject: [PATCH 0180/1416] Address review comments --- satpy/_scene_converters.py | 124 ++++++++++++++++++ satpy/scene.py | 69 +++------- satpy/tests/scene_tests/test_conversions.py | 130 ++++++++++-------- satpy/tests/writer_tests/test_cf.py | 4 +- satpy/writers/cf/coords_attrs.py | 2 +- satpy/writers/cf_writer.py | 138 +++++++------------- 6 files changed, 265 insertions(+), 202 deletions(-) create mode 100644 satpy/_scene_converters.py diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py new file mode 100644 index 0000000000..25fe728b9f --- /dev/null +++ b/satpy/_scene_converters.py @@ -0,0 +1,124 @@ +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Helper functions for converting the Scene object to some other object.""" + +import xarray as xr + +from satpy.dataset import DataID + + +def _get_dataarrays_from_identifiers(scn, identifiers): + """Return a list of DataArray based on a single or list of identifiers. + + An identifier can be a DataID or a string with name of a valid DataID. + """ + if isinstance(identifiers, (str, DataID)): + identifiers = [identifiers] + + if identifiers is not None: + dataarrays = [scn[ds] for ds in identifiers] + else: + dataarrays = [scn._datasets.get(ds) for ds in scn._wishlist] + dataarrays = [dataarray for dataarray in dataarrays if dataarray is not None] + return dataarrays + + +def to_xarray(scn, + datasets=None, # DataID + header_attrs=None, + exclude_attrs=None, + flatten_attrs=False, + pretty=True, + include_lonlats=True, + epoch=None, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. + + If all Scene DataArrays are on the same area, it returns an xr.Dataset. + If Scene DataArrays are on different areas, currently it fails, although + in future we might return a DataTree object, grouped by area. + + Parameters + ---------- + scn: satpy.Scene + Satpy Scene. + datasets (iterable): + List of Satpy Scene datasets to include in the output xr.Dataset. + Elements can be string name, a wavelength as a number, a DataID, + or DataQuery object. + If None (the default), it include all loaded Scene datasets. + header_attrs: + Global attributes of the output xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH" + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates. + If the 'area' attribute is a SwathDefinition, it always includes + latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, + but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + + Returns + ------- + ds, xr.Dataset + A CF-compliant xr.Dataset + + """ + from satpy.writers.cf_writer import EPOCH, collect_cf_datasets + + if epoch is None: + epoch = EPOCH + + # Get list of DataArrays + if datasets is None: + datasets = list(scn.keys()) # list all loaded DataIDs + list_dataarrays = _get_dataarrays_from_identifiers(scn, datasets) + + # Check that some DataArray could be returned + if len(list_dataarrays) == 0: + return xr.Dataset() + + # Collect xr.Dataset for each group + grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=list_dataarrays, + header_attrs=header_attrs, + exclude_attrs=exclude_attrs, + flatten_attrs=flatten_attrs, + pretty=pretty, + include_lonlats=include_lonlats, + epoch=epoch, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix, + groups=None) + if len(grouped_datasets) == 1: + ds = grouped_datasets[None] + return ds + else: + msg = """The Scene object contains datasets with different areas. + Resample the Scene to have matching dimensions using i.e. scn.resample(resampler="native") """ + raise NotImplementedError(msg) diff --git a/satpy/scene.py b/satpy/scene.py index 40c63ee26b..aadbfc4691 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1059,11 +1059,13 @@ def to_xarray_dataset(self, datasets=None): Returns: :class:`xarray.Dataset` """ + from satpy._scene_converters import _get_dataarrays_from_identifiers + warnings.warn('Scene.to_xarray_dataset() is deprecated.' 'Use Scene.to_xarray() instead, to obtain a CF-compliant xr.Dataset .', DeprecationWarning, stacklevel=2) - dataarrays = self._get_dataarrays_from_identifiers(datasets) + dataarrays = _get_dataarrays_from_identifiers(self, datasets) if len(dataarrays) == 0: return xr.Dataset() @@ -1137,55 +1139,18 @@ def to_xarray(self, A CF-compliant xr.Dataset """ - from satpy.writers.cf_writer import EPOCH, collect_cf_datasets - - # Retrieve epoch - if epoch is None: - epoch = EPOCH - - # Get list of DataArrays - # - If datasets=None, retrieve all loaded datasets - if datasets is None: - datasets = list(self.keys()) # list DataIDs - list_dataarrays = self._get_dataarrays_from_identifiers(datasets) - - # Check that some DataArray could be returned - if len(list_dataarrays) == 0: - return xr.Dataset() - - # Collect xr.Dataset for each group - grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=list_dataarrays, - header_attrs=header_attrs, - exclude_attrs=exclude_attrs, - flatten_attrs=flatten_attrs, - pretty=pretty, - include_lonlats=include_lonlats, - epoch=epoch, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix, - groups=None) - if len(grouped_datasets) == 1: - ds = grouped_datasets[None] - return ds - else: - msg = """The Scene object contains datasets with different areas. - Resample the Scene to have matching dimensions using i.e. scn.resample(resampler="native") """ - raise NotImplementedError(msg) - - def _get_dataarrays_from_identifiers(self, identifiers): - """Return a list of DataArray based on a single or list of identifiers. - - An identifier can be a DataID or a string with name of a valid DataID. - """ - if isinstance(identifiers, (str, DataID)): - identifiers = [identifiers] - - if identifiers is not None: - dataarrays = [self[ds] for ds in identifiers] - else: - dataarrays = [self._datasets.get(ds) for ds in self._wishlist] - dataarrays = [dataarray for dataarray in dataarrays if dataarray is not None] - return dataarrays + from satpy._scene_converters import to_xarray + + return to_xarray(scn=self, + datasets=datasets, # DataID + header_attrs=header_attrs, + exclude_attrs=exclude_attrs, + flatten_attrs=flatten_attrs, + pretty=pretty, + include_lonlats=include_lonlats, + epoch=epoch, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) def images(self): """Generate images for all the datasets from the scene.""" @@ -1286,7 +1251,9 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, close any objects that have a "close" method. """ - dataarrays = self._get_dataarrays_from_identifiers(datasets) + from satpy._scene_converters import _get_dataarrays_from_identifiers + + dataarrays = _get_dataarrays_from_identifiers(self, datasets) if not dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index a260ddf31f..c62ffcea1d 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -53,7 +53,39 @@ def test_to_xarray_dataset_with_empty_scene(self): assert len(xrds.variables) == 0 assert len(xrds.coords) == 0 - def test_to_xarray_with_empty_scene(self): + def test_geoviews_basic_with_area(self): + """Test converting a Scene to geoviews with an AreaDefinition.""" + from pyresample.geometry import AreaDefinition + scn = Scene() + area = AreaDefinition('test', 'test', 'test', + {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1), + 'area': area}) + gv_obj = scn.to_geoviews() + # we assume that if we got something back, geoviews can use it + assert gv_obj is not None + + def test_geoviews_basic_with_swath(self): + """Test converting a Scene to geoviews with a SwathDefinition.""" + from pyresample.geometry import SwathDefinition + scn = Scene() + lons = xr.DataArray(da.zeros((2, 2))) + lats = xr.DataArray(da.zeros((2, 2))) + area = SwathDefinition(lons, lats) + scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1), + 'area': area}) + gv_obj = scn.to_geoviews() + # we assume that if we got something back, geoviews can use it + assert gv_obj is not None + + +class TestToXarrayConversion: + """Test Scene.to_xarray() conversion.""" + + def test_with_empty_scene(self): """Test converting empty Scene to xarray.""" scn = Scene() ds = scn.to_xarray() @@ -61,8 +93,9 @@ def test_to_xarray_with_empty_scene(self): assert len(ds.variables) == 0 assert len(ds.coords) == 0 - def test_to_xarray_with_single_area_scene(self): - """Test converting single area Scene to xarray dataset.""" + @pytest.fixture + def single_area_scn(self): + """Define Scene with single area.""" from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', @@ -71,35 +104,13 @@ def test_to_xarray_with_single_area_scene(self): data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), attrs={'start_time': datetime(2018, 1, 1), 'area': area}) - scn = Scene() scn['var1'] = data_array - ds = scn.to_xarray() - - # Assert dataset type - assert isinstance(ds, xr.Dataset) - # Assert var1 is a Dataset variables - assert "var1" in ds.data_vars - - # Assert by default it include lon lats - assert "latitude" in ds.coords - assert "longitude" in ds.coords - - # Assert include_lonlats=False works - ds = scn.to_xarray(include_lonlats=False) - assert "latitude" not in ds.coords - assert "longitude" not in ds.coords - - # Assert dataset string is accepted - ds = scn.to_xarray(datasets="var1") - assert isinstance(ds, xr.Dataset) - - # Assert wrong datasets key - with pytest.raises(KeyError): - ds = scn.to_xarray(datasets="var2") + return scn - def test_to_xarray_with_multiple_area_scene(self): - """Test converting muiltple area Scene to xarray.""" + @pytest.fixture + def multi_area_scn(self): + """Define Scene with multiple area.""" from pyresample.geometry import AreaDefinition area1 = AreaDefinition('test', 'test', 'test', @@ -118,35 +129,38 @@ def test_to_xarray_with_multiple_area_scene(self): scn = Scene() scn['var1'] = data_array1 scn['var2'] = data_array2 + return scn - # TODO: in future adapt for DataTree implementation - with pytest.raises(ValueError): - _ = scn.to_xarray() + def test_with_single_area_scene_type(self, single_area_scn): + """Test converting single area Scene to xarray dataset.""" + ds = single_area_scn.to_xarray() + assert isinstance(ds, xr.Dataset) + assert "var1" in ds.data_vars - def test_geoviews_basic_with_area(self): - """Test converting a Scene to geoviews with an AreaDefinition.""" - from pyresample.geometry import AreaDefinition - scn = Scene() - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, - 2, 2, [-200, -200, 200, 200]) - scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), - 'area': area}) - gv_obj = scn.to_geoviews() - # we assume that if we got something back, geoviews can use it - assert gv_obj is not None + def test_include_lonlats_true(self, single_area_scn): + """Test include lonlats.""" + ds = single_area_scn.to_xarray(include_lonlats=True) + assert "latitude" in ds.coords + assert "longitude" in ds.coords - def test_geoviews_basic_with_swath(self): - """Test converting a Scene to geoviews with a SwathDefinition.""" - from pyresample.geometry import SwathDefinition - scn = Scene() - lons = xr.DataArray(da.zeros((2, 2))) - lats = xr.DataArray(da.zeros((2, 2))) - area = SwathDefinition(lons, lats) - scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), - 'area': area}) - gv_obj = scn.to_geoviews() - # we assume that if we got something back, geoviews can use it - assert gv_obj is not None + def test_include_lonlats_false(self, single_area_scn): + """Test exclude lonlats.""" + ds = single_area_scn.to_xarray(include_lonlats=False) + assert "latitude" not in ds.coords + assert "longitude" not in ds.coords + + def test_dataset_string_accepted(self, single_area_scn): + """Test accept dataset string.""" + ds = single_area_scn.to_xarray(datasets="var1") + assert isinstance(ds, xr.Dataset) + + def test_wrong_dataset_key(self, single_area_scn): + """Test raise error if unexisting dataset.""" + with pytest.raises(KeyError): + _ = single_area_scn.to_xarray(datasets="var2") + + def test_to_xarray_with_multiple_area_scene(self, multi_area_scn): + """Test converting muiltple area Scene to xarray.""" + # TODO: in future adapt for DataTree implementation + with pytest.raises(ValueError): + _ = multi_area_scn.to_xarray() diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index e232da31e4..6b838b6847 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -161,7 +161,7 @@ def test_preprocess_dataarray_name(): def test_add_time_cf_attrs(): """Test addition of CF-compliant time attributes.""" from satpy import Scene - from satpy.writers.cf_writer import add_time_cf_attrs + from satpy.writers.cf_writer import add_time_bounds_dimension scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) @@ -172,7 +172,7 @@ def test_add_time_cf_attrs(): coords={'time': ('y', times)}, attrs=dict(start_time=times[0], end_time=times[-1])) ds = scn['test-array'].to_dataset(name='test-array') - ds = add_time_cf_attrs(ds) + ds = add_time_bounds_dimension(ds) assert "bnds_1d" in ds.dims assert ds.dims['bnds_1d'] == 2 assert "time_bnds" in list(ds.data_vars) diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py index 9a35a25a00..c7e559adc2 100644 --- a/satpy/writers/cf/coords_attrs.py +++ b/satpy/writers/cf/coords_attrs.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) -def _add_xy_coords_attrs(dataarray): +def add_xy_coords_attrs(dataarray): """Add relevant attributes to x, y coordinates.""" # If there are no coords, return dataarray if not dataarray.coords.keys() & {"x", "y", "crs"}: diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index f4e9b28770..60e5e8ad12 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -170,7 +170,7 @@ from xarray.coding.times import CFDatetimeCoder from satpy.writers import Writer -from satpy.writers.cf.coords_attrs import _add_xy_coords_attrs +from satpy.writers.cf.coords_attrs import add_xy_coords_attrs from satpy.writers.utils import flatten_dict logger = logging.getLogger(__name__) @@ -398,51 +398,45 @@ def link_coords(datas): # ###--------------------------------------------------------------------------. # ### CF-Time -def make_time_bounds(start_times, end_times): - """Create time bounds for the current *dataarray*.""" +def add_time_bounds_dimension(ds, time="time"): + """Add time bound dimension to xr.Dataset.""" + start_times = [] + end_times = [] + for _var_name, data_array in ds.items(): + start_times.append(data_array.attrs.get("start_time", None)) + end_times.append(data_array.attrs.get("end_time", None)) + start_time = min(start_time for start_time in start_times if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) - data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - return data - - -def add_time_cf_attrs(ds): - """Add time CF-compliant attributes if Dataset has time coordinate.""" - if 'time' in ds: - # Retrieve list of start_time and end_time - start_times = [] - end_times = [] - for _var_name, data_array in ds.items(): - start_times.append(data_array.attrs.get("start_time", None)) - end_times.append(data_array.attrs.get("end_time", None)) - # Add time bounds dimension and bounds attribute - ds['time_bnds'] = make_time_bounds(start_times, end_times) - ds['time'].attrs['bounds'] = "time_bnds" - ds['time'].attrs['standard_name'] = "time" - # else: - # grp_str = ' of group {}'.format(group_name) if group_name is not None else '' - # logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str)) + ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + np.datetime64(end_time)]], + dims=['time', 'bnds_1d']) + ds[time].attrs['bounds'] = "time_bnds" + ds[time].attrs['standard_name'] = "time" return ds -def _encode_time(dataarray, epoch): - """Add encodings to the 'time' coordinate.""" +def _process_time_coord(dataarray, epoch): + """Process the 'time' coordinate, if existing. + + If expand the DataArray with a time dimension if does not yet exists. + + The function assumes + + - that x and y dimensions have at least shape > 1 + - the time coordinate has size 1 + + """ if 'time' in dataarray.coords: dataarray['time'].encoding['units'] = epoch dataarray['time'].attrs['standard_name'] = 'time' dataarray['time'].attrs.pop('bounds', None) - dataarray = _add_time_dimension(dataarray) - return dataarray + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') -def _add_time_dimension(dataarray): - """Add 'time' dimension to the DataArray.""" - # BUG: When number of timesteps equals the shape of y or x - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') return dataarray @@ -558,13 +552,10 @@ def encode_attrs_nc(attrs): def _add_ancillary_variables_attrs(dataarray): """Replace ancillary_variables DataArray with a list of their name.""" - # Retrieve list of ancillary variables DataArrays name - list_ancillary_variables = [da_ancillary.attrs['name'] - for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] - # Replace ancillary_variables attribute with the list of names - if list_ancillary_variables: - dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variables) - # If no ancillary_variables, drop the attribute + list_ancillary_variable_names = [da_ancillary.attrs['name'] + for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + if list_ancillary_variable_names: + dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) else: dataarray.attrs.pop("ancillary_variables", None) return dataarray @@ -581,7 +572,6 @@ def _drop_exclude_attrs(dataarray, exclude_attrs): def _remove_satpy_attrs(new_data): """Remove _satpy attribute.""" - # Remove _satpy* attributes satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] for satpy_attr in satpy_attrs: new_data.attrs.pop(satpy_attr) @@ -596,7 +586,7 @@ def _format_prerequisites_attrs(dataarray): return dataarray -def _cleanup_attrs(dataarray): +def _remove_none_attrs(dataarray): """Remove attribute keys with None value.""" for key, val in dataarray.attrs.copy().items(): if val is None: @@ -606,50 +596,33 @@ def _cleanup_attrs(dataarray): def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" - # Remove _satpy attribute dataarray = _remove_satpy_attrs(dataarray) - - # Add ancillary_variables attribute dataarray = _add_ancillary_variables_attrs(dataarray) - - # Drop exclude_attrs keys from DataArray attributes dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) - - # Remove 'area' attribute + dataarray = _format_prerequisites_attrs(dataarray) + dataarray = _remove_none_attrs(dataarray) _ = dataarray.attrs.pop("area", None) - # Set long_name to DataArray name if 'standard_name' attribute not provided if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: dataarray.attrs['long_name'] = dataarray.name - # Format prequisites attribute - dataarray = _format_prerequisites_attrs(dataarray) - - # Remove attribute keys when their value is None - dataarray = _cleanup_attrs(dataarray) - - # If specified, flatten dict-type attributes if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) - # Encode attributes to netcdf-compatible datatype dataarray.attrs = encode_attrs_nc(dataarray.attrs) return dataarray def preprocess_header_attrs(header_attrs, flatten_attrs=False): - """Prepare header attributes.""" - # Define file header attributes + """Prepare file header attributes.""" if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) header_attrs = encode_attrs_nc(header_attrs) # OrderedDict else: header_attrs = {} - # - Add "Created by pytroll/satpy ..." to history attribute header_attrs = _add_history(header_attrs) - # - Return header attributes return header_attrs @@ -717,14 +690,13 @@ def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): A lot of channel names in satpy starts with a digit. When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix. + If variables names in the encoding dictionary are numeric digits, their name is prefixed with numeric_name_prefix """ for var_name in list(dataset.variables): - # If var_name do no start with numeric_name_prefix, do nothing if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): continue - # Else update the encoding dictionary key name with the prefixed one orig_var_name = var_name.replace(numeric_name_prefix, '') if orig_var_name in encoding: encoding[var_name] = encoding.pop(orig_var_name) @@ -739,12 +711,10 @@ def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): """ other_to_netcdf_kwargs = to_netcdf_kwargs.copy() encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() - encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) encoding = _set_default_chunks(encoding, dataset) encoding = _set_default_fill_value(encoding, dataset) encoding = _set_default_time_encoding(encoding, dataset) - return encoding, other_to_netcdf_kwargs @@ -769,14 +739,13 @@ def _handle_dataarray_name(original_name, numeric_name_prefix): def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" - # Replace datarray name if start with a digit original_name = None dataarray = dataarray.copy() if 'name' in dataarray.attrs: original_name = dataarray.attrs.pop('name') original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) - # If the name changed, add the original_name attribute + if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: dataarray.attrs['original_name'] = original_name @@ -796,11 +765,13 @@ def _add_history(attrs): def _get_groups(groups, list_datarrays): - """Return a dictionary with the list of xr.DataArray associated to each group.""" - # If no groups, return all DataArray attached to a single None key + """Return a dictionary with the list of xr.DataArray associated to each group. + + If no groups (groups=None), return all DataArray attached to a single None key. + Else, collect the DataArrays associated to each group. + """ if groups is None: grouped_dataarrays = {None: list_datarrays} - # Else, collect the DataArrays associated to each group else: grouped_dataarrays = defaultdict(list) for datarray in list_datarrays: @@ -851,9 +822,8 @@ def make_cf_dataarray(dataarray, dataarray = preprocess_datarray_attrs(dataarray=dataarray, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs) - dataarray = _add_xy_coords_attrs(dataarray) - dataarray = _encode_time(dataarray, epoch=epoch) - + dataarray = add_xy_coords_attrs(dataarray) + dataarray = _process_time_coord(dataarray, epoch=epoch) return dataarray @@ -901,7 +871,7 @@ def _collect_cf_dataset(list_dataarrays, for dataarray in list_dataarrays: ds_collection.update(get_extra_ds(dataarray)) - # Check ??? + # Do what ??? got_lonlats = has_projection_coords(ds_collection) # Sort dictionary by keys name @@ -1015,14 +985,12 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ - # Check some DataArray have been provided if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " "composite inputs may need to have matching " "dimensions (eg. through resampling).") - # Define file header attributes header_attrs = preprocess_header_attrs(header_attrs=header_attrs, flatten_attrs=flatten_attrs) @@ -1034,7 +1002,7 @@ def collect_cf_datasets(list_dataarrays, is_grouped = len(grouped_dataarrays) >= 2 # If not grouped, add CF conventions. - # - If 'Conventions' key already present, do not overwrite + # - If 'Conventions' key already present, do not overwrite ! if "Conventions" not in header_attrs and not is_grouped: header_attrs['Conventions'] = CF_VERSION @@ -1052,17 +1020,13 @@ def collect_cf_datasets(list_dataarrays, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) - # If no groups, add global header to xr.Dataset if not is_grouped: ds.attrs = header_attrs - # If "time" is a coordinate, add relevant variable/attributes (i.e. time_bnds) - ds = add_time_cf_attrs(ds) + if 'time' in ds: + ds = add_time_bounds_dimension(ds, time="time") - # Add xr.Dataset to dictionary grouped_datasets[group_name] = ds - - # Return dictionary with xr.Dataset return grouped_datasets, header_attrs @@ -1078,7 +1042,6 @@ def _sanitize_writer_kwargs(writer_kwargs): def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): """Initialize root empty netCDF.""" root = xr.Dataset({}, attrs=header_attrs) - # - Define init kwargs init_nc_kwargs = to_netcdf_kwargs.copy() init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point init_nc_kwargs.pop('unlimited_dims', None) @@ -1168,7 +1131,6 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ - # Note: datasets is a list of xr.DataArray logger.info('Saving datasets to NetCDF4/CF.') _check_backend_versions() @@ -1209,11 +1171,9 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, # - If grouped netCDF, it appends to the root file # - If single netCDF, it write directly for group_name, ds in grouped_datasets.items(): - # Update encoding encoding, other_to_netcdf_kwargs = update_encoding(ds, to_netcdf_kwargs=to_netcdf_kwargs, numeric_name_prefix=numeric_name_prefix) - # Write (append) dataset res = ds.to_netcdf(filename, engine=engine, group=group_name, @@ -1221,8 +1181,6 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, encoding=encoding, **other_to_netcdf_kwargs) written.append(res) - - # Return list of writing results return written # --------------------------------------------------------------------------. From 8c1e6cd2d4c274b8f23dd9beb8669b91e4597556 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 25 May 2023 19:21:53 +0200 Subject: [PATCH 0181/1416] Start refactor of area to cf --- satpy/tests/writer_tests/test_cf.py | 42 ++++++++++---------- satpy/writers/cf_writer.py | 61 ++++++++++++++--------------- 2 files changed, 51 insertions(+), 52 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 6b838b6847..2ddf51c1e3 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -837,15 +837,15 @@ def test_area2cf(self): ds = ds_base.copy(deep=True) ds.attrs['area'] = geos - res = area2cf(ds) + res = area2cf(ds, include_lonlats=False) self.assertEqual(len(res), 2) self.assertEqual(res[0].size, 1) # grid mapping variable self.assertEqual(res[0].name, res[1].attrs['grid_mapping']) - # b) Area Definition and strict=False + # b) Area Definition and include_lonlats=False ds = ds_base.copy(deep=True) ds.attrs['area'] = geos - res = area2cf(ds, strict=True) + res = area2cf(ds, include_lonlats=True) # same as above self.assertEqual(len(res), 2) self.assertEqual(res[0].size, 1) # grid mapping variable @@ -859,15 +859,15 @@ def test_area2cf(self): ds = ds_base.copy(deep=True) ds.attrs['area'] = swath - res = area2cf(ds) + res = area2cf(ds, include_lonlats=False) self.assertEqual(len(res), 1) self.assertIn('longitude', res[0].coords) self.assertIn('latitude', res[0].coords) self.assertNotIn('grid_mapping', res[0].attrs) - def test_area2gridmapping(self): + def test__add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" - from satpy.writers.cf_writer import area2gridmapping + from satpy.writers.cf_writer import _add_grid_mapping def _gm_matches(gmapping, expected): """Assert that all keys in ``expected`` match the values in ``gmapping``.""" @@ -905,7 +905,7 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = geos - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) if 'sweep_angle_axis' in grid_mapping.attrs: # older versions of pyproj might not include this self.assertEqual(grid_mapping.attrs['sweep_angle_axis'], 'y') @@ -929,7 +929,7 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = cosmo7 - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) self.assertIn('crs_wkt', grid_mapping.attrs) wkt = grid_mapping.attrs['crs_wkt'] self.assertIn('ELLIPSOID["WGS 84"', wkt) @@ -962,7 +962,7 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = tmerc - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) self.assertEqual(new_ds.attrs['grid_mapping'], 'tmerc') _gm_matches(grid_mapping, tmerc_expected) @@ -988,7 +988,7 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = geos - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') _gm_matches(grid_mapping, geos_expected) @@ -1019,7 +1019,7 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = area - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) self.assertEqual(new_ds.attrs['grid_mapping'], 'omerc_otf') _gm_matches(grid_mapping, omerc_expected) @@ -1044,14 +1044,14 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = geos - new_ds, grid_mapping = area2gridmapping(ds) + new_ds, grid_mapping = _add_grid_mapping(ds) self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') _gm_matches(grid_mapping, geos_expected) - def test_area2lonlat(self): + def test_add_lonlat_coords(self): """Test the conversion from areas to lon/lat.""" - from satpy.writers.cf_writer import area2lonlat + from satpy.writers.cf_writer import add_lonlat_coords area = pyresample.geometry.AreaDefinition( 'seviri', @@ -1064,7 +1064,7 @@ def test_area2lonlat(self): lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) - res = area2lonlat(dataarray) + res = add_lonlat_coords(dataarray) # original should be unmodified self.assertNotIn('longitude', dataarray.coords) @@ -1087,7 +1087,7 @@ def test_area2lonlat(self): lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), dims=('bands', 'y', 'x'), attrs={'area': area}) - res = area2lonlat(dataarray) + res = add_lonlat_coords(dataarray) # original should be unmodified self.assertNotIn('longitude', dataarray.coords) @@ -1188,12 +1188,12 @@ def setUp(self): self.datasets['var2'].attrs['name'] = 'var2' self.datasets['lon'].attrs['name'] = 'lon' - def test_dataset_is_projection_coords(self): - """Test the dataset_is_projection_coords function.""" - from satpy.writers.cf_writer import dataset_is_projection_coords + def test_is_lon_or_lat_dataarray(self): + """Test the is_lon_or_lat_dataarray function.""" + from satpy.writers.cf_writer import is_lon_or_lat_dataarray - self.assertTrue(dataset_is_projection_coords(self.datasets['lat'])) - self.assertFalse(dataset_is_projection_coords(self.datasets['var1'])) + self.assertTrue(is_lon_or_lat_dataarray(self.datasets['lat'])) + self.assertFalse(is_lon_or_lat_dataarray(self.datasets['var1'])) def test_has_projection_coords(self): """Test the has_projection_coords function.""" diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 60e5e8ad12..70aaacc127 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -214,18 +214,6 @@ CF_VERSION = 'CF-1.7' -def create_grid_mapping(area): - """Create the grid mapping instance for `area`.""" - import pyproj - if Version(pyproj.__version__) < Version('2.4.1'): - # technically 2.2, but important bug fixes in 2.4.1 - raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") - # let pyproj do the heavily lifting - # pyproj 2.0+ required - grid_mapping = area.crs.to_cf() - return area.area_id, grid_mapping - - def get_extra_ds(dataarray, keys=None): """Get the extra datasets associated to *dataset*.""" ds_collection = {} @@ -244,8 +232,8 @@ def get_extra_ds(dataarray, keys=None): # ### CF-Area -def area2lonlat(dataarray): - """Convert an area to longitudes and latitudes.""" +def add_lonlat_coords(dataarray): + """Add 'longitude' and 'latitude' coordinates to DataArray.""" dataarray = dataarray.copy() area = dataarray.attrs['area'] ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} @@ -264,38 +252,50 @@ def area2lonlat(dataarray): return dataarray -def area2gridmapping(dataarray): +def _create_grid_mapping(area): + """Create the grid mapping instance for `area`.""" + import pyproj + + if Version(pyproj.__version__) < Version('2.4.1'): + # technically 2.2, but important bug fixes in 2.4.1 + raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") + # let pyproj do the heavily lifting (pyproj 2.0+ required) + grid_mapping = area.crs.to_cf() + return area.area_id, grid_mapping + + +def _add_grid_mapping(dataarray): """Convert an area to at CF grid mapping.""" dataarray = dataarray.copy() area = dataarray.attrs['area'] - gmapping_var_name, attrs = create_grid_mapping(area) + gmapping_var_name, attrs = _create_grid_mapping(area) dataarray.attrs['grid_mapping'] = gmapping_var_name return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) -def area2cf(dataarray, strict=False, got_lonlats=False): +def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] - if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or strict): - dataarray = area2lonlat(dataarray) + if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): + dataarray = add_lonlat_coords(dataarray) if isinstance(dataarray.attrs['area'], AreaDefinition): - dataarray, gmapping = area2gridmapping(dataarray) + dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) res.append(dataarray) return res -def dataset_is_projection_coords(dataset): - """Check if dataset is a projection coords.""" - if 'standard_name' in dataset.attrs and dataset.attrs['standard_name'] in ['longitude', 'latitude']: +def is_lon_or_lat_dataarray(dataarray): + """Check if the DataArray represents the latitude or longitude coordinate.""" + if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: return True return False def has_projection_coords(ds_collection): - """Check if collection has a projection coords among data arrays.""" - for dataset in ds_collection.values(): - if dataset_is_projection_coords(dataset): + """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" + for dataarray in ds_collection.values(): + if is_lon_or_lat_dataarray(dataarray): return True return False @@ -328,7 +328,7 @@ def make_alt_coords_unique(datas, pretty=False): tokens = defaultdict(set) for dataset in datas.values(): for coord_name in dataset.coords: - if not dataset_is_projection_coords(dataset[coord_name]) and coord_name not in dataset.dims: + if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: tokens[coord_name].add(tokenize(dataset[coord_name].data)) coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) @@ -788,8 +788,7 @@ def make_cf_dataarray(dataarray, exclude_attrs=None, include_orig_name=True, numeric_name_prefix='CHANNEL_'): - """ - Make the xr.DataArray CF-compliant. + """Make the xr.DataArray CF-compliant. Parameters ---------- @@ -871,7 +870,7 @@ def _collect_cf_dataset(list_dataarrays, for dataarray in list_dataarrays: ds_collection.update(get_extra_ds(dataarray)) - # Do what ??? + # Check if one DataArray in the collection has 'longitude' or 'latitude' got_lonlats = has_projection_coords(ds_collection) # Sort dictionary by keys name @@ -895,7 +894,7 @@ def _collect_cf_dataset(list_dataarrays, # - Return the CF-compliant input DataArray as second list element try: list_new_dataarrays = area2cf(dataarray, - strict=include_lonlats, + include_lonlats=include_lonlats, got_lonlats=got_lonlats) except KeyError: list_new_dataarrays = [dataarray] From aa203652dc8562f0a24fea639a6525285e1d1a88 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 25 May 2023 20:01:30 -0500 Subject: [PATCH 0182/1416] Fix accidental sphinx reference in satpy/writers/cf_writer.py --- satpy/writers/cf_writer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 70aaacc127..bfc21fbf5e 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -807,7 +807,7 @@ def make_cf_dataarray(dataarray, The default is True. numeric_name_prefix : TYPE, optional Prepend dataset name with this if starting with a digit. - The default is 'CHANNEL_'. + The default is "CHANNEL_". Returns ------- From 524a044af35935d3c5049d49502b4e00e1160f87 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 25 May 2023 20:15:40 -0500 Subject: [PATCH 0183/1416] Fix docstring formatting in cf_writer.py --- satpy/writers/cf_writer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index bfc21fbf5e..7f73f1e050 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -44,7 +44,7 @@ coordinate is identical for all datasets, the prefix can be removed by setting ``pretty=True``. * Some dataset names start with a digit, like AVHRR channels 1, 2, 3a, 3b, 4 and 5. This doesn't comply with CF https://cfconventions.org/Data/cf-conventions/cf-conventions-1.7/build/ch02s03.html. These channels are prefixed - with `CHANNEL_` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`. + with "CHANNEL_" by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`. Setting it to `None` or `''` will skip the prefixing. Grouping From f893b33a245d0aa1292d1dcd6d2808ca3a86d8dd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 25 May 2023 20:56:14 -0500 Subject: [PATCH 0184/1416] Try fixing docstrings in cf_writer.py --- satpy/writers/cf_writer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 7f73f1e050..edc26591fe 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -44,7 +44,7 @@ coordinate is identical for all datasets, the prefix can be removed by setting ``pretty=True``. * Some dataset names start with a digit, like AVHRR channels 1, 2, 3a, 3b, 4 and 5. This doesn't comply with CF https://cfconventions.org/Data/cf-conventions/cf-conventions-1.7/build/ch02s03.html. These channels are prefixed - with "CHANNEL_" by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`. + with ``"CHANNEL_"`` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`. Setting it to `None` or `''` will skip the prefixing. Grouping @@ -807,7 +807,7 @@ def make_cf_dataarray(dataarray, The default is True. numeric_name_prefix : TYPE, optional Prepend dataset name with this if starting with a digit. - The default is "CHANNEL_". + The default is ``"CHANNEL_"``. Returns ------- From 00a21c7e0de561c50f43c2688a298d3c7577a6db Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 26 May 2023 10:04:10 +0000 Subject: [PATCH 0185/1416] Refactor tests Write a VISSR file to disk and read it back in --- satpy/readers/gms5_vissr_l1b.py | 16 +- satpy/readers/gms5_vissr_navigation.py | 36 -- .../tests/reader_tests/test_gms5_vissr_l1b.py | 581 ++++++++++-------- 3 files changed, 345 insertions(+), 288 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index f99417023c..b59fa6a9b3 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -118,6 +118,13 @@ VIS_CHANNEL = 'VIS' IR_CHANNEL = 'IR' +CHANNEL_TYPES = { + "VIS": VIS_CHANNEL, + "IR1": IR_CHANNEL, + "IR2": IR_CHANNEL, + "IR3": IR_CHANNEL, + "WV": IR_CHANNEL +} ALT_CHANNEL_NAMES = { 'VIS': 'VIS', 'IR1': 'IR1', @@ -863,13 +870,10 @@ def _get_earth_edges(self): def _get_earth_edges_per_scan_line(self, cardinal): edges = self._image_data["LCW"][cardinal].compute().astype(np.int32) - if self._is_vis_channel(): + if is_vis_channel(self._channel): edges = self._correct_vis_edges(edges) return edges - def _is_vis_channel(self): - return self._channel == "VIS" - def _correct_vis_edges(self, edges): """Correct VIS edges. @@ -897,3 +901,7 @@ def get_earth_mask(shape, earth_edges, fill_value=-1): continue mask[line, first:last+1] = 1 return mask + + +def is_vis_channel(channel_name): + return channel_name == "VIS" diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index b2ebd02c4b..a98b42f16f 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -712,43 +712,7 @@ def _interpolate_nearest(x, x_sample, y_sample): """ - Orbital parameters - Area def -- Decompression - Finish Documentation - Call find_enclosing_index only once for all predictions """ - -""" - def _read_data_from_file(self): - if self._is_file_like(): - return self._read_file_like() - return self._read_data_from_disk() - - def _is_file_like(self): - return isinstance(self.filename, FSFile) - - def _read_data_from_disk(self): - # For reading the image data, unzip_context is faster than generic_open - dtype, shape = self._get_input_info() - with utils.unzip_context(self.filename) as fn: - with decompressed(fn) if self.compressed else nullcontext(fn) as filename: - return np.fromfile(filename, - offset=self.offset, - dtype=dtype, - count=np.prod(shape)) - - def _read_file_like(self): - # filename is likely to be a file-like object, already in memory - dtype, shape = self._get_input_info() - with utils.generic_open(self.filename, mode="rb") as fp: - no_elements = np.prod(shape) - fp.seek(self.offset) - return np.frombuffer( - fp.read(np.dtype(dtype).itemsize * no_elements), - dtype=np.dtype(dtype), - count=no_elements.item() - ).reshape(shape) - - - -""" diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index d641626ebd..549d6a0788 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -1,6 +1,6 @@ """Unit tests for GMS-5 VISSR reader.""" -from unittest import mock +import gzip import datetime as dt import numpy as np @@ -10,6 +10,8 @@ import satpy.readers.gms5_vissr_l1b as vissr import satpy.readers.gms5_vissr_navigation as nav from satpy.tests.utils import make_dataid +from satpy.readers import FSFile +import fsspec # Navigation references computed with JMA's Msial library (files # VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS @@ -301,13 +303,14 @@ class TestEarthMask: def test_get_earth_mask(self): first_earth_pixels = np.array([-1, 1, 0, -1]) last_earth_pixels = np.array([-1, 3, 2, -1]) + edges = first_earth_pixels, last_earth_pixels mask_exp = np.array( [[0, 0, 0, 0], [0, 1, 1, 1], [1, 1, 1, 0], [0, 0, 0, 0]] ) - mask = vissr.get_earth_mask(mask_exp.shape, first_earth_pixels, last_earth_pixels) + mask = vissr.get_earth_mask(mask_exp.shape, edges) np.testing.assert_equal(mask, mask_exp) @@ -486,34 +489,48 @@ def test_get_observation_time(): np.testing.assert_allclose(obs_time, 50000.0000705496871047) -class TestFileHandler: - channel_types = { - 'VIS': 'VIS', - 'IR1': 'IR', - 'IR2': 'IR', - 'IR3': 'IR' - } +def assert_namedtuple_close(a, b): + assert a.__class__ == b.__class__ + for attr in a._fields: + np.testing.assert_allclose( + getattr(a, attr), + getattr(b, attr), + err_msg='{} attribute {} differs'.format(a.__class__, attr) + ) - def test_get_dataset(self, file_handler, dataset_id, dataset_exp): - dataset = file_handler.get_dataset(dataset_id, {}) - xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1E-6) - @pytest.fixture - def file_handler(self, header, dataset_id, mask_space, image_data): - channel_type = self.channel_types[dataset_id['name']] - with mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_header') as _read_header, \ - mock.patch('satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler._read_image_data') as _read_image_data: - _read_header.return_value = header, channel_type - _read_image_data.return_value = image_data - fh = vissr.GMS5VISSRFileHandler( - 'foo', - {'foo': 'bar'}, - {'foo': 'bar'}, - mask_space=mask_space - ) - # Yield instead of return, to make the memmap mock succeed. - # See https://stackoverflow.com/a/59045506/5703449 - yield fh +class TestFileHandler: + @pytest.fixture(autouse=True) + def patch_number_of_pixels_per_scanline(self, monkeypatch): + num_pixels = 2 + IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', vissr.LINE_CONTROL_WORD), + ('DOC', vissr.U1, (256,)), + ('image_data', vissr.U1, num_pixels)]) + IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', vissr.LINE_CONTROL_WORD), + ('DOC', vissr.U1, (64,)), + ('image_data', vissr.U1, (num_pixels,))]) + IMAGE_DATA = { + vissr.VIS_CHANNEL: { + 'offset': 6 * vissr.BLOCK_SIZE_VIS, + 'dtype': IMAGE_DATA_BLOCK_VIS, + }, + vissr.IR_CHANNEL: { + 'offset': 18 * vissr.BLOCK_SIZE_IR, + 'dtype': IMAGE_DATA_BLOCK_IR + } + } + monkeypatch.setattr( + "satpy.readers.gms5_vissr_l1b.IMAGE_DATA_BLOCK_IR", + IMAGE_DATA_BLOCK_IR + ) + monkeypatch.setattr( + "satpy.readers.gms5_vissr_l1b.IMAGE_DATA_BLOCK_VIS", + IMAGE_DATA_BLOCK_VIS + ) + monkeypatch.setattr( + "satpy.readers.gms5_vissr_l1b.IMAGE_DATA", + IMAGE_DATA + ) @pytest.fixture(params=[ make_dataid(name="VIS", calibration="reflectance"), @@ -523,255 +540,283 @@ def file_handler(self, header, dataset_id, mask_space, image_data): def dataset_id(self, request): return request.param - @pytest.fixture(params=[True, False]) + @pytest.fixture(params=[ + True, + False + ]) def mask_space(self, request): return request.param - @pytest.fixture - def image_data(self, dataset_id): - """Get fake image data. - - Data type: - - ( - (line number, timestamp, west edge, east edge), - (data1, data2, ...) - ) - - VIS channel: - - pix = [6688, 6688, 6689, 6689] - lin = [2744, 8356, 2744, 8356] - earth mask = [[0, 0], [0, 1]] - - IR1 channel: + @pytest.fixture(params=[True, False]) + def with_compression(self, request): + return request.param - pix = [1672, 1672, 1673, 1673] - lin = [686, 2089, 686, 2089] - earth mask = [[1, 1], [1, 1]] - """ - line_control_word = np.dtype([ - ('line_number', vissr.I4), - ('scan_time', vissr.R8), - ('west_side_earth_edge', vissr.I4), - ('east_side_earth_edge', vissr.I4) - ]) - dtype = np.dtype([('LCW', line_control_word), - ('image_data', vissr.U1, (2,))]) - cases = { - "IR1": np.array([((686, 50000, 0, 1), (0, 1)), ((2089, 50000, 0, 1), (2, 3))], dtype=dtype), - "VIS": np.array([((2744, 50000, -1, -1), (0, 1)), ((8356, 50000, 0, 1), (2, 3))], dtype=dtype) - } - return cases[dataset_id["name"]] + @pytest.fixture + def open_function(self, with_compression): + return gzip.open if with_compression else open @pytest.fixture - def header(self, control_block, image_params): - return { - 'control_block': control_block, - 'image_parameters': image_params - } + def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): + filename = tmp_path / "vissr_file" + ch_type = vissr.CHANNEL_TYPES[dataset_id["name"]] + writer = VissrFileWriter(ch_type, open_function) + writer.write(filename, file_contents) + return filename @pytest.fixture - def control_block(self): + def file_contents(self, control_block, image_parameters, image_data): return { - 'available_block_size_of_image_data': 2 + "control_block": control_block, + "image_parameters": image_parameters, + "image_data": image_data } @pytest.fixture - def image_params( - self, mode, coordinate_conversion, attitude_prediction, - orbit_prediction, vis_calibration, ir1_calibration, ir2_calibration, - wv_calibration, simple_coordinate_conversion_table - ): - return { - 'mode': mode, - 'coordinate_conversion': coordinate_conversion, - 'attitude_prediction': attitude_prediction, - 'orbit_prediction': orbit_prediction, - 'vis_calibration': vis_calibration, - 'ir1_calibration': ir1_calibration, - 'ir2_calibration': ir2_calibration, - 'wv_calibration': wv_calibration, - 'simple_coordinate_conversion_table': simple_coordinate_conversion_table + def control_block(self, dataset_id): + block_size = { + "IR1": 16, + "VIS": 4 } + ctrl_block = np.zeros(1, dtype=vissr.CONTROL_BLOCK) + ctrl_block["parameter_block_size"] = block_size[dataset_id["name"]] + ctrl_block["available_block_size_of_image_data"] = 2 + return ctrl_block @pytest.fixture - def mode(self, ir_frame_parameters, vis_frame_parameters): + def image_parameters( + self, + mode_block, + coordinate_conversion, + attitude_prediction, + orbit_prediction_1, + orbit_prediction_2, + vis_calibration, + ir1_calibration, + ir2_calibration, + wv_calibration, + simple_coordinate_conversion_table, + ): return { - 'satellite_name': b'GMS-5 ', - 'spin_rate': 99.21774, - 'ir_frame_parameters': ir_frame_parameters, - 'vis_frame_parameters': vis_frame_parameters + "mode": mode_block, + "coordinate_conversion": coordinate_conversion, + "attitude_prediction": attitude_prediction, + "orbit_prediction_1": orbit_prediction_1, + "orbit_prediction_2": orbit_prediction_2, + "vis_calibration": vis_calibration, + "ir1_calibration": ir1_calibration, + "ir2_calibration": ir2_calibration, + "wv_calibration": wv_calibration, + "simple_coordinate_conversion_table": simple_coordinate_conversion_table } @pytest.fixture - def ir_frame_parameters(self): - return {'number_of_lines': 2, 'number_of_pixels': 2} - - @pytest.fixture - def vis_frame_parameters(self): - return {'number_of_lines': 2, 'number_of_pixels': 2} - + def mode_block(self): + mode = np.zeros(1, dtype=vissr.MODE_BLOCK) + mode["satellite_name"] = b'GMS-5 ' + mode["spin_rate"] = 99.21774 + mode["ir_frame_parameters"]["number_of_lines"] = 2 + mode["ir_frame_parameters"]["number_of_pixels"] = 2 + mode["vis_frame_parameters"]["number_of_lines"] = 2 + mode["vis_frame_parameters"]["number_of_pixels"] = 2 + return mode + @pytest.fixture def coordinate_conversion(self): - """Provide parameters for coordinate conversions. + """Get parameters for coordinate conversions. - Reference coordinates were computed near the central column. Adjust - pixel offset so that the first column is at the image center. This has - the advantage that we can test with very small 2x2 images. + Adjust pixel offset so that the first column is at the image center. + This has the advantage that we can test with very small 2x2 images. + Otherwise, all pixels would be in space. """ - return { - 'central_line_number_of_vissr_frame': { - 'IR1': 1378.5, - 'IR2': 1378.7, - 'VIS': 5513.0, - 'WV': 1379.1001 - }, - 'central_pixel_number_of_vissr_frame': { - 'IR1': 0.5, # instead of 1672.5 - 'IR2': 1672.5, - 'VIS': 0.5, # instead of 6688.5 - 'WV': 1672.5 - }, - 'pixel_difference_of_vissr_center_from_normal_position': { - 'IR1': 0.0, - 'IR2': 0.0, - 'VIS': 0.0, - 'WV': 0.0 - }, - 'scheduled_observation_time': 50130.979089568464, - 'number_of_sensor_elements': { - 'IR1': 1.0, - 'IR2': 1.0, - 'VIS': 4.0, - 'WV': 1.0 - }, - 'sampling_angle_along_pixel': { - 'IR1': 9.5719995e-05, - 'IR2': 9.5719995e-05, - 'VIS': 2.3929999e-05, - 'WV': 9.5719995e-05 - }, - 'stepping_angle_along_line': { - 'IR1': 0.00014000005, - 'IR2': 0.00014000005, - 'VIS': 3.5000005e-05, - 'WV': 0.00014000005 - }, - 'matrix_of_misalignment': np.array( - [[9.9999917e-01, -5.1195198e-04, -1.2135329e-03], - [5.1036407e-04, 9.9999905e-01, -1.3083406e-03], - [1.2142011e-03, 1.3077201e-03, 9.9999845e-01]], - dtype=np.float32 - ), - 'parameters': { - 'equatorial_radius': 6377397.0, - 'oblateness_of_earth': 0.003342773, - } - } + # fmt: off + conv = np.zeros(1, dtype=vissr.COORDINATE_CONVERSION_PARAMETERS) + + cline = conv["central_line_number_of_vissr_frame"] + cline["IR1"] = 1378.5 + cline["VIS"] = 5513.0 + + cpix = conv["central_pixel_number_of_vissr_frame"] + cpix["IR1"] = 0.5 # instead of 1672.5 + cpix["VIS"] = 0.5 # instead of 6688.5 + + conv['scheduled_observation_time'] = 50130.979089568464 + + nsensors = conv["number_of_sensor_elements"] + nsensors["IR1"] = 1 + nsensors["VIS"] = 4 + + sampling_angle = conv["sampling_angle_along_pixel"] + sampling_angle["IR1"] = 9.5719995e-05 + sampling_angle["VIS"] = 2.3929999e-05 + + stepping_angle = conv["stepping_angle_along_line"] + stepping_angle["IR1"] = 0.00014000005 + stepping_angle["VIS"] = 3.5000005e-05 + + conv["matrix_of_misalignment"] = np.array( + [[9.9999917e-01, -5.1195198e-04, -1.2135329e-03], + [5.1036407e-04, 9.9999905e-01, -1.3083406e-03], + [1.2142011e-03, 1.3077201e-03, 9.9999845e-01]], + dtype=np.float32 + ) + conv["parameters"]["equatorial_radius"] = 6377397.0 + conv["parameters"]["oblateness_of_earth"] = 0.003342773 + # fmt: on + return conv + @pytest.fixture def attitude_prediction(self): + att_pred = np.zeros(1, dtype=vissr.ATTITUDE_PREDICTION) # fmt: off - return { - 'data': np.array([ - (50130.93055556, (19960217, 222000), 3.14911863, 0.00054604, 4.3324597 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.93402778, (19960217, 222500), 3.14911863, 0.00054604, 4.31064812, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.9375 , (19960217, 223000), 3.14911863, 0.00054604, 4.28883633, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.94097222, (19960217, 223500), 3.14911863, 0.00054604, 4.26702432, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.94444444, (19960217, 224000), 3.14911863, 0.00054604, 4.2452121 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.94791667, (19960217, 224500), 3.14911863, 0.00054604, 4.22339966, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.95138889, (19960217, 225000), 3.14911863, 0.00054604, 4.201587 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.95486111, (19960217, 225500), 3.14911863, 0.00054604, 4.17977411, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.95833333, (19960217, 230000), 3.14911863, 0.00054604, 4.157961 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.96180556, (19960217, 230500), 3.14911863, 0.00054604, 4.13614765, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.96527778, (19960217, 231000), 3.14911863, 0.00054604, 4.11433408, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.96875 , (19960217, 231500), 3.14911863, 0.00054604, 4.09252027, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.97222222, (19960217, 232000), 3.14911863, 0.00054604, 4.07070622, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.97569444, (19960217, 232500), 3.14911863, 0.00054604, 4.04889193, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.97916667, (19960217, 233000), 3.14911863, 0.00054604, 4.02707741, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.98263889, (19960217, 233500), 3.14911863, 0.00054604, 4.00526265, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.98611111, (19960217, 234000), 3.14911863, 0.00054604, 3.98344765, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.98958333, (19960217, 234500), 3.14911863, 0.00054604, 3.96163241, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.99305556, (19960217, 235000), 3.14911863, 0.00054604, 3.93981692, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.99652778, (19960217, 235500), 3.14911863, 0.00054604, 3.9180012 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131. , (19960218, 0), 3.14911863, 0.00054604, 3.89618523, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.00347222, (19960218, 500), 3.14911863, 0.00054604, 3.87436903, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.00694444, (19960218, 1000), 3.14911863, 0.00054604, 3.85255258, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.01041667, (19960218, 1500), 3.14911863, 0.00054604, 3.8307359 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.01388889, (19960218, 2000), 3.14911863, 0.00054604, 3.80891898, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.01736111, (19960218, 2500), 3.14911863, 0.00054604, 3.78710182, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.02083333, (19960218, 3000), 3.14911863, 0.00054604, 3.76528442, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.02430556, (19960218, 3500), 3.14911863, 0.00054604, 3.74346679, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.02777778, (19960218, 4000), 3.14911863, 0.00054604, 3.72164893, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.03125 , (19960218, 4500), 3.14911863, 0.00054604, 3.69983084, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.03472222, (19960218, 5000), 3.14911863, 0.00054604, 3.67801252, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.03819444, (19960218, 5500), 3.14911863, 0.00054604, 3.65619398, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.04166667, (19960218, 10000), 3.14911863, 0.00054604, 3.63437521, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0)], - dtype=vissr.ATTITUDE_PREDICTION_DATA - ), - } + att_pred["data"] = np.array([ + (50130.93055556, (19960217, 222000), 3.14911863, 0.00054604, 4.3324597 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.93402778, (19960217, 222500), 3.14911863, 0.00054604, 4.31064812, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.9375 , (19960217, 223000), 3.14911863, 0.00054604, 4.28883633, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.94097222, (19960217, 223500), 3.14911863, 0.00054604, 4.26702432, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.94444444, (19960217, 224000), 3.14911863, 0.00054604, 4.2452121 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.94791667, (19960217, 224500), 3.14911863, 0.00054604, 4.22339966, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.95138889, (19960217, 225000), 3.14911863, 0.00054604, 4.201587 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.95486111, (19960217, 225500), 3.14911863, 0.00054604, 4.17977411, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.95833333, (19960217, 230000), 3.14911863, 0.00054604, 4.157961 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.96180556, (19960217, 230500), 3.14911863, 0.00054604, 4.13614765, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.96527778, (19960217, 231000), 3.14911863, 0.00054604, 4.11433408, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.96875 , (19960217, 231500), 3.14911863, 0.00054604, 4.09252027, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.97222222, (19960217, 232000), 3.14911863, 0.00054604, 4.07070622, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.97569444, (19960217, 232500), 3.14911863, 0.00054604, 4.04889193, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.97916667, (19960217, 233000), 3.14911863, 0.00054604, 4.02707741, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.98263889, (19960217, 233500), 3.14911863, 0.00054604, 4.00526265, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.98611111, (19960217, 234000), 3.14911863, 0.00054604, 3.98344765, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.98958333, (19960217, 234500), 3.14911863, 0.00054604, 3.96163241, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.99305556, (19960217, 235000), 3.14911863, 0.00054604, 3.93981692, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50130.99652778, (19960217, 235500), 3.14911863, 0.00054604, 3.9180012 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131. , (19960218, 0), 3.14911863, 0.00054604, 3.89618523, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.00347222, (19960218, 500), 3.14911863, 0.00054604, 3.87436903, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.00694444, (19960218, 1000), 3.14911863, 0.00054604, 3.85255258, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.01041667, (19960218, 1500), 3.14911863, 0.00054604, 3.8307359 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.01388889, (19960218, 2000), 3.14911863, 0.00054604, 3.80891898, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.01736111, (19960218, 2500), 3.14911863, 0.00054604, 3.78710182, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.02083333, (19960218, 3000), 3.14911863, 0.00054604, 3.76528442, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.02430556, (19960218, 3500), 3.14911863, 0.00054604, 3.74346679, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.02777778, (19960218, 4000), 3.14911863, 0.00054604, 3.72164893, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.03125 , (19960218, 4500), 3.14911863, 0.00054604, 3.69983084, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.03472222, (19960218, 5000), 3.14911863, 0.00054604, 3.67801252, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.03819444, (19960218, 5500), 3.14911863, 0.00054604, 3.65619398, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), + (50131.04166667, (19960218, 10000), 3.14911863, 0.00054604, 3.63437521, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0)], + dtype=vissr.ATTITUDE_PREDICTION_DATA + ) # fmt: on + return att_pred @pytest.fixture - def orbit_prediction(self): + def orbit_prediction_1(self): + orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) # fmt: off - return { - 'data': np.array([ - (50130.96180556, (960217, 230500), [ 2247604.14185506, -42110997.39399951, -276688.79765022], [3069.77904265, 164.12584895, 3.65437628], [-32392525.09983424, 27002204.93121811, -263873.25702763], [ 0.81859376, 0.6760037 , 17.44588753], 133.46391815, (330.12326803, -12.19424863), (197.27884747, -11.96904141), [[ 9.99936382e-01, 1.03449318e-02, 4.49611916e-03], [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01]], [ 2.46885475e+08, -2.07840219e+08, -7.66028692e+07], (-0.35887085, 140.18562594, 35793706.31768975), 0, 0), - (50130.96527778, (960217, 231000), [ 3167927.33749398, -42051692.51095297, -275526.52514815], [3065.46435995, 231.22434208, 4.09379482], [-32392279.4626506 , 27002405.27592725, -258576.96255205], [ 0.81939962, 0.66017389, 17.86159393], 134.71734048, (330.12643276, -12.19310271), (196.02858456, -11.9678881 ), [[ 9.99936382e-01, 1.03449336e-02, 4.49611993e-03], [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01]], [ 2.46204142e+08, -2.07689897e+08, -7.65268207e+07], (-0.35166851, 140.18520316, 35793613.0815237 ), 0, 0), - (50130.96875 , (960217, 231500), [ 4086736.12968183, -41972273.80964861, -274232.7185828 ], [3059.68341675, 298.21262775, 4.53123515], [-32392033.65156128, 27002600.83510851, -253157.23498394], [ 0.81975174, 0.6441 , 18.26873686], 135.97076281, (330.12959087, -12.19195587), (194.77831505, -11.96673388), [[ 9.99936382e-01, 1.03449353e-02, 4.49612071e-03], [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01]], [ 2.45524133e+08, -2.07559497e+08, -7.64508451e+07], (-0.3442983 , 140.18478523, 35793516.57370046), 0, 0), - (50130.97222222, (960217, 232000), [ 5003591.03339227, -41872779.15809826, -272808.0027587 ], [3052.43895532, 365.05867777, 4.9664885 ], [-32391787.80234722, 27002791.53735474, -247616.67261456], [ 0.81965461, 0.62779672, 18.66712192], 137.22418515, (330.13274246, -12.19080808), (193.52803902, -11.9655787 ), [[ 9.99936382e-01, 1.03449371e-02, 4.49612148e-03], [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01]], [ 2.44845888e+08, -2.07448982e+08, -7.63749418e+07], (-0.33676374, 140.18437233, 35793416.91561355), 0, 0), - (50130.97569444, (960217, 232500), [ 5918053.49286455, -41753256.02295399, -271253.06495935], [3043.73441705, 431.73053079, 5.39934712], [-32391542.0492856 , 27002977.3157848 , -241957.93142027], [ 0.81911313, 0.61127876, 19.05655891], 138.47760748, (330.13588763, -12.1896593 ), (192.27775657, -11.96442254), [[ 9.99936382e-01, 1.03449388e-02, 4.49612225e-03], [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01]], [ 2.44169846e+08, -2.07358303e+08, -7.62991102e+07], (-0.32906846, 140.18396465, 35793314.23041636), 0, 0), - (50130.97916667, (960217, 233000), [ 6829686.08751574, -41613761.44760592, -269568.65462124], [3033.5739409 , 498.19630731, 5.82960444], [-32391296.52466749, 27003158.10847847, -236183.72381214], [ 0.81813262, 0.59456087, 19.43686189], 139.73102981, (330.1390265 , -12.18850951), (191.02746783, -11.96326537), [[ 9.99936382e-01, 1.03449406e-02, 4.49612302e-03], [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01]], [ 2.43496443e+08, -2.07287406e+08, -7.62233495e+07], (-0.32121612, 140.18356238, 35793208.6428103 ), 0, 0), - (50130.98263889, (960217, 233500), [ 7738052.74476409, -41454362.02480648, -267755.58296603], [3021.96236148, 564.42422513, 6.25705512], [-32391051.35918404, 27003333.85786499, -230296.81731314], [ 0.81671881, 0.57765777, 19.80784932], 140.98445214, (330.14215916, -12.18735869), (189.77717289, -11.96210717), [[ 9.99936381e-01, 1.03449423e-02, 4.49612379e-03], [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01]], [ 2.42826115e+08, -2.07236222e+08, -7.61476592e+07], (-0.3132105 , 140.18316567, 35793100.27882991), 0, 0), - (50130.98611111, (960217, 234000), [ 8642718.9445816 , -41275133.86582235, -265814.72261683], [3008.90520686, 630.38261431, 6.68149519], [-32390806.68247503, 27003504.50991426, -224300.03325666], [ 0.81487783, 0.56058415, 20.16934411], 142.23787447, (330.14528573, -12.18620679), (188.52687186, -11.9609479 ), [[ 9.99936381e-01, 1.03449440e-02, 4.49612456e-03], [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01]], [ 2.42159297e+08, -2.07204676e+08, -7.60720382e+07], (-0.30505542, 140.18277471, 35792989.2656269 ), 0, 0), - (50130.98958333, (960217, 234500), [ 9543251.93095296, -41076162.56379041, -263747.00717057], [2994.40869593, 696.03993248, 7.10272213], [-32390562.62077149, 27003670.01680953, -218196.24541058], [ 0.81261619, 0.54335463, 20.52117372], 143.4912968 , (330.14840632, -12.18505381), (187.27656486, -11.95978754), [[ 9.99936381e-01, 1.03449458e-02, 4.49612532e-03], [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01]], [ 2.41496422e+08, -2.07192684e+08, -7.59964859e+07], (-0.29675479, 140.18238966, 35792875.73125207), 0, 0), - (50130.99305556, (960217, 235000), [ 10439220.91492008, -40857543.15396438, -261553.43075696], [2978.47973561, 761.36477969, 7.52053495], [-32390319.30020279, 27003830.33282405, -211988.37862591], [ 0.80994076, 0.52598377, 20.86317023], 144.74471913, (330.15152105, -12.1838997 ), (186.026252 , -11.95862606), [[ 9.99936381e-01, 1.03449475e-02, 4.49612609e-03], [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01]], [ 2.40837919e+08, -2.07200148e+08, -7.59210011e+07], (-0.28831259, 140.18201066, 35792759.80443729), 0, 0), - (50130.99652778, (960217, 235500), [ 11330197.2840407 , -40619380.06793167, -259235.04755252], [2961.12591755, 826.32591367, 7.93473432], [-32390076.84311398, 27003985.41857829, -205679.40741202], [ 0.80685878, 0.50848599, 21.19517045], 145.99814147, (330.15463004, -12.18274445), (184.77593341, -11.95746344), [[ 9.99936381e-01, 1.03449492e-02, 4.49612685e-03], [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01]], [ 2.40184218e+08, -2.07226967e+08, -7.58455830e+07], (-0.27973286, 140.18163787, 35792641.6143761 ), 0, 0), - (50131. , (960218, 0), [ 12215754.80493221, -40361787.08463053, -256792.97127933], [2942.35551459, 890.89226454, 8.34512262], [-32389835.37113104, 27004135.23720251, -199272.35452792], [ 0.8033778 , 0.49087558, 21.51701595], 147.2515638 , (330.15773341, -12.18158803), (183.5256092 , -11.95629965), [[ 9.99936381e-01, 1.03449510e-02, 4.49612761e-03], [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01]], [ 2.39535744e+08, -2.07273025e+08, -7.57702305e+07], (-0.2710197 , 140.18127143, 35792521.29050537), 0, 0), - (50131.00347222, (960218, 500), [ 13095469.82708225, -40084887.27645436, -254228.37467049], [2922.17747695, 955.03294974, 8.75150409], [-32389595.00191828, 27004279.7580633 , -192770.28953487], [ 0.79950572, 0.47316669, 21.82855319], 148.50498613, (330.16083128, -12.18043041), (182.27527951, -11.95513466), [[ 9.99936381e-01, 1.03449527e-02, 4.49612837e-03], [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01]], [ 2.38892921e+08, -2.07338200e+08, -7.56949425e+07], (-0.26217728, 140.18091148, 35792398.96228714), 0, 0), - (50131.00694444, (960218, 1000), [ 13968921.48773305, -39788812.95011112, -251542.48890031], [2900.60142795, 1018.71728887, 9.15368488], [-32389355.85220329, 27004418.95297137, -186176.32730922], [ 0.79525074, 0.45537327, 22.12963356], 149.75840846, (330.16392379, -12.17927157), (181.02494445, -11.95396845), [[ 9.99936381e-01, 1.03449544e-02, 4.49612913e-03], [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01]], [ 2.38256170e+08, -2.07422360e+08, -7.56197178e+07], (-0.25320985, 140.18055815, 35792274.75899146), 0, 0), - (50131.01041667, (960218, 1500), [ 14835691.90970188, -39473705.58489136, -248736.60300345], [2877.63765957, 1081.9148182 , 9.55147314], [-32389118.03536845, 27004552.79890675, -179493.62657611], [ 0.79062131, 0.43750908, 22.42011344], 151.01183079, (330.16701107, -12.17811148), (179.77462147, -11.952801 ), [[ 9.99936381e-01, 1.03449561e-02, 4.49612989e-03], [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01]], [ 2.37625908e+08, -2.07525364e+08, -7.55445552e+07], (-0.24412169, 140.18021156, 35792148.80948149), 0, 0), - (50131.01388889, (960218, 2000), [ 15695366.40490882, -39139715.76420763, -245812.06324505], [2853.29712752, 1144.59530548, 9.94467917], [-32388881.66227116, 27004681.27687033, -172725.38836895], [ 0.7856262 , 0.41958762, 22.69985431], 152.26525312, (330.17009324, -12.17695013), (178.52427609, -11.95163228), [[ 9.99936381e-01, 1.03449578e-02, 4.49613064e-03], [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01]], [ 2.37002549e+08, -2.07647061e+08, -7.54694534e+07], (-0.23491716, 140.17987182, 35792021.2420001 ), 0, 0), - (50131.01736111, (960218, 2500), [ 16547533.6691137 , -38787003.10533711, -242770.27248672], [2827.5914462 , 1206.72876414, 10.33311542], [-32388646.84104986, 27004804.37195345, -165874.85452439], [ 0.78027439, 0.40162218, 22.96872279], 153.51867545, (330.17317044, -12.17578748), (177.27392574, -11.95046228), [[ 9.99936381e-01, 1.03449595e-02, 4.49613140e-03], [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01]], [ 2.36386506e+08, -2.07787291e+08, -7.53944111e+07], (-0.22560065, 140.17953905, 35791892.18395986), 0, 0), - (50131.02083333, (960218, 3000), [ 17391785.98229151, -38415736.18212036, -239612.68950141], [2800.53288309, 1268.28546791, 10.71659666], [-32388413.67874206, 27004922.07123395, -158945.30610131], [ 0.77457509, 0.38362576, 23.2265907 ], 154.77209777, (330.17624281, -12.17462353), (176.02357057, -11.94929096), [[ 9.99936381e-01, 1.03449612e-02, 4.49613215e-03], [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01]], [ 2.35778185e+08, -2.07945887e+08, -7.53194268e+07], (-0.21617663, 140.17921335, 35791761.76173551), 0, 0)], - dtype=vissr.ORBIT_PREDICTION_DATA - ) - } + orb_pred["data"] = np.array([ + (50130.96180556, (960217, 230500), [ 2247604.14185506, -42110997.39399951, -276688.79765022], [3069.77904265, 164.12584895, 3.65437628], [-32392525.09983424, 27002204.93121811, -263873.25702763], [ 0.81859376, 0.6760037 , 17.44588753], 133.46391815, (330.12326803, -12.19424863), (197.27884747, -11.96904141), [[ 9.99936382e-01, 1.03449318e-02, 4.49611916e-03], [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01]], [ 2.46885475e+08, -2.07840219e+08, -7.66028692e+07], (-0.35887085, 140.18562594, 35793706.31768975), 0, 0), + (50130.96527778, (960217, 231000), [ 3167927.33749398, -42051692.51095297, -275526.52514815], [3065.46435995, 231.22434208, 4.09379482], [-32392279.4626506 , 27002405.27592725, -258576.96255205], [ 0.81939962, 0.66017389, 17.86159393], 134.71734048, (330.12643276, -12.19310271), (196.02858456, -11.9678881 ), [[ 9.99936382e-01, 1.03449336e-02, 4.49611993e-03], [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01]], [ 2.46204142e+08, -2.07689897e+08, -7.65268207e+07], (-0.35166851, 140.18520316, 35793613.0815237 ), 0, 0), + (50130.96875 , (960217, 231500), [ 4086736.12968183, -41972273.80964861, -274232.7185828 ], [3059.68341675, 298.21262775, 4.53123515], [-32392033.65156128, 27002600.83510851, -253157.23498394], [ 0.81975174, 0.6441 , 18.26873686], 135.97076281, (330.12959087, -12.19195587), (194.77831505, -11.96673388), [[ 9.99936382e-01, 1.03449353e-02, 4.49612071e-03], [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01]], [ 2.45524133e+08, -2.07559497e+08, -7.64508451e+07], (-0.3442983 , 140.18478523, 35793516.57370046), 0, 0), + (50130.97222222, (960217, 232000), [ 5003591.03339227, -41872779.15809826, -272808.0027587 ], [3052.43895532, 365.05867777, 4.9664885 ], [-32391787.80234722, 27002791.53735474, -247616.67261456], [ 0.81965461, 0.62779672, 18.66712192], 137.22418515, (330.13274246, -12.19080808), (193.52803902, -11.9655787 ), [[ 9.99936382e-01, 1.03449371e-02, 4.49612148e-03], [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01]], [ 2.44845888e+08, -2.07448982e+08, -7.63749418e+07], (-0.33676374, 140.18437233, 35793416.91561355), 0, 0), + (50130.97569444, (960217, 232500), [ 5918053.49286455, -41753256.02295399, -271253.06495935], [3043.73441705, 431.73053079, 5.39934712], [-32391542.0492856 , 27002977.3157848 , -241957.93142027], [ 0.81911313, 0.61127876, 19.05655891], 138.47760748, (330.13588763, -12.1896593 ), (192.27775657, -11.96442254), [[ 9.99936382e-01, 1.03449388e-02, 4.49612225e-03], [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01]], [ 2.44169846e+08, -2.07358303e+08, -7.62991102e+07], (-0.32906846, 140.18396465, 35793314.23041636), 0, 0), + (50130.97916667, (960217, 233000), [ 6829686.08751574, -41613761.44760592, -269568.65462124], [3033.5739409 , 498.19630731, 5.82960444], [-32391296.52466749, 27003158.10847847, -236183.72381214], [ 0.81813262, 0.59456087, 19.43686189], 139.73102981, (330.1390265 , -12.18850951), (191.02746783, -11.96326537), [[ 9.99936382e-01, 1.03449406e-02, 4.49612302e-03], [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01]], [ 2.43496443e+08, -2.07287406e+08, -7.62233495e+07], (-0.32121612, 140.18356238, 35793208.6428103 ), 0, 0), + (50130.98263889, (960217, 233500), [ 7738052.74476409, -41454362.02480648, -267755.58296603], [3021.96236148, 564.42422513, 6.25705512], [-32391051.35918404, 27003333.85786499, -230296.81731314], [ 0.81671881, 0.57765777, 19.80784932], 140.98445214, (330.14215916, -12.18735869), (189.77717289, -11.96210717), [[ 9.99936381e-01, 1.03449423e-02, 4.49612379e-03], [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01]], [ 2.42826115e+08, -2.07236222e+08, -7.61476592e+07], (-0.3132105 , 140.18316567, 35793100.27882991), 0, 0), + (50130.98611111, (960217, 234000), [ 8642718.9445816 , -41275133.86582235, -265814.72261683], [3008.90520686, 630.38261431, 6.68149519], [-32390806.68247503, 27003504.50991426, -224300.03325666], [ 0.81487783, 0.56058415, 20.16934411], 142.23787447, (330.14528573, -12.18620679), (188.52687186, -11.9609479 ), [[ 9.99936381e-01, 1.03449440e-02, 4.49612456e-03], [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01]], [ 2.42159297e+08, -2.07204676e+08, -7.60720382e+07], (-0.30505542, 140.18277471, 35792989.2656269 ), 0, 0), + (50130.98958333, (960217, 234500), [ 9543251.93095296, -41076162.56379041, -263747.00717057], [2994.40869593, 696.03993248, 7.10272213], [-32390562.62077149, 27003670.01680953, -218196.24541058], [ 0.81261619, 0.54335463, 20.52117372], 143.4912968 , (330.14840632, -12.18505381), (187.27656486, -11.95978754), [[ 9.99936381e-01, 1.03449458e-02, 4.49612532e-03], [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01]], [ 2.41496422e+08, -2.07192684e+08, -7.59964859e+07], (-0.29675479, 140.18238966, 35792875.73125207), 0, 0)], + dtype=vissr.ORBIT_PREDICTION_DATA + ) # fmt: on + return orb_pred + + @pytest.fixture + def orbit_prediction_2(self): + orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) + # fmt: off + orb_pred["data"] = np.array([ + (50130.99305556, (960217, 235000), [ 10439220.91492008, -40857543.15396438, -261553.43075696], [2978.47973561, 761.36477969, 7.52053495], [-32390319.30020279, 27003830.33282405, -211988.37862591], [ 0.80994076, 0.52598377, 20.86317023], 144.74471913, (330.15152105, -12.1838997 ), (186.026252 , -11.95862606), [[ 9.99936381e-01, 1.03449475e-02, 4.49612609e-03], [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01]], [ 2.40837919e+08, -2.07200148e+08, -7.59210011e+07], (-0.28831259, 140.18201066, 35792759.80443729), 0, 0), + (50130.99652778, (960217, 235500), [ 11330197.2840407 , -40619380.06793167, -259235.04755252], [2961.12591755, 826.32591367, 7.93473432], [-32390076.84311398, 27003985.41857829, -205679.40741202], [ 0.80685878, 0.50848599, 21.19517045], 145.99814147, (330.15463004, -12.18274445), (184.77593341, -11.95746344), [[ 9.99936381e-01, 1.03449492e-02, 4.49612685e-03], [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01]], [ 2.40184218e+08, -2.07226967e+08, -7.58455830e+07], (-0.27973286, 140.18163787, 35792641.6143761 ), 0, 0), + (50131. , (960218, 0), [ 12215754.80493221, -40361787.08463053, -256792.97127933], [2942.35551459, 890.89226454, 8.34512262], [-32389835.37113104, 27004135.23720251, -199272.35452792], [ 0.8033778 , 0.49087558, 21.51701595], 147.2515638 , (330.15773341, -12.18158803), (183.5256092 , -11.95629965), [[ 9.99936381e-01, 1.03449510e-02, 4.49612761e-03], [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01]], [ 2.39535744e+08, -2.07273025e+08, -7.57702305e+07], (-0.2710197 , 140.18127143, 35792521.29050537), 0, 0), + (50131.00347222, (960218, 500), [ 13095469.82708225, -40084887.27645436, -254228.37467049], [2922.17747695, 955.03294974, 8.75150409], [-32389595.00191828, 27004279.7580633 , -192770.28953487], [ 0.79950572, 0.47316669, 21.82855319], 148.50498613, (330.16083128, -12.18043041), (182.27527951, -11.95513466), [[ 9.99936381e-01, 1.03449527e-02, 4.49612837e-03], [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01]], [ 2.38892921e+08, -2.07338200e+08, -7.56949425e+07], (-0.26217728, 140.18091148, 35792398.96228714), 0, 0), + (50131.00694444, (960218, 1000), [ 13968921.48773305, -39788812.95011112, -251542.48890031], [2900.60142795, 1018.71728887, 9.15368488], [-32389355.85220329, 27004418.95297137, -186176.32730922], [ 0.79525074, 0.45537327, 22.12963356], 149.75840846, (330.16392379, -12.17927157), (181.02494445, -11.95396845), [[ 9.99936381e-01, 1.03449544e-02, 4.49612913e-03], [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01]], [ 2.38256170e+08, -2.07422360e+08, -7.56197178e+07], (-0.25320985, 140.18055815, 35792274.75899146), 0, 0), + (50131.01041667, (960218, 1500), [ 14835691.90970188, -39473705.58489136, -248736.60300345], [2877.63765957, 1081.9148182 , 9.55147314], [-32389118.03536845, 27004552.79890675, -179493.62657611], [ 0.79062131, 0.43750908, 22.42011344], 151.01183079, (330.16701107, -12.17811148), (179.77462147, -11.952801 ), [[ 9.99936381e-01, 1.03449561e-02, 4.49612989e-03], [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01]], [ 2.37625908e+08, -2.07525364e+08, -7.55445552e+07], (-0.24412169, 140.18021156, 35792148.80948149), 0, 0), + (50131.01388889, (960218, 2000), [ 15695366.40490882, -39139715.76420763, -245812.06324505], [2853.29712752, 1144.59530548, 9.94467917], [-32388881.66227116, 27004681.27687033, -172725.38836895], [ 0.7856262 , 0.41958762, 22.69985431], 152.26525312, (330.17009324, -12.17695013), (178.52427609, -11.95163228), [[ 9.99936381e-01, 1.03449578e-02, 4.49613064e-03], [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01]], [ 2.37002549e+08, -2.07647061e+08, -7.54694534e+07], (-0.23491716, 140.17987182, 35792021.2420001 ), 0, 0), + (50131.01736111, (960218, 2500), [ 16547533.6691137 , -38787003.10533711, -242770.27248672], [2827.5914462 , 1206.72876414, 10.33311542], [-32388646.84104986, 27004804.37195345, -165874.85452439], [ 0.78027439, 0.40162218, 22.96872279], 153.51867545, (330.17317044, -12.17578748), (177.27392574, -11.95046228), [[ 9.99936381e-01, 1.03449595e-02, 4.49613140e-03], [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01]], [ 2.36386506e+08, -2.07787291e+08, -7.53944111e+07], (-0.22560065, 140.17953905, 35791892.18395986), 0, 0), + (50131.02083333, (960218, 3000), [ 17391785.98229151, -38415736.18212036, -239612.68950141], [2800.53288309, 1268.28546791, 10.71659666], [-32388413.67874206, 27004922.07123395, -158945.30610131], [ 0.77457509, 0.38362576, 23.2265907 ], 154.77209777, (330.17624281, -12.17462353), (176.02357057, -11.94929096), [[ 9.99936381e-01, 1.03449612e-02, 4.49613215e-03], [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01]], [ 2.35778185e+08, -2.07945887e+08, -7.53194268e+07], (-0.21617663, 140.17921335, 35791761.76173551), 0, 0)], + dtype=vissr.ORBIT_PREDICTION_DATA + ) + # fmt: on + return orb_pred @pytest.fixture def vis_calibration(self): - return { - "vis1_calibration_table": { - "brightness_albedo_conversion_table": np.array([0, 0.25, 0.5, 1]) - } - } + vis_cal = np.zeros(1, dtype=vissr.VIS_CALIBRATION) + table = vis_cal["vis1_calibration_table"][ + "brightness_albedo_conversion_table" + ] + table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) + return vis_cal @pytest.fixture def ir1_calibration(self): - return { - "conversion_table_of_equivalent_black_body_temperature": np.array([0, 100, 200, 300]) - } + cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) + table = cal["conversion_table_of_equivalent_black_body_temperature"] + table[0, 0:4] = np.array([0, 100, 200, 300]) + return cal @pytest.fixture def ir2_calibration(self): - return { - "conversion_table_of_equivalent_black_body_temperature": None - } + cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) + return cal @pytest.fixture def wv_calibration(self): - return { - "conversion_table_of_equivalent_black_body_temperature": None - } + cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) + return cal @pytest.fixture def simple_coordinate_conversion_table(self): - return { + table = np.zeros(1, dtype=vissr.SIMPLE_COORDINATE_CONVERSION_TABLE) + return table + @pytest.fixture + def image_data(self, dataset_id, image_data_ir1, image_data_vis): + data = { + "IR1": image_data_ir1, + "VIS": image_data_vis } + return data[dataset_id["name"]] + + @pytest.fixture + def image_data_ir1(self): + image_data = np.zeros(2, vissr.IMAGE_DATA_BLOCK_IR) + image_data["LCW"]["line_number"] = [686, 2089] + image_data["LCW"]["scan_time"] = [50000, 50000] + image_data["LCW"]["west_side_earth_edge"] = [0, 0] + image_data["LCW"]["east_side_earth_edge"] = [1, 1] + image_data["image_data"] = [[0, 1], [2, 3]] + return image_data + + @pytest.fixture + def image_data_vis(self): + image_data = np.zeros(2, vissr.IMAGE_DATA_BLOCK_VIS) + image_data["LCW"]["line_number"] = [2744, 8356] + image_data["LCW"]["scan_time"] = [50000, 50000] + image_data["LCW"]["west_side_earth_edge"] = [-1, 0] + image_data["LCW"]["east_side_earth_edge"] = [-1, 1] + image_data["image_data"] = [[0, 1], [2, 3]] + return image_data + + @pytest.fixture + def vissr_file_like(self, vissr_file, with_compression): + if with_compression: + open_file = fsspec.open(vissr_file, compression="gzip") + return FSFile(open_file) + return vissr_file + + @pytest.fixture + def file_handler(self, vissr_file_like, mask_space): + return vissr.GMS5VISSRFileHandler( + vissr_file_like, {}, {}, mask_space=mask_space + ) @pytest.fixture def vis_refl_exp(self, mask_space, lons_lats_exp): @@ -821,18 +866,6 @@ def ir1_bt_exp(self, lons_lats_exp): } ) - @pytest.fixture - def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): - ir1_counts_id = make_dataid(name="IR1", calibration="counts") - ir1_bt_id = make_dataid(name="IR1", calibration="brightness_temperature") - vis_refl_id = make_dataid(name="VIS", calibration="reflectance") - expectations = { - ir1_counts_id: ir1_counts_exp, - ir1_bt_id: ir1_bt_exp, - vis_refl_id: vis_refl_exp - } - return expectations[dataset_id] - @pytest.fixture def lons_lats_exp(self, dataset_id): """Get expected lon/lat coordinates. @@ -869,12 +902,64 @@ def lons_lats_exp(self, dataset_id): lats = xr.DataArray(exp["lats"], dims=("y", "x")) return lons, lats + @pytest.fixture + def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): + ir1_counts_id = make_dataid(name="IR1", calibration="counts") + ir1_bt_id = make_dataid(name="IR1", calibration="brightness_temperature") + vis_refl_id = make_dataid(name="VIS", calibration="reflectance") + expectations = { + ir1_counts_id: ir1_counts_exp, + ir1_bt_id: ir1_bt_exp, + vis_refl_id: vis_refl_exp + } + return expectations[dataset_id] -def assert_namedtuple_close(a, b): - assert a.__class__ == b.__class__ - for attr in a._fields: - np.testing.assert_allclose( - getattr(a, attr), - getattr(b, attr), - err_msg='{} attribute {} differs'.format(a.__class__, attr) - ) + @pytest.fixture + def attrs_exp(self): + return { + "platform": "GMS-5", + "sensor": "vissr", + } + + def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): + dataset = file_handler.get_dataset(dataset_id, None) + xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1E-6) + # assert dataset.attrs == attrs_exp + + +class VissrFileWriter: + def __init__(self, ch_type, open_function): + self.ch_type = ch_type + self.open_function = open_function + + def write(self, filename, contents): + with self.open_function(filename, mode="wb") as fd: + self._write_control_block(fd, contents) + self._write_image_parameters(fd, contents) + self._write_image_data(fd, contents) + + def _write_control_block(self, fd, contents): + self._write(fd, contents["control_block"]) + + def _write_image_parameters(self, fd, contents): + for key, im_param in contents["image_parameters"].items(): + offset = vissr.IMAGE_PARAMS[key]["offset"][self.ch_type] + self._write(fd, im_param, offset) + + def _write_image_data(self, fd, contents): + offset = vissr.IMAGE_DATA[self.ch_type]["offset"] + self._write(fd, contents["image_data"], offset) + + def _write(self, fd, data, offset=None): + """Write data to file. + + If specified, prepend with 'offset' placeholder bytes. + """ + if offset: + self._fill(fd, offset) + fd.write(data.tobytes()) + + def _fill(self, fd, target_byte): + """Write placeholders from current position to target byte.""" + nbytes = target_byte - fd.tell() + fd.write(b" " * nbytes) From b13945bba4c6c4665437e11fc4806b23870add6b Mon Sep 17 00:00:00 2001 From: ghiggi Date: Fri, 26 May 2023 12:12:58 +0200 Subject: [PATCH 0186/1416] Remove to_dataset deprecation warning --- satpy/scene.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index aadbfc4691..61d42ab00a 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1061,10 +1061,6 @@ def to_xarray_dataset(self, datasets=None): """ from satpy._scene_converters import _get_dataarrays_from_identifiers - warnings.warn('Scene.to_xarray_dataset() is deprecated.' - 'Use Scene.to_xarray() instead, to obtain a CF-compliant xr.Dataset .', - DeprecationWarning, stacklevel=2) - dataarrays = _get_dataarrays_from_identifiers(self, datasets) if len(dataarrays) == 0: From 7789306704842dd9f8da7d90133570b383c2b447 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 26 May 2023 10:38:30 +0000 Subject: [PATCH 0187/1416] Add time and orbital parameters --- satpy/readers/gms5_vissr_l1b.py | 37 ++++++++++++++++++- .../tests/reader_tests/test_gms5_vissr_l1b.py | 20 +++++++++- 2 files changed, 54 insertions(+), 3 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index b59fa6a9b3..a99ad88278 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -100,6 +100,7 @@ import numpy as np import xarray as xr import numba +import datetime as dt import satpy.readers._geos_area as geos_area import satpy.readers.gms5_vissr_navigation as nav @@ -606,7 +607,29 @@ def _get_mda(self): mode_block = self._header['image_parameters']['mode'] return { 'platform': mode_block['satellite_name'].decode().strip().upper(), - 'sensor': 'VISSR' + 'sensor': 'VISSR', + 'time_parameters': self._get_time_parameters(), + 'orbital_parameters': self._get_orbital_parameters() + } + + def _get_orbital_parameters(self): + mode_block = self._header['image_parameters']['mode'] + return { + 'satellite_nominal_longitude': mode_block["ssp_longitude"], + 'satellite_nominal_latitude': 0.0, + 'satellite_nominal_altitude': mode_block["satellite_height"] + } + + def _get_time_parameters(self): + mode_block = self._header['image_parameters']['mode'] + start_time = mjd2datetime64(mode_block["observation_time_mjd"]) + start_time = start_time.astype(dt.datetime).replace(second=0, microsecond=0) + end_time = start_time + dt.timedelta( + minutes=25 + ) # Source: GMS User Guide, section 3.3.1 + return { + 'nominal_start_time': start_time, + 'nominal_end_time': end_time, } def get_dataset(self, dataset_id, ds_info): @@ -616,6 +639,7 @@ def get_dataset(self, dataset_id, ds_info): space_masker = SpaceMasker(image_data, dataset_id["name"]) dataset = self._mask_space_pixels(dataset, space_masker) self._attach_lons_lats(dataset, dataset_id) + self._update_attrs(dataset) return dataset def _get_image_data(self): @@ -814,6 +838,17 @@ def _make_lons_lats_data_array(self, lons, lats): "units": "degrees_north"}) return lons, lats + def _update_attrs(self, dataset): + dataset.attrs.update(self._mda) + + @property + def start_time(self): + return self._mda["time_parameters"]["nominal_start_time"] + + @property + def end_time(self): + return self._mda["time_parameters"]["nominal_end_time"] + def read_from_file_obj(file_obj, dtype, count, offset=0): file_obj.seek(offset) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 549d6a0788..c42676ae56 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -614,6 +614,9 @@ def mode_block(self): mode = np.zeros(1, dtype=vissr.MODE_BLOCK) mode["satellite_name"] = b'GMS-5 ' mode["spin_rate"] = 99.21774 + mode["observation_time_mjd"] = 50000.0 + mode["ssp_longitude"] = 140.0 + mode["satellite_height"] = 123456.0 mode["ir_frame_parameters"]["number_of_lines"] = 2 mode["ir_frame_parameters"]["number_of_pixels"] = 2 mode["vis_frame_parameters"]["number_of_lines"] = 2 @@ -918,13 +921,26 @@ def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): def attrs_exp(self): return { "platform": "GMS-5", - "sensor": "vissr", + "sensor": "VISSR", + "time_parameters": { + "nominal_start_time": dt.datetime(1995, 10, 10), + "nominal_end_time": dt.datetime(1995, 10, 10, 0, 25) + }, + "orbital_parameters": { + 'satellite_nominal_longitude': 140.0, + 'satellite_nominal_latitude': 0.0, + 'satellite_nominal_altitude': 123456.0 + } } def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): dataset = file_handler.get_dataset(dataset_id, None) xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1E-6) - # assert dataset.attrs == attrs_exp + assert dataset.attrs == attrs_exp + + def test_time_attributes(self, file_handler, attrs_exp): + assert file_handler.start_time == attrs_exp["time_parameters"]["nominal_start_time"] + assert file_handler.end_time == attrs_exp["time_parameters"]["nominal_end_time"] class VissrFileWriter: From acab2816ea89b77b04581179064a63c6b577f6c5 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 26 May 2023 10:53:14 +0000 Subject: [PATCH 0188/1416] Add orbital parameters --- satpy/readers/gms5_vissr_l1b.py | 9 +++++++-- satpy/readers/gms5_vissr_navigation.py | 4 ++-- satpy/tests/reader_tests/test_gms5_vissr_l1b.py | 8 +++++++- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index a99ad88278..8181d64065 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -613,11 +613,16 @@ def _get_mda(self): } def _get_orbital_parameters(self): - mode_block = self._header['image_parameters']['mode'] + im_params = self._header['image_parameters'] + mode_block = im_params['mode'] + coord = im_params["simple_coordinate_conversion_table"] return { 'satellite_nominal_longitude': mode_block["ssp_longitude"], 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': mode_block["satellite_height"] + 'satellite_nominal_altitude': mode_block["satellite_height"], + 'satellite_actual_longitude': coord["ssp_longitude"], + 'satellite_actual_latitude': coord["ssp_latitude"], + 'satellite_actual_altitude': coord["satellite_height"] } def _get_time_parameters(self): diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index a98b42f16f..74d9e82485 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -710,8 +710,8 @@ def _interpolate_nearest(x, x_sample, y_sample): # TODO """ -- Orbital parameters -- Area def + +- Area def: Attention! Nominal SSP (140) != Actual SSP (70) - Finish Documentation - Call find_enclosing_index only once for all predictions """ diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index c42676ae56..c3cc9c5c58 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -778,6 +778,9 @@ def wv_calibration(self): @pytest.fixture def simple_coordinate_conversion_table(self): table = np.zeros(1, dtype=vissr.SIMPLE_COORDINATE_CONVERSION_TABLE) + table["ssp_longitude"] = 141.0 + table["ssp_latitude"] = 1.0 + table["satellite_height"] = 123457.0 return table @pytest.fixture @@ -929,7 +932,10 @@ def attrs_exp(self): "orbital_parameters": { 'satellite_nominal_longitude': 140.0, 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 123456.0 + 'satellite_nominal_altitude': 123456.0, + 'satellite_actual_longitude': 141.0, + 'satellite_actual_latitude': 1.0, + 'satellite_actual_altitude': 123457.0 } } From 2e62a7032d8910895b68a0ff2179e0eeea958a35 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 26 May 2023 11:03:55 +0000 Subject: [PATCH 0189/1416] Restore environment name --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index d773951ab4..f60f25e4ff 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -1,4 +1,4 @@ -name: satpy +name: test-environment channels: - conda-forge dependencies: From 8e561b476dbf4461ec5cbdce6b9fad573c55425e Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 26 May 2023 11:04:53 +0000 Subject: [PATCH 0190/1416] Remove unused method --- satpy/readers/utils.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 6084dcc933..31f6dea6d9 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -474,11 +474,3 @@ def remove_earthsun_distance_correction(reflectance, utc_date=None): with xr.set_options(keep_attrs=True): reflectance = reflectance / (sun_earth_dist * sun_earth_dist) return reflectance - - -def modified_julian_day_to_datetime64(mjd): - """Convert Modified Julian Day (MJD) to datetime64.""" - epoch = np.datetime64('1858-11-17 00:00') - day2usec = 24 * 3600 * 1E6 - mjd_usec = (mjd * day2usec).astype(np.int64).astype('timedelta64[us]') - return epoch + mjd_usec From d39a7cacd50e62dcb0c367aef54309c5e56e7133 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 26 May 2023 13:58:16 -0500 Subject: [PATCH 0191/1416] Fix mock cleanup error by using teardown rather than cleanup --- satpy/readers/clavrx.py | 116 +++++++++++---------- satpy/tests/reader_tests/test_clavrx.py | 1 + satpy/tests/reader_tests/test_clavrx_nc.py | 76 +++++++++++--- 3 files changed, 128 insertions(+), 65 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 3d7455d209..f97c9cee19 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -33,7 +33,6 @@ LOG = logging.getLogger(__name__) - CF_UNITS = { 'none': '1', } @@ -69,18 +68,18 @@ } CHANNEL_ALIASES = { - "abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47, "modifiers": ("sunz_corrected",)}, - "refl_0_65um_nom": {"name": "C02", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, - "refl_0_86um_nom": {"name": "C03", "wavelength": 0.865, "modifiers": ("sunz_corrected",)}, - "refl_1_38um_nom": {"name": "C04", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, - "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61, "modifiers": ("sunz_corrected",)}, - "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25, "modifiers": ("sunz_corrected",)}, - }, - "viirs": {"refl_0_65um_nom": {"name": "I01", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, - "refl_1_38um_nom": {"name": "M09", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, - "refl_1_60um_nom": {"name": "I03", "wavelength": 1.61, "modifiers": ("sunz_corrected",)} - } - } + "abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47, "modifiers": ("sunz_corrected",)}, + "refl_0_65um_nom": {"name": "C02", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, + "refl_0_86um_nom": {"name": "C03", "wavelength": 0.865, "modifiers": ("sunz_corrected",)}, + "refl_1_38um_nom": {"name": "C04", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, + "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61, "modifiers": ("sunz_corrected",)}, + "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25, "modifiers": ("sunz_corrected",)}, + }, + "viirs": {"refl_0_65um_nom": {"name": "I01", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, + "refl_1_38um_nom": {"name": "M09", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, + "refl_1_60um_nom": {"name": "I03", "wavelength": 1.61, "modifiers": ("sunz_corrected",)} + } +} def _get_sensor(sensor: str) -> str: @@ -107,9 +106,29 @@ def _get_rows_per_scan(sensor: str) -> Optional[int]: return None +def _scale_data(data_arr: Union[xr.DataArray, int], scale_factor: float, add_offset: float) -> xr.DataArray: + """Scale data, if needed.""" + scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0) + if scaling_needed: + data_arr = data_arr * scale_factor + add_offset + return data_arr + + class _CLAVRxHelper: """A base class for the CLAVRx File Handlers.""" + @staticmethod + def _get_nadir_resolution(sensor, resolution_from_filename_info): + """Get nadir resolution.""" + for k, v in NADIR_RESOLUTION.items(): + if sensor.startswith(k): + return v + res = resolution_from_filename_info + if res.endswith('m'): + return int(res[:-1]) + elif res is not None: + return int(res) + @staticmethod def _remove_attributes(attrs: dict) -> dict: """Remove attributes that described data before scaling.""" @@ -120,14 +139,6 @@ def _remove_attributes(attrs: dict) -> dict: attrs.pop(attr_key, None) return attrs - @staticmethod - def _scale_data(data_arr: Union[xr.DataArray, int], scale_factor: float, add_offset: float) -> xr.DataArray: - """Scale data, if needed.""" - scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0) - if scaling_needed: - data_arr = data_arr * scale_factor + add_offset - return data_arr - @staticmethod def _get_data(data, dataset_id: dict) -> xr.DataArray: """Get a dataset.""" @@ -136,25 +147,28 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: attrs = data.attrs.copy() - fill = attrs.get('_FillValue') + # don't need these attributes after applied. factor = attrs.pop('scale_factor', (np.ones(1, dtype=data.dtype))[0]) offset = attrs.pop('add_offset', (np.zeros(1, dtype=data.dtype))[0]) + flag_values = data.attrs.get("flag_values", [None]) valid_range = attrs.get('valid_range', [None]) + if isinstance(valid_range, np.ndarray): + attrs["valid_range"] = valid_range.tolist() - flags = not data.attrs.get("SCALED", 1) and any(data.attrs.get("flag_values", [None])) - if not flags: + flags = not data.attrs.get("SCALED", 1) and any(flag_values) + if flags: + fill = attrs.get('_FillValue', None) + if isinstance(flag_values, np.ndarray) or isinstance(flag_values, list): + data = data.where((data >= flag_values[0]) & (data <= flag_values[-1]), fill) + else: + fill = attrs.pop('_FillValue', None) data = data.where(data != fill) - data = _CLAVRxHelper._scale_data(data, factor, offset) - # don't need _FillValue if it has been applied. - attrs.pop('_FillValue', None) - if isinstance(valid_range, np.ndarray): - valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset) - valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset) + data = _scale_data(data, factor, offset) + + if valid_range[0] is not None: + valid_min = _scale_data(valid_range[0], factor, offset) + valid_max = _scale_data(valid_range[1], factor, offset) data = data.where((data >= valid_min) & (data <= valid_max)) - else: - flag_values = attrs.get('flag_values', None) - if flag_values is not None and isinstance(flag_values, np.ndarray): - data = data.where((data >= flag_values[0]) & (data <= flag_values[-1]), fill) data.attrs = _CLAVRxHelper._remove_attributes(attrs) @@ -296,7 +310,8 @@ def __init__(self, filename, filename_info, filetype_info): self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) self.platform = _get_platform(self.file_content.get('/attr/platform')) - self.resolution = self.get_nadir_resolution(self.sensor) + self.resolution = _CLAVRxHelper._get_nadir_resolution(self.sensor, + self.filename_info.get('resolution')) @property def start_time(self): @@ -317,17 +332,6 @@ def get_dataset(self, dataset_id, ds_info): data.attrs, ds_info) return data - def get_nadir_resolution(self, sensor): - """Get nadir resolution.""" - for k, v in NADIR_RESOLUTION.items(): - if sensor.startswith(k): - return v - res = self.filename_info.get('resolution') - if res.endswith('m'): - return int(res[:-1]) - elif res is not None: - return int(res) - def _available_aliases(self, ds_info, current_var): """Add alias if there is a match.""" new_info = ds_info.copy() @@ -341,7 +345,6 @@ def _supplement_configured(self, configured_datasets=None): """Add more information if this reader can provide it.""" for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this - print(is_avail, ds_info) if is_avail is not None: yield is_avail, ds_info @@ -431,6 +434,8 @@ def __init__(self, filename, filename_info, filetype_info): self.platform = _get_platform( self.filename_info.get('platform_shortname', None)) self.sensor = _get_sensor(self.nc.attrs.get('sensor', None)) + self.resolution = _CLAVRxHelper._get_nadir_resolution(self.sensor, + self.filename_info.get('resolution')) # coordinates need scaling and valid_range (mask_and_scale won't work on valid_range) self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"], {"name": "latitude"}) @@ -439,7 +444,6 @@ def __init__(self, filename, filename_info, filetype_info): def _dynamic_dataset_info(self, var_name): """Set data name and, if applicable, aliases.""" - channel_info = None ds_info = { 'file_type': self.filetype_info['file_type'], 'name': var_name, @@ -447,11 +451,12 @@ def _dynamic_dataset_info(self, var_name): yield True, ds_info if CHANNEL_ALIASES.get(self.sensor) is not None: + alias_info = ds_info.copy() channel_info = CHANNEL_ALIASES.get(self.sensor).get(var_name, None) if channel_info is not None: channel_info["file_key"] = var_name - ds_info.update(channel_info) - yield True, ds_info + alias_info.update(channel_info) + yield True, alias_info @staticmethod def _is_2d_yx_data_array(data_arr): @@ -488,10 +493,15 @@ def available_datasets(self, configured_datasets=None): # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info - continue - if self.file_type_matches(ds_info['file_type']): + + matches = self.file_type_matches(ds_info['file_type']) + if matches and ds_info.get('resolution') != self.resolution: + # reader knows something about this dataset (file type matches) + # add any information that this reader can add. + new_info = ds_info.copy() + new_info['resolution'] = self.resolution handled_vars.add(ds_info['name']) - yield self.file_type_matches(ds_info['file_type']), ds_info + yield True, new_info yield from self._available_file_datasets(handled_vars) def _is_polar(self): diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 7f1fecc2be..f7c8f1f1cd 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -173,6 +173,7 @@ def test_available_datasets(self): (None, {'name': 'variable1', 'file_type': ['level_fake']}), (True, {'name': 'variable3', 'file_type': ['clavrx_hdf4']}), ] + new_ds_infos = list(r.file_handlers['clavrx_hdf4'][0].available_datasets( fake_dataset_info)) self.assertEqual(len(new_ds_infos), 9) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index b95a7dcce2..0d3e6680cb 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -16,7 +16,6 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.clavrx module.""" - import os import unittest from unittest import mock @@ -30,11 +29,13 @@ ABI_FILE = 'clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc' DEFAULT_FILE_DTYPE = np.uint16 -DEFAULT_FILE_SHAPE = (10, 300) +DEFAULT_FILE_SHAPE = (5, 5) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FLAGS = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=np.byte).reshape(DEFAULT_FILE_SHAPE) +DEFAULT_FILE_FLAGS_BEYOND_FILL = DEFAULT_FILE_FLAGS +DEFAULT_FILE_FLAGS_BEYOND_FILL[-1][:-2] = [-127, -127, -128] DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) @@ -97,7 +98,7 @@ def fake_dataset(): variable2 = variable2.where(variable2 % 2 != 0, FILL_VALUE) # category - variable3 = xr.DataArray(DEFAULT_FILE_FLAGS.astype(np.int8), + var_flags = xr.DataArray(DEFAULT_FILE_FLAGS.astype(np.int8), dims=('scan_lines_along_track_direction', 'pixel_elements_along_scan_direction'), attrs={'SCALED': 0, @@ -105,12 +106,21 @@ def fake_dataset(): 'units': '1', 'flag_values': [0, 1, 2, 3]}) + out_of_range_flags = xr.DataArray(DEFAULT_FILE_FLAGS_BEYOND_FILL.astype(np.int8), + dims=('scan_lines_along_track_direction', + 'pixel_elements_along_scan_direction'), + attrs={'SCALED': 0, + '_FillValue': -127, + 'units': '1', + 'flag_values': [0, 1, 2, 3]}) + ds_vars = { 'longitude': longitude, 'latitude': latitude, 'variable1': variable1, 'refl_0_65um_nom': variable2, - 'variable3': variable3 + 'var_flags': var_flags, + 'out_of_range_flags': out_of_range_flags, } ds = xr.Dataset(ds_vars, attrs=attrs) @@ -142,13 +152,19 @@ def setUp(self): self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(CLAVRXNetCDFFileHandler, '__bases__', - (FakeNetCDF4FileHandlerCLAVRx,), spec=True) + (FakeNetCDF4FileHandlerCLAVRx,)) self.fake_open_dataset = mock.patch('satpy.readers.clavrx.xr.open_dataset', return_value=fake_dataset()).start() + self.expected_dataset = mock.patch('xarray.load_dataset', + return_value=fake_dataset()).start() self.fake_handler = self.p.start() self.p.is_local = True - self.addCleanup(mock.patch.stopall) + def tearDown(self): + """Stop wrapping the NetCDF4 file handler.""" + self.p.stop() + self.fake_open_dataset.stop() + self.expected_dataset.stop() def test_init(self): """Test basic init with no extra parameters.""" @@ -176,8 +192,8 @@ def test_load_all_new_donor(self): with mock.patch('satpy.readers.clavrx.glob') as g, \ mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: g.return_value = ['fake_donor.nc'] - x = np.linspace(-0.1518, 0.1518, 300) - y = np.linspace(0.1518, -0.1518, 10) + x = np.linspace(-0.1518, 0.1518, 5) + y = np.linspace(0.1518, -0.1518, 5) proj = mock.Mock( semi_major_axis=6378137, semi_minor_axis=6356752.3142, @@ -198,11 +214,12 @@ def test_load_all_new_donor(self): self.assertNotIn("_FillValue", datasets["variable1"].attrs) self.assertEqual(np.float64, datasets["variable1"].dtype) - assert np.issubdtype(datasets["variable3"].dtype, np.integer) - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + assert np.issubdtype(datasets["var_flags"].dtype, np.integer) + self.assertIsNotNone(datasets['var_flags'].attrs.get('flag_meanings')) self.assertEqual('', - datasets['variable3'].attrs.get('flag_meanings'), + datasets['var_flags'].attrs.get('flag_meanings'), ) + assert np.issubdtype(datasets["out_of_range_flags"].dtype, np.integer) self.assertIsInstance(datasets["refl_0_65um_nom"].valid_range, list) self.assertEqual(np.float64, datasets["refl_0_65um_nom"].dtype) @@ -232,7 +249,7 @@ def test_yaml_datasets(self): ] new_ds_infos = list(r.file_handlers['clavrx_nc'][0].available_datasets( fake_dataset_info)) - self.assertEqual(len(new_ds_infos), 9) + self.assertEqual(len(new_ds_infos), 10) # we have this and can provide the resolution self.assertTrue(new_ds_infos[0][0]) @@ -247,3 +264,38 @@ def test_yaml_datasets(self): # because a previous handler said it has it self.assertTrue(new_ds_infos[2][0]) self.assertEqual(new_ds_infos[2][1]['resolution'], 2004) + + def test_scale_data(self): + """Test that data is scaled when necessary and not scaled data are flags.""" + from satpy.readers.clavrx import _scale_data + """Test scale data and results.""" + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ABI_FILE]) + r.create_filehandlers(loadables) + with mock.patch('satpy.readers.clavrx.glob') as g, \ + mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: + g.return_value = ['fake_donor.nc'] + x = np.linspace(-0.1518, 0.1518, 5) + y = np.linspace(0.1518, -0.1518, 5) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=-137.2, + sweep_angle_axis='x', + ) + d.return_value = fake_donor = mock.MagicMock( + variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + + ds_scale = ["variable1", "refl_0_65um_nom"] + ds_no_scale = ["var_flags", "out_of_range_flags"] + + with mock.patch("satpy.readers.clavrx._scale_data", wraps=_scale_data) as scale_data: + r.load(ds_scale) + scale_data.assert_called() + + with mock.patch("satpy.readers.clavrx._scale_data", wraps=_scale_data) as scale_data2: + r.load(ds_no_scale) + scale_data2.assert_not_called() From 6433eb0efd361eca7ecdcb1361f6397ddefd6999 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 26 May 2023 15:22:14 -0500 Subject: [PATCH 0192/1416] Fix poor capitalization bug --- satpy/tests/reader_tests/test_clavrx_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index 0d3e6680cb..338d83cf0e 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -25,7 +25,7 @@ from pyresample.geometry import AreaDefinition from satpy.readers import load_reader -from satpy.tests.reader_tests.test_netCDF_utils import FakeNetCDF4FileHandler +from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler ABI_FILE = 'clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc' DEFAULT_FILE_DTYPE = np.uint16 From dace7ab5794e8df9464223fa71b9043704569d43 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Sat, 27 May 2023 07:46:02 -0500 Subject: [PATCH 0193/1416] Fix donor name bug revealed when switching from AHI to ABI in tests Fix indent when creating info for alias --- satpy/readers/clavrx.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index f97c9cee19..1970bd641f 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -207,13 +207,17 @@ def _read_pug_fixed_grid(projection_coordinates: netCDF4.Variable, distance_mult return proj_dict @staticmethod - def _find_input_nc(filename: str, l1b_base: str) -> str: + def _find_input_nc(filename: str, sensor: str, l1b_base: str) -> str: dirname = os.path.dirname(filename) l1b_filename = os.path.join(dirname, l1b_base + '.nc') if os.path.exists(l1b_filename): return str(l1b_filename) - glob_pat = os.path.join(dirname, l1b_base + '*R20*.nc') + if sensor == "AHI": + glob_pat = os.path.join(dirname, l1b_base + '*R20*.nc') + else: + glob_pat = os.path.join(dirname, l1b_base + '*.nc') + LOG.debug("searching for {0}".format(glob_pat)) found_l1b_filenames = list(glob(glob_pat)) if len(found_l1b_filenames) == 0: @@ -223,7 +227,7 @@ def _find_input_nc(filename: str, l1b_base: str) -> str: return found_l1b_filenames[0] @staticmethod - def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: + def _read_axi_fixed_grid(filename: str, sensor: str, l1b_attr) -> geometry.AreaDefinition: """Read a fixed grid. CLAVR-x does not transcribe fixed grid parameters to its output @@ -238,7 +242,7 @@ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: """ LOG.debug(f"looking for corresponding input file for {l1b_attr}" " to act as fixed grid navigation donor") - l1b_path = _CLAVRxHelper._find_input_nc(filename, l1b_attr) + l1b_path = _CLAVRxHelper._find_input_nc(filename, sensor, l1b_attr) LOG.info(f"CLAVR-x does not include fixed-grid parameters, use input file {l1b_path} as donor") l1b = netCDF4.Dataset(l1b_path) proj = None @@ -408,7 +412,7 @@ def get_area_def(self, key): return super(CLAVRXHDF4FileHandler, self).get_area_def(key) l1b_att = str(self.file_content.get('/attr/L1B', None)) - area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) + area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, self.sensor, l1b_att) return area_def @@ -453,10 +457,10 @@ def _dynamic_dataset_info(self, var_name): if CHANNEL_ALIASES.get(self.sensor) is not None: alias_info = ds_info.copy() channel_info = CHANNEL_ALIASES.get(self.sensor).get(var_name, None) - if channel_info is not None: - channel_info["file_key"] = var_name - alias_info.update(channel_info) - yield True, alias_info + if channel_info is not None: + channel_info["file_key"] = var_name + alias_info.update(channel_info) + yield True, alias_info @staticmethod def _is_2d_yx_data_array(data_arr): @@ -516,7 +520,7 @@ def get_area_def(self, key): return super(CLAVRXNetCDFFileHandler, self).get_area_def(key) l1b_att = str(self.nc.attrs.get('L1B', None)) - return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) + return _CLAVRxHelper._read_axi_fixed_grid(self.filename, self.sensor, l1b_att) def get_dataset(self, dataset_id, ds_info): """Get a dataset for supported geostationary sensors.""" From 6f685767aea14bd41394e8a5df17ace7a3ede571 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Sat, 27 May 2023 07:48:12 -0500 Subject: [PATCH 0194/1416] Go back to using parameterize and add tests for alias. Clean up logic in some of the tests for yaml and expected datasets --- satpy/tests/reader_tests/test_clavrx_nc.py | 295 +++++++++++---------- 1 file changed, 157 insertions(+), 138 deletions(-) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index 0d3e6680cb..0a6bdddfec 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -17,15 +17,14 @@ # satpy. If not, see . """Module for testing the satpy.readers.clavrx module.""" import os -import unittest from unittest import mock import numpy as np +import pytest import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers import load_reader -from satpy.tests.reader_tests.test_netCDF_utils import FakeNetCDF4FileHandler ABI_FILE = 'clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc' DEFAULT_FILE_DTYPE = np.uint16 @@ -41,17 +40,18 @@ DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) -ABI_FILE = 'clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc' +L1B_FILE = 'clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173' +ABI_FILE = f'{L1B_FILE}.level2.nc' FILL_VALUE = -32768 -def fake_dataset(): +def fake_test_content(filename, **kwargs): """Mimic reader input file content.""" attrs = { 'platform': 'G16', 'sensor': 'ABI', # this is a Level 2 file that came from a L1B file - 'L1B': '"clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173', + 'L1B': L1B_FILE, } longitude = xr.DataArray(DEFAULT_LON_DATA, @@ -129,149 +129,168 @@ def fake_dataset(): return ds -class FakeNetCDF4FileHandlerCLAVRx(FakeNetCDF4FileHandler): - """Swap-in NetCDF4 File Handler.""" - - def get_test_content(self, filename, filename_info, filetype_info): - """Get a fake dataset.""" - return fake_dataset() - - -class TestCLAVRXReaderNetCDF(unittest.TestCase): - """Test CLAVR-X Reader with NetCDF files.""" +class TestCLAVRXReaderGeo: + """Test CLAVR-X Reader with Geo files.""" yaml_file = "clavrx.yaml" - filename = ABI_FILE - loadable_ids = list(fake_dataset().keys()) - def setUp(self): - """Wrap NetCDF file handler with a fake handler.""" + def setup_method(self): + """Read fake data.""" from satpy._config import config_search_paths - from satpy.readers.clavrx import CLAVRXNetCDFFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXNetCDFFileHandler, '__bases__', - (FakeNetCDF4FileHandlerCLAVRx,)) - self.fake_open_dataset = mock.patch('satpy.readers.clavrx.xr.open_dataset', - return_value=fake_dataset()).start() - self.expected_dataset = mock.patch('xarray.load_dataset', - return_value=fake_dataset()).start() - self.fake_handler = self.p.start() - self.p.is_local = True - - def tearDown(self): - """Stop wrapping the NetCDF4 file handler.""" - self.p.stop() - self.fake_open_dataset.stop() - self.expected_dataset.stop() - - def test_init(self): - """Test basic init with no extra parameters.""" - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames([ABI_FILE]) - self.assertEqual(len(loadables), 1) - r.create_filehandlers(loadables) - # make sure we have some files - self.assertTrue(r.file_handlers) - - def test_available_datasets(self): + + @pytest.mark.parametrize( + ("filenames", "expected_loadables"), + [([ABI_FILE], 1)] + ) + def test_reader_creation(self, filenames, expected_loadables): + """Test basic initialization.""" + with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + assert len(loadables) == expected_loadables + r.create_filehandlers(loadables) + # make sure we have some files + assert r.file_handlers + + @pytest.mark.parametrize( + ("filenames", "expected_datasets"), + [([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'C02', 'var_flags', + 'out_of_range_flags', 'longitude', 'latitude']), ] + ) + def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames([ABI_FILE]) - r.create_filehandlers(loadables) - avails = list(r.available_dataset_names) - expected_datasets = self.loadable_ids + ["latitude", "longitude"] - self.assertEqual(avails.sort(), expected_datasets.sort()) - - def test_load_all_new_donor(self): + from satpy.readers import load_reader + with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables) + avails = list(r.available_dataset_names) + for var_name in expected_datasets: + assert var_name in avails + + @pytest.mark.parametrize( + ("filenames", "loadable_ids"), + [([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'var_flags', 'out_of_range_flags']), ] + ) + def test_load_all_new_donor(self, filenames, loadable_ids): """Test loading all test datasets with new donor.""" - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames([ABI_FILE]) - r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, \ - mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] - x = np.linspace(-0.1518, 0.1518, 5) - y = np.linspace(0.1518, -0.1518, 5) - proj = mock.Mock( - semi_major_axis=6378137, - semi_minor_axis=6356752.3142, - perspective_point_height=35791000, - longitude_of_projection_origin=-137.2, - sweep_angle_axis='x', - ) - d.return_value = fake_donor = mock.MagicMock( - variables={'goes_imager_projection': proj, 'x': x, 'y': y}, - ) - fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - - datasets = r.load(self.loadable_ids + ["C02"]) - self.assertEqual(len(datasets), len(self.loadable_ids)+1) - - # should have file variable and one alias for reflectance - self.assertNotIn("valid_range", datasets["variable1"].attrs) - self.assertNotIn("_FillValue", datasets["variable1"].attrs) - self.assertEqual(np.float64, datasets["variable1"].dtype) - - assert np.issubdtype(datasets["var_flags"].dtype, np.integer) - self.assertIsNotNone(datasets['var_flags'].attrs.get('flag_meanings')) - self.assertEqual('', - datasets['var_flags'].attrs.get('flag_meanings'), - ) - assert np.issubdtype(datasets["out_of_range_flags"].dtype, np.integer) - - self.assertIsInstance(datasets["refl_0_65um_nom"].valid_range, list) - self.assertEqual(np.float64, datasets["refl_0_65um_nom"].dtype) - self.assertNotIn("_FillValue", datasets["refl_0_65um_nom"].attrs) - - self.assertEqual("refl_0_65um_nom", datasets["C02"].file_key) - self.assertNotIn("_FillValue", datasets["C02"].attrs) - - for v in datasets.values(): - self.assertIsInstance(v.area, AreaDefinition) - self.assertEqual(v.platform_name, 'GOES-16') - self.assertEqual(v.sensor, 'abi') - - self.assertNotIn('calibration', v.attrs) - self.assertIn("units", v.attrs) - self.assertNotIn('rows_per_scan', v.coords.get('longitude').attrs) - - def test_yaml_datasets(self): + with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables) + with mock.patch('satpy.readers.clavrx.glob') as g, \ + mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: + g.return_value = ['fake_donor.nc'] + x = np.linspace(-0.1518, 0.1518, DEFAULT_FILE_SHAPE[1]) + y = np.linspace(0.1518, -0.1518, DEFAULT_FILE_SHAPE[0]) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=140.7, + sweep_angle_axis='y', + ) + d.return_value = fake_donor = mock.MagicMock( + variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + + datasets = r.load(loadable_ids + ["C02"]) + assert len(datasets) == len(loadable_ids)+1 + + # should have file variable and one alias for reflectance + assert "valid_range" not in datasets["variable1"].attrs + assert "_FillValue" not in datasets["variable1"].attrs + assert np.float64 == datasets["variable1"].dtype + + assert np.issubdtype(datasets["var_flags"].dtype, np.integer) + assert datasets['var_flags'].attrs.get('flag_meanings') is not None + assert '' == datasets['var_flags'].attrs.get('flag_meanings') + assert np.issubdtype(datasets["out_of_range_flags"].dtype, np.integer) + + assert isinstance(datasets["refl_0_65um_nom"].valid_range, list) + assert np.float64 == datasets["refl_0_65um_nom"].dtype + assert "_FillValue" not in datasets["refl_0_65um_nom"].attrs + + assert "refl_0_65um_nom" == datasets["C02"].file_key + assert "_FillValue" not in datasets["C02"].attrs + + for v in datasets.values(): + assert isinstance(v.area, AreaDefinition) + assert v.platform_name == 'GOES-16' + assert v.sensor == 'abi' + + assert 'calibration' not in v.attrs + assert 'rows_per_scan' not in v.coords.get('longitude').attrs + assert "units" in v.attrs + + @pytest.mark.parametrize( + ("filenames", "expected_loadables"), + [([ABI_FILE], 1)] + ) + def test_yaml_datasets(self, filenames, expected_loadables): """Test available_datasets with fake variables from YAML.""" - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames([ABI_FILE]) - r.create_filehandlers(loadables) - # mimic the YAML file being configured for more datasets - fake_dataset_info = [ - (None, {'name': 'yaml1', 'resolution': None, 'file_type': ['clavrx_nc']}), - (True, {'name': 'yaml2', 'resolution': 0.5, 'file_type': ['clavrx_nc']}), - ] - new_ds_infos = list(r.file_handlers['clavrx_nc'][0].available_datasets( - fake_dataset_info)) - self.assertEqual(len(new_ds_infos), 10) - - # we have this and can provide the resolution - self.assertTrue(new_ds_infos[0][0]) - self.assertEqual(new_ds_infos[0][1]['resolution'], 2004) # hardcoded - - # we have this, but previous file handler said it knew about it - # and it is producing the same resolution as what we have - self.assertTrue(new_ds_infos[1][0]) - self.assertEqual(new_ds_infos[1][1]['resolution'], 0.5) - - # we have this, but don't want to change the resolution - # because a previous handler said it has it - self.assertTrue(new_ds_infos[2][0]) - self.assertEqual(new_ds_infos[2][1]['resolution'], 2004) - - def test_scale_data(self): + with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables) + + with mock.patch('satpy.readers.clavrx.glob') as g, \ + mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: + g.return_value = ['fake_donor.nc'] + x = np.linspace(-0.1518, 0.1518, 5) + y = np.linspace(0.1518, -0.1518, 5) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=-137.2, + sweep_angle_axis='x', + ) + d.return_value = fake_donor = mock.MagicMock( + variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + # mimic the YAML file being configured for more datasets + fake_dataset_info = [ + (None, {'name': 'yaml1', 'resolution': None, 'file_type': ['clavrx_nc']}), + (True, {'name': 'yaml2', 'resolution': 0.5, 'file_type': ['clavrx_nc']}), + ] + new_ds_infos = list(r.file_handlers['clavrx_nc'][0].available_datasets( + fake_dataset_info)) + assert len(new_ds_infos) == 10 + + # we have this and can provide the resolution + assert (new_ds_infos[0][0]) + assert new_ds_infos[0][1]['resolution'] == 2004 # hardcoded + + # we have this, but previous file handler said it knew about it + # and it is producing the same resolution as what we have + assert (new_ds_infos[1][0]) + assert new_ds_infos[1][1]['resolution'] == 0.5 + + # we have this, but don't want to change the resolution + # because a previous handler said it has it + assert (new_ds_infos[2][0]) + assert new_ds_infos[2][1]['resolution'] == 2004 + + @pytest.mark.parametrize( + ("filenames", "loadable_ids"), + [([ABI_FILE], ['variable1', 'refl_0_65um_nom', 'var_flags', 'out_of_range_flags']), ] + ) + def test_scale_data(self, filenames, loadable_ids): """Test that data is scaled when necessary and not scaled data are flags.""" from satpy.readers.clavrx import _scale_data - """Test scale data and results.""" - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames([ABI_FILE]) - r.create_filehandlers(loadables) + with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + od.side_effect = fake_test_content + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables) with mock.patch('satpy.readers.clavrx.glob') as g, \ mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: g.return_value = ['fake_donor.nc'] From 810598e5b04f2e4f1ac26ae35cc1222525a241f0 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 30 May 2023 11:49:20 +0000 Subject: [PATCH 0195/1416] Add area def with uniform sampling --- satpy/readers/gms5_vissr_l1b.py | 70 ++++++++----------- satpy/readers/gms5_vissr_navigation.py | 4 +- .../tests/reader_tests/test_gms5_vissr_l1b.py | 57 ++++++++++++--- 3 files changed, 79 insertions(+), 52 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 8181d64065..16e0984a64 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -613,16 +613,20 @@ def _get_mda(self): } def _get_orbital_parameters(self): + # Note: SSP longitude in simple coordinate conversion table seems to be + # incorrect (80 deg instead of 140 deg). Use orbital parameters instead. im_params = self._header['image_parameters'] - mode_block = im_params['mode'] - coord = im_params["simple_coordinate_conversion_table"] + mode = im_params['mode'] + simple_coord = im_params["simple_coordinate_conversion_table"] + orb_params = im_params["coordinate_conversion"]["orbital_parameters"] return { - 'satellite_nominal_longitude': mode_block["ssp_longitude"], + 'satellite_nominal_longitude': mode["ssp_longitude"], 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': mode_block["satellite_height"], - 'satellite_actual_longitude': coord["ssp_longitude"], - 'satellite_actual_latitude': coord["ssp_latitude"], - 'satellite_actual_altitude': coord["satellite_height"] + 'satellite_nominal_altitude': mode["satellite_height"], + + 'satellite_actual_longitude': orb_params["longitude_of_ssp"], + 'satellite_actual_latitude': orb_params["latitude_of_ssp"], + 'satellite_actual_altitude': simple_coord["satellite_height"] } def _get_time_parameters(self): @@ -644,7 +648,7 @@ def get_dataset(self, dataset_id, ds_info): space_masker = SpaceMasker(image_data, dataset_id["name"]) dataset = self._mask_space_pixels(dataset, space_masker) self._attach_lons_lats(dataset, dataset_id) - self._update_attrs(dataset) + self._update_attrs(dataset, dataset_id) return dataset def _get_image_data(self): @@ -699,50 +703,37 @@ def _get_calibration_table(self, dataset_id): } return tables[dataset_id["name"]] - def get_area_def_test(self, dsid): - alt_ch_name = ALT_CHANNEL_NAMES[dsid['name']] - num_lines, num_pixels = self._get_actual_shape() - mode_block = self._header['image_parameters']['mode'] + def _get_area_def_uniform_sampling(self, dataset_id): + alt_ch_name = ALT_CHANNEL_NAMES[dataset_id['name']] + num_lines, _ = self._get_actual_shape() coord_conv = self._header['image_parameters']['coordinate_conversion'] stepping_angle = coord_conv['stepping_angle_along_line'][alt_ch_name] - sampling_angle = coord_conv['sampling_angle_along_pixel'][alt_ch_name] - center_line_vissr_frame = coord_conv['central_line_number_of_vissr_frame'][alt_ch_name] - center_pixel_vissr_frame = coord_conv['central_pixel_number_of_vissr_frame'][alt_ch_name] - line_offset = self._header['control_block']['head_valid_line_number'] - pixel_offset = coord_conv['pixel_difference_of_vissr_center_from_normal_position'][ - alt_ch_name] - print(coord_conv['vissr_misalignment']) - print(coord_conv['matrix_of_misalignment']) - - equatorial_radius = coord_conv['parameters']['equatorial_radius'] - oblateness = coord_conv['parameters']['oblateness_of_earth'] name_dict = geos_area.get_geos_area_naming({ 'platform_name': self._mda['platform'], 'instrument_name': self._mda['sensor'], 'service_name': 'western-pacific', 'service_desc': 'Western Pacific', - 'resolution': dsid['resolution'] + 'resolution': dataset_id['resolution'] }) + uniform_size = num_lines + uniform_line_pixel_offset = 0.5 * num_lines + uniform_sampling_angle = geos_area.sampling_to_lfac_cfac(stepping_angle) proj_dict = { 'a_name': name_dict['area_id'], 'p_id': name_dict['area_id'], 'a_desc': name_dict['description'], - 'ssp_lon': coord_conv['orbital_parameters']['longitude_of_ssp'], - 'a': equatorial_radius, - 'b': _get_polar_earth_radius(equatorial_radius, oblateness), - 'h': mode_block['satellite_height'], - 'nlines': num_lines, - 'ncols': num_pixels, - 'lfac': geos_area.sampling_to_lfac_cfac(stepping_angle), - 'cfac': geos_area.sampling_to_lfac_cfac(sampling_angle), - 'coff': center_pixel_vissr_frame - pixel_offset, - 'loff': center_line_vissr_frame - line_offset, + 'ssp_lon': self._mda["orbital_parameters"]["satellite_nominal_longitude"], + "a": nav.EARTH_EQUATORIAL_RADIUS, + "b": nav.EARTH_POLAR_RADIUS, + 'h': self._mda["orbital_parameters"]["satellite_nominal_altitude"], + 'nlines': uniform_size, + 'ncols': uniform_size, + 'lfac': uniform_sampling_angle, + 'cfac': uniform_sampling_angle, + 'coff': uniform_line_pixel_offset, + 'loff': uniform_line_pixel_offset, 'scandir': 'N2S' } - from pprint import pprint - - # pprint(mode_block) - pprint(coord_conv) extent = geos_area.get_area_extent(proj_dict) area = geos_area.get_area_definition(proj_dict, extent) return area @@ -843,8 +834,9 @@ def _make_lons_lats_data_array(self, lons, lats): "units": "degrees_north"}) return lons, lats - def _update_attrs(self, dataset): + def _update_attrs(self, dataset, dataset_id): dataset.attrs.update(self._mda) + dataset.attrs["area_def_uniform_sampling"] = self._get_area_def_uniform_sampling(dataset_id) @property def start_time(self): diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 74d9e82485..98ead60858 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -18,6 +18,7 @@ EARTH_FLATTENING = 1/298.257 EARTH_EQUATORIAL_RADIUS = 6378136.0 +EARTH_POLAR_RADIUS = EARTH_EQUATORIAL_RADIUS * (1 - EARTH_FLATTENING) """Constants taken from JMA's Msial library.""" @@ -710,8 +711,7 @@ def _interpolate_nearest(x, x_sample, y_sample): # TODO """ - -- Area def: Attention! Nominal SSP (140) != Actual SSP (70) +- Code formatting - Finish Documentation - Call find_enclosing_index only once for all predictions """ diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index c3cc9c5c58..1ba7df7421 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -533,9 +533,9 @@ def patch_number_of_pixels_per_scanline(self, monkeypatch): ) @pytest.fixture(params=[ - make_dataid(name="VIS", calibration="reflectance"), - make_dataid(name='IR1', calibration="brightness_temperature"), - make_dataid(name='IR1', calibration="counts") + make_dataid(name="VIS", calibration="reflectance", resolution=1250), + make_dataid(name='IR1', calibration="brightness_temperature", resolution=5000), + make_dataid(name='IR1', calibration="counts", resolution=5000) ]) def dataset_id(self, request): return request.param @@ -665,6 +665,9 @@ def coordinate_conversion(self): conv["parameters"]["equatorial_radius"] = 6377397.0 conv["parameters"]["oblateness_of_earth"] = 0.003342773 + + conv["orbital_parameters"]["longitude_of_ssp"] = 141.0 + conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 # fmt: on return conv @@ -778,8 +781,6 @@ def wv_calibration(self): @pytest.fixture def simple_coordinate_conversion_table(self): table = np.zeros(1, dtype=vissr.SIMPLE_COORDINATE_CONVERSION_TABLE) - table["ssp_longitude"] = 141.0 - table["ssp_latitude"] = 1.0 table["satellite_height"] = 123457.0 return table @@ -910,9 +911,9 @@ def lons_lats_exp(self, dataset_id): @pytest.fixture def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): - ir1_counts_id = make_dataid(name="IR1", calibration="counts") - ir1_bt_id = make_dataid(name="IR1", calibration="brightness_temperature") - vis_refl_id = make_dataid(name="VIS", calibration="reflectance") + ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) + ir1_bt_id = make_dataid(name="IR1", calibration="brightness_temperature", resolution=5000) + vis_refl_id = make_dataid(name="VIS", calibration="reflectance", resolution=1250) expectations = { ir1_counts_id: ir1_counts_exp, ir1_bt_id: ir1_bt_exp, @@ -921,7 +922,28 @@ def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): return expectations[dataset_id] @pytest.fixture - def attrs_exp(self): + def area_def_exp(self, dataset_id): + from pyresample.geometry import AreaDefinition + if dataset_id["name"] == "IR1": + resol = 5 + extent = (-8.641922536247211, -8.641922536247211, 25.925767608741637, 25.925767608741637) + else: + resol = 1 + extent = (-2.1604801323784297, -2.1604801323784297, 6.481440397135289, 6.481440397135289) + area_id = f"gms-5_vissr_western-pacific_{resol}km" + desc = f"GMS-5 VISSR Western Pacific area definition with {resol} km resolution" + return AreaDefinition( + area_id=area_id, + description=desc, + proj_id=area_id, + projection={'ellps': 'SGS85', 'h': '123456', 'lon_0': '140', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, + area_extent=extent, + width=2, + height=2 + ) + + @pytest.fixture + def attrs_exp(self, area_def_exp): return { "platform": "GMS-5", "sensor": "VISSR", @@ -936,18 +958,31 @@ def attrs_exp(self): 'satellite_actual_longitude': 141.0, 'satellite_actual_latitude': 1.0, 'satellite_actual_altitude': 123457.0 - } + }, + "area_def_uniform_sampling": area_def_exp } def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): dataset = file_handler.get_dataset(dataset_id, None) xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1E-6) - assert dataset.attrs == attrs_exp + self._assert_attrs_equal(dataset.attrs, attrs_exp) def test_time_attributes(self, file_handler, attrs_exp): assert file_handler.start_time == attrs_exp["time_parameters"]["nominal_start_time"] assert file_handler.end_time == attrs_exp["time_parameters"]["nominal_end_time"] + def _assert_attrs_equal(self, attrs_tst, attrs_exp): + area_tst = attrs_tst.pop("area_def_uniform_sampling") + area_exp = attrs_exp.pop("area_def_uniform_sampling") + assert attrs_tst == attrs_exp + self._assert_areas_close(area_tst, area_exp) + + def _assert_areas_close(self, area_tst, area_exp): + lons_tst, lats_tst = area_tst.get_lonlats() + lons_exp, lats_exp = area_exp.get_lonlats() + np.testing.assert_allclose(lons_tst, lons_exp) + np.testing.assert_allclose(lats_tst, lats_exp) + class VissrFileWriter: def __init__(self, ch_type, open_function): From 768a2e54a1d8ad434ae3cd2d1be00f70b989aeb6 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 30 May 2023 12:16:41 +0000 Subject: [PATCH 0196/1416] Refactor area def creation --- satpy/readers/gms5_vissr_l1b.py | 103 ++++++++++++++++++++++---------- 1 file changed, 70 insertions(+), 33 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 16e0984a64..e782513742 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -704,39 +704,14 @@ def _get_calibration_table(self, dataset_id): return tables[dataset_id["name"]] def _get_area_def_uniform_sampling(self, dataset_id): - alt_ch_name = ALT_CHANNEL_NAMES[dataset_id['name']] - num_lines, _ = self._get_actual_shape() - coord_conv = self._header['image_parameters']['coordinate_conversion'] - stepping_angle = coord_conv['stepping_angle_along_line'][alt_ch_name] - name_dict = geos_area.get_geos_area_naming({ - 'platform_name': self._mda['platform'], - 'instrument_name': self._mda['sensor'], - 'service_name': 'western-pacific', - 'service_desc': 'Western Pacific', - 'resolution': dataset_id['resolution'] - }) - uniform_size = num_lines - uniform_line_pixel_offset = 0.5 * num_lines - uniform_sampling_angle = geos_area.sampling_to_lfac_cfac(stepping_angle) - proj_dict = { - 'a_name': name_dict['area_id'], - 'p_id': name_dict['area_id'], - 'a_desc': name_dict['description'], - 'ssp_lon': self._mda["orbital_parameters"]["satellite_nominal_longitude"], - "a": nav.EARTH_EQUATORIAL_RADIUS, - "b": nav.EARTH_POLAR_RADIUS, - 'h': self._mda["orbital_parameters"]["satellite_nominal_altitude"], - 'nlines': uniform_size, - 'ncols': uniform_size, - 'lfac': uniform_sampling_angle, - 'cfac': uniform_sampling_angle, - 'coff': uniform_line_pixel_offset, - 'loff': uniform_line_pixel_offset, - 'scandir': 'N2S' - } - extent = geos_area.get_area_extent(proj_dict) - area = geos_area.get_area_definition(proj_dict, extent) - return area + a = AreaDefEstimator( + coord_conv_params=self._header['image_parameters']['coordinate_conversion'], + metadata=self._mda + ) + return a.get_area_def_uniform_sampling( + original_shape=self._get_actual_shape(), + dataset_id=dataset_id + ) def _mask_space_pixels(self, dataset, space_masker): if self._mask_space: @@ -937,3 +912,65 @@ def get_earth_mask(shape, earth_edges, fill_value=-1): def is_vis_channel(channel_name): return channel_name == "VIS" + + +class AreaDefEstimator: + def __init__(self, coord_conv_params, metadata): + self.coord_conv = coord_conv_params + self.metadata = metadata + + def get_area_def_uniform_sampling(self, original_shape, dataset_id): + """Get area definition with uniform sampling.""" + proj_dict = self._get_proj_dict(dataset_id, original_shape) + extent = geos_area.get_area_extent(proj_dict) + return geos_area.get_area_definition(proj_dict, extent) + + def _get_proj_dict(self, dataset_id, original_shape): + proj_dict = {} + proj_dict.update(self._get_name_dict(dataset_id)) + proj_dict.update(self._get_proj4_dict()) + proj_dict.update(self._get_shape_dict(original_shape, dataset_id)) + return proj_dict + + def _get_name_dict(self, dataset_id): + name_dict = geos_area.get_geos_area_naming({ + 'platform_name': self.metadata['platform'], + 'instrument_name': self.metadata['sensor'], + 'service_name': 'western-pacific', + 'service_desc': 'Western Pacific', + 'resolution': dataset_id['resolution'] + }) + return { + "a_name": name_dict["area_id"], + "p_id": name_dict["area_id"], + "a_desc": name_dict["description"] + } + + def _get_proj4_dict(self, ): + # Use nominal parameters to make the area def as constant as possible + return { + 'ssp_lon': self.metadata["orbital_parameters"]["satellite_nominal_longitude"], + "a": nav.EARTH_EQUATORIAL_RADIUS, + "b": nav.EARTH_POLAR_RADIUS, + 'h': self.metadata["orbital_parameters"]["satellite_nominal_altitude"], + } + + def _get_shape_dict(self, original_shape, dataset_id): + # Apply parameters from the vertical dimension (num lines, stepping + # angle) to the horizontal dimension to obtain a square area definition + # with uniform sampling. + num_lines, _ = original_shape + alt_ch_name = ALT_CHANNEL_NAMES[dataset_id["name"]] + stepping_angle = self.coord_conv['stepping_angle_along_line'][alt_ch_name] + uniform_size = num_lines + uniform_line_pixel_offset = 0.5 * num_lines + uniform_sampling_angle = geos_area.sampling_to_lfac_cfac(stepping_angle) + return { + 'nlines': uniform_size, + 'ncols': uniform_size, + 'lfac': uniform_sampling_angle, + 'cfac': uniform_sampling_angle, + 'coff': uniform_line_pixel_offset, + 'loff': uniform_line_pixel_offset, + 'scandir': 'N2S' + } From 36a4dcce693a4f6bc022eae1de7bc0575280eab0 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 13:14:23 +0000 Subject: [PATCH 0197/1416] Delete _has_archive_header method as it is a remanant of the merge conflict --- satpy/readers/seviri_l1b_native.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index fc694df114..8cd7ba31d1 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -70,7 +70,7 @@ orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... time_parameters: {'nominal_start_time': datetime.datetime(2021, ... units: K - wavelength: 10.8 µm (9.8-11.8 µm) + wavelength: 10.8 µm (9.8-11.8 µm) standard_name: toa_brightness_temperature platform_name: Meteosat-11 sensor: seviri @@ -201,12 +201,6 @@ def __init__(self, filename, filename_info, filetype_info, if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: self.tres = 5 - def _has_archive_header(self): - """Check whether the file includes an ASCII archive header.""" - ascii_startswith = b'FormatName : NATIVE' - with open(self.filename, mode='rb') as istream: - return istream.read(36) == ascii_startswith - @property def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" From ebc3e088b13c6d4b329ac0eea3ff69832397aba1 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 13:20:00 +0000 Subject: [PATCH 0198/1416] Remove the default value as input of round_nom_time and remove import datetime --- satpy/readers/seviri_base.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index b558ce62fb..9187e7355e 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -166,7 +166,7 @@ """ import warnings -from datetime import datetime, timedelta +from datetime import timedelta import dask.array as da import numpy as np @@ -984,7 +984,7 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr return data -def round_nom_time(dt=None, time_delta=None): +def round_nom_time(dt, time_delta): """Round a datetime object to a multiple of a timedelta. dt : datetime.datetime object, default now. @@ -992,11 +992,7 @@ def round_nom_time(dt=None, time_delta=None): adapted for SEVIRI from: http://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python """ - if dt is None: - dt = datetime.now() seconds = (dt - dt.min).seconds - if time_delta is None: - time_delta = timedelta(minutes=1) round_to = time_delta.total_seconds() rounding = (seconds + round_to / 2) // round_to * round_to From 957e1d543b049f426aca3ac8dc25392c17a6f6da Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 13:30:34 +0000 Subject: [PATCH 0199/1416] Move rc_period_min into a dedicated method and Fix varaible naming --- satpy/readers/fci_l1c_nc.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 49b01e4483..8e28219035 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -202,10 +202,6 @@ class using the :mod:`~satpy.Scene.load` method with the reader def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" - self.RC_period_min = 10 - if not filename_info['coverage'] == 'FD': - raise NotImplementedError(f"coverage for {filename_info['coverage']} not supported by this reader") - self.RC_period_min = 2.5 super().__init__(filename, filename_info, filetype_info, cache_var_size=0, @@ -216,16 +212,27 @@ def __init__(self, filename, filename_info, filetype_info): self._cache = {} + @property + def rc_period_min(self): + """Get nominal repeat cycle duration. + + As RSS is not yet implemeted and error will be raised if RSS are to be read + """ + if not self.filename_info['coverage'] == 'FD': + raise NotImplementedError(f"coverage for {self.filename_info['coverage']} not supported by this reader") + return 2.5 + return 10 + @property def nominal_start_time(self): """Get nominal start time.""" - RC_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) - return RC_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*self.RC_period_min) + rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) + return rc_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*self.rc_period_min) @property def nominal_end_time(self): """Get nominal end time.""" - return self.nominal_start_time + timedelta(minutes=self.RC_period_min) + return self.nominal_start_time + timedelta(minutes=self.rc_period_min) @property def observation_start_time(self): From bebfdad9e4724c0186ffb954c237d2ba85d35b9a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 13:44:35 +0000 Subject: [PATCH 0200/1416] Move _repeat_cycle_duration as private property and replace the self.tres --- satpy/readers/seviri_l1b_hrit.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 127954c41c..388af46a3e 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -452,9 +452,6 @@ def __init__(self, filename, filename_info, filetype_info, self.calib_mode = calib_mode self.ext_calib_coefs = ext_calib_coefs or {} self.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines - self.tres = REPEAT_CYCLE_DURATION # base RC duration of 15 - if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - self.tres = 5 self._get_header() def _get_header(self): @@ -495,19 +492,26 @@ def _get_header(self): self.mda['service'] = service self.channel_name = CHANNEL_NAMES[self.mda['spectral_channel_id']] + @property + def _repeat_cycle_duration(self): + """Get repeacyckle duration from epilogue.""" + if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + return 5 + return REPEAT_CYCLE_DURATION + @property def nominal_start_time(self): """Get the start time and round it according to scan law.""" tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] - return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" tm = self.prologue['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): From 9e022ac911832fcc996f2e815cf89814ae896072 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 13:51:30 +0000 Subject: [PATCH 0201/1416] Move _repeat_cycle_duration as private property and replace the self.tres --- satpy/readers/seviri_l1b_native.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 8cd7ba31d1..b75c8edcd9 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -196,22 +196,24 @@ def __init__(self, filename, filename_info, filetype_info, self._read_trailer() self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) - self.tres = REPEAT_CYCLE_DURATION # base RC duration of 15 - if filetype_info is not None: # to avoid error in the pytest - if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - self.tres = 5 + @property + def _repeat_cycle_duration(self): + """Get repeacyckle duration from the trailer.""" + if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + return 5 + return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] - return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): From f147f2904a23d375aca355ad366805412eda87f4 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 13:52:52 +0000 Subject: [PATCH 0202/1416] fix file handler to add the reducedScan key so that the _repeat_cycle_duration does nto throw an error --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 316109ea75..8669f35a96 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1102,7 +1102,8 @@ def file_handler(self): 'ImageProductionStats': { 'ActualScanningSummary': { 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 9, 304888) + 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 9, 304888), + 'ReducedScan': 0 } } } From bcf945917cd8af5b488f2cc01a943471b29aa1e6 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 13:54:30 +0000 Subject: [PATCH 0203/1416] Remove debug comments --- satpy/tests/reader_tests/test_seviri_l1b_hrit.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 358deb59bb..7fdec8bef2 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -46,7 +46,6 @@ class TestHRITMSGFileHandlerHRV(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing HRV.""" - # self.observation_start_time = datetime(2016, 3, 3, 0, 0) self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.reader = setup.get_fake_file_handler( @@ -145,7 +144,6 @@ class TestHRITMSGFileHandler(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing.""" - # self.observation_start_time = datetime(2016, 3, 3, 0, 0) self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.ncols = 3712 From 49ed65bc5fc5225d7b1f202ab02f13cf509efaae Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 14:04:00 +0000 Subject: [PATCH 0204/1416] Fix docstring to specify observation start/end --- satpy/readers/seviri_l1b_hrit.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 388af46a3e..80abf2325a 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -515,13 +515,13 @@ def nominal_end_time(self): @property def observation_start_time(self): - """Get the start time.""" + """Get the observation start time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanStart'] @property def observation_end_time(self): - """Get the end time.""" + """Get the observation end time.""" return self.epilogue['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanEnd'] From b23cd276a2764f16c63dd7a89c15b66cbe5c7343 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 14:04:17 +0000 Subject: [PATCH 0205/1416] Fix docstring to specify observation start/end --- satpy/readers/hrit_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index b27103cbce..c8b2287653 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -223,12 +223,12 @@ def _get_hd(self, hdr_info): @property def observation_start_time(self): - """Get start time.""" + """Get observation start time.""" return self._start_time @property def observation_end_time(self): - """Get end time.""" + """Get observation end time.""" return self._end_time @property From 0d4abd281bf37fe6e0fcb9a704224721591d0904 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 14:07:04 +0000 Subject: [PATCH 0206/1416] Fix doc string for observation start/end time --- satpy/readers/seviri_l1b_native.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index b75c8edcd9..b3f4520d83 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -217,13 +217,13 @@ def nominal_end_time(self): @property def observation_start_time(self): - """Read the repeat cycle sensing start time from metadata.""" + """Get observation start time from trailer.""" return self.trailer['15TRAILER']['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanStart'] @property def observation_end_time(self): - """Read the repeat cycle sensing end time from metadata.""" + """Get observation end time from trailer.""" return self.trailer['15TRAILER']['ImageProductionStats'][ 'ActualScanningSummary']['ForwardScanEnd'] From 8f2f41ba131a6ee0a31853cc0a8f5ac850e1c34c Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 14:08:45 +0000 Subject: [PATCH 0207/1416] Fix doc string for nominal start/end time --- satpy/readers/seviri_l1b_native.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index b3f4520d83..04dadcb0e9 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -205,13 +205,13 @@ def _repeat_cycle_duration(self): @property def nominal_start_time(self): - """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" + """Get the repeat cycle nominal start time from file header and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): - """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" + """Get the repeat cycle nominal end time from file header and round it to expected nominal time slot.""" tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) From 41b71bfcc030378a6cff11ca7fa3430a9bb6cd31 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 14:11:20 +0000 Subject: [PATCH 0208/1416] Fix doc string for observation start/end time --- satpy/readers/seviri_l1b_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index c4e5cfb9f2..7d782cba28 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -84,12 +84,12 @@ def nominal_end_time(self): @property def observation_start_time(self): - """Read the repeat cycle sensing start time from metadata.""" + """Get the repeat cycle observation start time from metadata.""" return self.deltaSt @property def observation_end_time(self): - """Read the repeat cycle sensing end time from metadata.""" + """Get the repeat cycle observation end time from metadata.""" return self.deltaEnd @property From 5a909d261efca211d7147d61e1aa78e045f4143a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 14:17:56 +0000 Subject: [PATCH 0209/1416] Replace tres by a dedicated private property _repeat_cycle_duration --- satpy/readers/seviri_l1b_nc.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index 7d782cba28..ae56053114 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -70,17 +70,25 @@ def __init__(self, filename, filename_info, filetype_info, self.reference = datetime.datetime(1958, 1, 1) self.get_metadata() + @property + def _repeat_cycle_duration(self): + """Get repeat cycle duration from the metadata.""" + if self.nc.attrs['nominal_image_scanning'] == 'T': + return 15 + elif self.nc.attrs['reduced_scanning'] == 'T': + return 5 + @property def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.deltaSt - return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.deltaEnd - return round_nom_time(tm, time_delta=timedelta(minutes=self.tres)) + return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): @@ -145,10 +153,6 @@ def get_metadata(self): self.deltaEnd = self.reference + datetime.timedelta( days=int(self.nc.attrs['planned_repeat_cycle_end_day']), milliseconds=int(self.nc.attrs['planned_repeat_cycle_end_mi_sec'])) - if self.nc.attrs['nominal_image_scanning'] == 'T': - self.tres = 15 - elif self.nc.attrs['reduced_scanning'] == 'T': - self.tres = 5 self.north = int(self.nc.attrs['north_most_line']) self.east = int(self.nc.attrs['east_most_pixel']) From 925a3b7c8005a9adb25ff7698b262be5f76bb8a8 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 14:55:30 +0000 Subject: [PATCH 0210/1416] Fix typo --- satpy/readers/seviri_l1b_native.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 04dadcb0e9..89d54ab3d0 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -198,7 +198,7 @@ def __init__(self, filename, filename_info, filetype_info, @property def _repeat_cycle_duration(self): - """Get repeacyckle duration from the trailer.""" + """Get repeat cycle duration from the trailer.""" if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: return 5 return REPEAT_CYCLE_DURATION From a62452dc465f30bccb230b17b3a1ce203b741307 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 30 May 2023 14:56:28 +0000 Subject: [PATCH 0211/1416] Fix typo in docstring of _repeat_cycle_duration --- satpy/readers/seviri_l1b_hrit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 80abf2325a..4080250cdf 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -494,7 +494,7 @@ def _get_header(self): @property def _repeat_cycle_duration(self): - """Get repeacyckle duration from epilogue.""" + """Get repeat cycle duration from epilogue.""" if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: return 5 return REPEAT_CYCLE_DURATION From 66bc4654d409382c9ac6999879a94430ce5c5949 Mon Sep 17 00:00:00 2001 From: yukaribbba <72339781+yukaribbba@users.noreply.github.com> Date: Wed, 31 May 2023 10:59:40 +0800 Subject: [PATCH 0212/1416] Update satpy/composites/__init__.py Co-authored-by: David Hoese --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 57fe4bb9ea..8cbc55b20d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1039,7 +1039,7 @@ class RatioSharpenedRGB(GenericCompositor): new_G = G * ratio new_B = B * ratio - In some cases, there could be another high resolution band:: + In some cases, there could be multiple high resolution bands:: R_lo - 1000m resolution - shape=(2000, 2000) G_hi - 500m resolution - shape=(4000, 4000) From 609a39aef92283601aa72acc5b22854d5925b7b3 Mon Sep 17 00:00:00 2001 From: yukaribbba <72339781+yukaribbba@users.noreply.github.com> Date: Wed, 31 May 2023 10:59:51 +0800 Subject: [PATCH 0213/1416] Update satpy/composites/__init__.py Co-authored-by: David Hoese --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 8cbc55b20d..e38740ee60 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1047,8 +1047,8 @@ class RatioSharpenedRGB(GenericCompositor): R_hi - 500m resolution - shape=(4000, 4000) To avoid the green band getting involved in calculating ratio or sharpening, - specify it by "neutral_resolution_band: green" in YAML config file. Therefore, - only blue band will get sharpened:: + add "neutral_resolution_band: green" in the YAML config file. This way + only the blue band will get sharpened:: ratio = R_hi / R_lo new_R = R_hi From b0070a10280fd73a04d1f760d8f44f0ae53ee3de Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 31 May 2023 13:40:42 +0800 Subject: [PATCH 0214/1416] Update __init__.py --- satpy/composites/__init__.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index e38740ee60..d8c31125d0 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1060,7 +1060,7 @@ class RatioSharpenedRGB(GenericCompositor): def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_color = kwargs.pop("high_resolution_band", "red") - self.neutral_resolution_color = kwargs.pop("neutral_resolution_band", "red") + self.neutral_resolution_color = kwargs.pop("neutral_resolution_band", None) if self.high_resolution_color not in ['red', 'green', 'blue', None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " @@ -1103,15 +1103,12 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) if 'rows_per_scan' in high_res.attrs: new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) new_attrs.setdefault('resolution', high_res.attrs['resolution']) - colors = ['red', 'green', 'blue'] + colors = ['red', 'green', 'blue', None] low_resolution_index = colors.index(self.high_resolution_color) high_resolution_index = low_resolution_index - if self.neutral_resolution_color is not None: - neutral_resolution_index = colors.index(self.neutral_resolution_color) - neutral_res = datasets[neutral_resolution_index] - else: - neutral_res = None - neutral_resolution_index = 0 + neutral_resolution_index = colors.index(self.neutral_resolution_color) + neutral_res = datasets[neutral_resolution_index] if neutral_resolution_index is not None else None + else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None From 40b3c83debb835b24e62b6d6e007d102aec7e278 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 31 May 2023 11:33:11 +0200 Subject: [PATCH 0215/1416] Fix mistake in dev-guide docs. --- doc/source/dev_guide/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/dev_guide/index.rst b/doc/source/dev_guide/index.rst index 72068aab62..93c6ecd0ec 100644 --- a/doc/source/dev_guide/index.rst +++ b/doc/source/dev_guide/index.rst @@ -49,7 +49,7 @@ can do this using conda_:: .. _conda: https://conda.io/ -This will create a new environment called "satpy-dev" with Python 3.8 +This will create a new environment called "satpy-dev" with Python 3.11 installed. The second command will activate the environment so any future conda, python, or pip commands will use this new environment. From f21497f51ad7df5e3c8f1d570ccfe546c2cd3fbd Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 09:34:45 +0000 Subject: [PATCH 0216/1416] Fix doc creatin error caused by indent in docstring of round_nom_time --- satpy/readers/seviri_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 9187e7355e..131fe39ad4 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -990,7 +990,7 @@ def round_nom_time(dt, time_delta): dt : datetime.datetime object, default now. time_delta : timedelta object, we round to a multiple of this, default 1 minute. adapted for SEVIRI from: - http://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python + https://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python """ seconds = (dt - dt.min).seconds round_to = time_delta.total_seconds() From 092c22a470936495fd66f345a918e6f64bbcbecd Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Wed, 31 May 2023 11:48:01 +0200 Subject: [PATCH 0217/1416] Fix mistake in dev-guide docs. --- doc/source/dev_guide/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/dev_guide/index.rst b/doc/source/dev_guide/index.rst index 93c6ecd0ec..e877fd1c63 100644 --- a/doc/source/dev_guide/index.rst +++ b/doc/source/dev_guide/index.rst @@ -125,7 +125,7 @@ Satpy's documentation is built using Sphinx. All documentation lives in the ``doc/`` directory of the project repository. For building the documentation, additional packages are needed. These can be installed with :: - pip install -e ".[all]". + pip install -e ".[all]" After editing the source files there the documentation can be generated locally:: From e0d622c597d7f9a4960ca57789665392227f3e8b Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 31 May 2023 11:48:48 +0200 Subject: [PATCH 0218/1416] Rerun tests --- satpy/writers/cf_writer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index edc26591fe..b9a24b9292 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -215,7 +215,7 @@ def get_extra_ds(dataarray, keys=None): - """Get the extra datasets associated to *dataset*.""" + """Get the ancillary_variables DataArrays associated to a dataset.""" ds_collection = {} # Retrieve ancillary variable datarrays for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): From d77d2f2ac49cd01f2df2247b1afcaf1bc6b9a691 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 10:04:53 +0000 Subject: [PATCH 0219/1416] Add warning in the docstring for the future change of the nominal start/end time --- satpy/readers/seviri_l1b_hrit.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 4080250cdf..9096b36c4c 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -60,6 +60,30 @@ This reader also accepts bzipped file with the extension ``.bz2`` for the prologue, epilogue, and segment files. +Nominal start/end time +---------------------- + +.. warning:: attribute access change + +``nominal_start_time`` and ``nominal_end_time`` should be accessed using the ``time_parameters`` attribute. + +``nominal_start_time`` and ``nominal_end_time`` are also available directly +via ``start_time`` and ``end_time`` respectively. + +Here is an exmaple of the content of the start/end time and ``time_parameters`` attibutes + +.. code-block:: python + + Start time: 2019-08-29 12:00:00 + End time: 2019-08-29 12:15:00 + time_parameters: + {'nominal_start_time': datetime.datetime(2019, 8, 29, 12, 0), + 'nominal_end_time': datetime.datetime(2019, 8, 29, 12, 15), + 'observation_start_time': datetime.datetime(2019, 8, 29, 12, 0, 9, 338000), + 'observation_end_time': datetime.datetime(2019, 8, 29, 12, 15, 9, 203000) + } + + Example ------- Here is an example how to read the data in satpy: From d0ade06afa303daac8e67a88f33ca1df10e0bb87 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 31 May 2023 10:05:35 +0000 Subject: [PATCH 0220/1416] Update documentation --- satpy/etc/readers/gms5-vissr_l1b.yaml | 2 + satpy/readers/gms5_vissr_l1b.py | 109 ++++++++++++++---- .../tests/reader_tests/test_gms5_vissr_l1b.py | 83 +++++++++++-- 3 files changed, 164 insertions(+), 30 deletions(-) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index f37be582a4..6b18579463 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -8,6 +8,8 @@ reader: - https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf - https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf + status: Alpha + supports_fsspec: true sensors: [gms5-vissr] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index e782513742..5e9de11f58 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -2,7 +2,46 @@ Introduction ------------ -TODO +The ``gms5_vissr_l1b`` reader can decode, navigate and calibrate Level 1B data +from the Visible and Infrared Spin Scan Radiometer (VISSR) in `VISSR +archive format`. Corresponding platforms are GMS-5 (Japanese Geostationary +Meteorological Satellite) and GOES-09 (2003-2006 backup after MTSAT-1 launch +failure). + +VISSR has four channels, each stored in a separate file: + +.. code-block:: none + + VISSR_20020101_0031_IR1.A.IMG + VISSR_20020101_0031_IR2.A.IMG + VISSR_20020101_0031_IR3.A.IMG + VISSR_20020101_0031_VIS.A.IMG + +This is how to read them with Satpy: + +.. code-block:: python + + from satpy import Scene + import glob + + filenames = glob.glob(""/data/VISSR*") + scene = Scene(filenames, reader="gms5-vissr_l1b") + scene.load(["VIS", "IR1"]) + + +References +~~~~~~~~~~ + +Details about platform, instrument and data format can be found in the +following references: + + - `VISSR Format Description`_ + - `GMS User Guide`_ + +.. _VISSR Format Description: + https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf +.. _GMS User Guide: + https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf Compression @@ -28,7 +67,8 @@ ----------- Sensor counts are calibrated by looking up reflectance/temperature values in the -calibration tables included in each file. +calibration tables included in each file. See section 2.2 in the VISSR user +guide. Navigation @@ -46,7 +86,8 @@ This cannot be represented by a pyresample area definition, so each dataset is accompanied by 2-dimensional longitude and latitude coordinates. For -resampling purpose an area definition with uniform sampling is provided via +resampling purpose a square area definition with uniform sampling is provided +via .. code-block:: python @@ -62,14 +103,18 @@ is identical (IR channels) 2) for different repeat cycles, even if the channel is identical +However, the above area definition is using the nominal subsatellite point as +projection center. As this rarely changes, the area definition is pretty +constant. + Space Pixels ------------ -VISSR produces data for pixels outside the Earth disk (i,e: atmospheric limb or +VISSR produces data for pixels outside the Earth disk (i.e. atmospheric limb or deep space pixels). By default, these pixels are masked out as they contain data of limited or no value, but some applications do require these pixels. -To turn off masking, set ``mask_space=False`` upon scene creation:: +To turn off masking, set ``mask_space=False`` upon scene creation: .. code-block:: python @@ -80,20 +125,15 @@ scene = satpy.Scene(filenames, reader="gms5-vissr_l1b", reader_kwargs={"mask_space": False}) - scene.load(["VIS"]) - + scene.load(["VIS", "IR1]) -References ----------- +Metadata +-------- - - [FMT]: `VISSR Format Description`_ - - [UG]: `GMS User Guide`_ +Dataset attributes include metadata such as time or orbital parameters, +see :ref:`dataset_metadata`. -.. _VISSR Format Description: - https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf -.. _GMS User Guide: - https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf """ import dask.array as da @@ -507,7 +547,7 @@ # fmt: on -def recarr2dict(arr, preserve=None): +def _recarr2dict(arr, preserve=None): if not preserve: preserve = [] res = {} @@ -516,7 +556,7 @@ def recarr2dict(arr, preserve=None): continue if value.dtype.names and key not in preserve: # Nested record array - res[key] = recarr2dict(value) + res[key] = _recarr2dict(value) else: # Scalar or record array that shall be preserved res[key] = value @@ -524,6 +564,8 @@ def recarr2dict(arr, preserve=None): class GMS5VISSRFileHandler(BaseFileHandler): + """File handler for GMS-5 VISSR data in VISSR archive format.""" + def __init__(self, filename, filename_info, filetype_info, mask_space=True): super(GMS5VISSRFileHandler, self).__init__(filename, filename_info, filetype_info) self._filename = filename @@ -554,7 +596,7 @@ def _read_control_block(self, file_obj): dtype=CONTROL_BLOCK, count=1 ) - return recarr2dict(ctrl_block[0]) + return _recarr2dict(ctrl_block[0]) def _read_image_params(self, file_obj, channel_type): """Read image parameters from the header.""" @@ -577,7 +619,7 @@ def _read_image_param(file_obj, param, channel_type): count=1, offset=param['offset'][channel_type] ) - return recarr2dict(image_params[0], preserve=param.get('preserve')) + return _recarr2dict(image_params[0], preserve=param.get('preserve')) @staticmethod def _concat_orbit_prediction(orb_pred_1, orb_pred_2): @@ -642,6 +684,7 @@ def _get_time_parameters(self): } def get_dataset(self, dataset_id, ds_info): + """Get dataset from file.""" image_data = self._get_image_data() counts = self._get_counts(image_data) dataset = self._calibrate(counts, dataset_id) @@ -815,24 +858,39 @@ def _update_attrs(self, dataset, dataset_id): @property def start_time(self): + """Nominal start time of the dataset.""" return self._mda["time_parameters"]["nominal_start_time"] @property def end_time(self): + """Nominal end time of the dataset.""" return self._mda["time_parameters"]["nominal_end_time"] def read_from_file_obj(file_obj, dtype, count, offset=0): + """Read data from file object. + + Args: + file_obj: An open file object. + dtype: Data type to be read. + count: Number of elements to be read. + offset: Byte offset where to start reading. + """ file_obj.seek(offset) data = file_obj.read(dtype.itemsize * count) return np.frombuffer(data, dtype=dtype, count=count) class Calibrator: + """Calibrate VISSR data to reflectance or brightness temperature. + + Reference: Section 2.2 in the VISSR User Guide. + """ def __init__(self, calib_table): self._calib_table = calib_table def calibrate(self, counts, calibration): + """Transform counts to given calibration level.""" if calibration == "counts": return counts res = da.map_blocks( @@ -855,6 +913,8 @@ def _lookup_calib_table(self, counts, calib_table): class SpaceMasker: + """Mask pixels outside the earth disk.""" + _fill_value = -1 # scanline not intersecting the earth def __init__(self, image_data, channel): @@ -864,6 +924,7 @@ def __init__(self, image_data, channel): self._earth_mask = self._get_earth_mask() def mask_space(self, dataset): + """Mask space pixels in the given dataset.""" return dataset.where(self._earth_mask).astype(np.float32) def _get_earth_mask(self): @@ -911,16 +972,24 @@ def get_earth_mask(shape, earth_edges, fill_value=-1): def is_vis_channel(channel_name): + """Check if it's the visible channel.""" return channel_name == "VIS" class AreaDefEstimator: + """Estimate area definition for VISSR images.""" + def __init__(self, coord_conv_params, metadata): self.coord_conv = coord_conv_params self.metadata = metadata def get_area_def_uniform_sampling(self, original_shape, dataset_id): - """Get area definition with uniform sampling.""" + """Get square area definition with uniform sampling. + + Args: + original_shape: Shape of the oversampled VISSR image. + dataset_id: ID of the corresponding dataset. + """ proj_dict = self._get_proj_dict(dataset_id, original_shape) extent = geos_area.get_area_extent(proj_dict) return geos_area.get_area_definition(proj_dict, extent) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 1ba7df7421..b2a93241ac 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -7,6 +7,7 @@ import pytest import xarray as xr +from pyresample.geometry import AreaDefinition import satpy.readers.gms5_vissr_l1b as vissr import satpy.readers.gms5_vissr_navigation as nav from satpy.tests.utils import make_dataid @@ -17,6 +18,7 @@ # VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS # navigation is slightly off (< 0.01 deg) compared to JMA's reference. # This is probably due to precision problems with the copied numbers. +# fmt: off IR_NAVIGATION_REFERENCE = [ { 'line': 686, @@ -179,6 +181,7 @@ ) }, ] +# fmt: on NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE @@ -276,6 +279,7 @@ def test_normalize_vector(self): class TestImageNavigation: + """Test navigation of an entire image.""" def test_get_lons_lats( self, scan_params, @@ -283,12 +287,15 @@ def test_get_lons_lats( orbit_prediction, proj_params ): + """Test getting lon/lat coordinates.""" + # fmt: off lons_exp = [[-114.56923, -112.096837, -109.559702], [8.33221, 8.793893, 9.22339], [15.918476, 16.268354, 16.6332]] lats_exp = [[-23.078721, -24.629845, -26.133314], [-42.513409, -39.790231, -37.06392], [3.342834, 6.07043, 8.795932]] + # fmt: on lons, lats = nav.get_lons_lats( lines=np.array([1000, 1500, 2000]), pixels=np.array([1000, 1500, 2000]), @@ -300,16 +307,20 @@ def test_get_lons_lats( class TestEarthMask: + """Test getting the earth mask.""" def test_get_earth_mask(self): + """Test getting the earth mask.""" first_earth_pixels = np.array([-1, 1, 0, -1]) last_earth_pixels = np.array([-1, 3, 2, -1]) edges = first_earth_pixels, last_earth_pixels + # fmt: off mask_exp = np.array( [[0, 0, 0, 0], [0, 1, 1, 1], [1, 1, 1, 0], [0, 0, 0, 0]] ) + # fmt: on mask = vissr.get_earth_mask(mask_exp.shape, edges) np.testing.assert_equal(mask, mask_exp) @@ -326,6 +337,7 @@ class TestPredictionInterpolation: ] ) def test_interpolate_continuous(self, obs_time, expected): + """Test interpolation of continuous variables.""" prediction_times = np.array([0, 1, 2, 3]) predicted_values = np.array([1, 2, 3, 4]) res = nav.interpolate_continuous( @@ -346,6 +358,7 @@ def test_interpolate_continuous(self, obs_time, expected): ] ) def test_interpolate_angles(self, obs_time, expected): + """Test interpolation of periodic angles.""" prediction_times = np.array([0, 1, 2, 3, 4]) predicted_angles = np.array( [0, 0.5*np.pi, np.pi, 1.5*np.pi, 2*np.pi] @@ -366,6 +379,7 @@ def test_interpolate_angles(self, obs_time, expected): ] ) def test_interpolate_nearest(self, obs_time, expected): + """Test nearest neighbour interpolation.""" prediction_times = np.array([0, 1, 2]) predicted_angles = np.array([ np.zeros((2, 2)), @@ -380,6 +394,7 @@ def test_interpolate_nearest(self, obs_time, expected): np.testing.assert_allclose(res, expected) def test_interpolate_orbit_prediction(self, obs_time, orbit_prediction, orbit_expected): + """Test interpolating orbit prediction.""" orbit_prediction = orbit_prediction.to_numba() orbit = nav.interpolate_orbit_prediction( orbit_prediction, obs_time @@ -387,6 +402,7 @@ def test_interpolate_orbit_prediction(self, obs_time, orbit_prediction, orbit_ex assert_namedtuple_close(orbit, orbit_expected) def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, attitude_expected): + """Test interpolating attitude prediction.""" attitude_prediction = attitude_prediction.to_numba() attitude = nav.interpolate_attitude_prediction( attitude_prediction, obs_time @@ -395,10 +411,12 @@ def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, at @pytest.fixture def obs_time(self): + """Get observation time.""" return 2.5 @pytest.fixture def orbit_expected(self): + """Get expected orbit.""" return nav.Orbit( greenwich_sidereal_time=1.5, declination_from_sat_to_sun=1.6, @@ -411,6 +429,7 @@ def orbit_expected(self): @pytest.fixture def attitude_expected(self): + """Get expected attitude.""" return nav.Attitude( angle_between_earth_and_sun=1.5, angle_between_sat_spin_and_z_axis=1.6, @@ -420,11 +439,13 @@ def attitude_expected(self): @pytest.fixture def sampling_angle(): + """Get sampling angle.""" return 0.000095719995443 @pytest.fixture def scan_params(sampling_angle): + """Get scanning parameters.""" return nav.ScanningParameters( start_time_of_scan=0, spinning_rate=0.5, @@ -435,6 +456,7 @@ def scan_params(sampling_angle): @pytest.fixture def attitude_prediction(): + """Get attitude prediction.""" return nav.AttitudePrediction( prediction_times=np.array([1.0, 2.0, 3.0]), angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), @@ -445,6 +467,7 @@ def attitude_prediction(): @pytest.fixture def orbit_prediction(): + """Get orbit prediction.""" return nav.OrbitPrediction( prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), @@ -466,6 +489,7 @@ def orbit_prediction(): @pytest.fixture def proj_params(sampling_angle): + """Get projection parameters.""" return nav.ProjectionParameters( line_offset=1378.5, pixel_offset=1672.5, @@ -478,6 +502,7 @@ def proj_params(sampling_angle): def test_get_observation_time(): + """Test getting a pixel's observation time.""" scan_params = nav.ScanningParameters( start_time_of_scan=50000.0, spinning_rate=100, @@ -490,6 +515,7 @@ def test_get_observation_time(): def assert_namedtuple_close(a, b): + """Assert that two numba namedtuples are approximately equal.""" assert a.__class__ == b.__class__ for attr in a._fields: np.testing.assert_allclose( @@ -500,8 +526,11 @@ def assert_namedtuple_close(a, b): class TestFileHandler: + """Test VISSR file handler.""" + @pytest.fixture(autouse=True) def patch_number_of_pixels_per_scanline(self, monkeypatch): + """Patch data types so that each scanline has two pixels.""" num_pixels = 2 IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', vissr.LINE_CONTROL_WORD), ('DOC', vissr.U1, (256,)), @@ -538,6 +567,7 @@ def patch_number_of_pixels_per_scanline(self, monkeypatch): make_dataid(name='IR1', calibration="counts", resolution=5000) ]) def dataset_id(self, request): + """Get dataset ID.""" return request.param @pytest.fixture(params=[ @@ -545,18 +575,22 @@ def dataset_id(self, request): False ]) def mask_space(self, request): + """Mask space pixels.""" return request.param @pytest.fixture(params=[True, False]) def with_compression(self, request): + """Enable compression.""" return request.param @pytest.fixture def open_function(self, with_compression): + """Get open function for writing test files.""" return gzip.open if with_compression else open @pytest.fixture def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): + """Get test VISSR file.""" filename = tmp_path / "vissr_file" ch_type = vissr.CHANNEL_TYPES[dataset_id["name"]] writer = VissrFileWriter(ch_type, open_function) @@ -565,6 +599,7 @@ def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): @pytest.fixture def file_contents(self, control_block, image_parameters, image_data): + """Get VISSR file contents.""" return { "control_block": control_block, "image_parameters": image_parameters, @@ -573,6 +608,7 @@ def file_contents(self, control_block, image_parameters, image_data): @pytest.fixture def control_block(self, dataset_id): + """Get VISSR control block.""" block_size = { "IR1": 16, "VIS": 4 @@ -596,6 +632,7 @@ def image_parameters( wv_calibration, simple_coordinate_conversion_table, ): + """Get VISSR image parameters.""" return { "mode": mode_block, "coordinate_conversion": coordinate_conversion, @@ -611,6 +648,7 @@ def image_parameters( @pytest.fixture def mode_block(self): + """Get VISSR mode block.""" mode = np.zeros(1, dtype=vissr.MODE_BLOCK) mode["satellite_name"] = b'GMS-5 ' mode["spin_rate"] = 99.21774 @@ -622,7 +660,7 @@ def mode_block(self): mode["vis_frame_parameters"]["number_of_lines"] = 2 mode["vis_frame_parameters"]["number_of_pixels"] = 2 return mode - + @pytest.fixture def coordinate_conversion(self): """Get parameters for coordinate conversions. @@ -633,25 +671,25 @@ def coordinate_conversion(self): """ # fmt: off conv = np.zeros(1, dtype=vissr.COORDINATE_CONVERSION_PARAMETERS) - + cline = conv["central_line_number_of_vissr_frame"] cline["IR1"] = 1378.5 cline["VIS"] = 5513.0 - + cpix = conv["central_pixel_number_of_vissr_frame"] cpix["IR1"] = 0.5 # instead of 1672.5 cpix["VIS"] = 0.5 # instead of 6688.5 - + conv['scheduled_observation_time'] = 50130.979089568464 - + nsensors = conv["number_of_sensor_elements"] nsensors["IR1"] = 1 nsensors["VIS"] = 4 - + sampling_angle = conv["sampling_angle_along_pixel"] sampling_angle["IR1"] = 9.5719995e-05 sampling_angle["VIS"] = 2.3929999e-05 - + stepping_angle = conv["stepping_angle_along_line"] stepping_angle["IR1"] = 0.00014000005 stepping_angle["VIS"] = 3.5000005e-05 @@ -670,9 +708,10 @@ def coordinate_conversion(self): conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 # fmt: on return conv - + @pytest.fixture def attitude_prediction(self): + """Get attitude prediction.""" att_pred = np.zeros(1, dtype=vissr.ATTITUDE_PREDICTION) # fmt: off att_pred["data"] = np.array([ @@ -716,6 +755,7 @@ def attitude_prediction(self): @pytest.fixture def orbit_prediction_1(self): + """Get first block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) # fmt: off orb_pred["data"] = np.array([ @@ -732,9 +772,10 @@ def orbit_prediction_1(self): ) # fmt: on return orb_pred - + @pytest.fixture def orbit_prediction_2(self): + """Get second block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) # fmt: off orb_pred["data"] = np.array([ @@ -754,6 +795,7 @@ def orbit_prediction_2(self): @pytest.fixture def vis_calibration(self): + """Get VIS calibration block.""" vis_cal = np.zeros(1, dtype=vissr.VIS_CALIBRATION) table = vis_cal["vis1_calibration_table"][ "brightness_albedo_conversion_table" @@ -763,6 +805,7 @@ def vis_calibration(self): @pytest.fixture def ir1_calibration(self): + """Get IR1 calibration block.""" cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) table = cal["conversion_table_of_equivalent_black_body_temperature"] table[0, 0:4] = np.array([0, 100, 200, 300]) @@ -770,22 +813,26 @@ def ir1_calibration(self): @pytest.fixture def ir2_calibration(self): + """Get IR2 calibration block.""" cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) return cal @pytest.fixture def wv_calibration(self): + """Get WV calibration block.""" cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) return cal @pytest.fixture def simple_coordinate_conversion_table(self): + """Get simple coordinate conversion table.""" table = np.zeros(1, dtype=vissr.SIMPLE_COORDINATE_CONVERSION_TABLE) table["satellite_height"] = 123457.0 return table @pytest.fixture def image_data(self, dataset_id, image_data_ir1, image_data_vis): + """Get VISSR image data.""" data = { "IR1": image_data_ir1, "VIS": image_data_vis @@ -794,6 +841,7 @@ def image_data(self, dataset_id, image_data_ir1, image_data_vis): @pytest.fixture def image_data_ir1(self): + """Get IR1 image data.""" image_data = np.zeros(2, vissr.IMAGE_DATA_BLOCK_IR) image_data["LCW"]["line_number"] = [686, 2089] image_data["LCW"]["scan_time"] = [50000, 50000] @@ -804,6 +852,7 @@ def image_data_ir1(self): @pytest.fixture def image_data_vis(self): + """Get VIS image data.""" image_data = np.zeros(2, vissr.IMAGE_DATA_BLOCK_VIS) image_data["LCW"]["line_number"] = [2744, 8356] image_data["LCW"]["scan_time"] = [50000, 50000] @@ -814,6 +863,7 @@ def image_data_vis(self): @pytest.fixture def vissr_file_like(self, vissr_file, with_compression): + """Get file-like object for VISSR test file.""" if with_compression: open_file = fsspec.open(vissr_file, compression="gzip") return FSFile(open_file) @@ -821,12 +871,14 @@ def vissr_file_like(self, vissr_file, with_compression): @pytest.fixture def file_handler(self, vissr_file_like, mask_space): + """Get file handler to be tested.""" return vissr.GMS5VISSRFileHandler( vissr_file_like, {}, {}, mask_space=mask_space ) @pytest.fixture def vis_refl_exp(self, mask_space, lons_lats_exp): + """Get expected VIS reflectance.""" lons, lats = lons_lats_exp if mask_space: data = [[np.nan, np.nan], [0.5, 1]] @@ -845,6 +897,7 @@ def vis_refl_exp(self, mask_space, lons_lats_exp): @pytest.fixture def ir1_counts_exp(self, lons_lats_exp): + """Get expected IR1 counts.""" lons, lats = lons_lats_exp return xr.DataArray( [[0, 1], [2, 3]], @@ -860,6 +913,7 @@ def ir1_counts_exp(self, lons_lats_exp): @pytest.fixture def ir1_bt_exp(self, lons_lats_exp): + """Get expected IR1 brightness temperature.""" lons, lats = lons_lats_exp return xr.DataArray( [[0, 100], [200, 300]], @@ -890,6 +944,7 @@ def lons_lats_exp(self, dataset_id): pix = [1672, 1672, 1673, 1673] lin = [686, 2089, 686, 2089] """ + # fmt: off expectations = { "IR1": { "lons": [[139.680120, 139.718902], @@ -904,6 +959,7 @@ def lons_lats_exp(self, dataset_id): [-34.940439, -34.940370]] } } + # fmt: on exp = expectations[dataset_id["name"]] lons = xr.DataArray(exp["lons"], dims=("y", "x")) lats = xr.DataArray(exp["lats"], dims=("y", "x")) @@ -911,6 +967,7 @@ def lons_lats_exp(self, dataset_id): @pytest.fixture def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): + """Get expected dataset.""" ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) ir1_bt_id = make_dataid(name="IR1", calibration="brightness_temperature", resolution=5000) vis_refl_id = make_dataid(name="VIS", calibration="reflectance", resolution=1250) @@ -923,7 +980,7 @@ def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): @pytest.fixture def area_def_exp(self, dataset_id): - from pyresample.geometry import AreaDefinition + """Get expected area definition.""" if dataset_id["name"] == "IR1": resol = 5 extent = (-8.641922536247211, -8.641922536247211, 25.925767608741637, 25.925767608741637) @@ -944,6 +1001,7 @@ def area_def_exp(self, dataset_id): @pytest.fixture def attrs_exp(self, area_def_exp): + """Get expected dataset attributes.""" return { "platform": "GMS-5", "sensor": "VISSR", @@ -963,11 +1021,13 @@ def attrs_exp(self, area_def_exp): } def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): + """Test getting the dataset.""" dataset = file_handler.get_dataset(dataset_id, None) xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1E-6) self._assert_attrs_equal(dataset.attrs, attrs_exp) def test_time_attributes(self, file_handler, attrs_exp): + """Test the file handler's time attributes.""" assert file_handler.start_time == attrs_exp["time_parameters"]["nominal_start_time"] assert file_handler.end_time == attrs_exp["time_parameters"]["nominal_end_time"] @@ -985,11 +1045,14 @@ def _assert_areas_close(self, area_tst, area_exp): class VissrFileWriter: + """Write data in VISSR archive format.""" + def __init__(self, ch_type, open_function): self.ch_type = ch_type self.open_function = open_function def write(self, filename, contents): + """Write file contents to disk.""" with self.open_function(filename, mode="wb") as fd: self._write_control_block(fd, contents) self._write_image_parameters(fd, contents) From 46eb8f0f2ac8e0f5d62563775da1b2f118d6fb9e Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 10:09:41 +0000 Subject: [PATCH 0221/1416] Revert cahnge to nominal start/end time attribute to avoid breaking someone else code --- satpy/readers/seviri_l1b_hrit.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 9096b36c4c..1fedadb0e2 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -767,14 +767,8 @@ def _update_attrs(self, res, info): res.attrs['standard_name'] = info['standard_name'] res.attrs['platform_name'] = self.platform_name res.attrs['sensor'] = 'seviri' - res.attrs['nominal_start_time'] = """ - Deprecation warning: nominal_start_time should be accessed via the time_parameters attrs - nominal_start_time is also available directly via start_time - """ - res.attrs['nominal_end_time'] = """ - deprecation warning: nominal_end_time should be accessed via the time_parameters attrs - nominal_end_time is also available directly via end_time - """ + res.attrs['nominal_start_time'] = self.nominal_start_time, + res.attrs['nominal_end_time'] = self.nominal_end_time, res.attrs['time_parameters'] = { 'nominal_start_time': self.nominal_start_time, 'nominal_end_time': self.nominal_end_time, From 807770f224982561300f913d601a4dfedb2affe7 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 31 May 2023 10:13:29 +0000 Subject: [PATCH 0222/1416] Apply black code formatting --- satpy/readers/gms5_vissr_l1b.py | 314 +++-- satpy/readers/gms5_vissr_navigation.py | 289 ++-- .../tests/reader_tests/test_gms5_vissr_l1b.py | 1160 ++++++++++++++--- 3 files changed, 1266 insertions(+), 497 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 5e9de11f58..5eec1557d6 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -151,34 +151,29 @@ CHUNK_SIZE = get_legacy_chunk_size() -U1 = '>u1' -I2 = '>i2' -I4 = '>i4' -R4 = '>f4' -R8 = '>f8' - -VIS_CHANNEL = 'VIS' -IR_CHANNEL = 'IR' +U1 = ">u1" +I2 = ">i2" +I4 = ">i4" +R4 = ">f4" +R8 = ">f8" + +VIS_CHANNEL = "VIS" +IR_CHANNEL = "IR" CHANNEL_TYPES = { "VIS": VIS_CHANNEL, "IR1": IR_CHANNEL, "IR2": IR_CHANNEL, "IR3": IR_CHANNEL, - "WV": IR_CHANNEL -} -ALT_CHANNEL_NAMES = { - 'VIS': 'VIS', - 'IR1': 'IR1', - 'IR2': 'IR2', - 'IR3': 'WV' + "WV": IR_CHANNEL, } +ALT_CHANNEL_NAMES = {"VIS": "VIS", "IR1": "IR1", "IR2": "IR2", "IR3": "WV"} BLOCK_SIZE_VIS = 13504 BLOCK_SIZE_IR = 3664 IMAGE_PARAM_ITEM_SIZE = 2688 -TIME = [('date', I4), ('time', I4)] -CHANNELS = [('VIS', R4), ('IR1', R4), ('IR2', R4), ('WV', R4)] -VISIR_SOLAR = [('VIS', R4), ('IR', R4)] +TIME = [("date", I4), ("time", I4)] +CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] +VISIR_SOLAR = [("VIS", R4), ("IR", R4)] # fmt: off CONTROL_BLOCK = np.dtype([('control_block_size', I2), @@ -552,7 +547,7 @@ def _recarr2dict(arr, preserve=None): preserve = [] res = {} for key, value in zip(arr.dtype.names, arr): - if key.startswith('reserved'): + if key.startswith("reserved"): continue if value.dtype.names and key not in preserve: # Nested record array @@ -567,7 +562,9 @@ class GMS5VISSRFileHandler(BaseFileHandler): """File handler for GMS-5 VISSR data in VISSR archive format.""" def __init__(self, filename, filename_info, filetype_info, mask_space=True): - super(GMS5VISSRFileHandler, self).__init__(filename, filename_info, filetype_info) + super(GMS5VISSRFileHandler, self).__init__( + filename, filename_info, filetype_info + ) self._filename = filename self._filename_info = filename_info self._header, self._channel_type = self._read_header(filename) @@ -576,10 +573,12 @@ def __init__(self, filename, filename_info, filetype_info, mask_space=True): def _read_header(self, filename): header = {} - with generic_open(filename, mode='rb') as file_obj: - header['control_block'] = self._read_control_block(file_obj) - channel_type = self._get_channel_type(header['control_block']['parameter_block_size']) - header['image_parameters'] = self._read_image_params(file_obj, channel_type) + with generic_open(filename, mode="rb") as file_obj: + header["control_block"] = self._read_control_block(file_obj) + channel_type = self._get_channel_type( + header["control_block"]["parameter_block_size"] + ) + header["image_parameters"] = self._read_image_params(file_obj, channel_type) return header, channel_type @staticmethod @@ -588,14 +587,10 @@ def _get_channel_type(parameter_block_size): return VIS_CHANNEL elif parameter_block_size == 16: return IR_CHANNEL - raise ValueError('Cannot determine channel type: Unknown parameter block size.') + raise ValueError("Cannot determine channel type: Unknown parameter block size.") def _read_control_block(self, file_obj): - ctrl_block = read_from_file_obj( - file_obj, - dtype=CONTROL_BLOCK, - count=1 - ) + ctrl_block = read_from_file_obj(file_obj, dtype=CONTROL_BLOCK, count=1) return _recarr2dict(ctrl_block[0]) def _read_image_params(self, file_obj, channel_type): @@ -604,9 +599,9 @@ def _read_image_params(self, file_obj, channel_type): for name, param in IMAGE_PARAMS.items(): image_params[name] = self._read_image_param(file_obj, param, channel_type) - image_params['orbit_prediction'] = self._concat_orbit_prediction( - image_params.pop('orbit_prediction_1'), - image_params.pop('orbit_prediction_2') + image_params["orbit_prediction"] = self._concat_orbit_prediction( + image_params.pop("orbit_prediction_1"), + image_params.pop("orbit_prediction_2"), ) return image_params @@ -617,9 +612,9 @@ def _read_image_param(file_obj, param, channel_type): file_obj, dtype=param["dtype"], count=1, - offset=param['offset'][channel_type] + offset=param["offset"][channel_type], ) - return _recarr2dict(image_params[0], preserve=param.get('preserve')) + return _recarr2dict(image_params[0], preserve=param.get("preserve")) @staticmethod def _concat_orbit_prediction(orb_pred_1, orb_pred_2): @@ -628,59 +623,62 @@ def _concat_orbit_prediction(orb_pred_1, orb_pred_2): It is split over two image parameter blocks in the header. """ orb_pred = orb_pred_1 - orb_pred['data'] = np.concatenate([orb_pred_1['data'], orb_pred_2['data']]) + orb_pred["data"] = np.concatenate([orb_pred_1["data"], orb_pred_2["data"]]) return orb_pred def _get_frame_parameters_key(self): if self._channel_type == VIS_CHANNEL: - return 'vis_frame_parameters' - return 'ir_frame_parameters' + return "vis_frame_parameters" + return "ir_frame_parameters" def _get_actual_shape(self): - actual_num_lines = self._header['control_block']['available_block_size_of_image_data'] + actual_num_lines = self._header["control_block"][ + "available_block_size_of_image_data" + ] _, nominal_num_pixels = self._get_nominal_shape() return actual_num_lines, nominal_num_pixels def _get_nominal_shape(self): - frame_params = self._header['image_parameters']['mode'][self._get_frame_parameters_key()] - return frame_params['number_of_lines'], frame_params['number_of_pixels'] + frame_params = self._header["image_parameters"]["mode"][ + self._get_frame_parameters_key() + ] + return frame_params["number_of_lines"], frame_params["number_of_pixels"] def _get_mda(self): - mode_block = self._header['image_parameters']['mode'] + mode_block = self._header["image_parameters"]["mode"] return { - 'platform': mode_block['satellite_name'].decode().strip().upper(), - 'sensor': 'VISSR', - 'time_parameters': self._get_time_parameters(), - 'orbital_parameters': self._get_orbital_parameters() + "platform": mode_block["satellite_name"].decode().strip().upper(), + "sensor": "VISSR", + "time_parameters": self._get_time_parameters(), + "orbital_parameters": self._get_orbital_parameters(), } def _get_orbital_parameters(self): # Note: SSP longitude in simple coordinate conversion table seems to be # incorrect (80 deg instead of 140 deg). Use orbital parameters instead. - im_params = self._header['image_parameters'] - mode = im_params['mode'] + im_params = self._header["image_parameters"] + mode = im_params["mode"] simple_coord = im_params["simple_coordinate_conversion_table"] orb_params = im_params["coordinate_conversion"]["orbital_parameters"] return { - 'satellite_nominal_longitude': mode["ssp_longitude"], - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': mode["satellite_height"], - - 'satellite_actual_longitude': orb_params["longitude_of_ssp"], - 'satellite_actual_latitude': orb_params["latitude_of_ssp"], - 'satellite_actual_altitude': simple_coord["satellite_height"] + "satellite_nominal_longitude": mode["ssp_longitude"], + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": mode["satellite_height"], + "satellite_actual_longitude": orb_params["longitude_of_ssp"], + "satellite_actual_latitude": orb_params["latitude_of_ssp"], + "satellite_actual_altitude": simple_coord["satellite_height"], } def _get_time_parameters(self): - mode_block = self._header['image_parameters']['mode'] + mode_block = self._header["image_parameters"]["mode"] start_time = mjd2datetime64(mode_block["observation_time_mjd"]) start_time = start_time.astype(dt.datetime).replace(second=0, microsecond=0) end_time = start_time + dt.timedelta( minutes=25 ) # Source: GMS User Guide, section 3.3.1 return { - 'nominal_start_time': start_time, - 'nominal_end_time': end_time, + "nominal_start_time": start_time, + "nominal_end_time": end_time, } def get_dataset(self, dataset_id, ds_info): @@ -703,10 +701,7 @@ def _read_image_data(self): specs = self._get_image_data_type_specs() with generic_open(self._filename, "rb") as file_obj: return read_from_file_obj( - file_obj, - dtype=specs["dtype"], - count=num_lines, - offset=specs["offset"] + file_obj, dtype=specs["dtype"], count=num_lines, offset=specs["offset"] ) def _get_image_data_type_specs(self): @@ -717,20 +712,20 @@ def _get_counts(self, image_data): def _make_counts_data_array(self, image_data): return xr.DataArray( - image_data['image_data'], - dims=('y', 'x'), + image_data["image_data"], + dims=("y", "x"), coords={ - 'acq_time': ('y', self._get_acq_time(image_data)), - 'line_number': ('y', self._get_line_number(image_data)) - } + "acq_time": ("y", self._get_acq_time(image_data)), + "line_number": ("y", self._get_line_number(image_data)), + }, ) def _get_acq_time(self, dask_array): - acq_time = dask_array['LCW']['scan_time'].compute() + acq_time = dask_array["LCW"]["scan_time"].compute() return mjd2datetime64(acq_time) def _get_line_number(self, dask_array): - return dask_array['LCW']['line_number'].compute() + return dask_array["LCW"]["line_number"].compute() def _calibrate(self, counts, dataset_id): table = self._get_calibration_table(dataset_id) @@ -739,21 +734,28 @@ def _calibrate(self, counts, dataset_id): def _get_calibration_table(self, dataset_id): tables = { - "VIS": self._header['image_parameters']['vis_calibration']["vis1_calibration_table"]["brightness_albedo_conversion_table"], - "IR1": self._header['image_parameters']['ir1_calibration']["conversion_table_of_equivalent_black_body_temperature"], - "IR2": self._header['image_parameters']['ir2_calibration']["conversion_table_of_equivalent_black_body_temperature"], - "IR3": self._header['image_parameters']['wv_calibration']["conversion_table_of_equivalent_black_body_temperature"] + "VIS": self._header["image_parameters"]["vis_calibration"][ + "vis1_calibration_table" + ]["brightness_albedo_conversion_table"], + "IR1": self._header["image_parameters"]["ir1_calibration"][ + "conversion_table_of_equivalent_black_body_temperature" + ], + "IR2": self._header["image_parameters"]["ir2_calibration"][ + "conversion_table_of_equivalent_black_body_temperature" + ], + "IR3": self._header["image_parameters"]["wv_calibration"][ + "conversion_table_of_equivalent_black_body_temperature" + ], } return tables[dataset_id["name"]] def _get_area_def_uniform_sampling(self, dataset_id): a = AreaDefEstimator( - coord_conv_params=self._header['image_parameters']['coordinate_conversion'], - metadata=self._mda + coord_conv_params=self._header["image_parameters"]["coordinate_conversion"], + metadata=self._mda, ) return a.get_area_def_uniform_sampling( - original_shape=self._get_actual_shape(), - dataset_id=dataset_id + original_shape=self._get_actual_shape(), dataset_id=dataset_id ) def _mask_space_pixels(self, dataset, space_masker): @@ -763,8 +765,8 @@ def _mask_space_pixels(self, dataset, space_masker): def _attach_lons_lats(self, dataset, dataset_id): lons, lats = self._get_lons_lats(dataset, dataset_id) - dataset.coords['lon'] = lons - dataset.coords['lat'] = lats + dataset.coords["lon"] = lons + dataset.coords["lat"] = lats def _get_lons_lats(self, dataset, dataset_id): lines, pixels = self._get_image_coords(dataset) @@ -774,12 +776,12 @@ def _get_lons_lats(self, dataset, dataset_id): lines=lines, pixels=pixels, static_params=static_params, - predicted_params=predicted_params + predicted_params=predicted_params, ) return self._make_lons_lats_data_array(lons, lats) def _get_image_coords(self, data): - lines = data.coords['line_number'].values + lines = data.coords["line_number"].values pixels = np.arange(data.shape[1]) return lines.astype(np.float64), pixels.astype(np.float64) @@ -794,18 +796,23 @@ def _get_static_navigation_params(self, dataset_id): IR2: 1378.7 IR3: 1379.1001 """ - alt_ch_name = ALT_CHANNEL_NAMES[dataset_id['name']] - mode_block = self._header['image_parameters']['mode'] - coord_conv = self._header['image_parameters']['coordinate_conversion'] - center_line_vissr_frame = coord_conv['central_line_number_of_vissr_frame'][alt_ch_name] - center_pixel_vissr_frame = coord_conv['central_pixel_number_of_vissr_frame'][alt_ch_name] - pixel_offset = coord_conv['pixel_difference_of_vissr_center_from_normal_position'][ - alt_ch_name] + alt_ch_name = ALT_CHANNEL_NAMES[dataset_id["name"]] + mode_block = self._header["image_parameters"]["mode"] + coord_conv = self._header["image_parameters"]["coordinate_conversion"] + center_line_vissr_frame = coord_conv["central_line_number_of_vissr_frame"][ + alt_ch_name + ] + center_pixel_vissr_frame = coord_conv["central_pixel_number_of_vissr_frame"][ + alt_ch_name + ] + pixel_offset = coord_conv[ + "pixel_difference_of_vissr_center_from_normal_position" + ][alt_ch_name] scan_params = nav.ScanningParameters( - start_time_of_scan=coord_conv['scheduled_observation_time'], - spinning_rate=mode_block['spin_rate'], - num_sensors=coord_conv['number_of_sensor_elements'][alt_ch_name], - sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], + start_time_of_scan=coord_conv["scheduled_observation_time"], + spinning_rate=mode_block["spin_rate"], + num_sensors=coord_conv["number_of_sensor_elements"][alt_ch_name], + sampling_angle=coord_conv["sampling_angle_along_pixel"][alt_ch_name], ) # Use earth radius and flattening from JMA's Msial library, because # the values in the data seem to be pretty old. For example the @@ -813,48 +820,74 @@ def _get_static_navigation_params(self, dataset_id): proj_params = nav.ProjectionParameters( line_offset=center_line_vissr_frame, pixel_offset=center_pixel_vissr_frame + pixel_offset, - stepping_angle=coord_conv['stepping_angle_along_line'][alt_ch_name], - sampling_angle=coord_conv['sampling_angle_along_pixel'][alt_ch_name], - misalignment=np.ascontiguousarray(coord_conv['matrix_of_misalignment'].transpose().astype(np.float64)), + stepping_angle=coord_conv["stepping_angle_along_line"][alt_ch_name], + sampling_angle=coord_conv["sampling_angle_along_pixel"][alt_ch_name], + misalignment=np.ascontiguousarray( + coord_conv["matrix_of_misalignment"].transpose().astype(np.float64) + ), earth_flattening=nav.EARTH_FLATTENING, - earth_equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS + earth_equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS, ) return scan_params, proj_params def _get_predicted_navigation_params(self): """Get predictions of time-dependent navigation parameters.""" - att_pred = self._header['image_parameters']['attitude_prediction']['data'] - orb_pred = self._header['image_parameters']['orbit_prediction']['data'] + att_pred = self._header["image_parameters"]["attitude_prediction"]["data"] + orb_pred = self._header["image_parameters"]["orbit_prediction"]["data"] attitude_prediction = nav.AttitudePrediction( - prediction_times=att_pred['prediction_time_mjd'].astype(np.float64), - angle_between_earth_and_sun=att_pred['sun_earth_angle'].astype(np.float64), - angle_between_sat_spin_and_z_axis=att_pred['right_ascension_of_attitude'].astype(np.float64), - angle_between_sat_spin_and_yz_plane=att_pred['declination_of_attitude'].astype(np.float64), + prediction_times=att_pred["prediction_time_mjd"].astype(np.float64), + angle_between_earth_and_sun=att_pred["sun_earth_angle"].astype(np.float64), + angle_between_sat_spin_and_z_axis=att_pred[ + "right_ascension_of_attitude" + ].astype(np.float64), + angle_between_sat_spin_and_yz_plane=att_pred[ + "declination_of_attitude" + ].astype(np.float64), ) orbit_prediction = nav.OrbitPrediction( - prediction_times=orb_pred['prediction_time_mjd'].astype(np.float64), - greenwich_sidereal_time=np.deg2rad(orb_pred['greenwich_sidereal_time'].astype(np.float64)), - declination_from_sat_to_sun=np.deg2rad(orb_pred['sat_sun_vector_earth_fixed']['elevation'].astype(np.float64)), - right_ascension_from_sat_to_sun=np.deg2rad(orb_pred['sat_sun_vector_earth_fixed']['azimuth'].astype(np.float64)), - sat_position_earth_fixed_x=orb_pred['satellite_position_earth_fixed'][:, 0].astype(np.float64), - sat_position_earth_fixed_y=orb_pred['satellite_position_earth_fixed'][:, 1].astype(np.float64), - sat_position_earth_fixed_z=orb_pred['satellite_position_earth_fixed'][:, 2].astype(np.float64), - nutation_precession=np.ascontiguousarray(orb_pred['conversion_matrix'].transpose(0, 2, 1).astype(np.float64)) + prediction_times=orb_pred["prediction_time_mjd"].astype(np.float64), + greenwich_sidereal_time=np.deg2rad( + orb_pred["greenwich_sidereal_time"].astype(np.float64) + ), + declination_from_sat_to_sun=np.deg2rad( + orb_pred["sat_sun_vector_earth_fixed"]["elevation"].astype(np.float64) + ), + right_ascension_from_sat_to_sun=np.deg2rad( + orb_pred["sat_sun_vector_earth_fixed"]["azimuth"].astype(np.float64) + ), + sat_position_earth_fixed_x=orb_pred["satellite_position_earth_fixed"][ + :, 0 + ].astype(np.float64), + sat_position_earth_fixed_y=orb_pred["satellite_position_earth_fixed"][ + :, 1 + ].astype(np.float64), + sat_position_earth_fixed_z=orb_pred["satellite_position_earth_fixed"][ + :, 2 + ].astype(np.float64), + nutation_precession=np.ascontiguousarray( + orb_pred["conversion_matrix"].transpose(0, 2, 1).astype(np.float64) + ), ) return attitude_prediction, orbit_prediction def _make_lons_lats_data_array(self, lons, lats): - lons = xr.DataArray(lons, dims=('y', 'x'), - attrs={'standard_name': 'longitude', - "units": "degrees_east"}) - lats = xr.DataArray(lats, dims=('y', 'x'), - attrs={'standard_name': 'latitude', - "units": "degrees_north"}) + lons = xr.DataArray( + lons, + dims=("y", "x"), + attrs={"standard_name": "longitude", "units": "degrees_east"}, + ) + lats = xr.DataArray( + lats, + dims=("y", "x"), + attrs={"standard_name": "latitude", "units": "degrees_north"}, + ) return lons, lats def _update_attrs(self, dataset, dataset_id): dataset.attrs.update(self._mda) - dataset.attrs["area_def_uniform_sampling"] = self._get_area_def_uniform_sampling(dataset_id) + dataset.attrs[ + "area_def_uniform_sampling" + ] = self._get_area_def_uniform_sampling(dataset_id) @property def start_time(self): @@ -886,6 +919,7 @@ class Calibrator: Reference: Section 2.2 in the VISSR User Guide. """ + def __init__(self, calib_table): self._calib_table = calib_table @@ -967,7 +1001,7 @@ def get_earth_mask(shape, earth_edges, fill_value=-1): last = last_earth_pixels[line] if first == fill_value or last == fill_value: continue - mask[line, first:last+1] = 1 + mask[line, first : last + 1] = 1 return mask @@ -1002,26 +1036,32 @@ def _get_proj_dict(self, dataset_id, original_shape): return proj_dict def _get_name_dict(self, dataset_id): - name_dict = geos_area.get_geos_area_naming({ - 'platform_name': self.metadata['platform'], - 'instrument_name': self.metadata['sensor'], - 'service_name': 'western-pacific', - 'service_desc': 'Western Pacific', - 'resolution': dataset_id['resolution'] - }) + name_dict = geos_area.get_geos_area_naming( + { + "platform_name": self.metadata["platform"], + "instrument_name": self.metadata["sensor"], + "service_name": "western-pacific", + "service_desc": "Western Pacific", + "resolution": dataset_id["resolution"], + } + ) return { "a_name": name_dict["area_id"], "p_id": name_dict["area_id"], - "a_desc": name_dict["description"] + "a_desc": name_dict["description"], } - def _get_proj4_dict(self, ): + def _get_proj4_dict( + self, + ): # Use nominal parameters to make the area def as constant as possible return { - 'ssp_lon': self.metadata["orbital_parameters"]["satellite_nominal_longitude"], + "ssp_lon": self.metadata["orbital_parameters"][ + "satellite_nominal_longitude" + ], "a": nav.EARTH_EQUATORIAL_RADIUS, "b": nav.EARTH_POLAR_RADIUS, - 'h': self.metadata["orbital_parameters"]["satellite_nominal_altitude"], + "h": self.metadata["orbital_parameters"]["satellite_nominal_altitude"], } def _get_shape_dict(self, original_shape, dataset_id): @@ -1030,16 +1070,16 @@ def _get_shape_dict(self, original_shape, dataset_id): # with uniform sampling. num_lines, _ = original_shape alt_ch_name = ALT_CHANNEL_NAMES[dataset_id["name"]] - stepping_angle = self.coord_conv['stepping_angle_along_line'][alt_ch_name] + stepping_angle = self.coord_conv["stepping_angle_along_line"][alt_ch_name] uniform_size = num_lines uniform_line_pixel_offset = 0.5 * num_lines uniform_sampling_angle = geos_area.sampling_to_lfac_cfac(stepping_angle) return { - 'nlines': uniform_size, - 'ncols': uniform_size, - 'lfac': uniform_sampling_angle, - 'cfac': uniform_sampling_angle, - 'coff': uniform_line_pixel_offset, - 'loff': uniform_line_pixel_offset, - 'scandir': 'N2S' + "nlines": uniform_size, + "ncols": uniform_size, + "lfac": uniform_sampling_angle, + "cfac": uniform_sampling_angle, + "coff": uniform_line_pixel_offset, + "loff": uniform_line_pixel_offset, + "scandir": "N2S", } diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 98ead60858..3a9e08f340 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -16,77 +16,78 @@ CHUNK_SIZE = get_legacy_chunk_size() -EARTH_FLATTENING = 1/298.257 +EARTH_FLATTENING = 1 / 298.257 EARTH_EQUATORIAL_RADIUS = 6378136.0 EARTH_POLAR_RADIUS = EARTH_EQUATORIAL_RADIUS * (1 - EARTH_FLATTENING) """Constants taken from JMA's Msial library.""" Attitude = namedtuple( - 'Attitude', + "Attitude", [ - 'angle_between_earth_and_sun', - 'angle_between_sat_spin_and_z_axis', - 'angle_between_sat_spin_and_yz_plane' - ] + "angle_between_earth_and_sun", + "angle_between_sat_spin_and_z_axis", + "angle_between_sat_spin_and_yz_plane", + ], ) Orbit = namedtuple( - 'Orbit', + "Orbit", [ - 'greenwich_sidereal_time', - 'declination_from_sat_to_sun', - 'right_ascension_from_sat_to_sun', - 'sat_position_earth_fixed_x', - 'sat_position_earth_fixed_y', - 'sat_position_earth_fixed_z', - 'nutation_precession', - ] + "greenwich_sidereal_time", + "declination_from_sat_to_sun", + "right_ascension_from_sat_to_sun", + "sat_position_earth_fixed_x", + "sat_position_earth_fixed_y", + "sat_position_earth_fixed_z", + "nutation_precession", + ], ) ScanningParameters = namedtuple( - 'ScanningParameters', ['start_time_of_scan', 'spinning_rate', 'num_sensors', 'sampling_angle'] + "ScanningParameters", + ["start_time_of_scan", "spinning_rate", "num_sensors", "sampling_angle"], ) ProjectionParameters = namedtuple( - 'ProjectionParameters', + "ProjectionParameters", [ - 'line_offset', - 'pixel_offset', - 'stepping_angle', - 'sampling_angle', - 'misalignment', - 'earth_flattening', - 'earth_equatorial_radius', - ] + "line_offset", + "pixel_offset", + "stepping_angle", + "sampling_angle", + "misalignment", + "earth_flattening", + "earth_equatorial_radius", + ], ) _AttitudePrediction = namedtuple( - '_AttitudePrediction', + "_AttitudePrediction", [ - 'prediction_times', - 'angle_between_earth_and_sun', - 'angle_between_sat_spin_and_z_axis', - 'angle_between_sat_spin_and_yz_plane', - ] + "prediction_times", + "angle_between_earth_and_sun", + "angle_between_sat_spin_and_z_axis", + "angle_between_sat_spin_and_yz_plane", + ], ) _OrbitPrediction = namedtuple( - '_OrbitPrediction', + "_OrbitPrediction", [ - 'prediction_times', - 'greenwich_sidereal_time', - 'declination_from_sat_to_sun', - 'right_ascension_from_sat_to_sun', - 'sat_position_earth_fixed_x', - 'sat_position_earth_fixed_y', - 'sat_position_earth_fixed_z', - 'nutation_precession', - ] + "prediction_times", + "greenwich_sidereal_time", + "declination_from_sat_to_sun", + "right_ascension_from_sat_to_sun", + "sat_position_earth_fixed_x", + "sat_position_earth_fixed_y", + "sat_position_earth_fixed_z", + "nutation_precession", + ], ) @@ -97,19 +98,25 @@ class AttitudePrediction(object): layer avoids usage of jitclasses and having to re-implement np.unwrap in numba. """ - def __init__(self, - prediction_times, - angle_between_earth_and_sun, - angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane - ): + + def __init__( + self, + prediction_times, + angle_between_earth_and_sun, + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane, + ): # In order to accelerate interpolation, the 2-pi periodicity of angles # is unwrapped here already (that means phase jumps greater than pi # are wrapped to their 2*pi complement). self.prediction_times = prediction_times self.angle_between_earth_and_sun = np.unwrap(angle_between_earth_and_sun) - self.angle_between_sat_spin_and_z_axis = np.unwrap(angle_between_sat_spin_and_z_axis) - self.angle_between_sat_spin_and_yz_plane = np.unwrap(angle_between_sat_spin_and_yz_plane) + self.angle_between_sat_spin_and_z_axis = np.unwrap( + angle_between_sat_spin_and_z_axis + ) + self.angle_between_sat_spin_and_yz_plane = np.unwrap( + angle_between_sat_spin_and_yz_plane + ) def to_numba(self): """Convert to numba-compatible type.""" @@ -117,7 +124,7 @@ def to_numba(self): prediction_times=self.prediction_times, angle_between_earth_and_sun=self.angle_between_earth_and_sun, angle_between_sat_spin_and_z_axis=self.angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane=self.angle_between_sat_spin_and_yz_plane + angle_between_sat_spin_and_yz_plane=self.angle_between_sat_spin_and_yz_plane, ) @@ -128,23 +135,27 @@ class OrbitPrediction(object): layer avoids usage of jitclasses and having to re-implement np.unwrap in numba. """ - def __init__(self, - prediction_times, - greenwich_sidereal_time, - declination_from_sat_to_sun, - right_ascension_from_sat_to_sun, - sat_position_earth_fixed_x, - sat_position_earth_fixed_y, - sat_position_earth_fixed_z, - nutation_precession - ): + + def __init__( + self, + prediction_times, + greenwich_sidereal_time, + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun, + sat_position_earth_fixed_x, + sat_position_earth_fixed_y, + sat_position_earth_fixed_z, + nutation_precession, + ): # In order to accelerate interpolation, the 2-pi periodicity of angles # is unwrapped here already (that means phase jumps greater than pi # are wrapped to their 2*pi complement). self.prediction_times = prediction_times self.greenwich_sidereal_time = np.unwrap(greenwich_sidereal_time) self.declination_from_sat_to_sun = np.unwrap(declination_from_sat_to_sun) - self.right_ascension_from_sat_to_sun = np.unwrap(right_ascension_from_sat_to_sun) + self.right_ascension_from_sat_to_sun = np.unwrap( + right_ascension_from_sat_to_sun + ) self.sat_position_earth_fixed_x = sat_position_earth_fixed_x self.sat_position_earth_fixed_y = sat_position_earth_fixed_y self.sat_position_earth_fixed_z = sat_position_earth_fixed_z @@ -160,7 +171,7 @@ def to_numba(self): sat_position_earth_fixed_x=self.sat_position_earth_fixed_x, sat_position_earth_fixed_y=self.sat_position_earth_fixed_y, sat_position_earth_fixed_z=self.sat_position_earth_fixed_z, - nutation_precession=self.nutation_precession + nutation_precession=self.nutation_precession, ) @@ -187,7 +198,7 @@ def _get_map_blocks_kwargs(chunks): # with a function that returns two arguments. return { "new_axis": 0, - "chunks": (2, ) + chunks, + "chunks": (2,) + chunks, "dtype": np.float32, } @@ -201,9 +212,7 @@ def _get_lons_lats_numba(lines_2d, pixels_2d, static_params, predicted_params): for j in range(shape[1]): point = (lines_2d[i, j], pixels_2d[i, j]) nav_params = _get_navigation_parameters( - point, - static_params, - predicted_params + point, static_params, predicted_params ) lon, lat = get_lon_lat(point, nav_params) lons[i, j] = lon @@ -214,10 +223,7 @@ def _get_lons_lats_numba(lines_2d, pixels_2d, static_params, predicted_params): @numba.njit -def _get_navigation_parameters( - point, - static_params, - predicted_params): +def _get_navigation_parameters(point, static_params, predicted_params): scan_params, proj_params = static_params attitude_prediction, orbit_prediction = predicted_params obs_time = get_observation_time(point, scan_params) @@ -247,16 +253,10 @@ def _get_relative_observation_time(point, scan_params): @numba.njit def interpolate_navigation_prediction( - attitude_prediction, - orbit_prediction, - observation_time + attitude_prediction, orbit_prediction, observation_time ): - attitude = interpolate_attitude_prediction( - attitude_prediction, observation_time - ) - orbit = interpolate_orbit_prediction( - orbit_prediction, observation_time - ) + attitude = interpolate_attitude_prediction(attitude_prediction, observation_time) + orbit = interpolate_orbit_prediction(orbit_prediction, observation_time) return attitude, orbit @@ -273,13 +273,10 @@ def get_lon_lat(point, nav_params): """ attitude, orbit, proj_params = nav_params scan_angles = transform_image_coords_to_scanning_angles( - point, - _get_image_offset(proj_params), - _get_sampling(proj_params) + point, _get_image_offset(proj_params), _get_sampling(proj_params) ) view_vector_sat = transform_scanning_angles_to_satellite_coords( - scan_angles, - proj_params.misalignment + scan_angles, proj_params.misalignment ) view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords( view_vector_sat, @@ -287,16 +284,13 @@ def get_lon_lat(point, nav_params): _get_sat_sun_angles(orbit), attitude.angle_between_earth_and_sun, _get_spin_angles(attitude), - orbit.nutation_precession + orbit.nutation_precession, ) point_on_earth = intersect_with_earth( - view_vector_earth_fixed, - _get_sat_pos(orbit), - _get_ellipsoid(proj_params) + view_vector_earth_fixed, _get_sat_pos(orbit), _get_ellipsoid(proj_params) ) lon, lat = transform_earth_fixed_to_geodetic_coords( - point_on_earth, - proj_params.earth_flattening + point_on_earth, proj_params.earth_flattening ) return lon, lat @@ -313,27 +307,31 @@ def _get_sampling(proj_params): @numba.njit def _get_sat_sun_angles(orbit): - return (orbit.declination_from_sat_to_sun, - orbit.right_ascension_from_sat_to_sun) + return (orbit.declination_from_sat_to_sun, orbit.right_ascension_from_sat_to_sun) @numba.njit def _get_spin_angles(attitude): - return (attitude.angle_between_sat_spin_and_z_axis, - attitude.angle_between_sat_spin_and_yz_plane) + return ( + attitude.angle_between_sat_spin_and_z_axis, + attitude.angle_between_sat_spin_and_yz_plane, + ) @numba.njit def _get_sat_pos(orbit): - return np.array((orbit.sat_position_earth_fixed_x, + return np.array( + ( + orbit.sat_position_earth_fixed_x, orbit.sat_position_earth_fixed_y, - orbit.sat_position_earth_fixed_z)) + orbit.sat_position_earth_fixed_z, + ) + ) @numba.njit def _get_ellipsoid(proj_params): - return (proj_params.earth_equatorial_radius, - proj_params.earth_flattening) + return (proj_params.earth_equatorial_radius, proj_params.earth_flattening) @numba.njit @@ -367,9 +365,7 @@ def transform_scanning_angles_to_satellite_coords(angles, misalignment): Returns: View vector (x, y, z) in satellite angular momentum coordinates. """ - rotation, vector = _get_transforms_from_scanning_angles_to_satellite_coords( - angles - ) + rotation, vector = _get_transforms_from_scanning_angles_to_satellite_coords(angles) return np.dot(rotation, np.dot(misalignment, vector)) @@ -378,21 +374,19 @@ def _get_transforms_from_scanning_angles_to_satellite_coords(angles): x, y = angles cos_x = np.cos(x) sin_x = np.sin(x) - rot = np.array(((cos_x, -sin_x, 0), - (sin_x, cos_x, 0), - (0, 0, 1))) + rot = np.array(((cos_x, -sin_x, 0), (sin_x, cos_x, 0), (0, 0, 1))) vec = np.array([np.cos(y), 0, np.sin(y)]) return rot, vec @numba.njit def transform_satellite_to_earth_fixed_coords( - point, - greenwich_sidereal_time, - sat_sun_angles, - earth_sun_angle, - spin_angles, - nutation_precession + point, + greenwich_sidereal_time, + sat_sun_angles, + earth_sun_angle, + spin_angles, + nutation_precession, ): """Transform from earth-fixed to satellite angular momentum coordinates. @@ -414,18 +408,18 @@ def transform_satellite_to_earth_fixed_coords( sat_sun_angles, earth_sun_angle, spin_angles, - nutation_precession + nutation_precession, ) return np.dot(sat_unit_vectors, point) @numba.njit def _get_satellite_unit_vectors( - greenwich_sidereal_time, - sat_sun_angles, - earth_sun_angle, - spin_angles, - nutation_precession + greenwich_sidereal_time, + sat_sun_angles, + earth_sun_angle, + spin_angles, + nutation_precession, ): unit_vector_z = _get_satellite_unit_vector_z( spin_angles, greenwich_sidereal_time, nutation_precession @@ -438,7 +432,9 @@ def _get_satellite_unit_vectors( @numba.njit -def _get_satellite_unit_vector_z(spin_angles, greenwich_sidereal_time, nutation_precession): +def _get_satellite_unit_vector_z( + spin_angles, greenwich_sidereal_time, nutation_precession +): sat_z_axis_1950 = _get_satellite_z_axis_1950(spin_angles) rotation = _get_transform_from_1950_to_earth_fixed(greenwich_sidereal_time) z_vec = np.dot(rotation, np.dot(nutation_precession, sat_z_axis_1950)) @@ -460,22 +456,18 @@ def _get_satellite_z_axis_1950(spin_angles): def _get_transform_from_1950_to_earth_fixed(greenwich_sidereal_time): cos = np.cos(greenwich_sidereal_time) sin = np.sin(greenwich_sidereal_time) - return np.array( - ((cos, sin, 0), - (-sin, cos, 0), - (0, 0, 1)) - ) + return np.array(((cos, sin, 0), (-sin, cos, 0), (0, 0, 1))) @numba.njit -def _get_satellite_unit_vector_x(earth_sun_angle, sat_sun_angles, - sat_unit_vector_z): +def _get_satellite_unit_vector_x(earth_sun_angle, sat_sun_angles, sat_unit_vector_z): beta = earth_sun_angle sat_sun_vector = _get_vector_from_satellite_to_sun(sat_sun_angles) z_cross_satsun = np.cross(sat_unit_vector_z, sat_sun_vector) z_cross_satsun = normalize_vector(z_cross_satsun) - x_vec = z_cross_satsun * np.sin(beta) + \ - np.cross(z_cross_satsun, sat_unit_vector_z) * np.cos(beta) + x_vec = z_cross_satsun * np.sin(beta) + np.cross( + z_cross_satsun, sat_unit_vector_z + ) * np.cos(beta) return normalize_vector(x_vec) @@ -507,11 +499,7 @@ def intersect_with_earth(view_vector, sat_pos, ellipsoid): Returns: Intersection (x', y', z') with the earth's surface. """ - distance = _get_distance_to_intersection( - view_vector, - sat_pos, - ellipsoid - ) + distance = _get_distance_to_intersection(view_vector, sat_pos, ellipsoid) return sat_pos + distance * view_vector @@ -535,10 +523,10 @@ def _get_distances_to_intersections(view_vector, sat_pos, ellipsoid): x, y, z = sat_pos a = flat2 * (ux**2 + uy**2) + uz**2 - b = flat2 * (x*ux + y*uy) + z*uz + b = flat2 * (x * ux + y * uy) + z * uz c = flat2 * (x**2 + y**2 - equatorial_radius**2) + z**2 - tmp = np.sqrt((b**2 - a*c)) + tmp = np.sqrt((b**2 - a * c)) dist_1 = (-b + tmp) / a dist_2 = (-b - tmp) / a return dist_1, dist_2 @@ -558,7 +546,7 @@ def transform_earth_fixed_to_geodetic_coords(point, earth_flattening): x, y, z = point f = earth_flattening lon = np.arctan2(y, x) - lat = np.arctan2(z, ((1 - f)**2 * np.sqrt(x**2 + y**2))) + lat = np.arctan2(z, ((1 - f) ** 2 * np.sqrt(x**2 + y**2))) return np.rad2deg(lon), np.rad2deg(lat) @@ -573,37 +561,37 @@ def interpolate_orbit_prediction(orbit_prediction, observation_time): greenwich_sidereal_time = interpolate_angles( observation_time, orbit_prediction.prediction_times, - orbit_prediction.greenwich_sidereal_time + orbit_prediction.greenwich_sidereal_time, ) declination_from_sat_to_sun = interpolate_angles( observation_time, orbit_prediction.prediction_times, - orbit_prediction.declination_from_sat_to_sun + orbit_prediction.declination_from_sat_to_sun, ) right_ascension_from_sat_to_sun = interpolate_angles( observation_time, orbit_prediction.prediction_times, - orbit_prediction.right_ascension_from_sat_to_sun + orbit_prediction.right_ascension_from_sat_to_sun, ) sat_position_earth_fixed_x = interpolate_continuous( observation_time, orbit_prediction.prediction_times, - orbit_prediction.sat_position_earth_fixed_x + orbit_prediction.sat_position_earth_fixed_x, ) sat_position_earth_fixed_y = interpolate_continuous( observation_time, orbit_prediction.prediction_times, - orbit_prediction.sat_position_earth_fixed_y + orbit_prediction.sat_position_earth_fixed_y, ) sat_position_earth_fixed_z = interpolate_continuous( observation_time, orbit_prediction.prediction_times, - orbit_prediction.sat_position_earth_fixed_z + orbit_prediction.sat_position_earth_fixed_z, ) nutation_precession = interpolate_nearest( observation_time, orbit_prediction.prediction_times, - orbit_prediction.nutation_precession + orbit_prediction.nutation_precession, ) return Orbit( greenwich_sidereal_time, @@ -612,7 +600,7 @@ def interpolate_orbit_prediction(orbit_prediction, observation_time): sat_position_earth_fixed_x, sat_position_earth_fixed_y, sat_position_earth_fixed_z, - nutation_precession + nutation_precession, ) @@ -621,22 +609,22 @@ def interpolate_attitude_prediction(attitude_prediction, observation_time): angle_between_earth_and_sun = interpolate_angles( observation_time, attitude_prediction.prediction_times, - attitude_prediction.angle_between_earth_and_sun + attitude_prediction.angle_between_earth_and_sun, ) angle_between_sat_spin_and_z_axis = interpolate_angles( observation_time, attitude_prediction.prediction_times, - attitude_prediction.angle_between_sat_spin_and_z_axis + attitude_prediction.angle_between_sat_spin_and_z_axis, ) angle_between_sat_spin_and_yz_plane = interpolate_angles( observation_time, attitude_prediction.prediction_times, - attitude_prediction.angle_between_sat_spin_and_yz_plane + attitude_prediction.angle_between_sat_spin_and_yz_plane, ) return Attitude( angle_between_earth_and_sun, angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane + angle_between_sat_spin_and_yz_plane, ) @@ -658,8 +646,8 @@ def interpolate_continuous(x, x_sample, y_sample): def _interpolate(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) offset = y_sample[i] - x_diff = x_sample[i+1] - x_sample[i] - y_diff = y_sample[i+1] - y_sample[i] + x_diff = x_sample[i + 1] - x_sample[i] + y_diff = y_sample[i + 1] - y_sample[i] slope = y_diff / x_diff dist = x - x_sample[i] return offset + slope * dist @@ -669,9 +657,9 @@ def _interpolate(x, x_sample, y_sample): def _find_enclosing_index(x, x_sample): """Find where x_sample encloses x.""" for i in range(len(x_sample) - 1): - if x_sample[i] <= x < x_sample[i+1]: + if x_sample[i] <= x < x_sample[i + 1]: return i - raise Exception('x not enclosed by x_sample') + raise Exception("x not enclosed by x_sample") @numba.njit @@ -715,4 +703,3 @@ def _interpolate_nearest(x, x_sample, y_sample): - Finish Documentation - Call find_enclosing_index only once for all predictions """ - diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index b2a93241ac..be64a97f5b 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -190,13 +190,11 @@ class TestSinglePixelNavigation: """Test navigation of a single pixel.""" @pytest.mark.parametrize( - 'point,nav_params,expected', + "point,nav_params,expected", [ - ((ref['line'], ref['pixel']), - ref['nav_params'], - (ref['lon'], ref['lat'])) + ((ref["line"], ref["pixel"]), ref["nav_params"], (ref["lon"], ref["lat"])) for ref in NAVIGATION_REFERENCE - ] + ], ) def test_get_lon_lat(self, point, nav_params, expected): """Test getting lon/lat coordinates for a given pixel.""" @@ -208,7 +206,7 @@ def test_transform_image_coords_to_scanning_angles(self): angles = nav.transform_image_coords_to_scanning_angles( point=np.array([199, 99]), offset=np.array([100, 200]), - sampling=np.array([0.01, 0.02]) + sampling=np.array([0.01, 0.02]), ) np.testing.assert_allclose(angles, [-2, 1]) @@ -219,7 +217,7 @@ def test_transform_scanning_angles_to_satellite_coords(self): point_sat = nav.transform_scanning_angles_to_satellite_coords( scanning_angles, misalignment ) - np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1E-12) + np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1e-12) def test_transform_satellite_to_earth_fixed_coords(self): """Test transformation from satellite to earth-fixed coordinates.""" @@ -235,7 +233,7 @@ def test_transform_satellite_to_earth_fixed_coords(self): sat_sun_angles, earth_sun_angle, spin_angles, - nutation_precession + nutation_precession, ) np.testing.assert_allclose(res, [-3, 1, -2]) @@ -246,28 +244,25 @@ def test_intersect_view_vector_with_earth(self): eq_radius = 6371 * 1000 flattening = 0.003 ellipsoid = np.array([eq_radius, flattening]) - point = nav.intersect_with_earth( - view_vector, sat_pos, ellipsoid - ) + point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid) np.testing.assert_allclose(point, [eq_radius, 0, 0]) @pytest.mark.parametrize( - 'point_earth_fixed,point_geodetic_exp', + "point_earth_fixed,point_geodetic_exp", [ ([0, 0, 1], [0, 90]), ([0, 0, -1], [0, -90]), ([1, 0, 0], [0, 0]), ([-1, 0, 0], [180, 0]), ([1, 1, 1], [45, 35.426852]), - ] + ], ) def test_transform_earth_fixed_to_geodetic_coords( - self, point_earth_fixed, point_geodetic_exp + self, point_earth_fixed, point_geodetic_exp ): """Test transformation from earth-fixed to geodetic coordinates.""" point_geodetic = nav.transform_earth_fixed_to_geodetic_coords( - np.array(point_earth_fixed), - 0.003 + np.array(point_earth_fixed), 0.003 ) np.testing.assert_allclose(point_geodetic, point_geodetic_exp) @@ -280,12 +275,9 @@ def test_normalize_vector(self): class TestImageNavigation: """Test navigation of an entire image.""" + def test_get_lons_lats( - self, - scan_params, - attitude_prediction, - orbit_prediction, - proj_params + self, scan_params, attitude_prediction, orbit_prediction, proj_params ): """Test getting lon/lat coordinates.""" # fmt: off @@ -300,7 +292,7 @@ def test_get_lons_lats( lines=np.array([1000, 1500, 2000]), pixels=np.array([1000, 1500, 2000]), static_params=(scan_params, proj_params), - predicted_params=(attitude_prediction, orbit_prediction) + predicted_params=(attitude_prediction, orbit_prediction), ) np.testing.assert_allclose(lons, lons_exp) np.testing.assert_allclose(lats, lats_exp) @@ -308,6 +300,7 @@ def test_get_lons_lats( class TestEarthMask: """Test getting the earth mask.""" + def test_get_earth_mask(self): """Test getting the earth mask.""" first_earth_pixels = np.array([-1, 1, 0, -1]) @@ -329,84 +322,65 @@ class TestPredictionInterpolation: """Test interpolation of orbit and attitude predictions.""" @pytest.mark.parametrize( - 'obs_time,expected', - [ - (-1, np.nan), - (1.5, 2.5), - (5, np.nan) - ] + "obs_time,expected", [(-1, np.nan), (1.5, 2.5), (5, np.nan)] ) def test_interpolate_continuous(self, obs_time, expected): """Test interpolation of continuous variables.""" prediction_times = np.array([0, 1, 2, 3]) predicted_values = np.array([1, 2, 3, 4]) - res = nav.interpolate_continuous( - obs_time, - prediction_times, - predicted_values - ) + res = nav.interpolate_continuous(obs_time, prediction_times, predicted_values) np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - 'obs_time,expected', + "obs_time,expected", [ (-1, np.nan), - (1.5, 0.75*np.pi), - (2.5, -0.75*np.pi), - (3.5, -0.25*np.pi), + (1.5, 0.75 * np.pi), + (2.5, -0.75 * np.pi), + (3.5, -0.25 * np.pi), (5, np.nan), - ] + ], ) def test_interpolate_angles(self, obs_time, expected): """Test interpolation of periodic angles.""" prediction_times = np.array([0, 1, 2, 3, 4]) predicted_angles = np.array( - [0, 0.5*np.pi, np.pi, 1.5*np.pi, 2*np.pi] + [0, 0.5 * np.pi, np.pi, 1.5 * np.pi, 2 * np.pi] ) # already unwrapped - res = nav.interpolate_angles( - obs_time, - prediction_times, - predicted_angles - ) + res = nav.interpolate_angles(obs_time, prediction_times, predicted_angles) np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - 'obs_time,expected', + "obs_time,expected", [ (-1, np.nan * np.ones((2, 2))), (1.5, [[1, 0], [0, 2]]), (3, np.nan * np.ones((2, 2))), - ] + ], ) def test_interpolate_nearest(self, obs_time, expected): """Test nearest neighbour interpolation.""" prediction_times = np.array([0, 1, 2]) - predicted_angles = np.array([ - np.zeros((2, 2)), - np.diag((1, 2)), - np.zeros((2, 2)) - ]) - res = nav.interpolate_nearest( - obs_time, - prediction_times, - predicted_angles + predicted_angles = np.array( + [np.zeros((2, 2)), np.diag((1, 2)), np.zeros((2, 2))] ) + res = nav.interpolate_nearest(obs_time, prediction_times, predicted_angles) np.testing.assert_allclose(res, expected) - def test_interpolate_orbit_prediction(self, obs_time, orbit_prediction, orbit_expected): + def test_interpolate_orbit_prediction( + self, obs_time, orbit_prediction, orbit_expected + ): """Test interpolating orbit prediction.""" orbit_prediction = orbit_prediction.to_numba() - orbit = nav.interpolate_orbit_prediction( - orbit_prediction, obs_time - ) + orbit = nav.interpolate_orbit_prediction(orbit_prediction, obs_time) assert_namedtuple_close(orbit, orbit_expected) - def test_interpolate_attitude_prediction(self, obs_time, attitude_prediction, attitude_expected): + def test_interpolate_attitude_prediction( + self, obs_time, attitude_prediction, attitude_expected + ): """Test interpolating attitude prediction.""" attitude_prediction = attitude_prediction.to_numba() - attitude = nav.interpolate_attitude_prediction( - attitude_prediction, obs_time - ) + attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) assert_namedtuple_close(attitude, attitude_expected) @pytest.fixture @@ -424,7 +398,7 @@ def orbit_expected(self): sat_position_earth_fixed_x=1.8, sat_position_earth_fixed_y=1.9, sat_position_earth_fixed_z=2.0, - nutation_precession=1.6 * np.identity(3) + nutation_precession=1.6 * np.identity(3), ) @pytest.fixture @@ -450,7 +424,7 @@ def scan_params(sampling_angle): start_time_of_scan=0, spinning_rate=0.5, num_sensors=1, - sampling_angle=sampling_angle + sampling_angle=sampling_angle, ) @@ -478,12 +452,12 @@ def orbit_prediction(): sat_position_earth_fixed_z=np.array([0.5, 1.5, 2.5, 3.5]), nutation_precession=np.array( [ - 0.6*np.identity(3), - 1.6*np.identity(3), - 2.6*np.identity(3), - 3.6*np.identity(3) + 0.6 * np.identity(3), + 1.6 * np.identity(3), + 2.6 * np.identity(3), + 3.6 * np.identity(3), ] - ) + ), ) @@ -497,7 +471,7 @@ def proj_params(sampling_angle): sampling_angle=sampling_angle, misalignment=np.identity(3).astype(np.float64), earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136 + earth_equatorial_radius=6378136, ) @@ -507,7 +481,7 @@ def test_get_observation_time(): start_time_of_scan=50000.0, spinning_rate=100, num_sensors=1, - sampling_angle=0.01 + sampling_angle=0.01, ) point = np.array([11, 100]) obs_time = nav.get_observation_time(point, scan_params) @@ -521,7 +495,7 @@ def assert_namedtuple_close(a, b): np.testing.assert_allclose( getattr(a, attr), getattr(b, attr), - err_msg='{} attribute {} differs'.format(a.__class__, attr) + err_msg="{} attribute {} differs".format(a.__class__, attr), ) @@ -532,48 +506,52 @@ class TestFileHandler: def patch_number_of_pixels_per_scanline(self, monkeypatch): """Patch data types so that each scanline has two pixels.""" num_pixels = 2 - IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', vissr.LINE_CONTROL_WORD), - ('DOC', vissr.U1, (256,)), - ('image_data', vissr.U1, num_pixels)]) - IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', vissr.LINE_CONTROL_WORD), - ('DOC', vissr.U1, (64,)), - ('image_data', vissr.U1, (num_pixels,))]) + IMAGE_DATA_BLOCK_IR = np.dtype( + [ + ("LCW", vissr.LINE_CONTROL_WORD), + ("DOC", vissr.U1, (256,)), + ("image_data", vissr.U1, num_pixels), + ] + ) + IMAGE_DATA_BLOCK_VIS = np.dtype( + [ + ("LCW", vissr.LINE_CONTROL_WORD), + ("DOC", vissr.U1, (64,)), + ("image_data", vissr.U1, (num_pixels,)), + ] + ) IMAGE_DATA = { vissr.VIS_CHANNEL: { - 'offset': 6 * vissr.BLOCK_SIZE_VIS, - 'dtype': IMAGE_DATA_BLOCK_VIS, + "offset": 6 * vissr.BLOCK_SIZE_VIS, + "dtype": IMAGE_DATA_BLOCK_VIS, }, vissr.IR_CHANNEL: { - 'offset': 18 * vissr.BLOCK_SIZE_IR, - 'dtype': IMAGE_DATA_BLOCK_IR - } + "offset": 18 * vissr.BLOCK_SIZE_IR, + "dtype": IMAGE_DATA_BLOCK_IR, + }, } monkeypatch.setattr( - "satpy.readers.gms5_vissr_l1b.IMAGE_DATA_BLOCK_IR", - IMAGE_DATA_BLOCK_IR + "satpy.readers.gms5_vissr_l1b.IMAGE_DATA_BLOCK_IR", IMAGE_DATA_BLOCK_IR ) monkeypatch.setattr( - "satpy.readers.gms5_vissr_l1b.IMAGE_DATA_BLOCK_VIS", - IMAGE_DATA_BLOCK_VIS - ) - monkeypatch.setattr( - "satpy.readers.gms5_vissr_l1b.IMAGE_DATA", - IMAGE_DATA + "satpy.readers.gms5_vissr_l1b.IMAGE_DATA_BLOCK_VIS", IMAGE_DATA_BLOCK_VIS ) + monkeypatch.setattr("satpy.readers.gms5_vissr_l1b.IMAGE_DATA", IMAGE_DATA) - @pytest.fixture(params=[ - make_dataid(name="VIS", calibration="reflectance", resolution=1250), - make_dataid(name='IR1', calibration="brightness_temperature", resolution=5000), - make_dataid(name='IR1', calibration="counts", resolution=5000) - ]) + @pytest.fixture( + params=[ + make_dataid(name="VIS", calibration="reflectance", resolution=1250), + make_dataid( + name="IR1", calibration="brightness_temperature", resolution=5000 + ), + make_dataid(name="IR1", calibration="counts", resolution=5000), + ] + ) def dataset_id(self, request): """Get dataset ID.""" return request.param - @pytest.fixture(params=[ - True, - False - ]) + @pytest.fixture(params=[True, False]) def mask_space(self, request): """Mask space pixels.""" return request.param @@ -603,16 +581,13 @@ def file_contents(self, control_block, image_parameters, image_data): return { "control_block": control_block, "image_parameters": image_parameters, - "image_data": image_data + "image_data": image_data, } @pytest.fixture def control_block(self, dataset_id): """Get VISSR control block.""" - block_size = { - "IR1": 16, - "VIS": 4 - } + block_size = {"IR1": 16, "VIS": 4} ctrl_block = np.zeros(1, dtype=vissr.CONTROL_BLOCK) ctrl_block["parameter_block_size"] = block_size[dataset_id["name"]] ctrl_block["available_block_size_of_image_data"] = 2 @@ -643,14 +618,14 @@ def image_parameters( "ir1_calibration": ir1_calibration, "ir2_calibration": ir2_calibration, "wv_calibration": wv_calibration, - "simple_coordinate_conversion_table": simple_coordinate_conversion_table + "simple_coordinate_conversion_table": simple_coordinate_conversion_table, } @pytest.fixture def mode_block(self): """Get VISSR mode block.""" mode = np.zeros(1, dtype=vissr.MODE_BLOCK) - mode["satellite_name"] = b'GMS-5 ' + mode["satellite_name"] = b"GMS-5 " mode["spin_rate"] = 99.21774 mode["observation_time_mjd"] = 50000.0 mode["ssp_longitude"] = 140.0 @@ -713,93 +688,829 @@ def coordinate_conversion(self): def attitude_prediction(self): """Get attitude prediction.""" att_pred = np.zeros(1, dtype=vissr.ATTITUDE_PREDICTION) - # fmt: off - att_pred["data"] = np.array([ - (50130.93055556, (19960217, 222000), 3.14911863, 0.00054604, 4.3324597 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.93402778, (19960217, 222500), 3.14911863, 0.00054604, 4.31064812, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.9375 , (19960217, 223000), 3.14911863, 0.00054604, 4.28883633, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.94097222, (19960217, 223500), 3.14911863, 0.00054604, 4.26702432, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.94444444, (19960217, 224000), 3.14911863, 0.00054604, 4.2452121 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.94791667, (19960217, 224500), 3.14911863, 0.00054604, 4.22339966, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.95138889, (19960217, 225000), 3.14911863, 0.00054604, 4.201587 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.95486111, (19960217, 225500), 3.14911863, 0.00054604, 4.17977411, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.95833333, (19960217, 230000), 3.14911863, 0.00054604, 4.157961 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.96180556, (19960217, 230500), 3.14911863, 0.00054604, 4.13614765, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.96527778, (19960217, 231000), 3.14911863, 0.00054604, 4.11433408, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.96875 , (19960217, 231500), 3.14911863, 0.00054604, 4.09252027, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.97222222, (19960217, 232000), 3.14911863, 0.00054604, 4.07070622, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.97569444, (19960217, 232500), 3.14911863, 0.00054604, 4.04889193, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.97916667, (19960217, 233000), 3.14911863, 0.00054604, 4.02707741, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.98263889, (19960217, 233500), 3.14911863, 0.00054604, 4.00526265, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.98611111, (19960217, 234000), 3.14911863, 0.00054604, 3.98344765, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.98958333, (19960217, 234500), 3.14911863, 0.00054604, 3.96163241, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.99305556, (19960217, 235000), 3.14911863, 0.00054604, 3.93981692, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50130.99652778, (19960217, 235500), 3.14911863, 0.00054604, 3.9180012 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131. , (19960218, 0), 3.14911863, 0.00054604, 3.89618523, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.00347222, (19960218, 500), 3.14911863, 0.00054604, 3.87436903, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.00694444, (19960218, 1000), 3.14911863, 0.00054604, 3.85255258, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.01041667, (19960218, 1500), 3.14911863, 0.00054604, 3.8307359 , 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.01388889, (19960218, 2000), 3.14911863, 0.00054604, 3.80891898, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.01736111, (19960218, 2500), 3.14911863, 0.00054604, 3.78710182, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.02083333, (19960218, 3000), 3.14911863, 0.00054604, 3.76528442, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.02430556, (19960218, 3500), 3.14911863, 0.00054604, 3.74346679, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.02777778, (19960218, 4000), 3.14911863, 0.00054604, 3.72164893, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.03125 , (19960218, 4500), 3.14911863, 0.00054604, 3.69983084, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.03472222, (19960218, 5000), 3.14911863, 0.00054604, 3.67801252, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.03819444, (19960218, 5500), 3.14911863, 0.00054604, 3.65619398, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0), - (50131.04166667, (19960218, 10000), 3.14911863, 0.00054604, 3.63437521, 99.21774527, 0.97415452, -1.56984055, 0., 0, 0)], - dtype=vissr.ATTITUDE_PREDICTION_DATA + att_pred["data"] = np.array( + [ + ( + 50130.93055556, + (19960217, 222000), + 3.14911863, + 0.00054604, + 4.3324597, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.93402778, + (19960217, 222500), + 3.14911863, + 0.00054604, + 4.31064812, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.9375, + (19960217, 223000), + 3.14911863, + 0.00054604, + 4.28883633, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94097222, + (19960217, 223500), + 3.14911863, + 0.00054604, + 4.26702432, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94444444, + (19960217, 224000), + 3.14911863, + 0.00054604, + 4.2452121, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94791667, + (19960217, 224500), + 3.14911863, + 0.00054604, + 4.22339966, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95138889, + (19960217, 225000), + 3.14911863, + 0.00054604, + 4.201587, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95486111, + (19960217, 225500), + 3.14911863, + 0.00054604, + 4.17977411, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95833333, + (19960217, 230000), + 3.14911863, + 0.00054604, + 4.157961, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96180556, + (19960217, 230500), + 3.14911863, + 0.00054604, + 4.13614765, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96527778, + (19960217, 231000), + 3.14911863, + 0.00054604, + 4.11433408, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96875, + (19960217, 231500), + 3.14911863, + 0.00054604, + 4.09252027, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97222222, + (19960217, 232000), + 3.14911863, + 0.00054604, + 4.07070622, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97569444, + (19960217, 232500), + 3.14911863, + 0.00054604, + 4.04889193, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97916667, + (19960217, 233000), + 3.14911863, + 0.00054604, + 4.02707741, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98263889, + (19960217, 233500), + 3.14911863, + 0.00054604, + 4.00526265, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98611111, + (19960217, 234000), + 3.14911863, + 0.00054604, + 3.98344765, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98958333, + (19960217, 234500), + 3.14911863, + 0.00054604, + 3.96163241, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.99305556, + (19960217, 235000), + 3.14911863, + 0.00054604, + 3.93981692, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.99652778, + (19960217, 235500), + 3.14911863, + 0.00054604, + 3.9180012, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.0, + (19960218, 0), + 3.14911863, + 0.00054604, + 3.89618523, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.00347222, + (19960218, 500), + 3.14911863, + 0.00054604, + 3.87436903, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.00694444, + (19960218, 1000), + 3.14911863, + 0.00054604, + 3.85255258, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01041667, + (19960218, 1500), + 3.14911863, + 0.00054604, + 3.8307359, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01388889, + (19960218, 2000), + 3.14911863, + 0.00054604, + 3.80891898, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01736111, + (19960218, 2500), + 3.14911863, + 0.00054604, + 3.78710182, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02083333, + (19960218, 3000), + 3.14911863, + 0.00054604, + 3.76528442, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02430556, + (19960218, 3500), + 3.14911863, + 0.00054604, + 3.74346679, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02777778, + (19960218, 4000), + 3.14911863, + 0.00054604, + 3.72164893, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03125, + (19960218, 4500), + 3.14911863, + 0.00054604, + 3.69983084, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03472222, + (19960218, 5000), + 3.14911863, + 0.00054604, + 3.67801252, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03819444, + (19960218, 5500), + 3.14911863, + 0.00054604, + 3.65619398, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.04166667, + (19960218, 10000), + 3.14911863, + 0.00054604, + 3.63437521, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ], + dtype=vissr.ATTITUDE_PREDICTION_DATA, ) - # fmt: on return att_pred @pytest.fixture def orbit_prediction_1(self): """Get first block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) - # fmt: off - orb_pred["data"] = np.array([ - (50130.96180556, (960217, 230500), [ 2247604.14185506, -42110997.39399951, -276688.79765022], [3069.77904265, 164.12584895, 3.65437628], [-32392525.09983424, 27002204.93121811, -263873.25702763], [ 0.81859376, 0.6760037 , 17.44588753], 133.46391815, (330.12326803, -12.19424863), (197.27884747, -11.96904141), [[ 9.99936382e-01, 1.03449318e-02, 4.49611916e-03], [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01]], [ 2.46885475e+08, -2.07840219e+08, -7.66028692e+07], (-0.35887085, 140.18562594, 35793706.31768975), 0, 0), - (50130.96527778, (960217, 231000), [ 3167927.33749398, -42051692.51095297, -275526.52514815], [3065.46435995, 231.22434208, 4.09379482], [-32392279.4626506 , 27002405.27592725, -258576.96255205], [ 0.81939962, 0.66017389, 17.86159393], 134.71734048, (330.12643276, -12.19310271), (196.02858456, -11.9678881 ), [[ 9.99936382e-01, 1.03449336e-02, 4.49611993e-03], [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01]], [ 2.46204142e+08, -2.07689897e+08, -7.65268207e+07], (-0.35166851, 140.18520316, 35793613.0815237 ), 0, 0), - (50130.96875 , (960217, 231500), [ 4086736.12968183, -41972273.80964861, -274232.7185828 ], [3059.68341675, 298.21262775, 4.53123515], [-32392033.65156128, 27002600.83510851, -253157.23498394], [ 0.81975174, 0.6441 , 18.26873686], 135.97076281, (330.12959087, -12.19195587), (194.77831505, -11.96673388), [[ 9.99936382e-01, 1.03449353e-02, 4.49612071e-03], [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01]], [ 2.45524133e+08, -2.07559497e+08, -7.64508451e+07], (-0.3442983 , 140.18478523, 35793516.57370046), 0, 0), - (50130.97222222, (960217, 232000), [ 5003591.03339227, -41872779.15809826, -272808.0027587 ], [3052.43895532, 365.05867777, 4.9664885 ], [-32391787.80234722, 27002791.53735474, -247616.67261456], [ 0.81965461, 0.62779672, 18.66712192], 137.22418515, (330.13274246, -12.19080808), (193.52803902, -11.9655787 ), [[ 9.99936382e-01, 1.03449371e-02, 4.49612148e-03], [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01]], [ 2.44845888e+08, -2.07448982e+08, -7.63749418e+07], (-0.33676374, 140.18437233, 35793416.91561355), 0, 0), - (50130.97569444, (960217, 232500), [ 5918053.49286455, -41753256.02295399, -271253.06495935], [3043.73441705, 431.73053079, 5.39934712], [-32391542.0492856 , 27002977.3157848 , -241957.93142027], [ 0.81911313, 0.61127876, 19.05655891], 138.47760748, (330.13588763, -12.1896593 ), (192.27775657, -11.96442254), [[ 9.99936382e-01, 1.03449388e-02, 4.49612225e-03], [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01]], [ 2.44169846e+08, -2.07358303e+08, -7.62991102e+07], (-0.32906846, 140.18396465, 35793314.23041636), 0, 0), - (50130.97916667, (960217, 233000), [ 6829686.08751574, -41613761.44760592, -269568.65462124], [3033.5739409 , 498.19630731, 5.82960444], [-32391296.52466749, 27003158.10847847, -236183.72381214], [ 0.81813262, 0.59456087, 19.43686189], 139.73102981, (330.1390265 , -12.18850951), (191.02746783, -11.96326537), [[ 9.99936382e-01, 1.03449406e-02, 4.49612302e-03], [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01]], [ 2.43496443e+08, -2.07287406e+08, -7.62233495e+07], (-0.32121612, 140.18356238, 35793208.6428103 ), 0, 0), - (50130.98263889, (960217, 233500), [ 7738052.74476409, -41454362.02480648, -267755.58296603], [3021.96236148, 564.42422513, 6.25705512], [-32391051.35918404, 27003333.85786499, -230296.81731314], [ 0.81671881, 0.57765777, 19.80784932], 140.98445214, (330.14215916, -12.18735869), (189.77717289, -11.96210717), [[ 9.99936381e-01, 1.03449423e-02, 4.49612379e-03], [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01]], [ 2.42826115e+08, -2.07236222e+08, -7.61476592e+07], (-0.3132105 , 140.18316567, 35793100.27882991), 0, 0), - (50130.98611111, (960217, 234000), [ 8642718.9445816 , -41275133.86582235, -265814.72261683], [3008.90520686, 630.38261431, 6.68149519], [-32390806.68247503, 27003504.50991426, -224300.03325666], [ 0.81487783, 0.56058415, 20.16934411], 142.23787447, (330.14528573, -12.18620679), (188.52687186, -11.9609479 ), [[ 9.99936381e-01, 1.03449440e-02, 4.49612456e-03], [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01]], [ 2.42159297e+08, -2.07204676e+08, -7.60720382e+07], (-0.30505542, 140.18277471, 35792989.2656269 ), 0, 0), - (50130.98958333, (960217, 234500), [ 9543251.93095296, -41076162.56379041, -263747.00717057], [2994.40869593, 696.03993248, 7.10272213], [-32390562.62077149, 27003670.01680953, -218196.24541058], [ 0.81261619, 0.54335463, 20.52117372], 143.4912968 , (330.14840632, -12.18505381), (187.27656486, -11.95978754), [[ 9.99936381e-01, 1.03449458e-02, 4.49612532e-03], [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01]], [ 2.41496422e+08, -2.07192684e+08, -7.59964859e+07], (-0.29675479, 140.18238966, 35792875.73125207), 0, 0)], - dtype=vissr.ORBIT_PREDICTION_DATA + orb_pred["data"] = np.array( + [ + ( + 50130.96180556, + (960217, 230500), + [2247604.14185506, -42110997.39399951, -276688.79765022], + [3069.77904265, 164.12584895, 3.65437628], + [-32392525.09983424, 27002204.93121811, -263873.25702763], + [0.81859376, 0.6760037, 17.44588753], + 133.46391815, + (330.12326803, -12.19424863), + (197.27884747, -11.96904141), + [ + [9.99936382e-01, 1.03449318e-02, 4.49611916e-03], + [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], + [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01], + ], + [2.46885475e08, -2.07840219e08, -7.66028692e07], + (-0.35887085, 140.18562594, 35793706.31768975), + 0, + 0, + ), + ( + 50130.96527778, + (960217, 231000), + [3167927.33749398, -42051692.51095297, -275526.52514815], + [3065.46435995, 231.22434208, 4.09379482], + [-32392279.4626506, 27002405.27592725, -258576.96255205], + [0.81939962, 0.66017389, 17.86159393], + 134.71734048, + (330.12643276, -12.19310271), + (196.02858456, -11.9678881), + [ + [9.99936382e-01, 1.03449336e-02, 4.49611993e-03], + [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], + [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01], + ], + [2.46204142e08, -2.07689897e08, -7.65268207e07], + (-0.35166851, 140.18520316, 35793613.0815237), + 0, + 0, + ), + ( + 50130.96875, + (960217, 231500), + [4086736.12968183, -41972273.80964861, -274232.7185828], + [3059.68341675, 298.21262775, 4.53123515], + [-32392033.65156128, 27002600.83510851, -253157.23498394], + [0.81975174, 0.6441, 18.26873686], + 135.97076281, + (330.12959087, -12.19195587), + (194.77831505, -11.96673388), + [ + [9.99936382e-01, 1.03449353e-02, 4.49612071e-03], + [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], + [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01], + ], + [2.45524133e08, -2.07559497e08, -7.64508451e07], + (-0.3442983, 140.18478523, 35793516.57370046), + 0, + 0, + ), + ( + 50130.97222222, + (960217, 232000), + [5003591.03339227, -41872779.15809826, -272808.0027587], + [3052.43895532, 365.05867777, 4.9664885], + [-32391787.80234722, 27002791.53735474, -247616.67261456], + [0.81965461, 0.62779672, 18.66712192], + 137.22418515, + (330.13274246, -12.19080808), + (193.52803902, -11.9655787), + [ + [9.99936382e-01, 1.03449371e-02, 4.49612148e-03], + [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], + [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01], + ], + [2.44845888e08, -2.07448982e08, -7.63749418e07], + (-0.33676374, 140.18437233, 35793416.91561355), + 0, + 0, + ), + ( + 50130.97569444, + (960217, 232500), + [5918053.49286455, -41753256.02295399, -271253.06495935], + [3043.73441705, 431.73053079, 5.39934712], + [-32391542.0492856, 27002977.3157848, -241957.93142027], + [0.81911313, 0.61127876, 19.05655891], + 138.47760748, + (330.13588763, -12.1896593), + (192.27775657, -11.96442254), + [ + [9.99936382e-01, 1.03449388e-02, 4.49612225e-03], + [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], + [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01], + ], + [2.44169846e08, -2.07358303e08, -7.62991102e07], + (-0.32906846, 140.18396465, 35793314.23041636), + 0, + 0, + ), + ( + 50130.97916667, + (960217, 233000), + [6829686.08751574, -41613761.44760592, -269568.65462124], + [3033.5739409, 498.19630731, 5.82960444], + [-32391296.52466749, 27003158.10847847, -236183.72381214], + [0.81813262, 0.59456087, 19.43686189], + 139.73102981, + (330.1390265, -12.18850951), + (191.02746783, -11.96326537), + [ + [9.99936382e-01, 1.03449406e-02, 4.49612302e-03], + [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], + [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01], + ], + [2.43496443e08, -2.07287406e08, -7.62233495e07], + (-0.32121612, 140.18356238, 35793208.6428103), + 0, + 0, + ), + ( + 50130.98263889, + (960217, 233500), + [7738052.74476409, -41454362.02480648, -267755.58296603], + [3021.96236148, 564.42422513, 6.25705512], + [-32391051.35918404, 27003333.85786499, -230296.81731314], + [0.81671881, 0.57765777, 19.80784932], + 140.98445214, + (330.14215916, -12.18735869), + (189.77717289, -11.96210717), + [ + [9.99936381e-01, 1.03449423e-02, 4.49612379e-03], + [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], + [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01], + ], + [2.42826115e08, -2.07236222e08, -7.61476592e07], + (-0.3132105, 140.18316567, 35793100.27882991), + 0, + 0, + ), + ( + 50130.98611111, + (960217, 234000), + [8642718.9445816, -41275133.86582235, -265814.72261683], + [3008.90520686, 630.38261431, 6.68149519], + [-32390806.68247503, 27003504.50991426, -224300.03325666], + [0.81487783, 0.56058415, 20.16934411], + 142.23787447, + (330.14528573, -12.18620679), + (188.52687186, -11.9609479), + [ + [9.99936381e-01, 1.03449440e-02, 4.49612456e-03], + [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], + [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01], + ], + [2.42159297e08, -2.07204676e08, -7.60720382e07], + (-0.30505542, 140.18277471, 35792989.2656269), + 0, + 0, + ), + ( + 50130.98958333, + (960217, 234500), + [9543251.93095296, -41076162.56379041, -263747.00717057], + [2994.40869593, 696.03993248, 7.10272213], + [-32390562.62077149, 27003670.01680953, -218196.24541058], + [0.81261619, 0.54335463, 20.52117372], + 143.4912968, + (330.14840632, -12.18505381), + (187.27656486, -11.95978754), + [ + [9.99936381e-01, 1.03449458e-02, 4.49612532e-03], + [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], + [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01], + ], + [2.41496422e08, -2.07192684e08, -7.59964859e07], + (-0.29675479, 140.18238966, 35792875.73125207), + 0, + 0, + ), + ], + dtype=vissr.ORBIT_PREDICTION_DATA, ) - # fmt: on return orb_pred @pytest.fixture def orbit_prediction_2(self): """Get second block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) - # fmt: off - orb_pred["data"] = np.array([ - (50130.99305556, (960217, 235000), [ 10439220.91492008, -40857543.15396438, -261553.43075696], [2978.47973561, 761.36477969, 7.52053495], [-32390319.30020279, 27003830.33282405, -211988.37862591], [ 0.80994076, 0.52598377, 20.86317023], 144.74471913, (330.15152105, -12.1838997 ), (186.026252 , -11.95862606), [[ 9.99936381e-01, 1.03449475e-02, 4.49612609e-03], [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01]], [ 2.40837919e+08, -2.07200148e+08, -7.59210011e+07], (-0.28831259, 140.18201066, 35792759.80443729), 0, 0), - (50130.99652778, (960217, 235500), [ 11330197.2840407 , -40619380.06793167, -259235.04755252], [2961.12591755, 826.32591367, 7.93473432], [-32390076.84311398, 27003985.41857829, -205679.40741202], [ 0.80685878, 0.50848599, 21.19517045], 145.99814147, (330.15463004, -12.18274445), (184.77593341, -11.95746344), [[ 9.99936381e-01, 1.03449492e-02, 4.49612685e-03], [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01]], [ 2.40184218e+08, -2.07226967e+08, -7.58455830e+07], (-0.27973286, 140.18163787, 35792641.6143761 ), 0, 0), - (50131. , (960218, 0), [ 12215754.80493221, -40361787.08463053, -256792.97127933], [2942.35551459, 890.89226454, 8.34512262], [-32389835.37113104, 27004135.23720251, -199272.35452792], [ 0.8033778 , 0.49087558, 21.51701595], 147.2515638 , (330.15773341, -12.18158803), (183.5256092 , -11.95629965), [[ 9.99936381e-01, 1.03449510e-02, 4.49612761e-03], [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01]], [ 2.39535744e+08, -2.07273025e+08, -7.57702305e+07], (-0.2710197 , 140.18127143, 35792521.29050537), 0, 0), - (50131.00347222, (960218, 500), [ 13095469.82708225, -40084887.27645436, -254228.37467049], [2922.17747695, 955.03294974, 8.75150409], [-32389595.00191828, 27004279.7580633 , -192770.28953487], [ 0.79950572, 0.47316669, 21.82855319], 148.50498613, (330.16083128, -12.18043041), (182.27527951, -11.95513466), [[ 9.99936381e-01, 1.03449527e-02, 4.49612837e-03], [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01]], [ 2.38892921e+08, -2.07338200e+08, -7.56949425e+07], (-0.26217728, 140.18091148, 35792398.96228714), 0, 0), - (50131.00694444, (960218, 1000), [ 13968921.48773305, -39788812.95011112, -251542.48890031], [2900.60142795, 1018.71728887, 9.15368488], [-32389355.85220329, 27004418.95297137, -186176.32730922], [ 0.79525074, 0.45537327, 22.12963356], 149.75840846, (330.16392379, -12.17927157), (181.02494445, -11.95396845), [[ 9.99936381e-01, 1.03449544e-02, 4.49612913e-03], [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01]], [ 2.38256170e+08, -2.07422360e+08, -7.56197178e+07], (-0.25320985, 140.18055815, 35792274.75899146), 0, 0), - (50131.01041667, (960218, 1500), [ 14835691.90970188, -39473705.58489136, -248736.60300345], [2877.63765957, 1081.9148182 , 9.55147314], [-32389118.03536845, 27004552.79890675, -179493.62657611], [ 0.79062131, 0.43750908, 22.42011344], 151.01183079, (330.16701107, -12.17811148), (179.77462147, -11.952801 ), [[ 9.99936381e-01, 1.03449561e-02, 4.49612989e-03], [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01]], [ 2.37625908e+08, -2.07525364e+08, -7.55445552e+07], (-0.24412169, 140.18021156, 35792148.80948149), 0, 0), - (50131.01388889, (960218, 2000), [ 15695366.40490882, -39139715.76420763, -245812.06324505], [2853.29712752, 1144.59530548, 9.94467917], [-32388881.66227116, 27004681.27687033, -172725.38836895], [ 0.7856262 , 0.41958762, 22.69985431], 152.26525312, (330.17009324, -12.17695013), (178.52427609, -11.95163228), [[ 9.99936381e-01, 1.03449578e-02, 4.49613064e-03], [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01]], [ 2.37002549e+08, -2.07647061e+08, -7.54694534e+07], (-0.23491716, 140.17987182, 35792021.2420001 ), 0, 0), - (50131.01736111, (960218, 2500), [ 16547533.6691137 , -38787003.10533711, -242770.27248672], [2827.5914462 , 1206.72876414, 10.33311542], [-32388646.84104986, 27004804.37195345, -165874.85452439], [ 0.78027439, 0.40162218, 22.96872279], 153.51867545, (330.17317044, -12.17578748), (177.27392574, -11.95046228), [[ 9.99936381e-01, 1.03449595e-02, 4.49613140e-03], [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01]], [ 2.36386506e+08, -2.07787291e+08, -7.53944111e+07], (-0.22560065, 140.17953905, 35791892.18395986), 0, 0), - (50131.02083333, (960218, 3000), [ 17391785.98229151, -38415736.18212036, -239612.68950141], [2800.53288309, 1268.28546791, 10.71659666], [-32388413.67874206, 27004922.07123395, -158945.30610131], [ 0.77457509, 0.38362576, 23.2265907 ], 154.77209777, (330.17624281, -12.17462353), (176.02357057, -11.94929096), [[ 9.99936381e-01, 1.03449612e-02, 4.49613215e-03], [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01]], [ 2.35778185e+08, -2.07945887e+08, -7.53194268e+07], (-0.21617663, 140.17921335, 35791761.76173551), 0, 0)], - dtype=vissr.ORBIT_PREDICTION_DATA + orb_pred["data"] = np.array( + [ + ( + 50130.99305556, + (960217, 235000), + [10439220.91492008, -40857543.15396438, -261553.43075696], + [2978.47973561, 761.36477969, 7.52053495], + [-32390319.30020279, 27003830.33282405, -211988.37862591], + [0.80994076, 0.52598377, 20.86317023], + 144.74471913, + (330.15152105, -12.1838997), + (186.026252, -11.95862606), + [ + [9.99936381e-01, 1.03449475e-02, 4.49612609e-03], + [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], + [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01], + ], + [2.40837919e08, -2.07200148e08, -7.59210011e07], + (-0.28831259, 140.18201066, 35792759.80443729), + 0, + 0, + ), + ( + 50130.99652778, + (960217, 235500), + [11330197.2840407, -40619380.06793167, -259235.04755252], + [2961.12591755, 826.32591367, 7.93473432], + [-32390076.84311398, 27003985.41857829, -205679.40741202], + [0.80685878, 0.50848599, 21.19517045], + 145.99814147, + (330.15463004, -12.18274445), + (184.77593341, -11.95746344), + [ + [9.99936381e-01, 1.03449492e-02, 4.49612685e-03], + [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], + [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01], + ], + [2.40184218e08, -2.07226967e08, -7.58455830e07], + (-0.27973286, 140.18163787, 35792641.6143761), + 0, + 0, + ), + ( + 50131.0, + (960218, 0), + [12215754.80493221, -40361787.08463053, -256792.97127933], + [2942.35551459, 890.89226454, 8.34512262], + [-32389835.37113104, 27004135.23720251, -199272.35452792], + [0.8033778, 0.49087558, 21.51701595], + 147.2515638, + (330.15773341, -12.18158803), + (183.5256092, -11.95629965), + [ + [9.99936381e-01, 1.03449510e-02, 4.49612761e-03], + [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], + [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01], + ], + [2.39535744e08, -2.07273025e08, -7.57702305e07], + (-0.2710197, 140.18127143, 35792521.29050537), + 0, + 0, + ), + ( + 50131.00347222, + (960218, 500), + [13095469.82708225, -40084887.27645436, -254228.37467049], + [2922.17747695, 955.03294974, 8.75150409], + [-32389595.00191828, 27004279.7580633, -192770.28953487], + [0.79950572, 0.47316669, 21.82855319], + 148.50498613, + (330.16083128, -12.18043041), + (182.27527951, -11.95513466), + [ + [9.99936381e-01, 1.03449527e-02, 4.49612837e-03], + [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], + [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01], + ], + [2.38892921e08, -2.07338200e08, -7.56949425e07], + (-0.26217728, 140.18091148, 35792398.96228714), + 0, + 0, + ), + ( + 50131.00694444, + (960218, 1000), + [13968921.48773305, -39788812.95011112, -251542.48890031], + [2900.60142795, 1018.71728887, 9.15368488], + [-32389355.85220329, 27004418.95297137, -186176.32730922], + [0.79525074, 0.45537327, 22.12963356], + 149.75840846, + (330.16392379, -12.17927157), + (181.02494445, -11.95396845), + [ + [9.99936381e-01, 1.03449544e-02, 4.49612913e-03], + [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], + [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01], + ], + [2.38256170e08, -2.07422360e08, -7.56197178e07], + (-0.25320985, 140.18055815, 35792274.75899146), + 0, + 0, + ), + ( + 50131.01041667, + (960218, 1500), + [14835691.90970188, -39473705.58489136, -248736.60300345], + [2877.63765957, 1081.9148182, 9.55147314], + [-32389118.03536845, 27004552.79890675, -179493.62657611], + [0.79062131, 0.43750908, 22.42011344], + 151.01183079, + (330.16701107, -12.17811148), + (179.77462147, -11.952801), + [ + [9.99936381e-01, 1.03449561e-02, 4.49612989e-03], + [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], + [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01], + ], + [2.37625908e08, -2.07525364e08, -7.55445552e07], + (-0.24412169, 140.18021156, 35792148.80948149), + 0, + 0, + ), + ( + 50131.01388889, + (960218, 2000), + [15695366.40490882, -39139715.76420763, -245812.06324505], + [2853.29712752, 1144.59530548, 9.94467917], + [-32388881.66227116, 27004681.27687033, -172725.38836895], + [0.7856262, 0.41958762, 22.69985431], + 152.26525312, + (330.17009324, -12.17695013), + (178.52427609, -11.95163228), + [ + [9.99936381e-01, 1.03449578e-02, 4.49613064e-03], + [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], + [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01], + ], + [2.37002549e08, -2.07647061e08, -7.54694534e07], + (-0.23491716, 140.17987182, 35792021.2420001), + 0, + 0, + ), + ( + 50131.01736111, + (960218, 2500), + [16547533.6691137, -38787003.10533711, -242770.27248672], + [2827.5914462, 1206.72876414, 10.33311542], + [-32388646.84104986, 27004804.37195345, -165874.85452439], + [0.78027439, 0.40162218, 22.96872279], + 153.51867545, + (330.17317044, -12.17578748), + (177.27392574, -11.95046228), + [ + [9.99936381e-01, 1.03449595e-02, 4.49613140e-03], + [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], + [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01], + ], + [2.36386506e08, -2.07787291e08, -7.53944111e07], + (-0.22560065, 140.17953905, 35791892.18395986), + 0, + 0, + ), + ( + 50131.02083333, + (960218, 3000), + [17391785.98229151, -38415736.18212036, -239612.68950141], + [2800.53288309, 1268.28546791, 10.71659666], + [-32388413.67874206, 27004922.07123395, -158945.30610131], + [0.77457509, 0.38362576, 23.2265907], + 154.77209777, + (330.17624281, -12.17462353), + (176.02357057, -11.94929096), + [ + [9.99936381e-01, 1.03449612e-02, 4.49613215e-03], + [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], + [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01], + ], + [2.35778185e08, -2.07945887e08, -7.53194268e07], + (-0.21617663, 140.17921335, 35791761.76173551), + 0, + 0, + ), + ], + dtype=vissr.ORBIT_PREDICTION_DATA, ) - # fmt: on return orb_pred @pytest.fixture def vis_calibration(self): """Get VIS calibration block.""" vis_cal = np.zeros(1, dtype=vissr.VIS_CALIBRATION) - table = vis_cal["vis1_calibration_table"][ - "brightness_albedo_conversion_table" - ] + table = vis_cal["vis1_calibration_table"]["brightness_albedo_conversion_table"] table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) return vis_cal @@ -833,10 +1544,7 @@ def simple_coordinate_conversion_table(self): @pytest.fixture def image_data(self, dataset_id, image_data_ir1, image_data_vis): """Get VISSR image data.""" - data = { - "IR1": image_data_ir1, - "VIS": image_data_vis - } + data = {"IR1": image_data_ir1, "VIS": image_data_vis} return data[dataset_id["name"]] @pytest.fixture @@ -886,13 +1594,16 @@ def vis_refl_exp(self, mask_space, lons_lats_exp): data = [[0, 0.25], [0.5, 1]] return xr.DataArray( data, - dims=('y', 'x'), + dims=("y", "x"), coords={ "lon": lons, "lat": lats, - 'acq_time': ('y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), - 'line_number': ('y', [2744, 8356]) - } + "acq_time": ( + "y", + [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], + ), + "line_number": ("y", [2744, 8356]), + }, ) @pytest.fixture @@ -901,14 +1612,16 @@ def ir1_counts_exp(self, lons_lats_exp): lons, lats = lons_lats_exp return xr.DataArray( [[0, 1], [2, 3]], - dims=('y', 'x'), + dims=("y", "x"), coords={ "lon": lons, "lat": lats, - 'acq_time': ('y', [dt.datetime(1995, 10, 10), - dt.datetime(1995, 10, 10)]), - 'line_number': ('y', [686, 2089]) - } + "acq_time": ( + "y", + [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], + ), + "line_number": ("y", [686, 2089]), + }, ) @pytest.fixture @@ -917,14 +1630,16 @@ def ir1_bt_exp(self, lons_lats_exp): lons, lats = lons_lats_exp return xr.DataArray( [[0, 100], [200, 300]], - dims=('y', 'x'), + dims=("y", "x"), coords={ "lon": lons, "lat": lats, - 'acq_time': ( - 'y', [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)]), - 'line_number': ('y', [686, 2089]) - } + "acq_time": ( + "y", + [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], + ), + "line_number": ("y", [686, 2089]), + }, ) @pytest.fixture @@ -969,12 +1684,16 @@ def lons_lats_exp(self, dataset_id): def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): """Get expected dataset.""" ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) - ir1_bt_id = make_dataid(name="IR1", calibration="brightness_temperature", resolution=5000) - vis_refl_id = make_dataid(name="VIS", calibration="reflectance", resolution=1250) + ir1_bt_id = make_dataid( + name="IR1", calibration="brightness_temperature", resolution=5000 + ) + vis_refl_id = make_dataid( + name="VIS", calibration="reflectance", resolution=1250 + ) expectations = { ir1_counts_id: ir1_counts_exp, ir1_bt_id: ir1_bt_exp, - vis_refl_id: vis_refl_exp + vis_refl_id: vis_refl_exp, } return expectations[dataset_id] @@ -983,20 +1702,40 @@ def area_def_exp(self, dataset_id): """Get expected area definition.""" if dataset_id["name"] == "IR1": resol = 5 - extent = (-8.641922536247211, -8.641922536247211, 25.925767608741637, 25.925767608741637) + extent = ( + -8.641922536247211, + -8.641922536247211, + 25.925767608741637, + 25.925767608741637, + ) else: resol = 1 - extent = (-2.1604801323784297, -2.1604801323784297, 6.481440397135289, 6.481440397135289) + extent = ( + -2.1604801323784297, + -2.1604801323784297, + 6.481440397135289, + 6.481440397135289, + ) area_id = f"gms-5_vissr_western-pacific_{resol}km" desc = f"GMS-5 VISSR Western Pacific area definition with {resol} km resolution" return AreaDefinition( area_id=area_id, description=desc, proj_id=area_id, - projection={'ellps': 'SGS85', 'h': '123456', 'lon_0': '140', 'no_defs': 'None', 'proj': 'geos', 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, + projection={ + "ellps": "SGS85", + "h": "123456", + "lon_0": "140", + "no_defs": "None", + "proj": "geos", + "type": "crs", + "units": "m", + "x_0": "0", + "y_0": "0", + }, area_extent=extent, width=2, - height=2 + height=2, ) @pytest.fixture @@ -1007,28 +1746,31 @@ def attrs_exp(self, area_def_exp): "sensor": "VISSR", "time_parameters": { "nominal_start_time": dt.datetime(1995, 10, 10), - "nominal_end_time": dt.datetime(1995, 10, 10, 0, 25) + "nominal_end_time": dt.datetime(1995, 10, 10, 0, 25), }, "orbital_parameters": { - 'satellite_nominal_longitude': 140.0, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 123456.0, - 'satellite_actual_longitude': 141.0, - 'satellite_actual_latitude': 1.0, - 'satellite_actual_altitude': 123457.0 + "satellite_nominal_longitude": 140.0, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 123456.0, + "satellite_actual_longitude": 141.0, + "satellite_actual_latitude": 1.0, + "satellite_actual_altitude": 123457.0, }, - "area_def_uniform_sampling": area_def_exp + "area_def_uniform_sampling": area_def_exp, } def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): """Test getting the dataset.""" dataset = file_handler.get_dataset(dataset_id, None) - xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1E-6) + xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1e-6) self._assert_attrs_equal(dataset.attrs, attrs_exp) def test_time_attributes(self, file_handler, attrs_exp): """Test the file handler's time attributes.""" - assert file_handler.start_time == attrs_exp["time_parameters"]["nominal_start_time"] + assert ( + file_handler.start_time + == attrs_exp["time_parameters"]["nominal_start_time"] + ) assert file_handler.end_time == attrs_exp["time_parameters"]["nominal_end_time"] def _assert_attrs_equal(self, attrs_tst, attrs_exp): From dff8e6f6ea73a86bd3a11b7492736367663f85c7 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 31 May 2023 10:13:55 +0000 Subject: [PATCH 0223/1416] Sort imports --- satpy/readers/gms5_vissr_l1b.py | 7 ++++--- satpy/readers/gms5_vissr_navigation.py | 1 + satpy/tests/reader_tests/test_gms5_vissr_l1b.py | 8 ++++---- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 5eec1557d6..940bdeb7ba 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -136,18 +136,19 @@ """ +import datetime as dt + import dask.array as da +import numba import numpy as np import xarray as xr -import numba -import datetime as dt import satpy.readers._geos_area as geos_area import satpy.readers.gms5_vissr_navigation as nav from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.hrit_jma import mjd2datetime64 -from satpy.utils import get_legacy_chunk_size from satpy.readers.utils import generic_open +from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 3a9e08f340..f8c95a0010 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -12,6 +12,7 @@ import dask.array as da import numba import numpy as np + from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index be64a97f5b..393800c003 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -1,18 +1,18 @@ """Unit tests for GMS-5 VISSR reader.""" -import gzip import datetime as dt +import gzip +import fsspec import numpy as np import pytest import xarray as xr - from pyresample.geometry import AreaDefinition + import satpy.readers.gms5_vissr_l1b as vissr import satpy.readers.gms5_vissr_navigation as nav -from satpy.tests.utils import make_dataid from satpy.readers import FSFile -import fsspec +from satpy.tests.utils import make_dataid # Navigation references computed with JMA's Msial library (files # VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS From d704a12681f8752cbf72990f378aa966d8af19c5 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 10:22:58 +0000 Subject: [PATCH 0224/1416] Fix exp_attrs to reflect teh change in nominal start/end time attrs with the removal of the deprecation warning --- satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index de718042d2..841d45b943 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -231,14 +231,8 @@ def get_attrs_exp(projection_longitude=0.0): 'satellite_actual_latitude': -0.5711243456528018, 'satellite_actual_altitude': 35783296.150123544}, 'georef_offset_corrected': True, - 'nominal_start_time': """ - Deprecation warning: nominal_start_time should be accessed via the time_parameters attrs - nominal_start_time is also available directly via start_time - """, - 'nominal_end_time': """ - deprecation warning: nominal_end_time should be accessed via the time_parameters attrs - nominal_end_time is also available directly via end_time - """, + 'nominal_start_time': (datetime(2006, 1, 1, 12, 15),), + 'nominal_end_time': (datetime(2006, 1, 1, 12, 30),), 'time_parameters': { 'nominal_start_time': datetime(2006, 1, 1, 12, 15), 'nominal_end_time': datetime(2006, 1, 1, 12, 30), From 531e910923aa4f88bbae5a6cdd974d95c46264ea Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 10:33:46 +0000 Subject: [PATCH 0225/1416] Clean up mention of tres in the test for seviri l1b native reader --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 8669f35a96..da075e3aed 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -682,9 +682,6 @@ def prepare_area_defs(self, test_dict): fh.header = header fh.trailer = trailer fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) - fh.tres = 15 # base RC duration - if fh.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: - fh.tres = 5 calc_area_def = fh.get_area_def(dataset_id) return calc_area_def, expected_area_def @@ -1035,7 +1032,6 @@ def file_handler(self): fh = NativeMSGFileHandler(filename='', filename_info=dict(), filetype_info=None) fh.header = header fh.trailer = trailer - fh.tres = 15 fh.platform_id = self.platform_id return fh @@ -1129,7 +1125,6 @@ def file_handler(self): fh = NativeMSGFileHandler(filename='', filename_info=dict(), filetype_info=None) fh.header = header fh.trailer = trailer - fh.tres = 15 fh.mda = mda fh.dask_array = da.from_array(data) fh.platform_id = 324 From dea08ab2739811fe25888a4c76f1d6fc477b9f2e Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 10:35:02 +0000 Subject: [PATCH 0226/1416] Clean up mention of tres in the test for seviri l1b native reader --- satpy/tests/reader_tests/test_seviri_l1b_hrit.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 7fdec8bef2..1492463506 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -434,7 +434,6 @@ def file_handler(self): fh.mda = mda fh.prologue = prolog fh.epilogue = epilog - fh.tres = 15 return fh @pytest.mark.parametrize( From a2a05cfd9d4de64456f5ac282c83595c5b5bec68 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 31 May 2023 12:31:45 +0000 Subject: [PATCH 0227/1416] Make flake8 happy --- satpy/readers/gms5_vissr_l1b.py | 27 +++++++++- satpy/readers/gms5_vissr_navigation.py | 68 +++++++++++++++++++------- 2 files changed, 75 insertions(+), 20 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 940bdeb7ba..211988e32c 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -563,6 +563,14 @@ class GMS5VISSRFileHandler(BaseFileHandler): """File handler for GMS-5 VISSR data in VISSR archive format.""" def __init__(self, filename, filename_info, filetype_info, mask_space=True): + """Initialize the file handler. + + Args: + filename: Name of file to be read + filename_info: Information obtained from filename + filetype_info: Information about file type + mask_space: Mask space pixels. + """ super(GMS5VISSRFileHandler, self).__init__( filename, filename_info, filetype_info ) @@ -922,6 +930,11 @@ class Calibrator: """ def __init__(self, calib_table): + """Initialize the calibrator. + + Args: + calib_table: Calibration table + """ self._calib_table = calib_table def calibrate(self, counts, calibration): @@ -953,6 +966,12 @@ class SpaceMasker: _fill_value = -1 # scanline not intersecting the earth def __init__(self, image_data, channel): + """Initialize the space masker. + + Args: + image_data: Image data + channel: Channel name + """ self._image_data = image_data self._channel = channel self._shape = image_data["image_data"].shape @@ -1002,7 +1021,7 @@ def get_earth_mask(shape, earth_edges, fill_value=-1): last = last_earth_pixels[line] if first == fill_value or last == fill_value: continue - mask[line, first : last + 1] = 1 + mask[line, first:last+1] = 1 return mask @@ -1015,6 +1034,12 @@ class AreaDefEstimator: """Estimate area definition for VISSR images.""" def __init__(self, coord_conv_params, metadata): + """Initialize the area definition estimator. + + Args: + coord_conv_params: Coordinate conversion parameters + metadata: VISSR file metadata + """ self.coord_conv = coord_conv_params self.metadata = metadata diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index f8c95a0010..7d7a37cb1d 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -1,6 +1,5 @@ """GMS-5 VISSR Navigation. - Reference: `GMS User Guide`_, Appendix E, S-VISSR Mapping. .. _GMS User Guide: @@ -92,7 +91,7 @@ ) -class AttitudePrediction(object): +class AttitudePrediction: """Attitude prediction. Use .to_numba() to pass this object to jitted methods. This extra @@ -107,9 +106,20 @@ def __init__( angle_between_sat_spin_and_z_axis, angle_between_sat_spin_and_yz_plane, ): - # In order to accelerate interpolation, the 2-pi periodicity of angles - # is unwrapped here already (that means phase jumps greater than pi - # are wrapped to their 2*pi complement). + """Initialize attitude prediction. + + In order to accelerate interpolation, the 2-pi periodicity of angles + is unwrapped here already (that means phase jumps greater than pi + are wrapped to their 2*pi complement). + + Args: + prediction_times: Timestamps of predicted attitudes + angle_between_earth_and_sun: Angle between earth and sun + angle_between_sat_spin_and_z_axis: Angle between satellite's + spin-axis and the z-axis of the coordinate system + angle_between_sat_spin_and_yz_plane: Angle between satellite's + spin-axis and the yz-plane of the coordinate system + """ self.prediction_times = prediction_times self.angle_between_earth_and_sun = np.unwrap(angle_between_earth_and_sun) self.angle_between_sat_spin_and_z_axis = np.unwrap( @@ -129,7 +139,7 @@ def to_numba(self): ) -class OrbitPrediction(object): +class OrbitPrediction: """Orbit prediction. Use .to_numba() to pass this object to jitted methods. This extra @@ -148,9 +158,26 @@ def __init__( sat_position_earth_fixed_z, nutation_precession, ): - # In order to accelerate interpolation, the 2-pi periodicity of angles - # is unwrapped here already (that means phase jumps greater than pi - # are wrapped to their 2*pi complement). + """Initialize orbit prediction. + + In order to accelerate interpolation, the 2-pi periodicity of angles + is unwrapped here already (that means phase jumps greater than pi + are wrapped to their 2*pi complement). + + Args: + prediction_times: Timestamps of orbit prediction. + greenwich_sidereal_time: Greenwich sidereal time + declination_from_sat_to_sun: Declination from satellite to sun + right_ascension_from_sat_to_sun: Right ascension from satellite to + sun + sat_position_earth_fixed_x: Satellite position in earth fixed + coordinates (x-component) + sat_position_earth_fixed_y: Satellite position in earth fixed + coordinates (y-component) + sat_position_earth_fixed_z: Satellite position in earth fixed + coordinates (z-component) + nutation_precession: Nutation and precession matrix. + """ self.prediction_times = prediction_times self.greenwich_sidereal_time = np.unwrap(greenwich_sidereal_time) self.declination_from_sat_to_sun = np.unwrap(declination_from_sat_to_sun) @@ -177,6 +204,14 @@ def to_numba(self): def get_lons_lats(lines, pixels, static_params, predicted_params): + """Compute lon/lat coordinates given VISSR image coordinates. + + Args: + lines: VISSR image lines + pixels: VISSR image pixels + static_params: Static navigation parameters + predicted_params: Predicted time-dependent navigation parameters + """ pixels_2d, lines_2d = da.meshgrid(pixels, lines) lons, lats = da.map_blocks( _get_lons_lats_numba, @@ -256,6 +291,7 @@ def _get_relative_observation_time(point, scan_params): def interpolate_navigation_prediction( attitude_prediction, orbit_prediction, observation_time ): + """Interpolate predicted navigation parameters.""" attitude = interpolate_attitude_prediction(attitude_prediction, observation_time) orbit = interpolate_orbit_prediction(orbit_prediction, observation_time) return attitude, orbit @@ -559,6 +595,7 @@ def normalize_vector(v): @numba.njit def interpolate_orbit_prediction(orbit_prediction, observation_time): + """Interpolate orbit prediction.""" greenwich_sidereal_time = interpolate_angles( observation_time, orbit_prediction.prediction_times, @@ -607,6 +644,7 @@ def interpolate_orbit_prediction(orbit_prediction, observation_time): @numba.njit def interpolate_attitude_prediction(attitude_prediction, observation_time): + """Interpolate attitude prediction.""" angle_between_earth_and_sun = interpolate_angles( observation_time, attitude_prediction.prediction_times, @@ -638,7 +676,7 @@ def interpolate_continuous(x, x_sample, y_sample): """ try: return _interpolate(x, x_sample, y_sample) - except: + except Exception: # Numba cannot distinguish exception types return np.nan @@ -688,7 +726,7 @@ def interpolate_nearest(x, x_sample, y_sample): """Nearest neighbour interpolation.""" try: return _interpolate_nearest(x, x_sample, y_sample) - except: + except Exception: return np.nan * np.ones_like(y_sample[0]) @@ -696,11 +734,3 @@ def interpolate_nearest(x, x_sample, y_sample): def _interpolate_nearest(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) return y_sample[i] - - -# TODO -""" -- Code formatting -- Finish Documentation -- Call find_enclosing_index only once for all predictions -""" From 09aff64e15507ea529da5477816119df02f5e666 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 31 May 2023 12:54:33 +0000 Subject: [PATCH 0228/1416] Fix test misses --- satpy/readers/gms5_vissr_l1b.py | 5 ++- .../tests/reader_tests/test_gms5_vissr_l1b.py | 43 ++++++++++++++++--- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 211988e32c..218d76b3c7 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -596,7 +596,10 @@ def _get_channel_type(parameter_block_size): return VIS_CHANNEL elif parameter_block_size == 16: return IR_CHANNEL - raise ValueError("Cannot determine channel type: Unknown parameter block size.") + raise ValueError( + f"Cannot determine channel type, possibly corrupt file " + f"(unknown parameter block size: {parameter_block_size})" + ) def _read_control_block(self, file_obj): ctrl_block = read_from_file_obj(file_obj, dtype=CONTROL_BLOCK, count=1) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 393800c003..151060b741 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -1767,11 +1767,10 @@ def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): def test_time_attributes(self, file_handler, attrs_exp): """Test the file handler's time attributes.""" - assert ( - file_handler.start_time - == attrs_exp["time_parameters"]["nominal_start_time"] - ) - assert file_handler.end_time == attrs_exp["time_parameters"]["nominal_end_time"] + start_time_exp = attrs_exp["time_parameters"]["nominal_start_time"] + end_time_exp = attrs_exp["time_parameters"]["nominal_end_time"] + assert file_handler.start_time == start_time_exp + assert file_handler.end_time == end_time_exp def _assert_attrs_equal(self, attrs_tst, attrs_exp): area_tst = attrs_tst.pop("area_def_uniform_sampling") @@ -1786,10 +1785,44 @@ def _assert_areas_close(self, area_tst, area_exp): np.testing.assert_allclose(lats_tst, lats_exp) +class TestCorruptFile: + """Test reading corrupt files.""" + + @pytest.fixture + def file_contents(self): + """Get corrupt file contents (all zero).""" + control_block = np.zeros(1, dtype=vissr.CONTROL_BLOCK) + image_data = np.zeros(1, dtype=vissr.IMAGE_DATA_BLOCK_IR) + return { + "control_block": control_block, + "image_parameters": {}, + "image_data": image_data, + } + + @pytest.fixture + def corrupt_file(self, file_contents, tmp_path): + """Write corrupt VISSR file to disk.""" + filename = tmp_path / "my_vissr_file" + writer = VissrFileWriter(ch_type="VIS", open_function=open) + writer.write(filename, file_contents) + return filename + + def test_corrupt_file(self, corrupt_file): + """Test reading a corrupt file.""" + with pytest.raises(ValueError, match=r'.* corrupt .*'): + vissr.GMS5VISSRFileHandler(corrupt_file, {}, {}) + + class VissrFileWriter: """Write data in VISSR archive format.""" def __init__(self, ch_type, open_function): + """Initialize the writer. + + Args: + ch_type: Channel type (VIS or IR) + open_function: Open function to be used (e.g. open or gzip.open) + """ self.ch_type = ch_type self.open_function = open_function From 5e553180322b2dbefe4215b71ceb7d0f4b76be01 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 31 May 2023 22:12:13 +0800 Subject: [PATCH 0229/1416] Update __init__.py --- satpy/composites/__init__.py | 33 ++++++++++++--------------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d8c31125d0..e965f82a61 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1105,19 +1105,21 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) new_attrs.setdefault('resolution', high_res.attrs['resolution']) colors = ['red', 'green', 'blue', None] low_resolution_index = colors.index(self.high_resolution_color) - high_resolution_index = low_resolution_index neutral_resolution_index = colors.index(self.neutral_resolution_color) - neutral_res = datasets[neutral_resolution_index] if neutral_resolution_index is not None else None + neutral_res = datasets[neutral_resolution_index] if self.neutral_resolution_color is not None else None else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None neutral_res = None low_resolution_index = 0 - high_resolution_index = 0 neutral_resolution_index = 0 if high_res is not None: + bands = locals() + colors = ['red', 'green', 'blue'] + bands["low_res_" + self.high_resolution_color] = high_res + colors.remove(self.high_resolution_color) low_res = (low_res_red, low_res_green, low_res_blue)[low_resolution_index] ratio = da.map_blocks( _get_sharpening_ratio, @@ -1129,27 +1131,16 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) ) with xr.set_options(keep_attrs=True): if neutral_res is not None: - if high_resolution_index == 0: - low_res_red = high_res - low_res_green = neutral_res if neutral_resolution_index == 1 else low_res_green * ratio - low_res_blue = neutral_res if neutral_resolution_index == 2 else low_res_blue * ratio + if low_resolution_index != neutral_resolution_index: + colors.remove(self.neutral_resolution_color) - elif high_resolution_index == 1: - low_res_red = neutral_res if neutral_resolution_index == 0 else low_res_red * ratio - low_res_green = high_res - low_res_blue = neutral_res if neutral_resolution_index == 2 else low_res_blue * ratio + for color in colors: + bands["low_res_" + color] = bands["low_res_" + color] * ratio - elif high_resolution_index == 2: - low_res_red = neutral_res if neutral_resolution_index == 0 else low_res_red * ratio - low_res_green = neutral_res if neutral_resolution_index == 1 else low_res_green * ratio - low_res_blue = high_res + return bands["low_res_red"], bands["low_res_green"], bands["low_res_blue"], new_attrs - else: - low_res_red = high_res if high_resolution_index == 0 else low_res_red * ratio - low_res_green = high_res if high_resolution_index == 1 else low_res_green * ratio - low_res_blue = high_res if high_resolution_index == 2 else low_res_blue * ratio - - return low_res_red, low_res_green, low_res_blue, new_attrs + else: + return low_res_red, low_res_green, low_res_blue, new_attrs def _combined_sharpened_info(self, info, new_attrs): combined_info = {} From 886f803b9a3a9bd14e0f2afe81f4b38bb48313a7 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 31 May 2023 14:33:21 +0000 Subject: [PATCH 0230/1416] Run tests with jit enabled and disabled --- .../tests/reader_tests/test_gms5_vissr_l1b.py | 17 +++++++++++++++++ satpy/tests/reader_tests/utils.py | 18 ++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 151060b741..f78e930160 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -12,6 +12,7 @@ import satpy.readers.gms5_vissr_l1b as vissr import satpy.readers.gms5_vissr_navigation as nav from satpy.readers import FSFile +from satpy.tests.reader_tests.utils import get_jit_methods from satpy.tests.utils import make_dataid # Navigation references computed with JMA's Msial library (files @@ -186,6 +187,22 @@ NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE +@pytest.fixture(params=[False, True], autouse=True) +def disable_jit(request, monkeypatch): + """Run tests with jit enabled and disabled. + + Reason: Coverage report is only accurate with jit disabled. + """ + if request.param: + jit_methods = get_jit_methods(nav) + jit_methods.update(get_jit_methods(vissr)) + for name, method in jit_methods.items(): + monkeypatch.setattr( + name, + method.py_func + ) + + class TestSinglePixelNavigation: """Test navigation of a single pixel.""" diff --git a/satpy/tests/reader_tests/utils.py b/satpy/tests/reader_tests/utils.py index dd5b09c86a..9415ac56ec 100644 --- a/satpy/tests/reader_tests/utils.py +++ b/satpy/tests/reader_tests/utils.py @@ -17,6 +17,8 @@ # satpy. If not, see . """Utilities for reader tests.""" +import inspect + def default_attr_processor(root, attr): """Do not change the attribute.""" @@ -43,3 +45,19 @@ def fill_h5(root, contents, attr_processor=default_attr_processor): if "attrs" in val: for attr_name, attr_val in val["attrs"].items(): root[key].attrs[attr_name] = attr_processor(root, attr_val) + + +def get_jit_methods(module): + """Get all jit-compiled methods in a module.""" + res = {} + module_name = module.__name__ + members = inspect.getmembers(module) + for member_name, obj in members: + if _is_jit_method(obj): + full_name = f"{module_name}.{member_name}" + res[full_name] = obj + return res + + +def _is_jit_method(obj): + return hasattr(obj, "py_func") From 9279a3828b0ece4391e0047b4c2a9ec47224dc41 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 14:44:08 +0000 Subject: [PATCH 0231/1416] Add test of the start/end and observation time --- satpy/tests/reader_tests/test_hrit_base.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 0f46d94882..f8ad241532 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -21,7 +21,7 @@ import gzip import os import unittest -from datetime import datetime +from datetime import datetime, timedelta from tempfile import NamedTemporaryFile, gettempdir from unittest import mock @@ -268,6 +268,13 @@ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): res = self.reader.read_band('VIS006', None) assert res.compute().shape == (464, 3712) + def test_start_end_time(self): + """Test reading and converting start/end time.""" + assert self.reader.start_time == datetime(2016, 3, 3, 0, 0) + assert self.reader.start_time == self.reader.observation_start_time + assert self.reader.end_time == datetime(2016, 3, 3, 0, 0) + timedelta(minutes=15) + assert self.reader.end_time == self.reader.observation_end_time + def fake_decompress(infile, outdir='.'): """Fake decompression.""" From 9f26360d886a4f94dfc9cd8d5a13b5fbcc247f59 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 31 May 2023 14:45:13 +0000 Subject: [PATCH 0232/1416] Add numba to rtd environment --- doc/rtd_environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 82168df77d..e37233906b 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -11,6 +11,7 @@ dependencies: # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - graphviz + - numba - numpy - pillow - pooch From 3df9d15fda5b8f513c290dc93c21f368f98c321c Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 31 May 2023 15:10:44 +0000 Subject: [PATCH 0233/1416] Split unit tests into multiple files --- .../reader_tests/test_gms5_vissr_data.py | 812 +++++++++++ .../tests/reader_tests/test_gms5_vissr_l1b.py | 1276 +---------------- .../test_gms5_vissr_navigation.py | 486 +++++++ 3 files changed, 1303 insertions(+), 1271 deletions(-) create mode 100644 satpy/tests/reader_tests/test_gms5_vissr_data.py create mode 100644 satpy/tests/reader_tests/test_gms5_vissr_navigation.py diff --git a/satpy/tests/reader_tests/test_gms5_vissr_data.py b/satpy/tests/reader_tests/test_gms5_vissr_data.py new file mode 100644 index 0000000000..dafe4e80b5 --- /dev/null +++ b/satpy/tests/reader_tests/test_gms5_vissr_data.py @@ -0,0 +1,812 @@ +"""Real world test data for GMS-5 VISSR unit tests.""" + +import numpy as np + +import satpy.readers.gms5_vissr_l1b as vissr + +ATTITUDE_PREDICTION = np.array( + [ + ( + 50130.93055556, + (19960217, 222000), + 3.14911863, + 0.00054604, + 4.3324597, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.93402778, + (19960217, 222500), + 3.14911863, + 0.00054604, + 4.31064812, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.9375, + (19960217, 223000), + 3.14911863, + 0.00054604, + 4.28883633, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94097222, + (19960217, 223500), + 3.14911863, + 0.00054604, + 4.26702432, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94444444, + (19960217, 224000), + 3.14911863, + 0.00054604, + 4.2452121, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.94791667, + (19960217, 224500), + 3.14911863, + 0.00054604, + 4.22339966, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95138889, + (19960217, 225000), + 3.14911863, + 0.00054604, + 4.201587, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95486111, + (19960217, 225500), + 3.14911863, + 0.00054604, + 4.17977411, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.95833333, + (19960217, 230000), + 3.14911863, + 0.00054604, + 4.157961, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96180556, + (19960217, 230500), + 3.14911863, + 0.00054604, + 4.13614765, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96527778, + (19960217, 231000), + 3.14911863, + 0.00054604, + 4.11433408, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.96875, + (19960217, 231500), + 3.14911863, + 0.00054604, + 4.09252027, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97222222, + (19960217, 232000), + 3.14911863, + 0.00054604, + 4.07070622, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97569444, + (19960217, 232500), + 3.14911863, + 0.00054604, + 4.04889193, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.97916667, + (19960217, 233000), + 3.14911863, + 0.00054604, + 4.02707741, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98263889, + (19960217, 233500), + 3.14911863, + 0.00054604, + 4.00526265, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98611111, + (19960217, 234000), + 3.14911863, + 0.00054604, + 3.98344765, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.98958333, + (19960217, 234500), + 3.14911863, + 0.00054604, + 3.96163241, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.99305556, + (19960217, 235000), + 3.14911863, + 0.00054604, + 3.93981692, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50130.99652778, + (19960217, 235500), + 3.14911863, + 0.00054604, + 3.9180012, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.0, + (19960218, 0), + 3.14911863, + 0.00054604, + 3.89618523, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.00347222, + (19960218, 500), + 3.14911863, + 0.00054604, + 3.87436903, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.00694444, + (19960218, 1000), + 3.14911863, + 0.00054604, + 3.85255258, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01041667, + (19960218, 1500), + 3.14911863, + 0.00054604, + 3.8307359, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01388889, + (19960218, 2000), + 3.14911863, + 0.00054604, + 3.80891898, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.01736111, + (19960218, 2500), + 3.14911863, + 0.00054604, + 3.78710182, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02083333, + (19960218, 3000), + 3.14911863, + 0.00054604, + 3.76528442, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02430556, + (19960218, 3500), + 3.14911863, + 0.00054604, + 3.74346679, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.02777778, + (19960218, 4000), + 3.14911863, + 0.00054604, + 3.72164893, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03125, + (19960218, 4500), + 3.14911863, + 0.00054604, + 3.69983084, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03472222, + (19960218, 5000), + 3.14911863, + 0.00054604, + 3.67801252, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.03819444, + (19960218, 5500), + 3.14911863, + 0.00054604, + 3.65619398, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ( + 50131.04166667, + (19960218, 10000), + 3.14911863, + 0.00054604, + 3.63437521, + 99.21774527, + 0.97415452, + -1.56984055, + 0.0, + 0, + 0, + ), + ], + dtype=vissr.ATTITUDE_PREDICTION_DATA, +) + +ORBIT_PREDICTION_1 = np.array( + [ + ( + 50130.96180556, + (960217, 230500), + [2247604.14185506, -42110997.39399951, -276688.79765022], + [3069.77904265, 164.12584895, 3.65437628], + [-32392525.09983424, 27002204.93121811, -263873.25702763], + [0.81859376, 0.6760037, 17.44588753], + 133.46391815, + (330.12326803, -12.19424863), + (197.27884747, -11.96904141), + [ + [9.99936382e-01, 1.03449318e-02, 4.49611916e-03], + [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], + [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01], + ], + [2.46885475e08, -2.07840219e08, -7.66028692e07], + (-0.35887085, 140.18562594, 35793706.31768975), + 0, + 0, + ), + ( + 50130.96527778, + (960217, 231000), + [3167927.33749398, -42051692.51095297, -275526.52514815], + [3065.46435995, 231.22434208, 4.09379482], + [-32392279.4626506, 27002405.27592725, -258576.96255205], + [0.81939962, 0.66017389, 17.86159393], + 134.71734048, + (330.12643276, -12.19310271), + (196.02858456, -11.9678881), + [ + [9.99936382e-01, 1.03449336e-02, 4.49611993e-03], + [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], + [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01], + ], + [2.46204142e08, -2.07689897e08, -7.65268207e07], + (-0.35166851, 140.18520316, 35793613.0815237), + 0, + 0, + ), + ( + 50130.96875, + (960217, 231500), + [4086736.12968183, -41972273.80964861, -274232.7185828], + [3059.68341675, 298.21262775, 4.53123515], + [-32392033.65156128, 27002600.83510851, -253157.23498394], + [0.81975174, 0.6441, 18.26873686], + 135.97076281, + (330.12959087, -12.19195587), + (194.77831505, -11.96673388), + [ + [9.99936382e-01, 1.03449353e-02, 4.49612071e-03], + [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], + [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01], + ], + [2.45524133e08, -2.07559497e08, -7.64508451e07], + (-0.3442983, 140.18478523, 35793516.57370046), + 0, + 0, + ), + ( + 50130.97222222, + (960217, 232000), + [5003591.03339227, -41872779.15809826, -272808.0027587], + [3052.43895532, 365.05867777, 4.9664885], + [-32391787.80234722, 27002791.53735474, -247616.67261456], + [0.81965461, 0.62779672, 18.66712192], + 137.22418515, + (330.13274246, -12.19080808), + (193.52803902, -11.9655787), + [ + [9.99936382e-01, 1.03449371e-02, 4.49612148e-03], + [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], + [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01], + ], + [2.44845888e08, -2.07448982e08, -7.63749418e07], + (-0.33676374, 140.18437233, 35793416.91561355), + 0, + 0, + ), + ( + 50130.97569444, + (960217, 232500), + [5918053.49286455, -41753256.02295399, -271253.06495935], + [3043.73441705, 431.73053079, 5.39934712], + [-32391542.0492856, 27002977.3157848, -241957.93142027], + [0.81911313, 0.61127876, 19.05655891], + 138.47760748, + (330.13588763, -12.1896593), + (192.27775657, -11.96442254), + [ + [9.99936382e-01, 1.03449388e-02, 4.49612225e-03], + [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], + [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01], + ], + [2.44169846e08, -2.07358303e08, -7.62991102e07], + (-0.32906846, 140.18396465, 35793314.23041636), + 0, + 0, + ), + ( + 50130.97916667, + (960217, 233000), + [6829686.08751574, -41613761.44760592, -269568.65462124], + [3033.5739409, 498.19630731, 5.82960444], + [-32391296.52466749, 27003158.10847847, -236183.72381214], + [0.81813262, 0.59456087, 19.43686189], + 139.73102981, + (330.1390265, -12.18850951), + (191.02746783, -11.96326537), + [ + [9.99936382e-01, 1.03449406e-02, 4.49612302e-03], + [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], + [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01], + ], + [2.43496443e08, -2.07287406e08, -7.62233495e07], + (-0.32121612, 140.18356238, 35793208.6428103), + 0, + 0, + ), + ( + 50130.98263889, + (960217, 233500), + [7738052.74476409, -41454362.02480648, -267755.58296603], + [3021.96236148, 564.42422513, 6.25705512], + [-32391051.35918404, 27003333.85786499, -230296.81731314], + [0.81671881, 0.57765777, 19.80784932], + 140.98445214, + (330.14215916, -12.18735869), + (189.77717289, -11.96210717), + [ + [9.99936381e-01, 1.03449423e-02, 4.49612379e-03], + [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], + [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01], + ], + [2.42826115e08, -2.07236222e08, -7.61476592e07], + (-0.3132105, 140.18316567, 35793100.27882991), + 0, + 0, + ), + ( + 50130.98611111, + (960217, 234000), + [8642718.9445816, -41275133.86582235, -265814.72261683], + [3008.90520686, 630.38261431, 6.68149519], + [-32390806.68247503, 27003504.50991426, -224300.03325666], + [0.81487783, 0.56058415, 20.16934411], + 142.23787447, + (330.14528573, -12.18620679), + (188.52687186, -11.9609479), + [ + [9.99936381e-01, 1.03449440e-02, 4.49612456e-03], + [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], + [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01], + ], + [2.42159297e08, -2.07204676e08, -7.60720382e07], + (-0.30505542, 140.18277471, 35792989.2656269), + 0, + 0, + ), + ( + 50130.98958333, + (960217, 234500), + [9543251.93095296, -41076162.56379041, -263747.00717057], + [2994.40869593, 696.03993248, 7.10272213], + [-32390562.62077149, 27003670.01680953, -218196.24541058], + [0.81261619, 0.54335463, 20.52117372], + 143.4912968, + (330.14840632, -12.18505381), + (187.27656486, -11.95978754), + [ + [9.99936381e-01, 1.03449458e-02, 4.49612532e-03], + [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], + [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01], + ], + [2.41496422e08, -2.07192684e08, -7.59964859e07], + (-0.29675479, 140.18238966, 35792875.73125207), + 0, + 0, + ), + ], + dtype=vissr.ORBIT_PREDICTION_DATA, +) + +ORBIT_PREDICTION_2 = np.array( + [ + ( + 50130.99305556, + (960217, 235000), + [10439220.91492008, -40857543.15396438, -261553.43075696], + [2978.47973561, 761.36477969, 7.52053495], + [-32390319.30020279, 27003830.33282405, -211988.37862591], + [0.80994076, 0.52598377, 20.86317023], + 144.74471913, + (330.15152105, -12.1838997), + (186.026252, -11.95862606), + [ + [9.99936381e-01, 1.03449475e-02, 4.49612609e-03], + [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], + [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01], + ], + [2.40837919e08, -2.07200148e08, -7.59210011e07], + (-0.28831259, 140.18201066, 35792759.80443729), + 0, + 0, + ), + ( + 50130.99652778, + (960217, 235500), + [11330197.2840407, -40619380.06793167, -259235.04755252], + [2961.12591755, 826.32591367, 7.93473432], + [-32390076.84311398, 27003985.41857829, -205679.40741202], + [0.80685878, 0.50848599, 21.19517045], + 145.99814147, + (330.15463004, -12.18274445), + (184.77593341, -11.95746344), + [ + [9.99936381e-01, 1.03449492e-02, 4.49612685e-03], + [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], + [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01], + ], + [2.40184218e08, -2.07226967e08, -7.58455830e07], + (-0.27973286, 140.18163787, 35792641.6143761), + 0, + 0, + ), + ( + 50131.0, + (960218, 0), + [12215754.80493221, -40361787.08463053, -256792.97127933], + [2942.35551459, 890.89226454, 8.34512262], + [-32389835.37113104, 27004135.23720251, -199272.35452792], + [0.8033778, 0.49087558, 21.51701595], + 147.2515638, + (330.15773341, -12.18158803), + (183.5256092, -11.95629965), + [ + [9.99936381e-01, 1.03449510e-02, 4.49612761e-03], + [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], + [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01], + ], + [2.39535744e08, -2.07273025e08, -7.57702305e07], + (-0.2710197, 140.18127143, 35792521.29050537), + 0, + 0, + ), + ( + 50131.00347222, + (960218, 500), + [13095469.82708225, -40084887.27645436, -254228.37467049], + [2922.17747695, 955.03294974, 8.75150409], + [-32389595.00191828, 27004279.7580633, -192770.28953487], + [0.79950572, 0.47316669, 21.82855319], + 148.50498613, + (330.16083128, -12.18043041), + (182.27527951, -11.95513466), + [ + [9.99936381e-01, 1.03449527e-02, 4.49612837e-03], + [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], + [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01], + ], + [2.38892921e08, -2.07338200e08, -7.56949425e07], + (-0.26217728, 140.18091148, 35792398.96228714), + 0, + 0, + ), + ( + 50131.00694444, + (960218, 1000), + [13968921.48773305, -39788812.95011112, -251542.48890031], + [2900.60142795, 1018.71728887, 9.15368488], + [-32389355.85220329, 27004418.95297137, -186176.32730922], + [0.79525074, 0.45537327, 22.12963356], + 149.75840846, + (330.16392379, -12.17927157), + (181.02494445, -11.95396845), + [ + [9.99936381e-01, 1.03449544e-02, 4.49612913e-03], + [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], + [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01], + ], + [2.38256170e08, -2.07422360e08, -7.56197178e07], + (-0.25320985, 140.18055815, 35792274.75899146), + 0, + 0, + ), + ( + 50131.01041667, + (960218, 1500), + [14835691.90970188, -39473705.58489136, -248736.60300345], + [2877.63765957, 1081.9148182, 9.55147314], + [-32389118.03536845, 27004552.79890675, -179493.62657611], + [0.79062131, 0.43750908, 22.42011344], + 151.01183079, + (330.16701107, -12.17811148), + (179.77462147, -11.952801), + [ + [9.99936381e-01, 1.03449561e-02, 4.49612989e-03], + [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], + [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01], + ], + [2.37625908e08, -2.07525364e08, -7.55445552e07], + (-0.24412169, 140.18021156, 35792148.80948149), + 0, + 0, + ), + ( + 50131.01388889, + (960218, 2000), + [15695366.40490882, -39139715.76420763, -245812.06324505], + [2853.29712752, 1144.59530548, 9.94467917], + [-32388881.66227116, 27004681.27687033, -172725.38836895], + [0.7856262, 0.41958762, 22.69985431], + 152.26525312, + (330.17009324, -12.17695013), + (178.52427609, -11.95163228), + [ + [9.99936381e-01, 1.03449578e-02, 4.49613064e-03], + [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], + [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01], + ], + [2.37002549e08, -2.07647061e08, -7.54694534e07], + (-0.23491716, 140.17987182, 35792021.2420001), + 0, + 0, + ), + ( + 50131.01736111, + (960218, 2500), + [16547533.6691137, -38787003.10533711, -242770.27248672], + [2827.5914462, 1206.72876414, 10.33311542], + [-32388646.84104986, 27004804.37195345, -165874.85452439], + [0.78027439, 0.40162218, 22.96872279], + 153.51867545, + (330.17317044, -12.17578748), + (177.27392574, -11.95046228), + [ + [9.99936381e-01, 1.03449595e-02, 4.49613140e-03], + [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], + [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01], + ], + [2.36386506e08, -2.07787291e08, -7.53944111e07], + (-0.22560065, 140.17953905, 35791892.18395986), + 0, + 0, + ), + ( + 50131.02083333, + (960218, 3000), + [17391785.98229151, -38415736.18212036, -239612.68950141], + [2800.53288309, 1268.28546791, 10.71659666], + [-32388413.67874206, 27004922.07123395, -158945.30610131], + [0.77457509, 0.38362576, 23.2265907], + 154.77209777, + (330.17624281, -12.17462353), + (176.02357057, -11.94929096), + [ + [9.99936381e-01, 1.03449612e-02, 4.49613215e-03], + [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], + [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01], + ], + [2.35778185e08, -2.07945887e08, -7.53194268e07], + (-0.21617663, 140.17921335, 35791761.76173551), + 0, + 0, + ), + ], + dtype=vissr.ORBIT_PREDICTION_DATA, +) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index f78e930160..282cc694cb 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -10,182 +10,11 @@ from pyresample.geometry import AreaDefinition import satpy.readers.gms5_vissr_l1b as vissr -import satpy.readers.gms5_vissr_navigation as nav +import satpy.tests.reader_tests.test_gms5_vissr_data as real_world from satpy.readers import FSFile from satpy.tests.reader_tests.utils import get_jit_methods from satpy.tests.utils import make_dataid -# Navigation references computed with JMA's Msial library (files -# VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS -# navigation is slightly off (< 0.01 deg) compared to JMA's reference. -# This is probably due to precision problems with the copied numbers. -# fmt: off -IR_NAVIGATION_REFERENCE = [ - { - 'line': 686, - 'pixel': 1680, - 'lon': 139.990380, - 'lat': 35.047056, - 'nav_params': ( - nav.Attitude( - angle_between_earth_and_sun=3.997397917902958, - angle_between_sat_spin_and_z_axis=3.149118633034304, - angle_between_sat_spin_and_yz_plane=0.000546042025980, - ), - nav.Orbit( - greenwich_sidereal_time=2.468529732418296, - declination_from_sat_to_sun=-0.208770861178982, - right_ascension_from_sat_to_sun=3.304369303579407, - sat_position_earth_fixed_x=-32390963.148471601307392, - sat_position_earth_fixed_y=27003395.381247851997614, - sat_position_earth_fixed_z=-228134.860026293463307, - nutation_precession=np.array( - [[0.999936381496146, -0.010344758016410, -0.004496547784299], - [0.010344942303489, 0.999946489495557, 0.000017727054455], - [0.004496123789670, -0.000064242454080, 0.999989890320785]] - ), - ), - nav.ProjectionParameters( - line_offset=1378.5, - pixel_offset=1672.5, - stepping_angle=0.000140000047395, - sampling_angle=0.000095719995443, - misalignment=np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - ), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136.0 - ), - ) - }, - { - 'line': 2089, - 'pixel': 1793, - 'lon': 144.996967, - 'lat': -34.959853, - 'nav_params': ( - nav.Attitude( - angle_between_earth_and_sun=3.935707944355762, - angle_between_sat_spin_and_z_axis=3.149118633034304, - angle_between_sat_spin_and_yz_plane=0.000546042025980, - ), - nav.Orbit( - greenwich_sidereal_time=2.530392320846865, - declination_from_sat_to_sun=-0.208713576872247, - right_ascension_from_sat_to_sun=3.242660398458377, - sat_position_earth_fixed_x=-32390273.633551981300116, - sat_position_earth_fixed_y=27003859.543135114014149, - sat_position_earth_fixed_z=-210800.087589388160268, - nutation_precession=np.array( - [[0.999936381432029, -0.010344763228876, -0.004496550050695], - [0.010344947502662, 0.999946489441823, 0.000017724053657], - [0.004496126086653, -0.000064239500295, 0.999989890310647]] - ), - ), - nav.ProjectionParameters( - line_offset=1378.5, - pixel_offset=1672.5, - stepping_angle=0.000140000047395, - sampling_angle=0.000095719995443, - misalignment=np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - ), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136 - ), - ) - } -] - - -VIS_NAVIGATION_REFERENCE = [ - { - 'line': 2744, - 'pixel': 6720, - 'lon': 139.975527, - 'lat': 35.078028, - 'nav_params': ( - nav.Attitude( - angle_between_earth_and_sun=3.997397918405798, - angle_between_sat_spin_and_z_axis=3.149118633034304, - angle_between_sat_spin_and_yz_plane=0.000546042025980, - ), - nav.Orbit( - greenwich_sidereal_time=2.468529731914041, - declination_from_sat_to_sun=-0.208770861179448, - right_ascension_from_sat_to_sun=3.304369304082406, - sat_position_earth_fixed_x=-32390963.148477241396904, - sat_position_earth_fixed_y=27003395.381243918091059, - sat_position_earth_fixed_z=-228134.860164520738181, - nutation_precession=np.array( - [[0.999936381496146, -0.010344758016410, -0.004496547784299], - [0.010344942303489, 0.999946489495557, 0.000017727054455], - [0.004496123789670, -0.000064242454080, 0.999989890320785]] - ), - ), - nav.ProjectionParameters( - line_offset=5513.0, - pixel_offset=6688.5, - stepping_angle=0.000035000004573, - sampling_angle=0.000023929998861, - misalignment=np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - ), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136 - ), - ) - }, - { - 'line': 8356, - 'pixel': 7172, - 'lon': 144.980104, - 'lat': -34.929123, - 'nav_params': ( - nav.Attitude( - angle_between_earth_and_sun=3.935707944858620, - angle_between_sat_spin_and_z_axis=3.149118633034304, - angle_between_sat_spin_and_yz_plane=0.000546042025980, - ), - nav.Orbit( - greenwich_sidereal_time=2.530392320342610, - declination_from_sat_to_sun=-0.208713576872715, - right_ascension_from_sat_to_sun=3.242660398961383, - sat_position_earth_fixed_x=-32390273.633557569235563, - sat_position_earth_fixed_y=27003859.543131537735462, - sat_position_earth_fixed_z=-210800.087734811415430, - nutation_precession=np.array( - [[0.999936381432029, -0.010344763228876, -0.004496550050695], - [0.010344947502662, 0.999946489441823, 0.000017724053657], - [0.004496126086653, -0.000064239500295, 0.999989890310647]] - ), - ), - nav.ProjectionParameters( - line_offset=5513.0, - pixel_offset=6688.5, - stepping_angle=0.000035000004573, - sampling_angle=0.000023929998861, - misalignment=np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] - ), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136 - ), - ) - }, -] -# fmt: on - -NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE - @pytest.fixture(params=[False, True], autouse=True) def disable_jit(request, monkeypatch): @@ -194,8 +23,7 @@ def disable_jit(request, monkeypatch): Reason: Coverage report is only accurate with jit disabled. """ if request.param: - jit_methods = get_jit_methods(nav) - jit_methods.update(get_jit_methods(vissr)) + jit_methods = get_jit_methods(vissr) for name, method in jit_methods.items(): monkeypatch.setattr( name, @@ -203,118 +31,6 @@ def disable_jit(request, monkeypatch): ) -class TestSinglePixelNavigation: - """Test navigation of a single pixel.""" - - @pytest.mark.parametrize( - "point,nav_params,expected", - [ - ((ref["line"], ref["pixel"]), ref["nav_params"], (ref["lon"], ref["lat"])) - for ref in NAVIGATION_REFERENCE - ], - ) - def test_get_lon_lat(self, point, nav_params, expected): - """Test getting lon/lat coordinates for a given pixel.""" - lon, lat = nav.get_lon_lat(point, nav_params) - np.testing.assert_allclose((lon, lat), expected) - - def test_transform_image_coords_to_scanning_angles(self): - """Test transformation from image coordinates to scanning angles.""" - angles = nav.transform_image_coords_to_scanning_angles( - point=np.array([199, 99]), - offset=np.array([100, 200]), - sampling=np.array([0.01, 0.02]), - ) - np.testing.assert_allclose(angles, [-2, 1]) - - def test_transform_scanning_angles_to_satellite_coords(self): - """Test transformation from scanning angles to satellite coordinates.""" - scanning_angles = np.array([np.pi, np.pi / 2]) - misalignment = np.diag([1, 2, 3]).astype(float) - point_sat = nav.transform_scanning_angles_to_satellite_coords( - scanning_angles, misalignment - ) - np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1e-12) - - def test_transform_satellite_to_earth_fixed_coords(self): - """Test transformation from satellite to earth-fixed coordinates.""" - point_sat = np.array([1, 2, 3], dtype=float) - greenwich_sidereal_time = np.pi - sat_sun_angles = np.array([np.pi, np.pi / 2]) - earth_sun_angle = np.pi - spin_angles = np.array([np.pi, np.pi / 2]) - nutation_precession = np.diag([1, 2, 3]).astype(float) - res = nav.transform_satellite_to_earth_fixed_coords( - point_sat, - greenwich_sidereal_time, - sat_sun_angles, - earth_sun_angle, - spin_angles, - nutation_precession, - ) - np.testing.assert_allclose(res, [-3, 1, -2]) - - def test_intersect_view_vector_with_earth(self): - """Test intersection of a view vector with the earth's surface.""" - view_vector = np.array([-1, 0, 0], dtype=float) - sat_pos = np.array([36000 * 1000, 0, 0], dtype=float) - eq_radius = 6371 * 1000 - flattening = 0.003 - ellipsoid = np.array([eq_radius, flattening]) - point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid) - np.testing.assert_allclose(point, [eq_radius, 0, 0]) - - @pytest.mark.parametrize( - "point_earth_fixed,point_geodetic_exp", - [ - ([0, 0, 1], [0, 90]), - ([0, 0, -1], [0, -90]), - ([1, 0, 0], [0, 0]), - ([-1, 0, 0], [180, 0]), - ([1, 1, 1], [45, 35.426852]), - ], - ) - def test_transform_earth_fixed_to_geodetic_coords( - self, point_earth_fixed, point_geodetic_exp - ): - """Test transformation from earth-fixed to geodetic coordinates.""" - point_geodetic = nav.transform_earth_fixed_to_geodetic_coords( - np.array(point_earth_fixed), 0.003 - ) - np.testing.assert_allclose(point_geodetic, point_geodetic_exp) - - def test_normalize_vector(self): - """Test vector normalization.""" - v = np.array([1, 2, 3], dtype=float) - normed = nav.normalize_vector(v) - np.testing.assert_allclose(normed, v / np.sqrt(14)) - - -class TestImageNavigation: - """Test navigation of an entire image.""" - - def test_get_lons_lats( - self, scan_params, attitude_prediction, orbit_prediction, proj_params - ): - """Test getting lon/lat coordinates.""" - # fmt: off - lons_exp = [[-114.56923, -112.096837, -109.559702], - [8.33221, 8.793893, 9.22339], - [15.918476, 16.268354, 16.6332]] - lats_exp = [[-23.078721, -24.629845, -26.133314], - [-42.513409, -39.790231, -37.06392], - [3.342834, 6.07043, 8.795932]] - # fmt: on - lons, lats = nav.get_lons_lats( - lines=np.array([1000, 1500, 2000]), - pixels=np.array([1000, 1500, 2000]), - static_params=(scan_params, proj_params), - predicted_params=(attitude_prediction, orbit_prediction), - ) - np.testing.assert_allclose(lons, lons_exp) - np.testing.assert_allclose(lats, lats_exp) - - class TestEarthMask: """Test getting the earth mask.""" @@ -335,187 +51,6 @@ def test_get_earth_mask(self): np.testing.assert_equal(mask, mask_exp) -class TestPredictionInterpolation: - """Test interpolation of orbit and attitude predictions.""" - - @pytest.mark.parametrize( - "obs_time,expected", [(-1, np.nan), (1.5, 2.5), (5, np.nan)] - ) - def test_interpolate_continuous(self, obs_time, expected): - """Test interpolation of continuous variables.""" - prediction_times = np.array([0, 1, 2, 3]) - predicted_values = np.array([1, 2, 3, 4]) - res = nav.interpolate_continuous(obs_time, prediction_times, predicted_values) - np.testing.assert_allclose(res, expected) - - @pytest.mark.parametrize( - "obs_time,expected", - [ - (-1, np.nan), - (1.5, 0.75 * np.pi), - (2.5, -0.75 * np.pi), - (3.5, -0.25 * np.pi), - (5, np.nan), - ], - ) - def test_interpolate_angles(self, obs_time, expected): - """Test interpolation of periodic angles.""" - prediction_times = np.array([0, 1, 2, 3, 4]) - predicted_angles = np.array( - [0, 0.5 * np.pi, np.pi, 1.5 * np.pi, 2 * np.pi] - ) # already unwrapped - res = nav.interpolate_angles(obs_time, prediction_times, predicted_angles) - np.testing.assert_allclose(res, expected) - - @pytest.mark.parametrize( - "obs_time,expected", - [ - (-1, np.nan * np.ones((2, 2))), - (1.5, [[1, 0], [0, 2]]), - (3, np.nan * np.ones((2, 2))), - ], - ) - def test_interpolate_nearest(self, obs_time, expected): - """Test nearest neighbour interpolation.""" - prediction_times = np.array([0, 1, 2]) - predicted_angles = np.array( - [np.zeros((2, 2)), np.diag((1, 2)), np.zeros((2, 2))] - ) - res = nav.interpolate_nearest(obs_time, prediction_times, predicted_angles) - np.testing.assert_allclose(res, expected) - - def test_interpolate_orbit_prediction( - self, obs_time, orbit_prediction, orbit_expected - ): - """Test interpolating orbit prediction.""" - orbit_prediction = orbit_prediction.to_numba() - orbit = nav.interpolate_orbit_prediction(orbit_prediction, obs_time) - assert_namedtuple_close(orbit, orbit_expected) - - def test_interpolate_attitude_prediction( - self, obs_time, attitude_prediction, attitude_expected - ): - """Test interpolating attitude prediction.""" - attitude_prediction = attitude_prediction.to_numba() - attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) - assert_namedtuple_close(attitude, attitude_expected) - - @pytest.fixture - def obs_time(self): - """Get observation time.""" - return 2.5 - - @pytest.fixture - def orbit_expected(self): - """Get expected orbit.""" - return nav.Orbit( - greenwich_sidereal_time=1.5, - declination_from_sat_to_sun=1.6, - right_ascension_from_sat_to_sun=1.7, - sat_position_earth_fixed_x=1.8, - sat_position_earth_fixed_y=1.9, - sat_position_earth_fixed_z=2.0, - nutation_precession=1.6 * np.identity(3), - ) - - @pytest.fixture - def attitude_expected(self): - """Get expected attitude.""" - return nav.Attitude( - angle_between_earth_and_sun=1.5, - angle_between_sat_spin_and_z_axis=1.6, - angle_between_sat_spin_and_yz_plane=1.7, - ) - - -@pytest.fixture -def sampling_angle(): - """Get sampling angle.""" - return 0.000095719995443 - - -@pytest.fixture -def scan_params(sampling_angle): - """Get scanning parameters.""" - return nav.ScanningParameters( - start_time_of_scan=0, - spinning_rate=0.5, - num_sensors=1, - sampling_angle=sampling_angle, - ) - - -@pytest.fixture -def attitude_prediction(): - """Get attitude prediction.""" - return nav.AttitudePrediction( - prediction_times=np.array([1.0, 2.0, 3.0]), - angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), - angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), - angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), - ) - - -@pytest.fixture -def orbit_prediction(): - """Get orbit prediction.""" - return nav.OrbitPrediction( - prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), - greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), - declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), - right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), - sat_position_earth_fixed_x=np.array([0.3, 1.3, 2.3, 3.3]), - sat_position_earth_fixed_y=np.array([0.4, 1.4, 2.4, 3.4]), - sat_position_earth_fixed_z=np.array([0.5, 1.5, 2.5, 3.5]), - nutation_precession=np.array( - [ - 0.6 * np.identity(3), - 1.6 * np.identity(3), - 2.6 * np.identity(3), - 3.6 * np.identity(3), - ] - ), - ) - - -@pytest.fixture -def proj_params(sampling_angle): - """Get projection parameters.""" - return nav.ProjectionParameters( - line_offset=1378.5, - pixel_offset=1672.5, - stepping_angle=0.000140000047395, - sampling_angle=sampling_angle, - misalignment=np.identity(3).astype(np.float64), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136, - ) - - -def test_get_observation_time(): - """Test getting a pixel's observation time.""" - scan_params = nav.ScanningParameters( - start_time_of_scan=50000.0, - spinning_rate=100, - num_sensors=1, - sampling_angle=0.01, - ) - point = np.array([11, 100]) - obs_time = nav.get_observation_time(point, scan_params) - np.testing.assert_allclose(obs_time, 50000.0000705496871047) - - -def assert_namedtuple_close(a, b): - """Assert that two numba namedtuples are approximately equal.""" - assert a.__class__ == b.__class__ - for attr in a._fields: - np.testing.assert_allclose( - getattr(a, attr), - getattr(b, attr), - err_msg="{} attribute {} differs".format(a.__class__, attr), - ) - - class TestFileHandler: """Test VISSR file handler.""" @@ -705,822 +240,21 @@ def coordinate_conversion(self): def attitude_prediction(self): """Get attitude prediction.""" att_pred = np.zeros(1, dtype=vissr.ATTITUDE_PREDICTION) - att_pred["data"] = np.array( - [ - ( - 50130.93055556, - (19960217, 222000), - 3.14911863, - 0.00054604, - 4.3324597, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.93402778, - (19960217, 222500), - 3.14911863, - 0.00054604, - 4.31064812, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.9375, - (19960217, 223000), - 3.14911863, - 0.00054604, - 4.28883633, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.94097222, - (19960217, 223500), - 3.14911863, - 0.00054604, - 4.26702432, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.94444444, - (19960217, 224000), - 3.14911863, - 0.00054604, - 4.2452121, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.94791667, - (19960217, 224500), - 3.14911863, - 0.00054604, - 4.22339966, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.95138889, - (19960217, 225000), - 3.14911863, - 0.00054604, - 4.201587, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.95486111, - (19960217, 225500), - 3.14911863, - 0.00054604, - 4.17977411, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.95833333, - (19960217, 230000), - 3.14911863, - 0.00054604, - 4.157961, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.96180556, - (19960217, 230500), - 3.14911863, - 0.00054604, - 4.13614765, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.96527778, - (19960217, 231000), - 3.14911863, - 0.00054604, - 4.11433408, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.96875, - (19960217, 231500), - 3.14911863, - 0.00054604, - 4.09252027, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.97222222, - (19960217, 232000), - 3.14911863, - 0.00054604, - 4.07070622, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.97569444, - (19960217, 232500), - 3.14911863, - 0.00054604, - 4.04889193, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.97916667, - (19960217, 233000), - 3.14911863, - 0.00054604, - 4.02707741, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.98263889, - (19960217, 233500), - 3.14911863, - 0.00054604, - 4.00526265, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.98611111, - (19960217, 234000), - 3.14911863, - 0.00054604, - 3.98344765, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.98958333, - (19960217, 234500), - 3.14911863, - 0.00054604, - 3.96163241, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.99305556, - (19960217, 235000), - 3.14911863, - 0.00054604, - 3.93981692, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50130.99652778, - (19960217, 235500), - 3.14911863, - 0.00054604, - 3.9180012, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.0, - (19960218, 0), - 3.14911863, - 0.00054604, - 3.89618523, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.00347222, - (19960218, 500), - 3.14911863, - 0.00054604, - 3.87436903, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.00694444, - (19960218, 1000), - 3.14911863, - 0.00054604, - 3.85255258, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.01041667, - (19960218, 1500), - 3.14911863, - 0.00054604, - 3.8307359, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.01388889, - (19960218, 2000), - 3.14911863, - 0.00054604, - 3.80891898, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.01736111, - (19960218, 2500), - 3.14911863, - 0.00054604, - 3.78710182, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.02083333, - (19960218, 3000), - 3.14911863, - 0.00054604, - 3.76528442, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.02430556, - (19960218, 3500), - 3.14911863, - 0.00054604, - 3.74346679, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.02777778, - (19960218, 4000), - 3.14911863, - 0.00054604, - 3.72164893, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.03125, - (19960218, 4500), - 3.14911863, - 0.00054604, - 3.69983084, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.03472222, - (19960218, 5000), - 3.14911863, - 0.00054604, - 3.67801252, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.03819444, - (19960218, 5500), - 3.14911863, - 0.00054604, - 3.65619398, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ( - 50131.04166667, - (19960218, 10000), - 3.14911863, - 0.00054604, - 3.63437521, - 99.21774527, - 0.97415452, - -1.56984055, - 0.0, - 0, - 0, - ), - ], - dtype=vissr.ATTITUDE_PREDICTION_DATA, - ) + att_pred["data"] = real_world.ATTITUDE_PREDICTION return att_pred @pytest.fixture def orbit_prediction_1(self): """Get first block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) - orb_pred["data"] = np.array( - [ - ( - 50130.96180556, - (960217, 230500), - [2247604.14185506, -42110997.39399951, -276688.79765022], - [3069.77904265, 164.12584895, 3.65437628], - [-32392525.09983424, 27002204.93121811, -263873.25702763], - [0.81859376, 0.6760037, 17.44588753], - 133.46391815, - (330.12326803, -12.19424863), - (197.27884747, -11.96904141), - [ - [9.99936382e-01, 1.03449318e-02, 4.49611916e-03], - [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], - [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01], - ], - [2.46885475e08, -2.07840219e08, -7.66028692e07], - (-0.35887085, 140.18562594, 35793706.31768975), - 0, - 0, - ), - ( - 50130.96527778, - (960217, 231000), - [3167927.33749398, -42051692.51095297, -275526.52514815], - [3065.46435995, 231.22434208, 4.09379482], - [-32392279.4626506, 27002405.27592725, -258576.96255205], - [0.81939962, 0.66017389, 17.86159393], - 134.71734048, - (330.12643276, -12.19310271), - (196.02858456, -11.9678881), - [ - [9.99936382e-01, 1.03449336e-02, 4.49611993e-03], - [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], - [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01], - ], - [2.46204142e08, -2.07689897e08, -7.65268207e07], - (-0.35166851, 140.18520316, 35793613.0815237), - 0, - 0, - ), - ( - 50130.96875, - (960217, 231500), - [4086736.12968183, -41972273.80964861, -274232.7185828], - [3059.68341675, 298.21262775, 4.53123515], - [-32392033.65156128, 27002600.83510851, -253157.23498394], - [0.81975174, 0.6441, 18.26873686], - 135.97076281, - (330.12959087, -12.19195587), - (194.77831505, -11.96673388), - [ - [9.99936382e-01, 1.03449353e-02, 4.49612071e-03], - [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], - [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01], - ], - [2.45524133e08, -2.07559497e08, -7.64508451e07], - (-0.3442983, 140.18478523, 35793516.57370046), - 0, - 0, - ), - ( - 50130.97222222, - (960217, 232000), - [5003591.03339227, -41872779.15809826, -272808.0027587], - [3052.43895532, 365.05867777, 4.9664885], - [-32391787.80234722, 27002791.53735474, -247616.67261456], - [0.81965461, 0.62779672, 18.66712192], - 137.22418515, - (330.13274246, -12.19080808), - (193.52803902, -11.9655787), - [ - [9.99936382e-01, 1.03449371e-02, 4.49612148e-03], - [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], - [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01], - ], - [2.44845888e08, -2.07448982e08, -7.63749418e07], - (-0.33676374, 140.18437233, 35793416.91561355), - 0, - 0, - ), - ( - 50130.97569444, - (960217, 232500), - [5918053.49286455, -41753256.02295399, -271253.06495935], - [3043.73441705, 431.73053079, 5.39934712], - [-32391542.0492856, 27002977.3157848, -241957.93142027], - [0.81911313, 0.61127876, 19.05655891], - 138.47760748, - (330.13588763, -12.1896593), - (192.27775657, -11.96442254), - [ - [9.99936382e-01, 1.03449388e-02, 4.49612225e-03], - [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], - [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01], - ], - [2.44169846e08, -2.07358303e08, -7.62991102e07], - (-0.32906846, 140.18396465, 35793314.23041636), - 0, - 0, - ), - ( - 50130.97916667, - (960217, 233000), - [6829686.08751574, -41613761.44760592, -269568.65462124], - [3033.5739409, 498.19630731, 5.82960444], - [-32391296.52466749, 27003158.10847847, -236183.72381214], - [0.81813262, 0.59456087, 19.43686189], - 139.73102981, - (330.1390265, -12.18850951), - (191.02746783, -11.96326537), - [ - [9.99936382e-01, 1.03449406e-02, 4.49612302e-03], - [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], - [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01], - ], - [2.43496443e08, -2.07287406e08, -7.62233495e07], - (-0.32121612, 140.18356238, 35793208.6428103), - 0, - 0, - ), - ( - 50130.98263889, - (960217, 233500), - [7738052.74476409, -41454362.02480648, -267755.58296603], - [3021.96236148, 564.42422513, 6.25705512], - [-32391051.35918404, 27003333.85786499, -230296.81731314], - [0.81671881, 0.57765777, 19.80784932], - 140.98445214, - (330.14215916, -12.18735869), - (189.77717289, -11.96210717), - [ - [9.99936381e-01, 1.03449423e-02, 4.49612379e-03], - [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], - [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01], - ], - [2.42826115e08, -2.07236222e08, -7.61476592e07], - (-0.3132105, 140.18316567, 35793100.27882991), - 0, - 0, - ), - ( - 50130.98611111, - (960217, 234000), - [8642718.9445816, -41275133.86582235, -265814.72261683], - [3008.90520686, 630.38261431, 6.68149519], - [-32390806.68247503, 27003504.50991426, -224300.03325666], - [0.81487783, 0.56058415, 20.16934411], - 142.23787447, - (330.14528573, -12.18620679), - (188.52687186, -11.9609479), - [ - [9.99936381e-01, 1.03449440e-02, 4.49612456e-03], - [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], - [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01], - ], - [2.42159297e08, -2.07204676e08, -7.60720382e07], - (-0.30505542, 140.18277471, 35792989.2656269), - 0, - 0, - ), - ( - 50130.98958333, - (960217, 234500), - [9543251.93095296, -41076162.56379041, -263747.00717057], - [2994.40869593, 696.03993248, 7.10272213], - [-32390562.62077149, 27003670.01680953, -218196.24541058], - [0.81261619, 0.54335463, 20.52117372], - 143.4912968, - (330.14840632, -12.18505381), - (187.27656486, -11.95978754), - [ - [9.99936381e-01, 1.03449458e-02, 4.49612532e-03], - [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], - [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01], - ], - [2.41496422e08, -2.07192684e08, -7.59964859e07], - (-0.29675479, 140.18238966, 35792875.73125207), - 0, - 0, - ), - ], - dtype=vissr.ORBIT_PREDICTION_DATA, - ) + orb_pred["data"] = real_world.ORBIT_PREDICTION_1 return orb_pred @pytest.fixture def orbit_prediction_2(self): """Get second block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) - orb_pred["data"] = np.array( - [ - ( - 50130.99305556, - (960217, 235000), - [10439220.91492008, -40857543.15396438, -261553.43075696], - [2978.47973561, 761.36477969, 7.52053495], - [-32390319.30020279, 27003830.33282405, -211988.37862591], - [0.80994076, 0.52598377, 20.86317023], - 144.74471913, - (330.15152105, -12.1838997), - (186.026252, -11.95862606), - [ - [9.99936381e-01, 1.03449475e-02, 4.49612609e-03], - [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], - [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01], - ], - [2.40837919e08, -2.07200148e08, -7.59210011e07], - (-0.28831259, 140.18201066, 35792759.80443729), - 0, - 0, - ), - ( - 50130.99652778, - (960217, 235500), - [11330197.2840407, -40619380.06793167, -259235.04755252], - [2961.12591755, 826.32591367, 7.93473432], - [-32390076.84311398, 27003985.41857829, -205679.40741202], - [0.80685878, 0.50848599, 21.19517045], - 145.99814147, - (330.15463004, -12.18274445), - (184.77593341, -11.95746344), - [ - [9.99936381e-01, 1.03449492e-02, 4.49612685e-03], - [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], - [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01], - ], - [2.40184218e08, -2.07226967e08, -7.58455830e07], - (-0.27973286, 140.18163787, 35792641.6143761), - 0, - 0, - ), - ( - 50131.0, - (960218, 0), - [12215754.80493221, -40361787.08463053, -256792.97127933], - [2942.35551459, 890.89226454, 8.34512262], - [-32389835.37113104, 27004135.23720251, -199272.35452792], - [0.8033778, 0.49087558, 21.51701595], - 147.2515638, - (330.15773341, -12.18158803), - (183.5256092, -11.95629965), - [ - [9.99936381e-01, 1.03449510e-02, 4.49612761e-03], - [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], - [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01], - ], - [2.39535744e08, -2.07273025e08, -7.57702305e07], - (-0.2710197, 140.18127143, 35792521.29050537), - 0, - 0, - ), - ( - 50131.00347222, - (960218, 500), - [13095469.82708225, -40084887.27645436, -254228.37467049], - [2922.17747695, 955.03294974, 8.75150409], - [-32389595.00191828, 27004279.7580633, -192770.28953487], - [0.79950572, 0.47316669, 21.82855319], - 148.50498613, - (330.16083128, -12.18043041), - (182.27527951, -11.95513466), - [ - [9.99936381e-01, 1.03449527e-02, 4.49612837e-03], - [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], - [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01], - ], - [2.38892921e08, -2.07338200e08, -7.56949425e07], - (-0.26217728, 140.18091148, 35792398.96228714), - 0, - 0, - ), - ( - 50131.00694444, - (960218, 1000), - [13968921.48773305, -39788812.95011112, -251542.48890031], - [2900.60142795, 1018.71728887, 9.15368488], - [-32389355.85220329, 27004418.95297137, -186176.32730922], - [0.79525074, 0.45537327, 22.12963356], - 149.75840846, - (330.16392379, -12.17927157), - (181.02494445, -11.95396845), - [ - [9.99936381e-01, 1.03449544e-02, 4.49612913e-03], - [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], - [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01], - ], - [2.38256170e08, -2.07422360e08, -7.56197178e07], - (-0.25320985, 140.18055815, 35792274.75899146), - 0, - 0, - ), - ( - 50131.01041667, - (960218, 1500), - [14835691.90970188, -39473705.58489136, -248736.60300345], - [2877.63765957, 1081.9148182, 9.55147314], - [-32389118.03536845, 27004552.79890675, -179493.62657611], - [0.79062131, 0.43750908, 22.42011344], - 151.01183079, - (330.16701107, -12.17811148), - (179.77462147, -11.952801), - [ - [9.99936381e-01, 1.03449561e-02, 4.49612989e-03], - [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], - [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01], - ], - [2.37625908e08, -2.07525364e08, -7.55445552e07], - (-0.24412169, 140.18021156, 35792148.80948149), - 0, - 0, - ), - ( - 50131.01388889, - (960218, 2000), - [15695366.40490882, -39139715.76420763, -245812.06324505], - [2853.29712752, 1144.59530548, 9.94467917], - [-32388881.66227116, 27004681.27687033, -172725.38836895], - [0.7856262, 0.41958762, 22.69985431], - 152.26525312, - (330.17009324, -12.17695013), - (178.52427609, -11.95163228), - [ - [9.99936381e-01, 1.03449578e-02, 4.49613064e-03], - [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], - [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01], - ], - [2.37002549e08, -2.07647061e08, -7.54694534e07], - (-0.23491716, 140.17987182, 35792021.2420001), - 0, - 0, - ), - ( - 50131.01736111, - (960218, 2500), - [16547533.6691137, -38787003.10533711, -242770.27248672], - [2827.5914462, 1206.72876414, 10.33311542], - [-32388646.84104986, 27004804.37195345, -165874.85452439], - [0.78027439, 0.40162218, 22.96872279], - 153.51867545, - (330.17317044, -12.17578748), - (177.27392574, -11.95046228), - [ - [9.99936381e-01, 1.03449595e-02, 4.49613140e-03], - [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], - [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01], - ], - [2.36386506e08, -2.07787291e08, -7.53944111e07], - (-0.22560065, 140.17953905, 35791892.18395986), - 0, - 0, - ), - ( - 50131.02083333, - (960218, 3000), - [17391785.98229151, -38415736.18212036, -239612.68950141], - [2800.53288309, 1268.28546791, 10.71659666], - [-32388413.67874206, 27004922.07123395, -158945.30610131], - [0.77457509, 0.38362576, 23.2265907], - 154.77209777, - (330.17624281, -12.17462353), - (176.02357057, -11.94929096), - [ - [9.99936381e-01, 1.03449612e-02, 4.49613215e-03], - [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], - [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01], - ], - [2.35778185e08, -2.07945887e08, -7.53194268e07], - (-0.21617663, 140.17921335, 35791761.76173551), - 0, - 0, - ), - ], - dtype=vissr.ORBIT_PREDICTION_DATA, - ) + orb_pred["data"] = real_world.ORBIT_PREDICTION_2 return orb_pred @pytest.fixture diff --git a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py new file mode 100644 index 0000000000..c34914ba8f --- /dev/null +++ b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py @@ -0,0 +1,486 @@ +"""Unit tests for GMS-5 VISSR navigation.""" + +import numpy as np +import pytest + +import satpy.readers.gms5_vissr_navigation as nav +from satpy.tests.reader_tests.utils import get_jit_methods + +# Navigation references computed with JMA's Msial library (files +# VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS +# navigation is slightly off (< 0.01 deg) compared to JMA's reference. +# This is probably due to precision problems with the copied numbers. +# fmt: off +IR_NAVIGATION_REFERENCE = [ + { + 'line': 686, + 'pixel': 1680, + 'lon': 139.990380, + 'lat': 35.047056, + 'nav_params': ( + nav.Attitude( + angle_between_earth_and_sun=3.997397917902958, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, + ), + nav.Orbit( + greenwich_sidereal_time=2.468529732418296, + declination_from_sat_to_sun=-0.208770861178982, + right_ascension_from_sat_to_sun=3.304369303579407, + sat_position_earth_fixed_x=-32390963.148471601307392, + sat_position_earth_fixed_y=27003395.381247851997614, + sat_position_earth_fixed_z=-228134.860026293463307, + nutation_precession=np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), + ), + nav.ProjectionParameters( + line_offset=1378.5, + pixel_offset=1672.5, + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136.0 + ), + ) + }, + { + 'line': 2089, + 'pixel': 1793, + 'lon': 144.996967, + 'lat': -34.959853, + 'nav_params': ( + nav.Attitude( + angle_between_earth_and_sun=3.935707944355762, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, + ), + nav.Orbit( + greenwich_sidereal_time=2.530392320846865, + declination_from_sat_to_sun=-0.208713576872247, + right_ascension_from_sat_to_sun=3.242660398458377, + sat_position_earth_fixed_x=-32390273.633551981300116, + sat_position_earth_fixed_y=27003859.543135114014149, + sat_position_earth_fixed_z=-210800.087589388160268, + nutation_precession=np.array( + [[0.999936381432029, -0.010344763228876, -0.004496550050695], + [0.010344947502662, 0.999946489441823, 0.000017724053657], + [0.004496126086653, -0.000064239500295, 0.999989890310647]] + ), + ), + nav.ProjectionParameters( + line_offset=1378.5, + pixel_offset=1672.5, + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136 + ), + ) + } +] + + +VIS_NAVIGATION_REFERENCE = [ + { + 'line': 2744, + 'pixel': 6720, + 'lon': 139.975527, + 'lat': 35.078028, + 'nav_params': ( + nav.Attitude( + angle_between_earth_and_sun=3.997397918405798, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, + ), + nav.Orbit( + greenwich_sidereal_time=2.468529731914041, + declination_from_sat_to_sun=-0.208770861179448, + right_ascension_from_sat_to_sun=3.304369304082406, + sat_position_earth_fixed_x=-32390963.148477241396904, + sat_position_earth_fixed_y=27003395.381243918091059, + sat_position_earth_fixed_z=-228134.860164520738181, + nutation_precession=np.array( + [[0.999936381496146, -0.010344758016410, -0.004496547784299], + [0.010344942303489, 0.999946489495557, 0.000017727054455], + [0.004496123789670, -0.000064242454080, 0.999989890320785]] + ), + ), + nav.ProjectionParameters( + line_offset=5513.0, + pixel_offset=6688.5, + stepping_angle=0.000035000004573, + sampling_angle=0.000023929998861, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136 + ), + ) + }, + { + 'line': 8356, + 'pixel': 7172, + 'lon': 144.980104, + 'lat': -34.929123, + 'nav_params': ( + nav.Attitude( + angle_between_earth_and_sun=3.935707944858620, + angle_between_sat_spin_and_z_axis=3.149118633034304, + angle_between_sat_spin_and_yz_plane=0.000546042025980, + ), + nav.Orbit( + greenwich_sidereal_time=2.530392320342610, + declination_from_sat_to_sun=-0.208713576872715, + right_ascension_from_sat_to_sun=3.242660398961383, + sat_position_earth_fixed_x=-32390273.633557569235563, + sat_position_earth_fixed_y=27003859.543131537735462, + sat_position_earth_fixed_z=-210800.087734811415430, + nutation_precession=np.array( + [[0.999936381432029, -0.010344763228876, -0.004496550050695], + [0.010344947502662, 0.999946489441823, 0.000017724053657], + [0.004496126086653, -0.000064239500295, 0.999989890310647]] + ), + ), + nav.ProjectionParameters( + line_offset=5513.0, + pixel_offset=6688.5, + stepping_angle=0.000035000004573, + sampling_angle=0.000023929998861, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136 + ), + ) + }, +] +# fmt: on + +NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE + + +@pytest.fixture(params=[False, True], autouse=True) +def disable_jit(request, monkeypatch): + """Run tests with jit enabled and disabled. + + Reason: Coverage report is only accurate with jit disabled. + """ + if request.param: + jit_methods = get_jit_methods(nav) + for name, method in jit_methods.items(): + monkeypatch.setattr( + name, + method.py_func + ) + + +class TestSinglePixelNavigation: + """Test navigation of a single pixel.""" + + @pytest.mark.parametrize( + "point,nav_params,expected", + [ + ((ref["line"], ref["pixel"]), ref["nav_params"], (ref["lon"], ref["lat"])) + for ref in NAVIGATION_REFERENCE + ], + ) + def test_get_lon_lat(self, point, nav_params, expected): + """Test getting lon/lat coordinates for a given pixel.""" + lon, lat = nav.get_lon_lat(point, nav_params) + np.testing.assert_allclose((lon, lat), expected) + + def test_transform_image_coords_to_scanning_angles(self): + """Test transformation from image coordinates to scanning angles.""" + angles = nav.transform_image_coords_to_scanning_angles( + point=np.array([199, 99]), + offset=np.array([100, 200]), + sampling=np.array([0.01, 0.02]), + ) + np.testing.assert_allclose(angles, [-2, 1]) + + def test_transform_scanning_angles_to_satellite_coords(self): + """Test transformation from scanning angles to satellite coordinates.""" + scanning_angles = np.array([np.pi, np.pi / 2]) + misalignment = np.diag([1, 2, 3]).astype(float) + point_sat = nav.transform_scanning_angles_to_satellite_coords( + scanning_angles, misalignment + ) + np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1e-12) + + def test_transform_satellite_to_earth_fixed_coords(self): + """Test transformation from satellite to earth-fixed coordinates.""" + point_sat = np.array([1, 2, 3], dtype=float) + greenwich_sidereal_time = np.pi + sat_sun_angles = np.array([np.pi, np.pi / 2]) + earth_sun_angle = np.pi + spin_angles = np.array([np.pi, np.pi / 2]) + nutation_precession = np.diag([1, 2, 3]).astype(float) + res = nav.transform_satellite_to_earth_fixed_coords( + point_sat, + greenwich_sidereal_time, + sat_sun_angles, + earth_sun_angle, + spin_angles, + nutation_precession, + ) + np.testing.assert_allclose(res, [-3, 1, -2]) + + def test_intersect_view_vector_with_earth(self): + """Test intersection of a view vector with the earth's surface.""" + view_vector = np.array([-1, 0, 0], dtype=float) + sat_pos = np.array([36000 * 1000, 0, 0], dtype=float) + eq_radius = 6371 * 1000 + flattening = 0.003 + ellipsoid = np.array([eq_radius, flattening]) + point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid) + np.testing.assert_allclose(point, [eq_radius, 0, 0]) + + @pytest.mark.parametrize( + "point_earth_fixed,point_geodetic_exp", + [ + ([0, 0, 1], [0, 90]), + ([0, 0, -1], [0, -90]), + ([1, 0, 0], [0, 0]), + ([-1, 0, 0], [180, 0]), + ([1, 1, 1], [45, 35.426852]), + ], + ) + def test_transform_earth_fixed_to_geodetic_coords( + self, point_earth_fixed, point_geodetic_exp + ): + """Test transformation from earth-fixed to geodetic coordinates.""" + point_geodetic = nav.transform_earth_fixed_to_geodetic_coords( + np.array(point_earth_fixed), 0.003 + ) + np.testing.assert_allclose(point_geodetic, point_geodetic_exp) + + def test_normalize_vector(self): + """Test vector normalization.""" + v = np.array([1, 2, 3], dtype=float) + normed = nav.normalize_vector(v) + np.testing.assert_allclose(normed, v / np.sqrt(14)) + + +class TestImageNavigation: + """Test navigation of an entire image.""" + + def test_get_lons_lats( + self, scan_params, attitude_prediction, orbit_prediction, proj_params + ): + """Test getting lon/lat coordinates.""" + # fmt: off + lons_exp = [[-114.56923, -112.096837, -109.559702], + [8.33221, 8.793893, 9.22339], + [15.918476, 16.268354, 16.6332]] + lats_exp = [[-23.078721, -24.629845, -26.133314], + [-42.513409, -39.790231, -37.06392], + [3.342834, 6.07043, 8.795932]] + # fmt: on + lons, lats = nav.get_lons_lats( + lines=np.array([1000, 1500, 2000]), + pixels=np.array([1000, 1500, 2000]), + static_params=(scan_params, proj_params), + predicted_params=(attitude_prediction, orbit_prediction), + ) + np.testing.assert_allclose(lons, lons_exp) + np.testing.assert_allclose(lats, lats_exp) + + +class TestPredictionInterpolation: + """Test interpolation of orbit and attitude predictions.""" + + @pytest.mark.parametrize( + "obs_time,expected", [(-1, np.nan), (1.5, 2.5), (5, np.nan)] + ) + def test_interpolate_continuous(self, obs_time, expected): + """Test interpolation of continuous variables.""" + prediction_times = np.array([0, 1, 2, 3]) + predicted_values = np.array([1, 2, 3, 4]) + res = nav.interpolate_continuous(obs_time, prediction_times, predicted_values) + np.testing.assert_allclose(res, expected) + + @pytest.mark.parametrize( + "obs_time,expected", + [ + (-1, np.nan), + (1.5, 0.75 * np.pi), + (2.5, -0.75 * np.pi), + (3.5, -0.25 * np.pi), + (5, np.nan), + ], + ) + def test_interpolate_angles(self, obs_time, expected): + """Test interpolation of periodic angles.""" + prediction_times = np.array([0, 1, 2, 3, 4]) + predicted_angles = np.array( + [0, 0.5 * np.pi, np.pi, 1.5 * np.pi, 2 * np.pi] + ) # already unwrapped + res = nav.interpolate_angles(obs_time, prediction_times, predicted_angles) + np.testing.assert_allclose(res, expected) + + @pytest.mark.parametrize( + "obs_time,expected", + [ + (-1, np.nan * np.ones((2, 2))), + (1.5, [[1, 0], [0, 2]]), + (3, np.nan * np.ones((2, 2))), + ], + ) + def test_interpolate_nearest(self, obs_time, expected): + """Test nearest neighbour interpolation.""" + prediction_times = np.array([0, 1, 2]) + predicted_angles = np.array( + [np.zeros((2, 2)), np.diag((1, 2)), np.zeros((2, 2))] + ) + res = nav.interpolate_nearest(obs_time, prediction_times, predicted_angles) + np.testing.assert_allclose(res, expected) + + def test_interpolate_orbit_prediction( + self, obs_time, orbit_prediction, orbit_expected + ): + """Test interpolating orbit prediction.""" + orbit_prediction = orbit_prediction.to_numba() + orbit = nav.interpolate_orbit_prediction(orbit_prediction, obs_time) + assert_namedtuple_close(orbit, orbit_expected) + + def test_interpolate_attitude_prediction( + self, obs_time, attitude_prediction, attitude_expected + ): + """Test interpolating attitude prediction.""" + attitude_prediction = attitude_prediction.to_numba() + attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) + assert_namedtuple_close(attitude, attitude_expected) + + @pytest.fixture + def obs_time(self): + """Get observation time.""" + return 2.5 + + @pytest.fixture + def orbit_expected(self): + """Get expected orbit.""" + return nav.Orbit( + greenwich_sidereal_time=1.5, + declination_from_sat_to_sun=1.6, + right_ascension_from_sat_to_sun=1.7, + sat_position_earth_fixed_x=1.8, + sat_position_earth_fixed_y=1.9, + sat_position_earth_fixed_z=2.0, + nutation_precession=1.6 * np.identity(3), + ) + + @pytest.fixture + def attitude_expected(self): + """Get expected attitude.""" + return nav.Attitude( + angle_between_earth_and_sun=1.5, + angle_between_sat_spin_and_z_axis=1.6, + angle_between_sat_spin_and_yz_plane=1.7, + ) + + +@pytest.fixture +def sampling_angle(): + """Get sampling angle.""" + return 0.000095719995443 + + +@pytest.fixture +def scan_params(sampling_angle): + """Get scanning parameters.""" + return nav.ScanningParameters( + start_time_of_scan=0, + spinning_rate=0.5, + num_sensors=1, + sampling_angle=sampling_angle, + ) + + +@pytest.fixture +def attitude_prediction(): + """Get attitude prediction.""" + return nav.AttitudePrediction( + prediction_times=np.array([1.0, 2.0, 3.0]), + angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), + angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), + angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), + ) + + +@pytest.fixture +def orbit_prediction(): + """Get orbit prediction.""" + return nav.OrbitPrediction( + prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), + greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), + declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), + right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), + sat_position_earth_fixed_x=np.array([0.3, 1.3, 2.3, 3.3]), + sat_position_earth_fixed_y=np.array([0.4, 1.4, 2.4, 3.4]), + sat_position_earth_fixed_z=np.array([0.5, 1.5, 2.5, 3.5]), + nutation_precession=np.array( + [ + 0.6 * np.identity(3), + 1.6 * np.identity(3), + 2.6 * np.identity(3), + 3.6 * np.identity(3), + ] + ), + ) + + +@pytest.fixture +def proj_params(sampling_angle): + """Get projection parameters.""" + return nav.ProjectionParameters( + line_offset=1378.5, + pixel_offset=1672.5, + stepping_angle=0.000140000047395, + sampling_angle=sampling_angle, + misalignment=np.identity(3).astype(np.float64), + earth_flattening=0.003352813177897, + earth_equatorial_radius=6378136, + ) + + +def test_get_observation_time(): + """Test getting a pixel's observation time.""" + scan_params = nav.ScanningParameters( + start_time_of_scan=50000.0, + spinning_rate=100, + num_sensors=1, + sampling_angle=0.01, + ) + point = np.array([11, 100]) + obs_time = nav.get_observation_time(point, scan_params) + np.testing.assert_allclose(obs_time, 50000.0000705496871047) + + +def assert_namedtuple_close(a, b): + """Assert that two numba namedtuples are approximately equal.""" + assert a.__class__ == b.__class__ + for attr in a._fields: + np.testing.assert_allclose( + getattr(a, attr), + getattr(b, attr), + err_msg="{} attribute {} differs".format(a.__class__, attr), + ) From f1037e98748a28cc95a0738a0a92f211ff940f55 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 15:17:18 +0000 Subject: [PATCH 0234/1416] Add test of the start/end time and the repeat_cycle_duration --- satpy/tests/reader_tests/test_seviri_l1b_hrit.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 1492463506..90785ffdbf 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -226,6 +226,19 @@ def test_get_dataset(self, calibrate, parent_get_dataset): res.attrs, setup.get_attrs_exp(self.projection_longitude) ) + # testing start/end time + self.assertEqual(datetime(2006, 1, 1, 12, 15, 9, 304888), self.reader.observation_start_time) + self.assertEqual(datetime(2006, 1, 1, 12, 15,), self.reader.start_time) + self.assertEqual(self.reader.start_time, self.reader.nominal_start_time) + + self.assertEqual(datetime(2006, 1, 1, 12, 27, 39), self.reader.observation_end_time) + self.assertEqual(self.reader.end_time, self.reader.nominal_end_time) + self.assertEqual(datetime(2006, 1, 1, 12, 30,), self.reader.end_time) + # test repeat cycle duration + self.assertEqual(15, self.reader._repeat_cycle_duration) + # Change the reducescan scenario to test the repeat cycle duration handling + self.reader.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + self.assertEqual(5, self.reader._repeat_cycle_duration) @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') From 933df07be79b2d45378f78e542faa6a455554ba2 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 15:36:12 +0000 Subject: [PATCH 0235/1416] Add tests for start/end time handling and repeat cycle duration --- .../reader_tests/test_seviri_l1b_native.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index da075e3aed..a7de60dcc2 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1205,6 +1205,23 @@ def test_get_dataset(self, file_handler): assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0) assert_attrs_equal(dataset.attrs, expected.attrs, tolerance=1e-4) + def test_time(self, file_handler): + """Test start/end nominal/observation time handling.""" + assert datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time + assert datetime(2006, 1, 1, 12, 15,) == file_handler.start_time + assert file_handler.start_time == file_handler.nominal_start_time + + assert datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time + assert file_handler.end_time == file_handler.nominal_end_time + assert datetime(2006, 1, 1, 12, 30,) == file_handler.end_time + + def test_repeat_cycle_duration(self, file_handler): + """Test repeat cycle handling for FD or ReduscedScan.""" + assert 15 == file_handler._repeat_cycle_duration + # Change the reducescan scenario to test the repeat cycle duration handling + file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + assert 5 == file_handler._repeat_cycle_duration + @staticmethod def _exp_data_array(): expected = xr.DataArray( From eb8b15828b53b71e4e799519295d34cb0bca88b1 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 31 May 2023 15:50:27 +0000 Subject: [PATCH 0236/1416] Add tests for start/end time handling and repeat cycle duration --- .../tests/reader_tests/test_seviri_l1b_nc.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index adb2089fd3..f85e9f5aae 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -350,6 +350,25 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ res.attrs.pop(key, None) assert_attrs_equal(res.attrs, expected.attrs, tolerance=1e-4) + def test_time(self, file_handler): + """Test start/end nominal/observation time handling.""" + assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time + assert datetime(2020, 1, 1, 0, 0) == file_handler.start_time + assert file_handler.start_time == file_handler.nominal_start_time + + assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time + assert file_handler.end_time == file_handler.nominal_end_time + assert datetime(2020, 1, 1, 0, 0) == file_handler.end_time + + def test_repeat_cycle_duration(self, file_handler): + """Test repeat cycle handling for FD or ReduscedScan.""" + assert 15 == file_handler._repeat_cycle_duration + # Change the reducescan scenario to test the repeat cycle duration handling + file_handler.nc.attrs['nominal_image_scanning'] = '' + file_handler.nc.attrs['reduced_scanning'] = 'T' + # file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + assert 5 == file_handler._repeat_cycle_duration + def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" dataset_id = make_dataid(name='VIS006', calibration='counts') From 7e149aca6fb116b195d4b5dc3ddc9c2df596cd2d Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 1 Jun 2023 12:19:41 +0800 Subject: [PATCH 0237/1416] Update test_composites.py --- satpy/tests/test_composites.py | 261 +++++++++++++++++---------------- 1 file changed, 131 insertions(+), 130 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index c293d551bf..cf3898ad72 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -31,6 +31,7 @@ import satpy + # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path @@ -129,7 +130,7 @@ def test_nondimensional_coords(self): self.assertNotIn('acq_time', ret_datasets[0].coords) -class TestRatioSharpenedCompositors(unittest.TestCase): +class TestRatioSharpenedCompositors: """Test RatioSharpenedRGB and SelfSharpendRGB compositors.""" def setUp(self): @@ -150,17 +151,16 @@ def setUp(self): ds1 = xr.DataArray(da.from_array(low_res_data, chunks=2), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) - self.ds1 = ds1 + ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds2.attrs['name'] += '2' - self.ds2 = ds2 + ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds3.attrs['name'] += '3' - self.ds3 = ds3 # high resolution version high_res_data = np.ones((2, 2), dtype=np.float64) @@ -170,146 +170,147 @@ def setUp(self): coords={'y': [0, 1], 'x': [0, 1]}) ds4.attrs['name'] += '4' ds4.attrs['resolution'] = 500 - self.ds4 = ds4 # high resolution version - but too big - ds4 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs.copy(), dims=('y', 'x'), - coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) - ds4.attrs['name'] += '4' - ds4.attrs['resolution'] = 500 - ds4.attrs['rows_per_scan'] = 1 - ds4.attrs['area'] = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, - (-2000, -2000, 2000, 2000)) - self.ds4_big = ds4 + ds4_big = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), + attrs=attrs.copy(), dims=('y', 'x'), + coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) + ds4_big.attrs['name'] += '4' + ds4_big.attrs['resolution'] = 500 + ds4_big.attrs['rows_per_scan'] = 1 + ds4_big.attrs['area'] = AreaDefinition('test', 'test', 'test', + {'proj': 'merc'}, 4, 4, + (-2000, -2000, 2000, 2000)) + + return ds1, ds2, ds3, ds4, ds4_big - def test_high_bad_color(self): - """Test that only valid band colors can be provided.""" - from satpy.composites import RatioSharpenedRGB - self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad') + @pytest.mark.parametrize( + ("case", "exp"), + [ + ("high bad color", ValueError), + ("neutral bad color", ValueError), + ("match_data_arrays", satpy.composites.IncompatibleAreas), + ("more than three datasets", ValueError), + ("no high res band in self sharpened", ValueError) + ] + ) + def test_errors(self, case, exp): + """Test errors under different cases.""" + from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + ds1, ds2, ds3, ds4, ds4_big = self.setUp() + + if case == "high bad color": + with pytest.raises(exp): + RatioSharpenedRGB(name='true_color', high_resolution_band="bad", neutral_resolution_band="red") + + elif case == "neutral bad color": + with pytest.raises(exp): + RatioSharpenedRGB(name='true_color', high_resolution_band="red", neutral_resolution_band="bad") + + elif case == "match_data_arrays": + comp = RatioSharpenedRGB(name='true_color') + with pytest.raises(exp): + comp((ds1, ds2, ds3), optional_datasets=(ds4_big,)) + + elif case == "more than three datasets": + comp = RatioSharpenedRGB(name='true_color') + with pytest.raises(exp): + comp((ds1, ds2, ds3, ds1), optional_datasets=(ds4_big,)) + + elif case == "no high res band in self sharpened": + comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) + with pytest.raises(exp): + comp((ds1, ds2, ds3)) - def test_neutral_bad_color(self): - """Test that only valid band colors can be provided.""" + @pytest.mark.parametrize( + ("case", "exp"), + [ + ("without optional high res", (3, 2, 2)), + ("high res band is None", (3, 2, 2)) + ] + ) + def test_basic_function(self, case, exp): + """Test basic composite function without sharpening.""" from satpy.composites import RatioSharpenedRGB - self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', neutral_resolution_band='bad') + ds1, ds2, ds3, ds4, ds4_big = self.setUp() - def test_match_data_arrays(self): - """Test that all areas have to be the same resolution.""" - from satpy.composites import IncompatibleAreas, RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') - self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) + if case == "without optional high res": + comp = RatioSharpenedRGB(name='true_color') + res = comp((ds1, ds2, ds3)) + elif case == "high res band is None": + comp = RatioSharpenedRGB(name='true_color', high_resolution_band=None) + res = comp((ds1, ds2, ds3), optional_datasets=(ds4,)) - def test_more_than_three_datasets(self): - """Test that only 3 datasets can be passed.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') - self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds3, self.ds1), - optional_datasets=(self.ds4_big,)) + assert res.shape == exp - def test_basic_no_high_res(self): - """Test that three datasets can be passed without optional high res.""" + @pytest.mark.parametrize( + ("high_resolution_band", "neutral_resolution_band", "exp_r", "exp_g", "exp_b"), + [ + ("red", None, + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), + np.array([[0.6, 0.6], [np.nan, 3.0]], dtype=np.float64), + np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)), + ("red", "green", + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), + np.array([[3.0, 3.0], [np.nan, 3.0]], dtype=np.float64), + np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)), + ("green", None, + np.array([[5 / 3, 5 / 3], [np.nan, 0.0]], dtype=np.float64), + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), + np.array([[4 / 3, 4 / 3], [np.nan, 4 / 3]], dtype=np.float64)), + ("green", "blue", + np.array([[5 / 3, 5 / 3], [np.nan, 0.0]], dtype=np.float64), + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), + np.array([[4.0, 4.0], [np.nan, 4.0]], dtype=np.float64)), + ("blue", None, + np.array([[1.25, 1.25], [np.nan, 0.0]], dtype=np.float64), + np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64), + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)), + ("blue", "red", + np.array([[5.0, 5.0], [np.nan, 0.0]], dtype=np.float64), + np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64), + np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) + ] + ) + def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b): + """Test RatioSharpenedRGB by different groups of high_resolution_band and neutral_resolution_band.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') - res = comp((self.ds1, self.ds2, self.ds3)) - self.assertEqual(res.shape, (3, 2, 2)) + comp = RatioSharpenedRGB(name='true_color', high_resolution_band=high_resolution_band, + neutral_resolution_band=neutral_resolution_band) + ds1, ds2, ds3, ds4, ds4_big = self.setUp() + res = comp((ds1, ds2, ds3), optional_datasets=(ds4,)) - def test_basic_no_sharpen(self): - """Test that color None does no sharpening.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band=None) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - self.assertEqual(res.shape, (3, 2, 2)) + assert "units" not in res.attrs + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) - def test_basic_red_no_neutral(self): - """Test that basic high resolution red with no neutral band can be passed.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', neutral_resolution_band=None) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], self.ds4.values) - np.testing.assert_allclose(res[1], np.array([[0.6, 0.6], [np.nan, 3.0]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)) - - def test_basic_red_neutral_green(self): - """Test that basic high resolution red with green neutral band can be passed.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', neutral_resolution_band='green') - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], self.ds4.values) - np.testing.assert_allclose(res[1], np.array([[3.0, 3.0], [np.nan, 3.0]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)) - - def test_high_green_no_neutral(self): - """Test that high resolution green with no neutral band can be passed.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band='green', neutral_resolution_band=None) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], np.array([[5/3, 5/3], [np.nan, 0.0]], dtype=np.float64)) - np.testing.assert_allclose(res[1], np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[4/3, 4/3], [np.nan, 4/3]], dtype=np.float64)) - - def test_high_green_neutral_blue(self): - """Test that high resolution green with blue neutral band can be passed.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band='green', neutral_resolution_band='blue') - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], np.array([[5/3, 5/3], [np.nan, 0.0]], dtype=np.float64)) - np.testing.assert_allclose(res[1], np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[4.0, 4.0], [np.nan, 4.0]], dtype=np.float64)) - - def test_high_blue_no_neutral(self): - """Test that high resolution blue with no neutral band can be passed.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band='blue', neutral_resolution_band=None) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], np.array([[1.25, 1.25], [np.nan, 0.0]], dtype=np.float64)) - np.testing.assert_allclose(res[1], np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) - - def test_high_blue_neutral_red(self): - """Test that high resolution blue with red neutral band can be passed.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band='blue', neutral_resolution_band='red') - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], np.array([[5.0, 5.0], [np.nan, 0.0]], dtype=np.float64)) - np.testing.assert_allclose(res[1], np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) - - def test_self_sharpened_no_high_res(self): - """Test for exception when no high res band is specified.""" - from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) - self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds3)) + data = res.values + np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) + np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) + np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) - def test_self_sharpened_basic(self): + @pytest.mark.parametrize( + ("exp_shape", "exp_r", "exp_g", "exp_b"), + [ + ((3, 2, 2), + np.array([[5.0, 5.0], [5.0, 0]], dtype=np.float64), + np.array([[4.0, 4.0], [4.0, 0]], dtype=np.float64), + np.array([[16/3, 16/3], [16/3, 0]], dtype=np.float64)) + ] + ) + def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color') - res = comp((self.ds1, self.ds2, self.ds3)) - res = res.values - self.assertEqual(res.shape, (3, 2, 2)) - np.testing.assert_allclose(res[0], self.ds1.values) - np.testing.assert_allclose(res[1], np.array([[4, 4], [4, 0]], dtype=np.float64)) - np.testing.assert_allclose(res[2], np.array([[5.333333, 5.333333], [5.333333, 0]], dtype=np.float64)) - - def test_no_units(self): - """Test that the computed RGB has no units attribute.""" - from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') - res = comp((self.ds1, self.ds2, self.ds3)) - assert "units" not in res.attrs + + ds1, ds2, ds3, ds4, ds4_big = self.setUp() + res = comp((ds1, ds2, ds3)) + data = res.values + + assert data.shape == exp_shape + np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) + np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) + np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) class TestDifferenceCompositor(unittest.TestCase): @@ -526,7 +527,7 @@ def test_day_only_area_with_alpha(self): """Test compositor with day portion with alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) - res = comp((self.data_a, )) + res = comp((self.data_a,)) res = res.compute() expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) expected_alpha = np.array([[1., 1.], [1., 1.]]) From 5da6b6fe95a01f5c4676eb19d1f74ce98b4a0edb Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 07:48:46 +0000 Subject: [PATCH 0238/1416] Fix code quality issues --- .../tests/reader_tests/test_gms5_vissr_l1b.py | 63 ++++++++++++++----- 1 file changed, 46 insertions(+), 17 deletions(-) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 282cc694cb..1621e3fb7f 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -25,10 +25,7 @@ def disable_jit(request, monkeypatch): if request.param: jit_methods = get_jit_methods(vissr) for name, method in jit_methods.items(): - monkeypatch.setattr( - name, - method.py_func - ) + monkeypatch.setattr(name, method.py_func) class TestEarthMask: @@ -146,31 +143,45 @@ def control_block(self, dataset_id): return ctrl_block @pytest.fixture - def image_parameters( + def image_parameters(self, mode_block, cal_params, nav_params): + """Get VISSR image parameters.""" + image_params = {"mode": mode_block} + image_params.update(cal_params) + image_params.update(nav_params) + return image_params + + @pytest.fixture + def nav_params( self, - mode_block, coordinate_conversion, attitude_prediction, orbit_prediction_1, orbit_prediction_2, - vis_calibration, - ir1_calibration, - ir2_calibration, - wv_calibration, simple_coordinate_conversion_table, ): - """Get VISSR image parameters.""" + """Get navigation parameters.""" return { - "mode": mode_block, "coordinate_conversion": coordinate_conversion, "attitude_prediction": attitude_prediction, "orbit_prediction_1": orbit_prediction_1, "orbit_prediction_2": orbit_prediction_2, + "simple_coordinate_conversion_table": simple_coordinate_conversion_table, + } + + @pytest.fixture + def cal_params( + self, + vis_calibration, + ir1_calibration, + ir2_calibration, + wv_calibration, + ): + """Get calibration parameters.""" + return { "vis_calibration": vis_calibration, "ir1_calibration": ir1_calibration, "ir2_calibration": ir2_calibration, "wv_calibration": wv_calibration, - "simple_coordinate_conversion_table": simple_coordinate_conversion_table, } @pytest.fixture @@ -560,13 +571,26 @@ def corrupt_file(self, file_contents, tmp_path): def test_corrupt_file(self, corrupt_file): """Test reading a corrupt file.""" - with pytest.raises(ValueError, match=r'.* corrupt .*'): + with pytest.raises(ValueError, match=r".* corrupt .*"): vissr.GMS5VISSRFileHandler(corrupt_file, {}, {}) class VissrFileWriter: """Write data in VISSR archive format.""" + image_params_order = [ + "mode", + "coordinate_conversion", + "attitude_prediction", + "orbit_prediction_1", + "orbit_prediction_2", + "vis_calibration", + "ir1_calibration", + "ir2_calibration", + "wv_calibration", + "simple_coordinate_conversion_table", + ] + def __init__(self, ch_type, open_function): """Initialize the writer. @@ -588,9 +612,14 @@ def _write_control_block(self, fd, contents): self._write(fd, contents["control_block"]) def _write_image_parameters(self, fd, contents): - for key, im_param in contents["image_parameters"].items(): - offset = vissr.IMAGE_PARAMS[key]["offset"][self.ch_type] - self._write(fd, im_param, offset) + for name in self.image_params_order: + im_param = contents["image_parameters"].get(name) + if im_param: + self._write_image_parameter(fd, im_param, name) + + def _write_image_parameter(self, fd, im_param, name): + offset = vissr.IMAGE_PARAMS[name]["offset"][self.ch_type] + self._write(fd, im_param, offset) def _write_image_data(self, fd, contents): offset = vissr.IMAGE_DATA[self.ch_type]["offset"] From 6fbfb7446bc6df0911952daa2175e12f1770d201 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 08:44:58 +0000 Subject: [PATCH 0239/1416] Refactor orbit prediction --- satpy/readers/gms5_vissr_l1b.py | 34 ++-- satpy/readers/gms5_vissr_navigation.py | 183 +++++++++--------- .../test_gms5_vissr_navigation.py | 141 ++++++++------ 3 files changed, 198 insertions(+), 160 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 218d76b3c7..4994d5a921 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -844,8 +844,12 @@ def _get_static_navigation_params(self, dataset_id): def _get_predicted_navigation_params(self): """Get predictions of time-dependent navigation parameters.""" + attitude_prediction = self._get_attitude_prediction() + orbit_prediction = self._get_orbit_prediction() + return attitude_prediction, orbit_prediction + + def _get_attitude_prediction(self): att_pred = self._header["image_parameters"]["attitude_prediction"]["data"] - orb_pred = self._header["image_parameters"]["orbit_prediction"]["data"] attitude_prediction = nav.AttitudePrediction( prediction_times=att_pred["prediction_time_mjd"].astype(np.float64), angle_between_earth_and_sun=att_pred["sun_earth_angle"].astype(np.float64), @@ -856,8 +860,11 @@ def _get_predicted_navigation_params(self): "declination_of_attitude" ].astype(np.float64), ) - orbit_prediction = nav.OrbitPrediction( - prediction_times=orb_pred["prediction_time_mjd"].astype(np.float64), + return attitude_prediction + + def _get_orbit_prediction(self): + orb_pred = self._header["image_parameters"]["orbit_prediction"]["data"] + orbit_angles = nav.OrbitAngles( greenwich_sidereal_time=np.deg2rad( orb_pred["greenwich_sidereal_time"].astype(np.float64) ), @@ -867,20 +874,21 @@ def _get_predicted_navigation_params(self): right_ascension_from_sat_to_sun=np.deg2rad( orb_pred["sat_sun_vector_earth_fixed"]["azimuth"].astype(np.float64) ), - sat_position_earth_fixed_x=orb_pred["satellite_position_earth_fixed"][ - :, 0 - ].astype(np.float64), - sat_position_earth_fixed_y=orb_pred["satellite_position_earth_fixed"][ - :, 1 - ].astype(np.float64), - sat_position_earth_fixed_z=orb_pred["satellite_position_earth_fixed"][ - :, 2 - ].astype(np.float64), + ) + sat_position = nav.SatellitePositionEarthFixed( + x=orb_pred["satellite_position_earth_fixed"][:, 0].astype(np.float64), + y=orb_pred["satellite_position_earth_fixed"][:, 1].astype(np.float64), + z=orb_pred["satellite_position_earth_fixed"][:, 2].astype(np.float64), + ) + orbit_prediction = nav.OrbitPrediction( + prediction_times=orb_pred["prediction_time_mjd"].astype(np.float64), + angles=orbit_angles, + sat_position=sat_position, nutation_precession=np.ascontiguousarray( orb_pred["conversion_matrix"].transpose(0, 2, 1).astype(np.float64) ), ) - return attitude_prediction, orbit_prediction + return orbit_prediction def _make_lons_lats_data_array(self, lons, lats): lons = xr.DataArray( diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 7d7a37cb1d..30f6b619a5 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -34,17 +34,34 @@ Orbit = namedtuple( "Orbit", + [ + "angles", + "sat_position", + "nutation_precession", + ], +) +Orbit.__doc__ = """Orbital Parameters + +Args: + angles (OrbitAngles): Orbit angles + sat_position (SatellitePositionEarthFixed): Satellite position + nutation_precession: Nutation and precession matrix (3x3) +""" + + +OrbitAngles = namedtuple( + "OrbitAngles", [ "greenwich_sidereal_time", "declination_from_sat_to_sun", "right_ascension_from_sat_to_sun", - "sat_position_earth_fixed_x", - "sat_position_earth_fixed_y", - "sat_position_earth_fixed_z", - "nutation_precession", ], ) +SatellitePositionEarthFixed = namedtuple( + "SatellitePositionEarthFixed", + ["x", "y", "z"], +) ScanningParameters = namedtuple( "ScanningParameters", @@ -80,12 +97,8 @@ "_OrbitPrediction", [ "prediction_times", - "greenwich_sidereal_time", - "declination_from_sat_to_sun", - "right_ascension_from_sat_to_sun", - "sat_position_earth_fixed_x", - "sat_position_earth_fixed_y", - "sat_position_earth_fixed_z", + "angles", + "sat_position", "nutation_precession", ], ) @@ -150,12 +163,8 @@ class OrbitPrediction: def __init__( self, prediction_times, - greenwich_sidereal_time, - declination_from_sat_to_sun, - right_ascension_from_sat_to_sun, - sat_position_earth_fixed_x, - sat_position_earth_fixed_y, - sat_position_earth_fixed_z, + angles, + sat_position, nutation_precession, ): """Initialize orbit prediction. @@ -166,39 +175,30 @@ def __init__( Args: prediction_times: Timestamps of orbit prediction. - greenwich_sidereal_time: Greenwich sidereal time - declination_from_sat_to_sun: Declination from satellite to sun - right_ascension_from_sat_to_sun: Right ascension from satellite to - sun - sat_position_earth_fixed_x: Satellite position in earth fixed - coordinates (x-component) - sat_position_earth_fixed_y: Satellite position in earth fixed - coordinates (y-component) - sat_position_earth_fixed_z: Satellite position in earth fixed - coordinates (z-component) + angles (OrbitAngles): Orbit angles + sat_position (SatellitePositionEarthFixed): Satellite position nutation_precession: Nutation and precession matrix. """ self.prediction_times = prediction_times - self.greenwich_sidereal_time = np.unwrap(greenwich_sidereal_time) - self.declination_from_sat_to_sun = np.unwrap(declination_from_sat_to_sun) - self.right_ascension_from_sat_to_sun = np.unwrap( - right_ascension_from_sat_to_sun - ) - self.sat_position_earth_fixed_x = sat_position_earth_fixed_x - self.sat_position_earth_fixed_y = sat_position_earth_fixed_y - self.sat_position_earth_fixed_z = sat_position_earth_fixed_z + self.angles = self._unwrap_angles(angles) + self.sat_position = sat_position self.nutation_precession = nutation_precession + def _unwrap_angles(self, angles): + return OrbitAngles( + greenwich_sidereal_time=np.unwrap(angles.greenwich_sidereal_time), + declination_from_sat_to_sun=np.unwrap(angles.declination_from_sat_to_sun), + right_ascension_from_sat_to_sun=np.unwrap( + angles.right_ascension_from_sat_to_sun + ), + ) + def to_numba(self): """Convert to numba-compatible type.""" return _OrbitPrediction( prediction_times=self.prediction_times, - greenwich_sidereal_time=self.greenwich_sidereal_time, - declination_from_sat_to_sun=self.declination_from_sat_to_sun, - right_ascension_from_sat_to_sun=self.right_ascension_from_sat_to_sun, - sat_position_earth_fixed_x=self.sat_position_earth_fixed_x, - sat_position_earth_fixed_y=self.sat_position_earth_fixed_y, - sat_position_earth_fixed_z=self.sat_position_earth_fixed_z, + angles=self.angles, + sat_position=self.sat_position, nutation_precession=self.nutation_precession, ) @@ -317,11 +317,9 @@ def get_lon_lat(point, nav_params): ) view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords( view_vector_sat, - orbit.greenwich_sidereal_time, - _get_sat_sun_angles(orbit), + orbit, attitude.angle_between_earth_and_sun, _get_spin_angles(attitude), - orbit.nutation_precession, ) point_on_earth = intersect_with_earth( view_vector_earth_fixed, _get_sat_pos(orbit), _get_ellipsoid(proj_params) @@ -342,11 +340,6 @@ def _get_sampling(proj_params): return proj_params.stepping_angle, proj_params.sampling_angle -@numba.njit -def _get_sat_sun_angles(orbit): - return (orbit.declination_from_sat_to_sun, orbit.right_ascension_from_sat_to_sun) - - @numba.njit def _get_spin_angles(attitude): return ( @@ -359,9 +352,9 @@ def _get_spin_angles(attitude): def _get_sat_pos(orbit): return np.array( ( - orbit.sat_position_earth_fixed_x, - orbit.sat_position_earth_fixed_y, - orbit.sat_position_earth_fixed_z, + orbit.sat_position.x, + orbit.sat_position.y, + orbit.sat_position.z, ) ) @@ -419,50 +412,41 @@ def _get_transforms_from_scanning_angles_to_satellite_coords(angles): @numba.njit def transform_satellite_to_earth_fixed_coords( point, - greenwich_sidereal_time, - sat_sun_angles, + orbit, earth_sun_angle, spin_angles, - nutation_precession, ): """Transform from earth-fixed to satellite angular momentum coordinates. Args: point: Point (x, y, z) in satellite angular momentum coordinates. - greenwich_sidereal_time: True Greenwich sidereal time (rad). - sat_sun_angles: Declination from satellite to sun (rad), - right ascension from satellite to sun (rad) + orbit (Orbit): Orbital parameters earth_sun_angle: Angle between sun and earth center on the z-axis vertical plane (rad) spin_angles: Angle between satellite spin axis and z-axis (rad), angle between satellite spin axis and yz-plane - nutation_precession: Nutation and precession matrix (3x3) Returns: Point (x', y', z') in earth-fixed coordinates. """ sat_unit_vectors = _get_satellite_unit_vectors( - greenwich_sidereal_time, - sat_sun_angles, + orbit, earth_sun_angle, spin_angles, - nutation_precession, ) return np.dot(sat_unit_vectors, point) @numba.njit def _get_satellite_unit_vectors( - greenwich_sidereal_time, - sat_sun_angles, + orbit, earth_sun_angle, spin_angles, - nutation_precession, ): unit_vector_z = _get_satellite_unit_vector_z( - spin_angles, greenwich_sidereal_time, nutation_precession + spin_angles, orbit.angles.greenwich_sidereal_time, orbit.nutation_precession ) unit_vector_x = _get_satellite_unit_vector_x( - earth_sun_angle, sat_sun_angles, unit_vector_z + earth_sun_angle, orbit.angles, unit_vector_z ) unit_vector_y = _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z) return np.stack((unit_vector_x, unit_vector_y, unit_vector_z), axis=-1) @@ -497,9 +481,9 @@ def _get_transform_from_1950_to_earth_fixed(greenwich_sidereal_time): @numba.njit -def _get_satellite_unit_vector_x(earth_sun_angle, sat_sun_angles, sat_unit_vector_z): +def _get_satellite_unit_vector_x(earth_sun_angle, orbit_angles, sat_unit_vector_z): beta = earth_sun_angle - sat_sun_vector = _get_vector_from_satellite_to_sun(sat_sun_angles) + sat_sun_vector = _get_vector_from_satellite_to_sun(orbit_angles) z_cross_satsun = np.cross(sat_unit_vector_z, sat_sun_vector) z_cross_satsun = normalize_vector(z_cross_satsun) x_vec = z_cross_satsun * np.sin(beta) + np.cross( @@ -509,8 +493,9 @@ def _get_satellite_unit_vector_x(earth_sun_angle, sat_sun_angles, sat_unit_vecto @numba.njit -def _get_vector_from_satellite_to_sun(sat_sun_angles): - declination, right_ascension = sat_sun_angles +def _get_vector_from_satellite_to_sun(orbit_angles): + declination = orbit_angles.declination_from_sat_to_sun + right_ascension = orbit_angles.right_ascension_from_sat_to_sun cos_declination = np.cos(declination) x = cos_declination * np.cos(right_ascension) y = cos_declination * np.sin(right_ascension) @@ -596,50 +581,62 @@ def normalize_vector(v): @numba.njit def interpolate_orbit_prediction(orbit_prediction, observation_time): """Interpolate orbit prediction.""" - greenwich_sidereal_time = interpolate_angles( + angles = _interpolate_orbit_angles(observation_time, orbit_prediction) + sat_position = _interpolate_sat_position(observation_time, orbit_prediction) + nutation_precession = interpolate_nearest( observation_time, orbit_prediction.prediction_times, - orbit_prediction.greenwich_sidereal_time, + orbit_prediction.nutation_precession, ) - declination_from_sat_to_sun = interpolate_angles( + return Orbit( + angles=angles, + sat_position=sat_position, + nutation_precession=nutation_precession, + ) + + +@numba.njit +def _interpolate_orbit_angles(observation_time, orbit_prediction): + sidereal_time = interpolate_angles( observation_time, orbit_prediction.prediction_times, - orbit_prediction.declination_from_sat_to_sun, + orbit_prediction.angles.greenwich_sidereal_time, ) - right_ascension_from_sat_to_sun = interpolate_angles( + declination = interpolate_angles( observation_time, orbit_prediction.prediction_times, - orbit_prediction.right_ascension_from_sat_to_sun, + orbit_prediction.angles.declination_from_sat_to_sun, ) - sat_position_earth_fixed_x = interpolate_continuous( + right_ascension = interpolate_angles( observation_time, orbit_prediction.prediction_times, - orbit_prediction.sat_position_earth_fixed_x, + orbit_prediction.angles.right_ascension_from_sat_to_sun, ) - sat_position_earth_fixed_y = interpolate_continuous( + return OrbitAngles( + greenwich_sidereal_time=sidereal_time, + declination_from_sat_to_sun=declination, + right_ascension_from_sat_to_sun=right_ascension, + ) + + +@numba.njit +def _interpolate_sat_position(observation_time, orbit_prediction): + x = interpolate_continuous( observation_time, orbit_prediction.prediction_times, - orbit_prediction.sat_position_earth_fixed_y, + orbit_prediction.sat_position.x, ) - sat_position_earth_fixed_z = interpolate_continuous( + y = interpolate_continuous( observation_time, orbit_prediction.prediction_times, - orbit_prediction.sat_position_earth_fixed_z, + orbit_prediction.sat_position.y, ) - nutation_precession = interpolate_nearest( + z = interpolate_continuous( observation_time, orbit_prediction.prediction_times, - orbit_prediction.nutation_precession, - ) - return Orbit( - greenwich_sidereal_time, - declination_from_sat_to_sun, - right_ascension_from_sat_to_sun, - sat_position_earth_fixed_x, - sat_position_earth_fixed_y, - sat_position_earth_fixed_z, - nutation_precession, + orbit_prediction.sat_position.z, ) + return SatellitePositionEarthFixed(x, y, z) @numba.njit diff --git a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py index c34914ba8f..b663d7b9e6 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py @@ -24,12 +24,16 @@ angle_between_sat_spin_and_yz_plane=0.000546042025980, ), nav.Orbit( - greenwich_sidereal_time=2.468529732418296, - declination_from_sat_to_sun=-0.208770861178982, - right_ascension_from_sat_to_sun=3.304369303579407, - sat_position_earth_fixed_x=-32390963.148471601307392, - sat_position_earth_fixed_y=27003395.381247851997614, - sat_position_earth_fixed_z=-228134.860026293463307, + angles=nav.OrbitAngles( + greenwich_sidereal_time=2.468529732418296, + declination_from_sat_to_sun=-0.208770861178982, + right_ascension_from_sat_to_sun=3.304369303579407, + ), + sat_position=nav.SatellitePositionEarthFixed( + x=-32390963.148471601307392, + y=27003395.381247851997614, + z=-228134.860026293463307, + ), nutation_precession=np.array( [[0.999936381496146, -0.010344758016410, -0.004496547784299], [0.010344942303489, 0.999946489495557, 0.000017727054455], @@ -63,12 +67,16 @@ angle_between_sat_spin_and_yz_plane=0.000546042025980, ), nav.Orbit( - greenwich_sidereal_time=2.530392320846865, - declination_from_sat_to_sun=-0.208713576872247, - right_ascension_from_sat_to_sun=3.242660398458377, - sat_position_earth_fixed_x=-32390273.633551981300116, - sat_position_earth_fixed_y=27003859.543135114014149, - sat_position_earth_fixed_z=-210800.087589388160268, + angles=nav.OrbitAngles( + greenwich_sidereal_time=2.530392320846865, + declination_from_sat_to_sun=-0.208713576872247, + right_ascension_from_sat_to_sun=3.242660398458377, + ), + sat_position=nav.SatellitePositionEarthFixed( + x=-32390273.633551981300116, + y=27003859.543135114014149, + z=-210800.087589388160268, + ), nutation_precession=np.array( [[0.999936381432029, -0.010344763228876, -0.004496550050695], [0.010344947502662, 0.999946489441823, 0.000017724053657], @@ -106,12 +114,16 @@ angle_between_sat_spin_and_yz_plane=0.000546042025980, ), nav.Orbit( - greenwich_sidereal_time=2.468529731914041, - declination_from_sat_to_sun=-0.208770861179448, - right_ascension_from_sat_to_sun=3.304369304082406, - sat_position_earth_fixed_x=-32390963.148477241396904, - sat_position_earth_fixed_y=27003395.381243918091059, - sat_position_earth_fixed_z=-228134.860164520738181, + angles=nav.OrbitAngles( + greenwich_sidereal_time=2.468529731914041, + declination_from_sat_to_sun=-0.208770861179448, + right_ascension_from_sat_to_sun=3.304369304082406, + ), + sat_position=nav.SatellitePositionEarthFixed( + x=-32390963.148477241396904, + y=27003395.381243918091059, + z=-228134.860164520738181, + ), nutation_precession=np.array( [[0.999936381496146, -0.010344758016410, -0.004496547784299], [0.010344942303489, 0.999946489495557, 0.000017727054455], @@ -145,12 +157,16 @@ angle_between_sat_spin_and_yz_plane=0.000546042025980, ), nav.Orbit( - greenwich_sidereal_time=2.530392320342610, - declination_from_sat_to_sun=-0.208713576872715, - right_ascension_from_sat_to_sun=3.242660398961383, - sat_position_earth_fixed_x=-32390273.633557569235563, - sat_position_earth_fixed_y=27003859.543131537735462, - sat_position_earth_fixed_z=-210800.087734811415430, + angles=nav.OrbitAngles( + greenwich_sidereal_time=2.530392320342610, + declination_from_sat_to_sun=-0.208713576872715, + right_ascension_from_sat_to_sun=3.242660398961383, + ), + sat_position=nav.SatellitePositionEarthFixed( + x=-32390273.633557569235563, + y=27003859.543131537735462, + z=-210800.087734811415430, + ), nutation_precession=np.array( [[0.999936381432029, -0.010344763228876, -0.004496550050695], [0.010344947502662, 0.999946489441823, 0.000017724053657], @@ -187,10 +203,7 @@ def disable_jit(request, monkeypatch): if request.param: jit_methods = get_jit_methods(nav) for name, method in jit_methods.items(): - monkeypatch.setattr( - name, - method.py_func - ) + monkeypatch.setattr(name, method.py_func) class TestSinglePixelNavigation: @@ -229,18 +242,22 @@ def test_transform_scanning_angles_to_satellite_coords(self): def test_transform_satellite_to_earth_fixed_coords(self): """Test transformation from satellite to earth-fixed coordinates.""" point_sat = np.array([1, 2, 3], dtype=float) - greenwich_sidereal_time = np.pi - sat_sun_angles = np.array([np.pi, np.pi / 2]) earth_sun_angle = np.pi spin_angles = np.array([np.pi, np.pi / 2]) - nutation_precession = np.diag([1, 2, 3]).astype(float) + orbit = nav.Orbit( + angles=nav.OrbitAngles( + greenwich_sidereal_time=np.pi, + declination_from_sat_to_sun=np.pi, + right_ascension_from_sat_to_sun=np.pi / 2, + ), + sat_position=nav.SatellitePositionEarthFixed(-999, -999, -999), + nutation_precession=np.diag([1, 2, 3]).astype(float), + ) res = nav.transform_satellite_to_earth_fixed_coords( point_sat, - greenwich_sidereal_time, - sat_sun_angles, + orbit, earth_sun_angle, spin_angles, - nutation_precession, ) np.testing.assert_allclose(res, [-3, 1, -2]) @@ -360,7 +377,7 @@ def test_interpolate_orbit_prediction( """Test interpolating orbit prediction.""" orbit_prediction = orbit_prediction.to_numba() orbit = nav.interpolate_orbit_prediction(orbit_prediction, obs_time) - assert_namedtuple_close(orbit, orbit_expected) + _assert_namedtuple_close(orbit, orbit_expected) def test_interpolate_attitude_prediction( self, obs_time, attitude_prediction, attitude_expected @@ -368,7 +385,7 @@ def test_interpolate_attitude_prediction( """Test interpolating attitude prediction.""" attitude_prediction = attitude_prediction.to_numba() attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) - assert_namedtuple_close(attitude, attitude_expected) + _assert_namedtuple_close(attitude, attitude_expected) @pytest.fixture def obs_time(self): @@ -379,12 +396,16 @@ def obs_time(self): def orbit_expected(self): """Get expected orbit.""" return nav.Orbit( - greenwich_sidereal_time=1.5, - declination_from_sat_to_sun=1.6, - right_ascension_from_sat_to_sun=1.7, - sat_position_earth_fixed_x=1.8, - sat_position_earth_fixed_y=1.9, - sat_position_earth_fixed_z=2.0, + angles=nav.OrbitAngles( + greenwich_sidereal_time=1.5, + declination_from_sat_to_sun=1.6, + right_ascension_from_sat_to_sun=1.7, + ), + sat_position=nav.SatellitePositionEarthFixed( + x=1.8, + y=1.9, + z=2.0, + ), nutation_precession=1.6 * np.identity(3), ) @@ -431,12 +452,16 @@ def orbit_prediction(): """Get orbit prediction.""" return nav.OrbitPrediction( prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), - greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), - declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), - right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), - sat_position_earth_fixed_x=np.array([0.3, 1.3, 2.3, 3.3]), - sat_position_earth_fixed_y=np.array([0.4, 1.4, 2.4, 3.4]), - sat_position_earth_fixed_z=np.array([0.5, 1.5, 2.5, 3.5]), + angles=nav.OrbitAngles( + greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), + declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), + right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), + ), + sat_position=nav.SatellitePositionEarthFixed( + x=np.array([0.3, 1.3, 2.3, 3.3]), + y=np.array([0.4, 1.4, 2.4, 3.4]), + z=np.array([0.5, 1.5, 2.5, 3.5]), + ), nutation_precession=np.array( [ 0.6 * np.identity(3), @@ -475,12 +500,20 @@ def test_get_observation_time(): np.testing.assert_allclose(obs_time, 50000.0000705496871047) -def assert_namedtuple_close(a, b): - """Assert that two numba namedtuples are approximately equal.""" +def _assert_namedtuple_close(a, b): + cls_name = b.__class__.__name__ assert a.__class__ == b.__class__ - for attr in a._fields: + for attr in b._fields: + a_attr = getattr(a, attr) + b_attr = getattr(b, attr) + if _is_namedtuple(b_attr): + _assert_namedtuple_close(a_attr, b_attr) np.testing.assert_allclose( - getattr(a, attr), - getattr(b, attr), - err_msg="{} attribute {} differs".format(a.__class__, attr), + a_attr, + b_attr, + err_msg=f"{cls_name} attribute {attr} differs" ) + + +def _is_namedtuple(obj): + return hasattr(obj, "_fields") From fe7212d2d9acb72aef67282a0ebd27709ae5a1ca Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 1 Jun 2023 12:08:36 +0200 Subject: [PATCH 0240/1416] Add callable to aggregate --- satpy/scene.py | 9 ++++++--- satpy/tests/scene_tests/test_resampling.py | 11 +++++++++++ 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index d43c9d80d2..76ec327239 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -757,7 +757,8 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', `Scene`. Defaults to all datasets. func (string): Function to apply on each aggregation window. One of 'mean', 'sum', 'min', 'max', 'median', 'argmin', - 'argmax', 'prod', 'std', 'var'. + 'argmax', 'prod', 'std', 'var' strings or a custom + function (callable). 'mean' is the default. boundary: See :meth:`xarray.DataArray.coarsen`, 'trim' by default. side: See :meth:`xarray.DataArray.coarsen`, 'left' by default. @@ -789,8 +790,10 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', resolution = max(target_area.lats.resolution, target_area.lons.resolution) for ds_id in ds_ids: res = self[ds_id].coarsen(boundary=boundary, side=side, **dim_kwargs) - - new_scn._datasets[ds_id] = getattr(res, func)() + if callable(func): + new_scn._datasets[ds_id] = res.reduce(func) + else: + new_scn._datasets[ds_id] = getattr(res, func)() new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy() new_scn._datasets[ds_id].attrs['area'] = target_area new_scn._datasets[ds_id].attrs['resolution'] = resolution diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 19268f9e19..39f9a50092 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -560,6 +560,17 @@ def test_aggregate(self): expected_aggregated_shape = (y_size / 2, x_size / 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) + def test_custom_aggregate(self): + """Test the aggregate method with custom function.""" + x_size = 3712 + y_size = 3712 + + scene1 = self._create_test_data(x_size, y_size) + + scene2 = scene1.aggregate(func=np.sum, x=2, y=2) + expected_aggregated_shape = (y_size / 2, x_size / 2) + self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) + @staticmethod def _create_test_data(x_size, y_size): from pyresample.geometry import AreaDefinition From 1c2f1d016c6c2381e4834622d79e968257403293 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 10:28:32 +0000 Subject: [PATCH 0241/1416] Refactor attitude prediction --- satpy/readers/gms5_vissr_l1b.py | 10 ++- satpy/readers/gms5_vissr_navigation.py | 87 +++++++------------ .../test_gms5_vissr_navigation.py | 18 ++-- 3 files changed, 49 insertions(+), 66 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 4994d5a921..e5ef6cb10d 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -850,9 +850,9 @@ def _get_predicted_navigation_params(self): def _get_attitude_prediction(self): att_pred = self._header["image_parameters"]["attitude_prediction"]["data"] - attitude_prediction = nav.AttitudePrediction( - prediction_times=att_pred["prediction_time_mjd"].astype(np.float64), - angle_between_earth_and_sun=att_pred["sun_earth_angle"].astype(np.float64), + attitudes = nav.Attitude( + angle_between_earth_and_sun=att_pred["sun_earth_angle"].astype( + np.float64), angle_between_sat_spin_and_z_axis=att_pred[ "right_ascension_of_attitude" ].astype(np.float64), @@ -860,6 +860,10 @@ def _get_attitude_prediction(self): "declination_of_attitude" ].astype(np.float64), ) + attitude_prediction = nav.AttitudePrediction( + prediction_times=att_pred["prediction_time_mjd"].astype(np.float64), + attitude=attitudes + ) return attitude_prediction def _get_orbit_prediction(self): diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 30f6b619a5..5086493891 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -86,9 +86,7 @@ "_AttitudePrediction", [ "prediction_times", - "angle_between_earth_and_sun", - "angle_between_sat_spin_and_z_axis", - "angle_between_sat_spin_and_yz_plane", + "attitude" ], ) @@ -115,9 +113,7 @@ class AttitudePrediction: def __init__( self, prediction_times, - angle_between_earth_and_sun, - angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane, + attitude ): """Initialize attitude prediction. @@ -127,28 +123,23 @@ def __init__( Args: prediction_times: Timestamps of predicted attitudes - angle_between_earth_and_sun: Angle between earth and sun - angle_between_sat_spin_and_z_axis: Angle between satellite's - spin-axis and the z-axis of the coordinate system - angle_between_sat_spin_and_yz_plane: Angle between satellite's - spin-axis and the yz-plane of the coordinate system + attitude (Attitude): Attitude angles """ self.prediction_times = prediction_times - self.angle_between_earth_and_sun = np.unwrap(angle_between_earth_and_sun) - self.angle_between_sat_spin_and_z_axis = np.unwrap( - angle_between_sat_spin_and_z_axis - ) - self.angle_between_sat_spin_and_yz_plane = np.unwrap( - angle_between_sat_spin_and_yz_plane + self.attitude = self._unwrap_angles(attitude) + + def _unwrap_angles(self, attitude): + return Attitude( + np.unwrap(attitude.angle_between_earth_and_sun), + np.unwrap(attitude.angle_between_sat_spin_and_z_axis), + np.unwrap(attitude.angle_between_sat_spin_and_yz_plane), ) def to_numba(self): """Convert to numba-compatible type.""" return _AttitudePrediction( prediction_times=self.prediction_times, - angle_between_earth_and_sun=self.angle_between_earth_and_sun, - angle_between_sat_spin_and_z_axis=self.angle_between_sat_spin_and_z_axis, - angle_between_sat_spin_and_yz_plane=self.angle_between_sat_spin_and_yz_plane, + attitude=self.attitude ) @@ -318,8 +309,7 @@ def get_lon_lat(point, nav_params): view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords( view_vector_sat, orbit, - attitude.angle_between_earth_and_sun, - _get_spin_angles(attitude), + attitude ) point_on_earth = intersect_with_earth( view_vector_earth_fixed, _get_sat_pos(orbit), _get_ellipsoid(proj_params) @@ -413,59 +403,44 @@ def _get_transforms_from_scanning_angles_to_satellite_coords(angles): def transform_satellite_to_earth_fixed_coords( point, orbit, - earth_sun_angle, - spin_angles, + attitude ): """Transform from earth-fixed to satellite angular momentum coordinates. Args: point: Point (x, y, z) in satellite angular momentum coordinates. orbit (Orbit): Orbital parameters - earth_sun_angle: Angle between sun and earth center on the z-axis - vertical plane (rad) - spin_angles: Angle between satellite spin axis and z-axis (rad), - angle between satellite spin axis and yz-plane + attitude (Attitude): Attitude parameters Returns: Point (x', y', z') in earth-fixed coordinates. """ - sat_unit_vectors = _get_satellite_unit_vectors( - orbit, - earth_sun_angle, - spin_angles, - ) + sat_unit_vectors = _get_satellite_unit_vectors(orbit, attitude) return np.dot(sat_unit_vectors, point) @numba.njit -def _get_satellite_unit_vectors( - orbit, - earth_sun_angle, - spin_angles, -): - unit_vector_z = _get_satellite_unit_vector_z( - spin_angles, orbit.angles.greenwich_sidereal_time, orbit.nutation_precession - ) +def _get_satellite_unit_vectors(orbit, attitude): + unit_vector_z = _get_satellite_unit_vector_z(attitude, orbit) unit_vector_x = _get_satellite_unit_vector_x( - earth_sun_angle, orbit.angles, unit_vector_z + attitude, orbit, unit_vector_z ) unit_vector_y = _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z) return np.stack((unit_vector_x, unit_vector_y, unit_vector_z), axis=-1) @numba.njit -def _get_satellite_unit_vector_z( - spin_angles, greenwich_sidereal_time, nutation_precession -): - sat_z_axis_1950 = _get_satellite_z_axis_1950(spin_angles) - rotation = _get_transform_from_1950_to_earth_fixed(greenwich_sidereal_time) - z_vec = np.dot(rotation, np.dot(nutation_precession, sat_z_axis_1950)) +def _get_satellite_unit_vector_z(attitude, orbit): + sat_z_axis_1950 = _get_satellite_z_axis_1950(attitude) + rotation = _get_transform_from_1950_to_earth_fixed(orbit.angles.greenwich_sidereal_time) + z_vec = np.dot(rotation, np.dot(orbit.nutation_precession, sat_z_axis_1950)) return normalize_vector(z_vec) @numba.njit -def _get_satellite_z_axis_1950(spin_angles): +def _get_satellite_z_axis_1950(attitude): """Get satellite z-axis (spin) in mean of 1950 coordinates.""" - alpha, delta = spin_angles + alpha = attitude.angle_between_sat_spin_and_z_axis + delta = attitude.angle_between_sat_spin_and_yz_plane cos_delta = np.cos(delta) x = np.sin(delta) y = -cos_delta * np.sin(alpha) @@ -481,9 +456,9 @@ def _get_transform_from_1950_to_earth_fixed(greenwich_sidereal_time): @numba.njit -def _get_satellite_unit_vector_x(earth_sun_angle, orbit_angles, sat_unit_vector_z): - beta = earth_sun_angle - sat_sun_vector = _get_vector_from_satellite_to_sun(orbit_angles) +def _get_satellite_unit_vector_x(attitude, orbit, sat_unit_vector_z): + beta = attitude.angle_between_earth_and_sun + sat_sun_vector = _get_vector_from_satellite_to_sun(orbit.angles) z_cross_satsun = np.cross(sat_unit_vector_z, sat_sun_vector) z_cross_satsun = normalize_vector(z_cross_satsun) x_vec = z_cross_satsun * np.sin(beta) + np.cross( @@ -645,17 +620,17 @@ def interpolate_attitude_prediction(attitude_prediction, observation_time): angle_between_earth_and_sun = interpolate_angles( observation_time, attitude_prediction.prediction_times, - attitude_prediction.angle_between_earth_and_sun, + attitude_prediction.attitude.angle_between_earth_and_sun, ) angle_between_sat_spin_and_z_axis = interpolate_angles( observation_time, attitude_prediction.prediction_times, - attitude_prediction.angle_between_sat_spin_and_z_axis, + attitude_prediction.attitude.angle_between_sat_spin_and_z_axis, ) angle_between_sat_spin_and_yz_plane = interpolate_angles( observation_time, attitude_prediction.prediction_times, - attitude_prediction.angle_between_sat_spin_and_yz_plane, + attitude_prediction.attitude.angle_between_sat_spin_and_yz_plane, ) return Attitude( angle_between_earth_and_sun, diff --git a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py index b663d7b9e6..2a8bec4ae1 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py @@ -242,8 +242,11 @@ def test_transform_scanning_angles_to_satellite_coords(self): def test_transform_satellite_to_earth_fixed_coords(self): """Test transformation from satellite to earth-fixed coordinates.""" point_sat = np.array([1, 2, 3], dtype=float) - earth_sun_angle = np.pi - spin_angles = np.array([np.pi, np.pi / 2]) + attitude = nav.Attitude( + angle_between_earth_and_sun=np.pi, + angle_between_sat_spin_and_z_axis=np.pi, + angle_between_sat_spin_and_yz_plane=np.pi / 2 + ) orbit = nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=np.pi, @@ -256,8 +259,7 @@ def test_transform_satellite_to_earth_fixed_coords(self): res = nav.transform_satellite_to_earth_fixed_coords( point_sat, orbit, - earth_sun_angle, - spin_angles, + attitude ) np.testing.assert_allclose(res, [-3, 1, -2]) @@ -441,9 +443,11 @@ def attitude_prediction(): """Get attitude prediction.""" return nav.AttitudePrediction( prediction_times=np.array([1.0, 2.0, 3.0]), - angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), - angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), - angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), + attitude=nav.Attitude( + angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), + angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), + angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), + ) ) From 53b11a6306bdabeeaef21a582f00deb0bc47550d Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 1 Jun 2023 14:17:11 +0200 Subject: [PATCH 0242/1416] Add numpy docstring --- satpy/scene.py | 56 +++++++++++++++++++++++++++++++------------------- 1 file changed, 35 insertions(+), 21 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 76ec327239..452ad7e299 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -752,27 +752,41 @@ def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', **dim_kwargs): """Create an aggregated version of the Scene. - Args: - dataset_ids (iterable): DataIDs to include in the returned - `Scene`. Defaults to all datasets. - func (string): Function to apply on each aggregation window. One of - 'mean', 'sum', 'min', 'max', 'median', 'argmin', - 'argmax', 'prod', 'std', 'var' strings or a custom - function (callable). - 'mean' is the default. - boundary: See :meth:`xarray.DataArray.coarsen`, 'trim' by default. - side: See :meth:`xarray.DataArray.coarsen`, 'left' by default. - dim_kwargs: the size of the windows to aggregate. - - Returns: - A new aggregated scene - - See also: - xarray.DataArray.coarsen - - Example: - `scn.aggregate(func='min', x=2, y=2)` will apply the `min` function - across a window of size 2 pixels. + Parameters + ---------- + dataset_ids : iterable, optional + DataIDs to include in the returned `Scene`. + If None, defaults to all datasets. The default is None. + boundary : str, optional + See :meth:`xarray.DataArray.coarsen`. + The default is 'trim'. + side : TYPE, optional + See :meth:`xarray.DataArray.coarsen`. + The default is 'left'. + func : str or callable, optional + Function to apply on each aggregation window. + One of 'mean', 'sum', 'min', 'max', 'median', 'argmin', 'argmax', + 'prod', 'std', 'var' strings or a custom function (callable). + The default is 'mean'. + **dim_kwargs + The size of the windows to aggregate. + For example: x=2, y=2 + + Returns + ------- + new_scn : satpy.Scene + A new aggregated scene. + + See Also + -------- + xarray.DataArray.coarsen` + A somewhat long description of the arguments. + + Example + ------- + Apply the `min` function across a window of size 2 pixels: + + >> scn.aggregate(func='min', x=2, y=2) """ new_scn = self.copy(datasets=dataset_ids) From 31b795cdd3c77560f3843b19483895b733ef239c Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 1 Jun 2023 14:26:54 +0200 Subject: [PATCH 0243/1416] Refactor in small functions --- satpy/scene.py | 35 +++++++++++++++++++++++++---------- 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 452ad7e299..eb9ed66b92 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -41,6 +41,25 @@ LOG = logging.getLogger(__name__) +def _get_area_resolution(area): + """Attempt to retrieve resolution from AreaDefinition.""" + try: + resolution = max(area.pixel_size_x, area.pixel_size_y) + except AttributeError: + resolution = max(area.lats.resolution, area.lons.resolution) + return resolution + + +def _aggregate_data_array(data_array, func, boundary, side, **dim_kwargs): + """Aggregate xr.DataArray.""" + res = data_array.coarsen(boundary=boundary, side=side, **dim_kwargs) + if callable(func): + out = res.reduce(func) + else: + out = getattr(res, func)() + return out + + class DelayedGeneration(KeyError): """Mark that a dataset can't be generated without further modification.""" @@ -798,20 +817,16 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', continue target_area = src_area.aggregate(boundary=boundary, **dim_kwargs) - try: - resolution = max(target_area.pixel_size_x, target_area.pixel_size_y) - except AttributeError: - resolution = max(target_area.lats.resolution, target_area.lons.resolution) + resolution = _get_area_resolution(target_area) for ds_id in ds_ids: - res = self[ds_id].coarsen(boundary=boundary, side=side, **dim_kwargs) - if callable(func): - new_scn._datasets[ds_id] = res.reduce(func) - else: - new_scn._datasets[ds_id] = getattr(res, func)() + new_scn._datasets[ds_id] = _aggregate_data_array(self[ds_id], + func=func, + boundary=boundary, + side=side, + **dim_kwargs) new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy() new_scn._datasets[ds_id].attrs['area'] = target_area new_scn._datasets[ds_id].attrs['resolution'] = resolution - return new_scn def get(self, key, default=None): From 22724089655c3e486bc49223fe3d1644dd775b03 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 12:45:54 +0000 Subject: [PATCH 0244/1416] Refactor projection parameters --- satpy/readers/gms5_vissr_l1b.py | 89 ++++++---- satpy/readers/gms5_vissr_navigation.py | 97 +++++++---- .../test_gms5_vissr_navigation.py | 153 +++++++++++------- 3 files changed, 226 insertions(+), 113 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index e5ef6cb10d..7f7bc4b905 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -657,9 +657,8 @@ def _get_nominal_shape(self): return frame_params["number_of_lines"], frame_params["number_of_pixels"] def _get_mda(self): - mode_block = self._header["image_parameters"]["mode"] return { - "platform": mode_block["satellite_name"].decode().strip().upper(), + "platform": self._mode_block["satellite_name"].decode().strip().upper(), "sensor": "VISSR", "time_parameters": self._get_time_parameters(), "orbital_parameters": self._get_orbital_parameters(), @@ -682,8 +681,7 @@ def _get_orbital_parameters(self): } def _get_time_parameters(self): - mode_block = self._header["image_parameters"]["mode"] - start_time = mjd2datetime64(mode_block["observation_time_mjd"]) + start_time = mjd2datetime64(self._mode_block["observation_time_mjd"]) start_time = start_time.astype(dt.datetime).replace(second=0, microsecond=0) end_time = start_time + dt.timedelta( minutes=25 @@ -808,39 +806,60 @@ def _get_static_navigation_params(self, dataset_id): IR2: 1378.7 IR3: 1379.1001 """ - alt_ch_name = ALT_CHANNEL_NAMES[dataset_id["name"]] - mode_block = self._header["image_parameters"]["mode"] - coord_conv = self._header["image_parameters"]["coordinate_conversion"] - center_line_vissr_frame = coord_conv["central_line_number_of_vissr_frame"][ + alt_ch_name = _get_alternative_channel_name(dataset_id) + scan_params = nav.ScanningParameters( + start_time_of_scan=self._coord_conv["scheduled_observation_time"], + spinning_rate=self._mode_block["spin_rate"], + num_sensors=self._coord_conv["number_of_sensor_elements"][alt_ch_name], + sampling_angle=self._coord_conv["sampling_angle_along_pixel"][alt_ch_name], + ) + proj_params = self._get_proj_params(dataset_id) + return scan_params, proj_params + + def _get_proj_params(self, dataset_id): + proj_params = nav.ProjectionParameters( + image_offset=self._get_image_offset(dataset_id), + scanning_angles=self._get_scanning_angles(dataset_id), + earth_ellipsoid=self._get_earth_ellipsoid() + ) + return proj_params + + def _get_earth_ellipsoid(self): + # Use earth radius and flattening from JMA's Msial library, because + # the values in the data seem to be pretty old. For example the + # equatorial radius is from the Bessel Ellipsoid (1841). + return nav.EarthEllipsoid( + flattening=nav.EARTH_FLATTENING, + equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS, + ) + + def _get_scanning_angles(self, dataset_id): + alt_ch_name = _get_alternative_channel_name(dataset_id) + misalignment = np.ascontiguousarray( + self._coord_conv["matrix_of_misalignment"].transpose().astype(np.float64) + ) + return nav.ScanningAngles( + stepping_angle=self._coord_conv["stepping_angle_along_line"][alt_ch_name], + sampling_angle=self._coord_conv["sampling_angle_along_pixel"][ + alt_ch_name], + misalignment=misalignment + ) + + def _get_image_offset(self, dataset_id): + alt_ch_name = _get_alternative_channel_name(dataset_id) + center_line_vissr_frame = self._coord_conv["central_line_number_of_vissr_frame"][ alt_ch_name ] - center_pixel_vissr_frame = coord_conv["central_pixel_number_of_vissr_frame"][ + center_pixel_vissr_frame = self._coord_conv["central_pixel_number_of_vissr_frame"][ alt_ch_name ] - pixel_offset = coord_conv[ + pixel_offset = self._coord_conv[ "pixel_difference_of_vissr_center_from_normal_position" ][alt_ch_name] - scan_params = nav.ScanningParameters( - start_time_of_scan=coord_conv["scheduled_observation_time"], - spinning_rate=mode_block["spin_rate"], - num_sensors=coord_conv["number_of_sensor_elements"][alt_ch_name], - sampling_angle=coord_conv["sampling_angle_along_pixel"][alt_ch_name], - ) - # Use earth radius and flattening from JMA's Msial library, because - # the values in the data seem to be pretty old. For example the - # equatorial radius is from the Bessel Ellipsoid (1841). - proj_params = nav.ProjectionParameters( + return nav.ImageOffset( line_offset=center_line_vissr_frame, pixel_offset=center_pixel_vissr_frame + pixel_offset, - stepping_angle=coord_conv["stepping_angle_along_line"][alt_ch_name], - sampling_angle=coord_conv["sampling_angle_along_pixel"][alt_ch_name], - misalignment=np.ascontiguousarray( - coord_conv["matrix_of_misalignment"].transpose().astype(np.float64) - ), - earth_flattening=nav.EARTH_FLATTENING, - earth_equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS, ) - return scan_params, proj_params def _get_predicted_navigation_params(self): """Get predictions of time-dependent navigation parameters.""" @@ -923,6 +942,18 @@ def end_time(self): """Nominal end time of the dataset.""" return self._mda["time_parameters"]["nominal_end_time"] + @property + def _coord_conv(self): + return self._header["image_parameters"]["coordinate_conversion"] + + @property + def _mode_block(self): + return self._header["image_parameters"]["mode"] + + +def _get_alternative_channel_name(dataset_id): + return ALT_CHANNEL_NAMES[dataset_id["name"]] + def read_from_file_obj(file_obj, dtype, count, offset=0): """Read data from file object. @@ -1110,7 +1141,7 @@ def _get_shape_dict(self, original_shape, dataset_id): # angle) to the horizontal dimension to obtain a square area definition # with uniform sampling. num_lines, _ = original_shape - alt_ch_name = ALT_CHANNEL_NAMES[dataset_id["name"]] + alt_ch_name = _get_alternative_channel_name(dataset_id) stepping_angle = self.coord_conv["stepping_angle_along_line"][alt_ch_name] uniform_size = num_lines uniform_line_pixel_offset = 0.5 * num_lines diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 5086493891..48b2a4edb1 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -30,6 +30,7 @@ "angle_between_sat_spin_and_yz_plane", ], ) +"""Units: radians""" Orbit = namedtuple( @@ -57,29 +58,65 @@ "right_ascension_from_sat_to_sun", ], ) +"""Units: radians""" + SatellitePositionEarthFixed = namedtuple( "SatellitePositionEarthFixed", ["x", "y", "z"], ) +"""Units: meters""" + ScanningParameters = namedtuple( "ScanningParameters", ["start_time_of_scan", "spinning_rate", "num_sensors", "sampling_angle"], ) + ProjectionParameters = namedtuple( "ProjectionParameters", + [ + "image_offset", + "scanning_angles", + "earth_ellipsoid", + ], +) + + +ImageOffset = namedtuple( + "ImageOffset", [ "line_offset", "pixel_offset", + ] +) + + +ScanningAngles = namedtuple( + "ScanningAngles", + [ "stepping_angle", "sampling_angle", - "misalignment", - "earth_flattening", - "earth_equatorial_radius", - ], + "misalignment" + ] ) +ScanningAngles.__doc__ = """Scanning angles + +Args: + stepping_angle: Scanning angle along line (rad) + sampling_angle: Scanning angle along pixel (rad) + misalignment: Misalignment matrix (3x3) +""" + +EarthEllipsoid = namedtuple( + "EarthEllipsoid", + [ + "flattening", + "equatorial_radius" + ] +) +"""Units: meters""" _AttitudePrediction = namedtuple( @@ -301,10 +338,13 @@ def get_lon_lat(point, nav_params): """ attitude, orbit, proj_params = nav_params scan_angles = transform_image_coords_to_scanning_angles( - point, _get_image_offset(proj_params), _get_sampling(proj_params) + point, + proj_params.image_offset, + proj_params.scanning_angles ) view_vector_sat = transform_scanning_angles_to_satellite_coords( - scan_angles, proj_params.misalignment + scan_angles, + proj_params.scanning_angles.misalignment ) view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords( view_vector_sat, @@ -312,10 +352,10 @@ def get_lon_lat(point, nav_params): attitude ) point_on_earth = intersect_with_earth( - view_vector_earth_fixed, _get_sat_pos(orbit), _get_ellipsoid(proj_params) + view_vector_earth_fixed, orbit.sat_position, proj_params.earth_ellipsoid ) lon, lat = transform_earth_fixed_to_geodetic_coords( - point_on_earth, proj_params.earth_flattening + point_on_earth, proj_params.earth_ellipsoid.flattening ) return lon, lat @@ -339,36 +379,32 @@ def _get_spin_angles(attitude): @numba.njit -def _get_sat_pos(orbit): +def _get_sat_pos_vector(sat_position): return np.array( ( - orbit.sat_position.x, - orbit.sat_position.y, - orbit.sat_position.z, + sat_position.x, + sat_position.y, + sat_position.z, ) ) @numba.njit -def _get_ellipsoid(proj_params): - return (proj_params.earth_equatorial_radius, proj_params.earth_flattening) - - -@numba.njit -def transform_image_coords_to_scanning_angles(point, offset, sampling): +def transform_image_coords_to_scanning_angles(point, image_offset, scanning_angles): """Transform image coordinates to scanning angles. Args: point: Point (line, pixel) in image coordinates. - offset: Offset (line, pixel) from image center. - sampling: Stepping angle (along line) and sampling angle (along pixels) - in radians. + image_offset (ImageOffset): Image offset. + scanning_angles (ScanningAngles): Scanning angles. Returns: Scanning angles (x, y) at the pixel center (rad). """ line, pixel = point - line_offset, pixel_offset = offset - stepping_angle, sampling_angle = sampling + line_offset = image_offset.line_offset + pixel_offset = image_offset.pixel_offset + stepping_angle = scanning_angles.stepping_angle + sampling_angle = scanning_angles.sampling_angle x = sampling_angle * (pixel + 1 - pixel_offset) y = stepping_angle * (line + 1 - line_offset) return np.array([x, y]) @@ -491,13 +527,15 @@ def intersect_with_earth(view_vector, sat_pos, ellipsoid): Args: view_vector: Instrument viewing vector (x, y, z) in earth-fixed coordinates. - sat_pos: Satellite position (x, y, z) in earth-fixed coordinates. - ellipsoid: Flattening and equatorial radius of the earth. + sat_pos (SatellitePositionEarthFixed): Satellite position in + earth-fixed coordinates. + ellipsoid (EarthEllipsoid): Earth ellipsoid. Returns: Intersection (x', y', z') with the earth's surface. """ distance = _get_distance_to_intersection(view_vector, sat_pos, ellipsoid) - return sat_pos + distance * view_vector + sat_pos_vec = _get_sat_pos_vector(sat_pos) + return sat_pos_vec + distance * view_vector @numba.njit @@ -514,14 +552,13 @@ def _get_distance_to_intersection(view_vector, sat_pos, ellipsoid): @numba.njit def _get_distances_to_intersections(view_vector, sat_pos, ellipsoid): - equatorial_radius, flattening = ellipsoid - flat2 = (1 - flattening) ** 2 + flat2 = (1 - ellipsoid.flattening) ** 2 ux, uy, uz = view_vector - x, y, z = sat_pos + x, y, z = sat_pos.x, sat_pos.y, sat_pos.z a = flat2 * (ux**2 + uy**2) + uz**2 b = flat2 * (x * ux + y * uy) + z * uz - c = flat2 * (x**2 + y**2 - equatorial_radius**2) + z**2 + c = flat2 * (x**2 + y**2 - ellipsoid.equatorial_radius**2) + z**2 tmp = np.sqrt((b**2 - a * c)) dist_1 = (-b + tmp) / a diff --git a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py index 2a8bec4ae1..72d593aaa0 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py @@ -41,17 +41,23 @@ ), ), nav.ProjectionParameters( - line_offset=1378.5, - pixel_offset=1672.5, - stepping_angle=0.000140000047395, - sampling_angle=0.000095719995443, - misalignment=np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + image_offset=nav.ImageOffset( + line_offset=1378.5, + pixel_offset=1672.5, ), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136.0 + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ) + ), + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136.0 + ) ), ) }, @@ -84,17 +90,23 @@ ), ), nav.ProjectionParameters( - line_offset=1378.5, - pixel_offset=1672.5, - stepping_angle=0.000140000047395, - sampling_angle=0.000095719995443, - misalignment=np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + image_offset=nav.ImageOffset( + line_offset=1378.5, + pixel_offset=1672.5, + ), + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000140000047395, + sampling_angle=0.000095719995443, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), ), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136 + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136 + ) ), ) } @@ -131,17 +143,23 @@ ), ), nav.ProjectionParameters( - line_offset=5513.0, - pixel_offset=6688.5, - stepping_angle=0.000035000004573, - sampling_angle=0.000023929998861, - misalignment=np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + image_offset=nav.ImageOffset( + line_offset=5513.0, + pixel_offset=6688.5, + ), + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000035000004573, + sampling_angle=0.000023929998861, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), ), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136 + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136 + ) ), ) }, @@ -174,17 +192,23 @@ ), ), nav.ProjectionParameters( - line_offset=5513.0, - pixel_offset=6688.5, - stepping_angle=0.000035000004573, - sampling_angle=0.000023929998861, - misalignment=np.array( - [[0.999999165534973, 0.000510364072397, 0.001214201096445], - [-0.000511951977387, 0.999999046325684, 0.001307720085606], - [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + image_offset=nav.ImageOffset( + line_offset=5513.0, + pixel_offset=6688.5, ), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136 + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000035000004573, + sampling_angle=0.000023929998861, + misalignment=np.array( + [[0.999999165534973, 0.000510364072397, 0.001214201096445], + [-0.000511951977387, 0.999999046325684, 0.001307720085606], + [-0.001213532872498, -0.001308340579271, 0.999998450279236]] + ), + ), + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136 + ) ), ) }, @@ -223,10 +247,19 @@ def test_get_lon_lat(self, point, nav_params, expected): def test_transform_image_coords_to_scanning_angles(self): """Test transformation from image coordinates to scanning angles.""" + offset = nav.ImageOffset( + line_offset=100, + pixel_offset=200 + ) + scanning_angles = nav.ScanningAngles( + stepping_angle=0.01, + sampling_angle=0.02, + misalignment=-999 + ) angles = nav.transform_image_coords_to_scanning_angles( point=np.array([199, 99]), - offset=np.array([100, 200]), - sampling=np.array([0.01, 0.02]), + image_offset=offset, + scanning_angles=scanning_angles ) np.testing.assert_allclose(angles, [-2, 1]) @@ -266,12 +299,18 @@ def test_transform_satellite_to_earth_fixed_coords(self): def test_intersect_view_vector_with_earth(self): """Test intersection of a view vector with the earth's surface.""" view_vector = np.array([-1, 0, 0], dtype=float) - sat_pos = np.array([36000 * 1000, 0, 0], dtype=float) - eq_radius = 6371 * 1000 - flattening = 0.003 - ellipsoid = np.array([eq_radius, flattening]) + ellipsoid = nav.EarthEllipsoid( + equatorial_radius=6371 * 1000, + flattening=0.003 + ) + sat_pos = nav.SatellitePositionEarthFixed( + x=36000 * 1000.0, + y=0.0, + z=0.0 + ) point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid) - np.testing.assert_allclose(point, [eq_radius, 0, 0]) + exp = [ellipsoid.equatorial_radius, 0, 0] + np.testing.assert_allclose(point, exp) @pytest.mark.parametrize( "point_earth_fixed,point_geodetic_exp", @@ -481,13 +520,19 @@ def orbit_prediction(): def proj_params(sampling_angle): """Get projection parameters.""" return nav.ProjectionParameters( - line_offset=1378.5, - pixel_offset=1672.5, - stepping_angle=0.000140000047395, - sampling_angle=sampling_angle, - misalignment=np.identity(3).astype(np.float64), - earth_flattening=0.003352813177897, - earth_equatorial_radius=6378136, + image_offset=nav.ImageOffset( + line_offset=1378.5, + pixel_offset=1672.5, + ), + scanning_angles=nav.ScanningAngles( + stepping_angle=0.000140000047395, + sampling_angle=sampling_angle, + misalignment=np.identity(3).astype(np.float64), + ), + earth_ellipsoid=nav.EarthEllipsoid( + flattening=0.003352813177897, + equatorial_radius=6378136, + ) ) From e23778d6c08f16d3897b1b7144f98278740abb40 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 1 Jun 2023 16:12:58 +0200 Subject: [PATCH 0245/1416] Revert docstring and resolve CodeScene warning --- satpy/scene.py | 60 +++++++++++++++++++------------------------------- 1 file changed, 23 insertions(+), 37 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index eb9ed66b92..87254918d3 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -50,9 +50,10 @@ def _get_area_resolution(area): return resolution -def _aggregate_data_array(data_array, func, boundary, side, **dim_kwargs): +def _aggregate_data_array(data_array, **coarsen_kwargs): """Aggregate xr.DataArray.""" - res = data_array.coarsen(boundary=boundary, side=side, **dim_kwargs) + func = coarsen_kwargs.pop("func") + res = data_array.coarsen(**coarsen_kwargs) if callable(func): out = res.reduce(func) else: @@ -771,41 +772,26 @@ def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', **dim_kwargs): """Create an aggregated version of the Scene. - Parameters - ---------- - dataset_ids : iterable, optional - DataIDs to include in the returned `Scene`. - If None, defaults to all datasets. The default is None. - boundary : str, optional - See :meth:`xarray.DataArray.coarsen`. - The default is 'trim'. - side : TYPE, optional - See :meth:`xarray.DataArray.coarsen`. - The default is 'left'. - func : str or callable, optional - Function to apply on each aggregation window. - One of 'mean', 'sum', 'min', 'max', 'median', 'argmin', 'argmax', - 'prod', 'std', 'var' strings or a custom function (callable). - The default is 'mean'. - **dim_kwargs - The size of the windows to aggregate. - For example: x=2, y=2 - - Returns - ------- - new_scn : satpy.Scene - A new aggregated scene. - - See Also - -------- - xarray.DataArray.coarsen` - A somewhat long description of the arguments. - - Example - ------- - Apply the `min` function across a window of size 2 pixels: - - >> scn.aggregate(func='min', x=2, y=2) + Args: + dataset_ids (iterable): DataIDs to include in the returned + `Scene`. Defaults to all datasets. + func (string, callable): Function to apply on each aggregation window. One of + 'mean', 'sum', 'min', 'max', 'median', 'argmin', + 'argmax', 'prod', 'std', 'var' strings or a custom + function. 'mean' is the default. + boundary: See :meth:`xarray.DataArray.coarsen`, 'trim' by default. + side: See :meth:`xarray.DataArray.coarsen`, 'left' by default. + dim_kwargs: the size of the windows to aggregate. + + Returns: + A new aggregated scene + + See also: + xarray.DataArray.coarsen + + Example: + `scn.aggregate(func='min', x=2, y=2)` will apply the `min` function + across a window of size 2 pixels. """ new_scn = self.copy(datasets=dataset_ids) From 9f733297ed3bc6e53cdfc46f68b91482d9308a94 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 14:13:12 +0000 Subject: [PATCH 0246/1416] Refactor navigation parameters --- satpy/readers/gms5_vissr_l1b.py | 28 +-- satpy/readers/gms5_vissr_navigation.py | 168 ++++++++++++++---- .../test_gms5_vissr_navigation.py | 82 ++++++--- 3 files changed, 204 insertions(+), 74 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 7f7bc4b905..e5c2a68538 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -131,7 +131,7 @@ Metadata -------- -Dataset attributes include metadata such as time or orbital parameters, +Dataset attributes include metadata such as time and orbital parameters, see :ref:`dataset_metadata`. """ @@ -780,14 +780,8 @@ def _attach_lons_lats(self, dataset, dataset_id): def _get_lons_lats(self, dataset, dataset_id): lines, pixels = self._get_image_coords(dataset) - static_params = self._get_static_navigation_params(dataset_id) - predicted_params = self._get_predicted_navigation_params() - lons, lats = nav.get_lons_lats( - lines=lines, - pixels=pixels, - static_params=static_params, - predicted_params=predicted_params, - ) + nav_params = self._get_navigation_parameters(dataset_id) + lons, lats = nav.get_lons_lats(lines, pixels, nav_params) return self._make_lons_lats_data_array(lons, lats) def _get_image_coords(self, data): @@ -795,6 +789,12 @@ def _get_image_coords(self, data): pixels = np.arange(data.shape[1]) return lines.astype(np.float64), pixels.astype(np.float64) + def _get_navigation_parameters(self, dataset_id): + return nav.ImageNavigationParameters( + static=self._get_static_navigation_params(dataset_id), + predicted=self._get_predicted_navigation_params() + ) + def _get_static_navigation_params(self, dataset_id): """Get static navigation parameters. @@ -814,7 +814,10 @@ def _get_static_navigation_params(self, dataset_id): sampling_angle=self._coord_conv["sampling_angle_along_pixel"][alt_ch_name], ) proj_params = self._get_proj_params(dataset_id) - return scan_params, proj_params + return nav.StaticNavigationParameters( + proj_params=proj_params, + scan_params=scan_params + ) def _get_proj_params(self, dataset_id): proj_params = nav.ProjectionParameters( @@ -865,7 +868,10 @@ def _get_predicted_navigation_params(self): """Get predictions of time-dependent navigation parameters.""" attitude_prediction = self._get_attitude_prediction() orbit_prediction = self._get_orbit_prediction() - return attitude_prediction, orbit_prediction + return nav.PredictedNavigationParameters( + attitude=attitude_prediction, + orbit=orbit_prediction + ) def _get_attitude_prediction(self): att_pred = self._header["image_parameters"]["attitude_prediction"]["data"] diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 48b2a4edb1..8bdb5d267c 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -30,7 +30,10 @@ "angle_between_sat_spin_and_yz_plane", ], ) -"""Units: radians""" +"""Attitude parameters. + +Units: radians +""" Orbit = namedtuple( @@ -41,7 +44,7 @@ "nutation_precession", ], ) -Orbit.__doc__ = """Orbital Parameters +"""Orbital Parameters Args: angles (OrbitAngles): Orbit angles @@ -58,19 +61,87 @@ "right_ascension_from_sat_to_sun", ], ) -"""Units: radians""" +"""Orbit angles. + +Units: radians +""" SatellitePositionEarthFixed = namedtuple( "SatellitePositionEarthFixed", ["x", "y", "z"], ) -"""Units: meters""" +"""Satellite position in earth-fixed coordinates. + +Units: meters +""" + + +ImageNavigationParameters = namedtuple( + "ImageNavigationParameters", + ["static", "predicted"] +) +"""Navigation parameters for the entire image. + +Args: + static (StaticNavigationParameters): Static parameters. + predicted (PredictedNavigationParameters): Predicted time-dependent parameters. +""" + + +PixelNavigationParameters = namedtuple( + "PixelNavigationParameters", + ["attitude", "orbit", "proj_params"] +) +"""Navigation parameters for a single pixel. + +Args: + attitude (Attitude): Attitude parameters + orbit (Orbit): Orbit parameters + proj_params (ProjectionParameters): Projection parameters +""" + + +StaticNavigationParameters = namedtuple( + "StaticNavigationParameters", + [ + "proj_params", + "scan_params" + ] +) +"""Navigation parameters which are constant for the entire scan. + +Args: + proj_params (ProjectionParameters): Projection parameters + scan_params (ScanningParameters): Scanning parameters +""" + + +PredictedNavigationParameters = namedtuple( + "PredictedNavigationParameters", + [ + "attitude", + "orbit" + ] +) +"""Predictions of time-dependent navigation parameters. + +They need to be evaluated for each pixel. + +Args: + attitude (AttitudePrediction): Attitude prediction + orbit (OrbitPrediction): Orbit prediction +""" ScanningParameters = namedtuple( "ScanningParameters", - ["start_time_of_scan", "spinning_rate", "num_sensors", "sampling_angle"], + [ + "start_time_of_scan", + "spinning_rate", + "num_sensors", + "sampling_angle" + ], ) @@ -82,6 +153,13 @@ "earth_ellipsoid", ], ) +"""Projection parameters. + +Args: + image_offset (ImageOffset): Image offset + scanning_angles (ScanningAngles): Scanning angles + earth_ellipsoid (EarthEllipsoid): Earth ellipsoid +""" ImageOffset = namedtuple( @@ -91,6 +169,12 @@ "pixel_offset", ] ) +"""Image offset + +Args: + line_offset: Line offset from image center + pixel_offset: Pixel offset from image center +""" ScanningAngles = namedtuple( @@ -101,7 +185,7 @@ "misalignment" ] ) -ScanningAngles.__doc__ = """Scanning angles +"""Scanning angles Args: stepping_angle: Scanning angle along line (rad) @@ -109,6 +193,7 @@ misalignment: Misalignment matrix (3x3) """ + EarthEllipsoid = namedtuple( "EarthEllipsoid", [ @@ -116,7 +201,12 @@ "equatorial_radius" ] ) -"""Units: meters""" +"""Earth ellipsoid. + +Args: + flattening: Ellipsoid flattening + equatorial_radius: Equatorial radius (meters) +""" _AttitudePrediction = namedtuple( @@ -160,7 +250,7 @@ def __init__( Args: prediction_times: Timestamps of predicted attitudes - attitude (Attitude): Attitude angles + attitude (Attitude): Attitudes at prediction times """ self.prediction_times = prediction_times self.attitude = self._unwrap_angles(attitude) @@ -231,30 +321,31 @@ def to_numba(self): ) -def get_lons_lats(lines, pixels, static_params, predicted_params): +def get_lons_lats(lines, pixels, nav_params): """Compute lon/lat coordinates given VISSR image coordinates. Args: lines: VISSR image lines pixels: VISSR image pixels - static_params: Static navigation parameters - predicted_params: Predicted time-dependent navigation parameters + nav_params: Image navigation parameters """ pixels_2d, lines_2d = da.meshgrid(pixels, lines) lons, lats = da.map_blocks( _get_lons_lats_numba, lines_2d, pixels_2d, - static_params=static_params, - predicted_params=_make_predicted_params_numba_compatible(predicted_params), + nav_params=_make_nav_params_numba_compatible(nav_params), **_get_map_blocks_kwargs(pixels_2d.chunks) ) return lons, lats -def _make_predicted_params_numba_compatible(predicted_params): - att_pred, orb_pred = predicted_params - return att_pred.to_numba(), orb_pred.to_numba() +def _make_nav_params_numba_compatible(nav_params): + predicted = PredictedNavigationParameters( + attitude=nav_params.predicted.attitude.to_numba(), + orbit=nav_params.predicted.orbit.to_numba() + ) + return ImageNavigationParameters(nav_params.static, predicted) def _get_map_blocks_kwargs(chunks): @@ -268,17 +359,17 @@ def _get_map_blocks_kwargs(chunks): @numba.njit -def _get_lons_lats_numba(lines_2d, pixels_2d, static_params, predicted_params): +def _get_lons_lats_numba(lines_2d, pixels_2d, nav_params): shape = lines_2d.shape lons = np.zeros(shape, dtype=np.float32) lats = np.zeros(shape, dtype=np.float32) for i in range(shape[0]): for j in range(shape[1]): point = (lines_2d[i, j], pixels_2d[i, j]) - nav_params = _get_navigation_parameters( - point, static_params, predicted_params + nav_params_pix = _get_pixel_navigation_parameters( + point, nav_params ) - lon, lat = get_lon_lat(point, nav_params) + lon, lat = get_lon_lat(point, nav_params_pix) lons[i, j] = lon lats[i, j] = lat # Stack lons and lats because da.map_blocks doesn't support multiple @@ -287,14 +378,18 @@ def _get_lons_lats_numba(lines_2d, pixels_2d, static_params, predicted_params): @numba.njit -def _get_navigation_parameters(point, static_params, predicted_params): - scan_params, proj_params = static_params - attitude_prediction, orbit_prediction = predicted_params - obs_time = get_observation_time(point, scan_params) +def _get_pixel_navigation_parameters(point, im_nav_params): + obs_time = get_observation_time(point, im_nav_params.static.scan_params) attitude, orbit = interpolate_navigation_prediction( - attitude_prediction, orbit_prediction, obs_time + attitude_prediction=im_nav_params.predicted.attitude, + orbit_prediction=im_nav_params.predicted.orbit, + observation_time=obs_time + ) + return PixelNavigationParameters( + attitude=attitude, + orbit=orbit, + proj_params=im_nav_params.static.proj_params ) - return attitude, orbit, proj_params @numba.njit @@ -331,31 +426,32 @@ def get_lon_lat(point, nav_params): Args: point: Point (line, pixel) in image coordinates. - nav_params: Navigation parameters (Attitude, Orbit, Projection - Parameters) + nav_params (PixelNavigationParameters): Navigation parameters for a + single pixel. Returns: Longitude and latitude in degrees. """ - attitude, orbit, proj_params = nav_params scan_angles = transform_image_coords_to_scanning_angles( point, - proj_params.image_offset, - proj_params.scanning_angles + nav_params.proj_params.image_offset, + nav_params.proj_params.scanning_angles ) view_vector_sat = transform_scanning_angles_to_satellite_coords( scan_angles, - proj_params.scanning_angles.misalignment + nav_params.proj_params.scanning_angles.misalignment ) view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords( view_vector_sat, - orbit, - attitude + nav_params.orbit, + nav_params.attitude ) point_on_earth = intersect_with_earth( - view_vector_earth_fixed, orbit.sat_position, proj_params.earth_ellipsoid + view_vector_earth_fixed, + nav_params.orbit.sat_position, + nav_params.proj_params.earth_ellipsoid ) lon, lat = transform_earth_fixed_to_geodetic_coords( - point_on_earth, proj_params.earth_ellipsoid.flattening + point_on_earth, nav_params.proj_params.earth_ellipsoid.flattening ) return lon, lat diff --git a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py index 72d593aaa0..80ce00c6cd 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py @@ -17,13 +17,13 @@ 'pixel': 1680, 'lon': 139.990380, 'lat': 35.047056, - 'nav_params': ( - nav.Attitude( + 'nav_params': nav.PixelNavigationParameters( + attitude=nav.Attitude( angle_between_earth_and_sun=3.997397917902958, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - nav.Orbit( + orbit=nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=2.468529732418296, declination_from_sat_to_sun=-0.208770861178982, @@ -40,7 +40,7 @@ [0.004496123789670, -0.000064242454080, 0.999989890320785]] ), ), - nav.ProjectionParameters( + proj_params=nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=1378.5, pixel_offset=1672.5, @@ -66,13 +66,13 @@ 'pixel': 1793, 'lon': 144.996967, 'lat': -34.959853, - 'nav_params': ( - nav.Attitude( + 'nav_params': nav.PixelNavigationParameters( + attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944355762, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - nav.Orbit( + orbit=nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=2.530392320846865, declination_from_sat_to_sun=-0.208713576872247, @@ -89,7 +89,7 @@ [0.004496126086653, -0.000064239500295, 0.999989890310647]] ), ), - nav.ProjectionParameters( + proj_params=nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=1378.5, pixel_offset=1672.5, @@ -119,13 +119,13 @@ 'pixel': 6720, 'lon': 139.975527, 'lat': 35.078028, - 'nav_params': ( - nav.Attitude( + 'nav_params': nav.PixelNavigationParameters( + attitude=nav.Attitude( angle_between_earth_and_sun=3.997397918405798, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - nav.Orbit( + orbit=nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=2.468529731914041, declination_from_sat_to_sun=-0.208770861179448, @@ -142,7 +142,7 @@ [0.004496123789670, -0.000064242454080, 0.999989890320785]] ), ), - nav.ProjectionParameters( + proj_params=nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=5513.0, pixel_offset=6688.5, @@ -168,13 +168,13 @@ 'pixel': 7172, 'lon': 144.980104, 'lat': -34.929123, - 'nav_params': ( - nav.Attitude( + 'nav_params': nav.PixelNavigationParameters( + attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944858620, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), - nav.Orbit( + orbit=nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=2.530392320342610, declination_from_sat_to_sun=-0.208713576872715, @@ -191,7 +191,7 @@ [0.004496126086653, -0.000064239500295, 0.999989890310647]] ), ), - nav.ProjectionParameters( + proj_params=nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=5513.0, pixel_offset=6688.5, @@ -341,26 +341,30 @@ def test_normalize_vector(self): class TestImageNavigation: """Test navigation of an entire image.""" - def test_get_lons_lats( - self, scan_params, attitude_prediction, orbit_prediction, proj_params - ): - """Test getting lon/lat coordinates.""" + @pytest.fixture + def expected(self): + """Get expected coordinates.""" # fmt: off - lons_exp = [[-114.56923, -112.096837, -109.559702], + exp = { + "lon": [[-114.56923, -112.096837, -109.559702], [8.33221, 8.793893, 9.22339], - [15.918476, 16.268354, 16.6332]] - lats_exp = [[-23.078721, -24.629845, -26.133314], + [15.918476, 16.268354, 16.6332]], + "lat": [[-23.078721, -24.629845, -26.133314], [-42.513409, -39.790231, -37.06392], [3.342834, 6.07043, 8.795932]] + } # fmt: on + return exp + + def test_get_lons_lats(self, navigation_params, expected): + """Test getting lon/lat coordinates.""" lons, lats = nav.get_lons_lats( lines=np.array([1000, 1500, 2000]), pixels=np.array([1000, 1500, 2000]), - static_params=(scan_params, proj_params), - predicted_params=(attitude_prediction, orbit_prediction), + nav_params=navigation_params ) - np.testing.assert_allclose(lons, lons_exp) - np.testing.assert_allclose(lats, lats_exp) + np.testing.assert_allclose(lons, expected["lon"]) + np.testing.assert_allclose(lats, expected["lat"]) class TestPredictionInterpolation: @@ -536,6 +540,30 @@ def proj_params(sampling_angle): ) +@pytest.fixture +def static_nav_params(proj_params, scan_params): + """Get static navigation parameters.""" + return nav.StaticNavigationParameters( + proj_params, scan_params + ) + + +@pytest.fixture +def predicted_nav_params(attitude_prediction, orbit_prediction): + """Get predicted navigation parameters.""" + return nav.PredictedNavigationParameters( + attitude_prediction, orbit_prediction + ) + + +@pytest.fixture +def navigation_params(static_nav_params, predicted_nav_params): + """Get image navigation parameters.""" + return nav.ImageNavigationParameters( + static_nav_params, predicted_nav_params + ) + + def test_get_observation_time(): """Test getting a pixel's observation time.""" scan_params = nav.ScanningParameters( From 1bc03e04bf1c62ea04599bbd0adb9cb9be91c977 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 14:34:17 +0000 Subject: [PATCH 0247/1416] Move format definition to a separate file --- satpy/readers/gms5_vissr_format.py | 399 +++++++++++++++++ satpy/readers/gms5_vissr_l1b.py | 405 +----------------- .../reader_tests/test_gms5_vissr_data.py | 8 +- .../tests/reader_tests/test_gms5_vissr_l1b.py | 63 +-- 4 files changed, 443 insertions(+), 432 deletions(-) create mode 100644 satpy/readers/gms5_vissr_format.py diff --git a/satpy/readers/gms5_vissr_format.py b/satpy/readers/gms5_vissr_format.py new file mode 100644 index 0000000000..cfd7241192 --- /dev/null +++ b/satpy/readers/gms5_vissr_format.py @@ -0,0 +1,399 @@ +"""GMS-5 VISSR archive data format. + +Reference: `VISSR Format Description`_ + +.. _VISSR Format Description: + https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf +""" + +import numpy as np + +U1 = ">u1" +I2 = ">i2" +I4 = ">i4" +R4 = ">f4" +R8 = ">f8" + +VIS_CHANNEL = "VIS" +IR_CHANNEL = "IR" +CHANNEL_TYPES = { + "VIS": VIS_CHANNEL, + "IR1": IR_CHANNEL, + "IR2": IR_CHANNEL, + "IR3": IR_CHANNEL, + "WV": IR_CHANNEL, +} +ALT_CHANNEL_NAMES = {"VIS": "VIS", "IR1": "IR1", "IR2": "IR2", "IR3": "WV"} +BLOCK_SIZE_VIS = 13504 +BLOCK_SIZE_IR = 3664 + +IMAGE_PARAM_ITEM_SIZE = 2688 +TIME = [("date", I4), ("time", I4)] +CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] +VISIR_SOLAR = [("VIS", R4), ("IR", R4)] + +# fmt: off +CONTROL_BLOCK = np.dtype([('control_block_size', I2), + ('head_block_number_of_parameter_block', I2), + ('parameter_block_size', I2), + ('head_block_number_of_image_data', I2), + ('total_block_size_of_image_data', I2), + ('available_block_size_of_image_data', I2), + ('head_valid_line_number', I2), + ('final_valid_line_number', I2), + ('final_data_block_number', I2)]) + +MODE_BLOCK_FRAME_PARAMETERS = [('bit_length', I4), + ('number_of_lines', I4), + ('number_of_pixels', I4), + ('stepping_angle', R4), + ('sampling_angle', R4), + ('lcw_pixel_size', I4), + ('doc_pixel_size', I4), + ('reserved', I4)] + +MODE_BLOCK = np.dtype([('satellite_number', I4), + ('satellite_name', '|S12'), + ('observation_time_ad', '|S16'), + ('observation_time_mjd', R8), + ('gms_operation_mode', I4), + ('dpc_operation_mode', I4), + ('vissr_observation_mode', I4), + ('scanner_selection', I4), + ('sensor_selection', I4), + ('sensor_mode', I4), + ('scan_frame_mode', I4), + ('scan_mode', I4), + ('upper_limit_of_scan_number', I4), + ('lower_limit_of_scan_number', I4), + ('equatorial_scan_line_number', I4), + ('spin_rate', R4), + ('vis_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), + ('ir_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), + ('satellite_height', R4), + ('earth_radius', R4), + ('ssp_longitude', R4), + ('reserved_1', I4, 9), + ('table_of_sensor_trouble', I4, 14), + ('reserved_2', I4, 36), + ('status_tables_of_data_relative_address_segment', I4, 60)]) + +COORDINATE_CONVERSION_PARAMETERS = np.dtype([ + ('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('scheduled_observation_time', R8), + ('stepping_angle_along_line', CHANNELS), + ('sampling_angle_along_pixel', CHANNELS), + ('central_line_number_of_vissr_frame', CHANNELS), + ('central_pixel_number_of_vissr_frame', CHANNELS), + ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), + ('number_of_sensor_elements', CHANNELS), + ('total_number_of_vissr_frame_lines', CHANNELS), + ('total_number_of_vissr_frame_pixels', CHANNELS), + ('vissr_misalignment', R4, (3,)), + ('matrix_of_misalignment', R4, (3, 3)), + ('parameters', [('judgement_of_observation_convergence_time', R4), + ('judgement_of_line_convergence', R4), + ('east_west_angle_of_sun_light_condense_prism', R4), + ('north_south_angle_of_sun_light_condense_prism', R4), + ('pi', R4), + ('pi_divided_by_180', R4), + ('180_divided_by_pi', R4), + ('equatorial_radius', R4), + ('oblateness_of_earth', R4), + ('eccentricity_of_earth_orbit', R4), + ('first_angle_of_vissr_observation_in_sdb', R4), + ('upper_limited_line_of_2nd_prism_for_vis_solar_observation', R4), + ('lower_limited_line_of_1st_prism_for_vis_solar_observation', R4), + ('upper_limited_line_of_3rd_prism_for_vis_solar_observation', R4), + ('lower_limited_line_of_2nd_prism_for_vis_solar_observation', R4)]), + ('solar_stepping_angle_along_line', VISIR_SOLAR), + ('solar_sampling_angle_along_pixel', VISIR_SOLAR), + ('solar_center_line_of_vissr_frame', VISIR_SOLAR), + ('solar_center_pixel_of_vissr_frame', VISIR_SOLAR), + ('solar_pixel_difference_of_vissr_center_from_normal_position', VISIR_SOLAR), + ('solar_number_of_sensor_elements', VISIR_SOLAR), + ('solar_total_number_of_vissr_frame_lines', VISIR_SOLAR), + ('solar_total_number_of_vissr_frame_pixels', VISIR_SOLAR), + ('reserved_1', I4, 19), + ('orbital_parameters', [('epoch_time', R8), + ('semi_major_axis', R8), + ('eccentricity', R8), + ('orbital_inclination', R8), + ('longitude_of_ascending_node', R8), + ('argument_of_perigee', R8), + ('mean_anomaly', R8), + ('longitude_of_ssp', R8), + ('latitude_of_ssp', R8)]), + ('reserved_2', I4, 2), + ('attitude_parameters', [('epoch_time', R8), + ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8), + ('angle_change_rate_between_spin_axis_and_z_axis', R8), + ('angle_between_spin_axis_and_zy_axis', R8), + ('angle_change_rate_between_spin_axis_and_zt_axis', R8), + ('daily_mean_of_spin_rate', R8)]), + ('reserved_3', I4, 529), + ('correction_of_image_distortion', [('stepping_angle_along_line_of_ir1', R4), + ('stepping_angle_along_line_of_ir2', R4), + ('stepping_angle_along_line_of_wv', R4), + ('stepping_angle_along_line_of_vis', R4), + ('sampling_angle_along_pixel_of_ir1', R4), + ('sampling_angle_along_pixel_of_ir2', R4), + ('sampling_angle_along_pixel_of_wv', R4), + ('sampling_angle_along_pixel_of_vis', R4), + ('x_component_vissr_misalignment', R4), + ('y_component_vissr_misalignment', R4)]) +]) + +ATTITUDE_PREDICTION_DATA = np.dtype([('prediction_time_mjd', R8), + ('prediction_time_utc', TIME), + ('right_ascension_of_attitude', R8), + ('declination_of_attitude', R8), + ('sun_earth_angle', R8), + ('spin_rate', R8), + ('right_ascension_of_orbital_plane', R8), + ('declination_of_orbital_plane', R8), + ('reserved', R8), + ('eclipse_flag', I4), + ('spin_axis_flag', I4)]) + +ATTITUDE_PREDICTION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('start_time', R8), + ('end_time', R8), + ('prediction_interval_time', R8), + ('number_of_prediction', I4), + ('data_size', I4), + ('data', ATTITUDE_PREDICTION_DATA, (33,))]) + +ORBIT_PREDICTION_DATA = [('prediction_time_mjd', R8), + ('prediction_time_utc', TIME), + ('satellite_position_1950', R8, (3,)), + ('satellite_velocity_1950', R8, (3,)), + ('satellite_position_earth_fixed', R8, (3,)), + ('satellite_velocity_earth_fixed', R8, (3,)), + ('greenwich_sidereal_time', R8), + ('sat_sun_vector_1950', [('azimuth', R8), + ('elevation', R8)]), + ('sat_sun_vector_earth_fixed', [('azimuth', R8), + ('elevation', R8)]), + ('conversion_matrix', R8, (3, 3)), + ('moon_directional_vector', R8, (3,)), + ('satellite_position', [('ssp_longitude', R8), + ('ssp_latitude', R8), + ('satellite_height', R8)]), + ('eclipse_period_flag', I4), + ('reserved', I4)] + +ORBIT_PREDICTION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('start_time', R8), + ('end_time', R8), + ('prediction_interval_time', R8), + ('number_of_prediction', I4), + ('data_size', I4), + ('data', ORBIT_PREDICTION_DATA, (9,))]) + +VIS_CALIBRATION_TABLE = np.dtype([ + ('channel_number', I4), + ('data_validity', I4), + ('updated_time', TIME), + ('table_id', I4), + ('brightness_albedo_conversion_table', R4, (64,)), + ('vis_channel_staircase_brightness_data', R4, (6,)), + ('coefficients_table_of_vis_staircase_regression_curve', R4, (10,)), + ('brightness_table_for_calibration', [('universal_space_brightness', R4), + ('solar_brightness', R4)]), + ('calibration_uses_brightness_correspondence_voltage_chart', [('universal_space_voltage', R4), + ('solar_voltage', R4)]), + ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), + ('reserved', I4, (9,)) + ]) + +VIS_CALIBRATION = np.dtype([('data_segment', I4), + ('data_validity', I4), + ('data_generation_time', TIME), + ('sensor_group', I4), + ('vis1_calibration_table', VIS_CALIBRATION_TABLE), + ('vis2_calibration_table', VIS_CALIBRATION_TABLE), + ('vis3_calibration_table', VIS_CALIBRATION_TABLE), + ('reserved', I4, (267,))]) + +TELEMETRY_DATA = np.dtype([ + ('shutter_temp', R4), + ('redundant_mirror_temp', R4), + ('primary_mirror_temp', R4), + ('baffle_fw_temp', R4), + ('baffle_af_temp', R4), + ('15_volt_auxiliary_power_supply', R4), + ('radiative_cooler_temp_1', R4), + ('radiative_cooler_temp_2', R4), + ('electronics_module_temp', R4), + ('scan_mirror_temp', R4), + ('shutter_cavity_temp', R4), + ('primary_mirror_sealed_temp', R4), + ('redundant_mirror_sealed_temp', R4), + ('shutter_temp_2', R4), + ('reserved', R4, (2,)) +]) + +IR_CALIBRATION = np.dtype([ + ('data_segment', I4), + ('data_validity', I4), + ('updated_time', TIME), + ('sensor_group', I4), + ('table_id', I4), + ('reserved_1', I4, (2,)), + ('conversion_table_of_equivalent_black_body_radiation', R4, (256,)), + ('conversion_table_of_equivalent_black_body_temperature', R4, (256,)), + ('staircase_brightness_data', R4, (6,)), + ('coefficients_table_of_staircase_regression_curve', R4, (10,)), + ('brightness_data_for_calibration', [('brightness_of_space', R4), + ('brightness_of_black_body_shutter', R4), + ('reserved', R4)]), + ('voltage_table_for_brightness_of_calibration', [('voltage_of_space', R4), + ('voltage_of_black_body_shutter', R4), + ('reserved', R4)]), + ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), + ('valid_shutter_temperature', R4), + ('valid_shutter_radiation', R4), + ('telemetry_data_table', TELEMETRY_DATA), + ('flag_of_calid_shutter_temperature_calculation', I4), + ('reserved_2', I4, (109,)) +]) + +SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([ + ('coordinate_conversion_table', I2, (1250,)), + ('earth_equator_radius', R4), + ('satellite_height', R4), + ('stepping_angle', R4), + ('sampling_angle', R4), + ('ssp_latitude', R4), + ('ssp_longitude', R4), + ('ssp_line_number', R4), + ('ssp_pixel_number', R4), + ('pi', R4), + ('line_correction_ir1_vis', R4), + ('pixel_correction_ir1_vis', R4), + ('line_correction_ir1_ir2', R4), + ('pixel_correction_ir1_ir2', R4), + ('line_correction_ir1_wv', R4), + ('pixel_correction_ir1_wv', R4), + ('reserved', R4, (32,)), +]) + +IMAGE_PARAMS = { + 'mode': { + 'dtype': MODE_BLOCK, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS, + IR_CHANNEL: 2 * BLOCK_SIZE_IR + } + }, + 'coordinate_conversion': { + 'dtype': COORDINATE_CONVERSION_PARAMETERS, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 4 * BLOCK_SIZE_IR + } + }, + 'attitude_prediction': { + 'dtype': ATTITUDE_PREDICTION, + 'offset': { + VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 5 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'orbit_prediction_1': { + 'dtype': ORBIT_PREDICTION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS, + IR_CHANNEL: 6 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'orbit_prediction_2': { + 'dtype': ORBIT_PREDICTION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 7 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'vis_calibration': { + 'dtype': VIS_CALIBRATION, + 'offset': { + VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 9 * BLOCK_SIZE_IR + }, + 'preserve': 'data' + }, + 'ir1_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, + IR_CHANNEL: 10 * BLOCK_SIZE_IR + }, + }, + 'ir2_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 11 * BLOCK_SIZE_IR + }, + }, + 'wv_calibration': { + 'dtype': IR_CALIBRATION, + 'offset': { + VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 12 * BLOCK_SIZE_IR + }, + }, + 'simple_coordinate_conversion_table': { + 'dtype': SIMPLE_COORDINATE_CONVERSION_TABLE, + 'offset': { + VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, + IR_CHANNEL: 16 * BLOCK_SIZE_IR + }, + } +} + +LINE_CONTROL_WORD = np.dtype([ + ('data_id', U1, (4, )), + ('line_number', I4), + ('line_name', I4), + ('error_line_flag', I4), + ('error_message', I4), + ('mode_error_flag', I4), + ('scan_time', R8), + ('beta_angle', R4), + ('west_side_earth_edge', I4), + ('east_side_earth_edge', I4), + ('received_time_1', R8), # Typo in format description (I*4) + ('received_time_2', I4), + ('reserved', U1, (8, )) +]) + +IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', LINE_CONTROL_WORD), + ('DOC', U1, (256,)), # Omitted + ('image_data', U1, 3344)]) + +IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', LINE_CONTROL_WORD), + ('DOC', U1, (64,)), # Omitted + ('image_data', U1, (13376,))]) + +IMAGE_DATA = { + VIS_CHANNEL: { + 'offset': 6 * BLOCK_SIZE_VIS, + 'dtype': IMAGE_DATA_BLOCK_VIS, + }, + IR_CHANNEL: { + 'offset': 18 * BLOCK_SIZE_IR, + 'dtype': IMAGE_DATA_BLOCK_IR + } +} +# fmt: on diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index e5c2a68538..347f93afa3 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -144,6 +144,7 @@ import xarray as xr import satpy.readers._geos_area as geos_area +import satpy.readers.gms5_vissr_format as fmt import satpy.readers.gms5_vissr_navigation as nav from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.hrit_jma import mjd2datetime64 @@ -152,396 +153,6 @@ CHUNK_SIZE = get_legacy_chunk_size() -U1 = ">u1" -I2 = ">i2" -I4 = ">i4" -R4 = ">f4" -R8 = ">f8" - -VIS_CHANNEL = "VIS" -IR_CHANNEL = "IR" -CHANNEL_TYPES = { - "VIS": VIS_CHANNEL, - "IR1": IR_CHANNEL, - "IR2": IR_CHANNEL, - "IR3": IR_CHANNEL, - "WV": IR_CHANNEL, -} -ALT_CHANNEL_NAMES = {"VIS": "VIS", "IR1": "IR1", "IR2": "IR2", "IR3": "WV"} -BLOCK_SIZE_VIS = 13504 -BLOCK_SIZE_IR = 3664 - -IMAGE_PARAM_ITEM_SIZE = 2688 -TIME = [("date", I4), ("time", I4)] -CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] -VISIR_SOLAR = [("VIS", R4), ("IR", R4)] - -# fmt: off -CONTROL_BLOCK = np.dtype([('control_block_size', I2), - ('head_block_number_of_parameter_block', I2), - ('parameter_block_size', I2), - ('head_block_number_of_image_data', I2), - ('total_block_size_of_image_data', I2), - ('available_block_size_of_image_data', I2), - ('head_valid_line_number', I2), - ('final_valid_line_number', I2), - ('final_data_block_number', I2)]) - -MODE_BLOCK_FRAME_PARAMETERS = [('bit_length', I4), - ('number_of_lines', I4), - ('number_of_pixels', I4), - ('stepping_angle', R4), - ('sampling_angle', R4), - ('lcw_pixel_size', I4), - ('doc_pixel_size', I4), - ('reserved', I4)] - -MODE_BLOCK = np.dtype([('satellite_number', I4), - ('satellite_name', '|S12'), - ('observation_time_ad', '|S16'), - ('observation_time_mjd', R8), - ('gms_operation_mode', I4), - ('dpc_operation_mode', I4), - ('vissr_observation_mode', I4), - ('scanner_selection', I4), - ('sensor_selection', I4), - ('sensor_mode', I4), - ('scan_frame_mode', I4), - ('scan_mode', I4), - ('upper_limit_of_scan_number', I4), - ('lower_limit_of_scan_number', I4), - ('equatorial_scan_line_number', I4), - ('spin_rate', R4), - ('vis_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), - ('ir_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), - ('satellite_height', R4), - ('earth_radius', R4), - ('ssp_longitude', R4), - ('reserved_1', I4, 9), - ('table_of_sensor_trouble', I4, 14), - ('reserved_2', I4, 36), - ('status_tables_of_data_relative_address_segment', I4, 60)]) - -COORDINATE_CONVERSION_PARAMETERS = np.dtype([ - ('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('scheduled_observation_time', R8), - ('stepping_angle_along_line', CHANNELS), - ('sampling_angle_along_pixel', CHANNELS), - ('central_line_number_of_vissr_frame', CHANNELS), - ('central_pixel_number_of_vissr_frame', CHANNELS), - ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), - ('number_of_sensor_elements', CHANNELS), - ('total_number_of_vissr_frame_lines', CHANNELS), - ('total_number_of_vissr_frame_pixels', CHANNELS), - ('vissr_misalignment', R4, (3,)), - ('matrix_of_misalignment', R4, (3, 3)), - ('parameters', [('judgement_of_observation_convergence_time', R4), - ('judgement_of_line_convergence', R4), - ('east_west_angle_of_sun_light_condense_prism', R4), - ('north_south_angle_of_sun_light_condense_prism', R4), - ('pi', R4), - ('pi_divided_by_180', R4), - ('180_divided_by_pi', R4), - ('equatorial_radius', R4), - ('oblateness_of_earth', R4), - ('eccentricity_of_earth_orbit', R4), - ('first_angle_of_vissr_observation_in_sdb', R4), - ('upper_limited_line_of_2nd_prism_for_vis_solar_observation', R4), - ('lower_limited_line_of_1st_prism_for_vis_solar_observation', R4), - ('upper_limited_line_of_3rd_prism_for_vis_solar_observation', R4), - ('lower_limited_line_of_2nd_prism_for_vis_solar_observation', R4)]), - ('solar_stepping_angle_along_line', VISIR_SOLAR), - ('solar_sampling_angle_along_pixel', VISIR_SOLAR), - ('solar_center_line_of_vissr_frame', VISIR_SOLAR), - ('solar_center_pixel_of_vissr_frame', VISIR_SOLAR), - ('solar_pixel_difference_of_vissr_center_from_normal_position', VISIR_SOLAR), - ('solar_number_of_sensor_elements', VISIR_SOLAR), - ('solar_total_number_of_vissr_frame_lines', VISIR_SOLAR), - ('solar_total_number_of_vissr_frame_pixels', VISIR_SOLAR), - ('reserved_1', I4, 19), - ('orbital_parameters', [('epoch_time', R8), - ('semi_major_axis', R8), - ('eccentricity', R8), - ('orbital_inclination', R8), - ('longitude_of_ascending_node', R8), - ('argument_of_perigee', R8), - ('mean_anomaly', R8), - ('longitude_of_ssp', R8), - ('latitude_of_ssp', R8)]), - ('reserved_2', I4, 2), - ('attitude_parameters', [('epoch_time', R8), - ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8), - ('angle_change_rate_between_spin_axis_and_z_axis', R8), - ('angle_between_spin_axis_and_zy_axis', R8), - ('angle_change_rate_between_spin_axis_and_zt_axis', R8), - ('daily_mean_of_spin_rate', R8)]), - ('reserved_3', I4, 529), - ('correction_of_image_distortion', [('stepping_angle_along_line_of_ir1', R4), - ('stepping_angle_along_line_of_ir2', R4), - ('stepping_angle_along_line_of_wv', R4), - ('stepping_angle_along_line_of_vis', R4), - ('sampling_angle_along_pixel_of_ir1', R4), - ('sampling_angle_along_pixel_of_ir2', R4), - ('sampling_angle_along_pixel_of_wv', R4), - ('sampling_angle_along_pixel_of_vis', R4), - ('x_component_vissr_misalignment', R4), - ('y_component_vissr_misalignment', R4)]) -]) - -ATTITUDE_PREDICTION_DATA = np.dtype([('prediction_time_mjd', R8), - ('prediction_time_utc', TIME), - ('right_ascension_of_attitude', R8), - ('declination_of_attitude', R8), - ('sun_earth_angle', R8), - ('spin_rate', R8), - ('right_ascension_of_orbital_plane', R8), - ('declination_of_orbital_plane', R8), - ('reserved', R8), - ('eclipse_flag', I4), - ('spin_axis_flag', I4)]) - -ATTITUDE_PREDICTION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('start_time', R8), - ('end_time', R8), - ('prediction_interval_time', R8), - ('number_of_prediction', I4), - ('data_size', I4), - ('data', ATTITUDE_PREDICTION_DATA, (33,))]) - -ORBIT_PREDICTION_DATA = [('prediction_time_mjd', R8), - ('prediction_time_utc', TIME), - ('satellite_position_1950', R8, (3,)), - ('satellite_velocity_1950', R8, (3,)), - ('satellite_position_earth_fixed', R8, (3,)), - ('satellite_velocity_earth_fixed', R8, (3,)), - ('greenwich_sidereal_time', R8), - ('sat_sun_vector_1950', [('azimuth', R8), - ('elevation', R8)]), - ('sat_sun_vector_earth_fixed', [('azimuth', R8), - ('elevation', R8)]), - ('conversion_matrix', R8, (3, 3)), - ('moon_directional_vector', R8, (3,)), - ('satellite_position', [('ssp_longitude', R8), - ('ssp_latitude', R8), - ('satellite_height', R8)]), - ('eclipse_period_flag', I4), - ('reserved', I4)] - -ORBIT_PREDICTION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('start_time', R8), - ('end_time', R8), - ('prediction_interval_time', R8), - ('number_of_prediction', I4), - ('data_size', I4), - ('data', ORBIT_PREDICTION_DATA, (9,))]) - -VIS_CALIBRATION_TABLE = np.dtype([ - ('channel_number', I4), - ('data_validity', I4), - ('updated_time', TIME), - ('table_id', I4), - ('brightness_albedo_conversion_table', R4, (64,)), - ('vis_channel_staircase_brightness_data', R4, (6,)), - ('coefficients_table_of_vis_staircase_regression_curve', R4, (10,)), - ('brightness_table_for_calibration', [('universal_space_brightness', R4), - ('solar_brightness', R4)]), - ('calibration_uses_brightness_correspondence_voltage_chart', [('universal_space_voltage', R4), - ('solar_voltage', R4)]), - ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), - ('reserved', I4, (9,)) - ]) - -VIS_CALIBRATION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('sensor_group', I4), - ('vis1_calibration_table', VIS_CALIBRATION_TABLE), - ('vis2_calibration_table', VIS_CALIBRATION_TABLE), - ('vis3_calibration_table', VIS_CALIBRATION_TABLE), - ('reserved', I4, (267,))]) - -TELEMETRY_DATA = np.dtype([ - ('shutter_temp', R4), - ('redundant_mirror_temp', R4), - ('primary_mirror_temp', R4), - ('baffle_fw_temp', R4), - ('baffle_af_temp', R4), - ('15_volt_auxiliary_power_supply', R4), - ('radiative_cooler_temp_1', R4), - ('radiative_cooler_temp_2', R4), - ('electronics_module_temp', R4), - ('scan_mirror_temp', R4), - ('shutter_cavity_temp', R4), - ('primary_mirror_sealed_temp', R4), - ('redundant_mirror_sealed_temp', R4), - ('shutter_temp_2', R4), - ('reserved', R4, (2,)) -]) - -IR_CALIBRATION = np.dtype([ - ('data_segment', I4), - ('data_validity', I4), - ('updated_time', TIME), - ('sensor_group', I4), - ('table_id', I4), - ('reserved_1', I4, (2,)), - ('conversion_table_of_equivalent_black_body_radiation', R4, (256,)), - ('conversion_table_of_equivalent_black_body_temperature', R4, (256,)), - ('staircase_brightness_data', R4, (6,)), - ('coefficients_table_of_staircase_regression_curve', R4, (10,)), - ('brightness_data_for_calibration', [('brightness_of_space', R4), - ('brightness_of_black_body_shutter', R4), - ('reserved', R4)]), - ('voltage_table_for_brightness_of_calibration', [('voltage_of_space', R4), - ('voltage_of_black_body_shutter', R4), - ('reserved', R4)]), - ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), - ('valid_shutter_temperature', R4), - ('valid_shutter_radiation', R4), - ('telemetry_data_table', TELEMETRY_DATA), - ('flag_of_calid_shutter_temperature_calculation', I4), - ('reserved_2', I4, (109,)) -]) - -SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([ - ('coordinate_conversion_table', I2, (1250,)), - ('earth_equator_radius', R4), - ('satellite_height', R4), - ('stepping_angle', R4), - ('sampling_angle', R4), - ('ssp_latitude', R4), - ('ssp_longitude', R4), - ('ssp_line_number', R4), - ('ssp_pixel_number', R4), - ('pi', R4), - ('line_correction_ir1_vis', R4), - ('pixel_correction_ir1_vis', R4), - ('line_correction_ir1_ir2', R4), - ('pixel_correction_ir1_ir2', R4), - ('line_correction_ir1_wv', R4), - ('pixel_correction_ir1_wv', R4), - ('reserved', R4, (32,)), -]) - -IMAGE_PARAMS = { - 'mode': { - 'dtype': MODE_BLOCK, - 'offset': { - VIS_CHANNEL: 2 * BLOCK_SIZE_VIS, - IR_CHANNEL: 2 * BLOCK_SIZE_IR - } - }, - 'coordinate_conversion': { - 'dtype': COORDINATE_CONVERSION_PARAMETERS, - 'offset': { - VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, - IR_CHANNEL: 4 * BLOCK_SIZE_IR - } - }, - 'attitude_prediction': { - 'dtype': ATTITUDE_PREDICTION, - 'offset': { - VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, - IR_CHANNEL: 5 * BLOCK_SIZE_IR - }, - 'preserve': 'data' - }, - 'orbit_prediction_1': { - 'dtype': ORBIT_PREDICTION, - 'offset': { - VIS_CHANNEL: 3 * BLOCK_SIZE_VIS, - IR_CHANNEL: 6 * BLOCK_SIZE_IR - }, - 'preserve': 'data' - }, - 'orbit_prediction_2': { - 'dtype': ORBIT_PREDICTION, - 'offset': { - VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE, - IR_CHANNEL: 7 * BLOCK_SIZE_IR - }, - 'preserve': 'data' - }, - 'vis_calibration': { - 'dtype': VIS_CALIBRATION, - 'offset': { - VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, - IR_CHANNEL: 9 * BLOCK_SIZE_IR - }, - 'preserve': 'data' - }, - 'ir1_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { - VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, - IR_CHANNEL: 10 * BLOCK_SIZE_IR - }, - }, - 'ir2_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { - VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE, - IR_CHANNEL: 11 * BLOCK_SIZE_IR - }, - }, - 'wv_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { - VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, - IR_CHANNEL: 12 * BLOCK_SIZE_IR - }, - }, - 'simple_coordinate_conversion_table': { - 'dtype': SIMPLE_COORDINATE_CONVERSION_TABLE, - 'offset': { - VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, - IR_CHANNEL: 16 * BLOCK_SIZE_IR - }, - } -} - -LINE_CONTROL_WORD = np.dtype([ - ('data_id', U1, (4, )), - ('line_number', I4), - ('line_name', I4), - ('error_line_flag', I4), - ('error_message', I4), - ('mode_error_flag', I4), - ('scan_time', R8), - ('beta_angle', R4), - ('west_side_earth_edge', I4), - ('east_side_earth_edge', I4), - ('received_time_1', R8), # Typo in format description (I*4) - ('received_time_2', I4), - ('reserved', U1, (8, )) -]) - -IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', LINE_CONTROL_WORD), - ('DOC', U1, (256,)), # Omitted - ('image_data', U1, 3344)]) - -IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', LINE_CONTROL_WORD), - ('DOC', U1, (64,)), # Omitted - ('image_data', U1, (13376,))]) - -IMAGE_DATA = { - VIS_CHANNEL: { - 'offset': 6 * BLOCK_SIZE_VIS, - 'dtype': IMAGE_DATA_BLOCK_VIS, - }, - IR_CHANNEL: { - 'offset': 18 * BLOCK_SIZE_IR, - 'dtype': IMAGE_DATA_BLOCK_IR - } -} -# fmt: on - def _recarr2dict(arr, preserve=None): if not preserve: @@ -593,22 +204,22 @@ def _read_header(self, filename): @staticmethod def _get_channel_type(parameter_block_size): if parameter_block_size == 4: - return VIS_CHANNEL + return fmt.VIS_CHANNEL elif parameter_block_size == 16: - return IR_CHANNEL + return fmt.IR_CHANNEL raise ValueError( f"Cannot determine channel type, possibly corrupt file " f"(unknown parameter block size: {parameter_block_size})" ) def _read_control_block(self, file_obj): - ctrl_block = read_from_file_obj(file_obj, dtype=CONTROL_BLOCK, count=1) + ctrl_block = read_from_file_obj(file_obj, dtype=fmt.CONTROL_BLOCK, count=1) return _recarr2dict(ctrl_block[0]) def _read_image_params(self, file_obj, channel_type): """Read image parameters from the header.""" image_params = {} - for name, param in IMAGE_PARAMS.items(): + for name, param in fmt.IMAGE_PARAMS.items(): image_params[name] = self._read_image_param(file_obj, param, channel_type) image_params["orbit_prediction"] = self._concat_orbit_prediction( @@ -639,7 +250,7 @@ def _concat_orbit_prediction(orb_pred_1, orb_pred_2): return orb_pred def _get_frame_parameters_key(self): - if self._channel_type == VIS_CHANNEL: + if self._channel_type == fmt.VIS_CHANNEL: return "vis_frame_parameters" return "ir_frame_parameters" @@ -715,7 +326,7 @@ def _read_image_data(self): ) def _get_image_data_type_specs(self): - return IMAGE_DATA[self._channel_type] + return fmt.IMAGE_DATA[self._channel_type] def _get_counts(self, image_data): return self._make_counts_data_array(image_data) @@ -958,7 +569,7 @@ def _mode_block(self): def _get_alternative_channel_name(dataset_id): - return ALT_CHANNEL_NAMES[dataset_id["name"]] + return fmt.ALT_CHANNEL_NAMES[dataset_id["name"]] def read_from_file_obj(file_obj, dtype, count, offset=0): diff --git a/satpy/tests/reader_tests/test_gms5_vissr_data.py b/satpy/tests/reader_tests/test_gms5_vissr_data.py index dafe4e80b5..754cab59da 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_data.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_data.py @@ -2,7 +2,7 @@ import numpy as np -import satpy.readers.gms5_vissr_l1b as vissr +import satpy.readers.gms5_vissr_format as fmt ATTITUDE_PREDICTION = np.array( [ @@ -436,7 +436,7 @@ 0, ), ], - dtype=vissr.ATTITUDE_PREDICTION_DATA, + dtype=fmt.ATTITUDE_PREDICTION_DATA, ) ORBIT_PREDICTION_1 = np.array( @@ -622,7 +622,7 @@ 0, ), ], - dtype=vissr.ORBIT_PREDICTION_DATA, + dtype=fmt.ORBIT_PREDICTION_DATA, ) ORBIT_PREDICTION_2 = np.array( @@ -808,5 +808,5 @@ 0, ), ], - dtype=vissr.ORBIT_PREDICTION_DATA, + dtype=fmt.ORBIT_PREDICTION_DATA, ) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 1621e3fb7f..bda7109936 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -9,6 +9,7 @@ import xarray as xr from pyresample.geometry import AreaDefinition +import satpy.readers.gms5_vissr_format as fmt import satpy.readers.gms5_vissr_l1b as vissr import satpy.tests.reader_tests.test_gms5_vissr_data as real_world from satpy.readers import FSFile @@ -57,35 +58,35 @@ def patch_number_of_pixels_per_scanline(self, monkeypatch): num_pixels = 2 IMAGE_DATA_BLOCK_IR = np.dtype( [ - ("LCW", vissr.LINE_CONTROL_WORD), - ("DOC", vissr.U1, (256,)), - ("image_data", vissr.U1, num_pixels), + ("LCW", fmt.LINE_CONTROL_WORD), + ("DOC", fmt.U1, (256,)), + ("image_data", fmt.U1, num_pixels), ] ) IMAGE_DATA_BLOCK_VIS = np.dtype( [ - ("LCW", vissr.LINE_CONTROL_WORD), - ("DOC", vissr.U1, (64,)), - ("image_data", vissr.U1, (num_pixels,)), + ("LCW", fmt.LINE_CONTROL_WORD), + ("DOC", fmt.U1, (64,)), + ("image_data", fmt.U1, (num_pixels,)), ] ) IMAGE_DATA = { - vissr.VIS_CHANNEL: { - "offset": 6 * vissr.BLOCK_SIZE_VIS, + fmt.VIS_CHANNEL: { + "offset": 6 * fmt.BLOCK_SIZE_VIS, "dtype": IMAGE_DATA_BLOCK_VIS, }, - vissr.IR_CHANNEL: { - "offset": 18 * vissr.BLOCK_SIZE_IR, + fmt.IR_CHANNEL: { + "offset": 18 * fmt.BLOCK_SIZE_IR, "dtype": IMAGE_DATA_BLOCK_IR, }, } monkeypatch.setattr( - "satpy.readers.gms5_vissr_l1b.IMAGE_DATA_BLOCK_IR", IMAGE_DATA_BLOCK_IR + "satpy.readers.gms5_vissr_format.IMAGE_DATA_BLOCK_IR", IMAGE_DATA_BLOCK_IR ) monkeypatch.setattr( - "satpy.readers.gms5_vissr_l1b.IMAGE_DATA_BLOCK_VIS", IMAGE_DATA_BLOCK_VIS + "satpy.readers.gms5_vissr_format.IMAGE_DATA_BLOCK_VIS", IMAGE_DATA_BLOCK_VIS ) - monkeypatch.setattr("satpy.readers.gms5_vissr_l1b.IMAGE_DATA", IMAGE_DATA) + monkeypatch.setattr("satpy.readers.gms5_vissr_format.IMAGE_DATA", IMAGE_DATA) @pytest.fixture( params=[ @@ -119,7 +120,7 @@ def open_function(self, with_compression): def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): """Get test VISSR file.""" filename = tmp_path / "vissr_file" - ch_type = vissr.CHANNEL_TYPES[dataset_id["name"]] + ch_type = fmt.CHANNEL_TYPES[dataset_id["name"]] writer = VissrFileWriter(ch_type, open_function) writer.write(filename, file_contents) return filename @@ -137,7 +138,7 @@ def file_contents(self, control_block, image_parameters, image_data): def control_block(self, dataset_id): """Get VISSR control block.""" block_size = {"IR1": 16, "VIS": 4} - ctrl_block = np.zeros(1, dtype=vissr.CONTROL_BLOCK) + ctrl_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) ctrl_block["parameter_block_size"] = block_size[dataset_id["name"]] ctrl_block["available_block_size_of_image_data"] = 2 return ctrl_block @@ -187,7 +188,7 @@ def cal_params( @pytest.fixture def mode_block(self): """Get VISSR mode block.""" - mode = np.zeros(1, dtype=vissr.MODE_BLOCK) + mode = np.zeros(1, dtype=fmt.MODE_BLOCK) mode["satellite_name"] = b"GMS-5 " mode["spin_rate"] = 99.21774 mode["observation_time_mjd"] = 50000.0 @@ -208,7 +209,7 @@ def coordinate_conversion(self): Otherwise, all pixels would be in space. """ # fmt: off - conv = np.zeros(1, dtype=vissr.COORDINATE_CONVERSION_PARAMETERS) + conv = np.zeros(1, dtype=fmt.COORDINATE_CONVERSION_PARAMETERS) cline = conv["central_line_number_of_vissr_frame"] cline["IR1"] = 1378.5 @@ -250,28 +251,28 @@ def coordinate_conversion(self): @pytest.fixture def attitude_prediction(self): """Get attitude prediction.""" - att_pred = np.zeros(1, dtype=vissr.ATTITUDE_PREDICTION) + att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION) att_pred["data"] = real_world.ATTITUDE_PREDICTION return att_pred @pytest.fixture def orbit_prediction_1(self): """Get first block of orbit prediction data.""" - orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) + orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_1 return orb_pred @pytest.fixture def orbit_prediction_2(self): """Get second block of orbit prediction data.""" - orb_pred = np.zeros(1, dtype=vissr.ORBIT_PREDICTION) + orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_2 return orb_pred @pytest.fixture def vis_calibration(self): """Get VIS calibration block.""" - vis_cal = np.zeros(1, dtype=vissr.VIS_CALIBRATION) + vis_cal = np.zeros(1, dtype=fmt.VIS_CALIBRATION) table = vis_cal["vis1_calibration_table"]["brightness_albedo_conversion_table"] table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) return vis_cal @@ -279,7 +280,7 @@ def vis_calibration(self): @pytest.fixture def ir1_calibration(self): """Get IR1 calibration block.""" - cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) + cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) table = cal["conversion_table_of_equivalent_black_body_temperature"] table[0, 0:4] = np.array([0, 100, 200, 300]) return cal @@ -287,19 +288,19 @@ def ir1_calibration(self): @pytest.fixture def ir2_calibration(self): """Get IR2 calibration block.""" - cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) + cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal @pytest.fixture def wv_calibration(self): """Get WV calibration block.""" - cal = np.zeros(1, dtype=vissr.IR_CALIBRATION) + cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal @pytest.fixture def simple_coordinate_conversion_table(self): """Get simple coordinate conversion table.""" - table = np.zeros(1, dtype=vissr.SIMPLE_COORDINATE_CONVERSION_TABLE) + table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE) table["satellite_height"] = 123457.0 return table @@ -312,7 +313,7 @@ def image_data(self, dataset_id, image_data_ir1, image_data_vis): @pytest.fixture def image_data_ir1(self): """Get IR1 image data.""" - image_data = np.zeros(2, vissr.IMAGE_DATA_BLOCK_IR) + image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_IR) image_data["LCW"]["line_number"] = [686, 2089] image_data["LCW"]["scan_time"] = [50000, 50000] image_data["LCW"]["west_side_earth_edge"] = [0, 0] @@ -323,7 +324,7 @@ def image_data_ir1(self): @pytest.fixture def image_data_vis(self): """Get VIS image data.""" - image_data = np.zeros(2, vissr.IMAGE_DATA_BLOCK_VIS) + image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_VIS) image_data["LCW"]["line_number"] = [2744, 8356] image_data["LCW"]["scan_time"] = [50000, 50000] image_data["LCW"]["west_side_earth_edge"] = [-1, 0] @@ -553,8 +554,8 @@ class TestCorruptFile: @pytest.fixture def file_contents(self): """Get corrupt file contents (all zero).""" - control_block = np.zeros(1, dtype=vissr.CONTROL_BLOCK) - image_data = np.zeros(1, dtype=vissr.IMAGE_DATA_BLOCK_IR) + control_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) + image_data = np.zeros(1, dtype=fmt.IMAGE_DATA_BLOCK_IR) return { "control_block": control_block, "image_parameters": {}, @@ -618,11 +619,11 @@ def _write_image_parameters(self, fd, contents): self._write_image_parameter(fd, im_param, name) def _write_image_parameter(self, fd, im_param, name): - offset = vissr.IMAGE_PARAMS[name]["offset"][self.ch_type] + offset = fmt.IMAGE_PARAMS[name]["offset"][self.ch_type] self._write(fd, im_param, offset) def _write_image_data(self, fd, contents): - offset = vissr.IMAGE_DATA[self.ch_type]["offset"] + offset = fmt.IMAGE_DATA[self.ch_type]["offset"] self._write(fd, contents["image_data"], offset) def _write(self, fd, data, offset=None): From 49fd0ddb82d138979dfef7726e96a4f61b84037c Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 14:35:02 +0000 Subject: [PATCH 0248/1416] Reformat tests --- .../test_gms5_vissr_navigation.py | 52 +++++-------------- 1 file changed, 14 insertions(+), 38 deletions(-) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py index 80ce00c6cd..8cc73b87b0 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py @@ -247,19 +247,14 @@ def test_get_lon_lat(self, point, nav_params, expected): def test_transform_image_coords_to_scanning_angles(self): """Test transformation from image coordinates to scanning angles.""" - offset = nav.ImageOffset( - line_offset=100, - pixel_offset=200 - ) + offset = nav.ImageOffset(line_offset=100, pixel_offset=200) scanning_angles = nav.ScanningAngles( - stepping_angle=0.01, - sampling_angle=0.02, - misalignment=-999 + stepping_angle=0.01, sampling_angle=0.02, misalignment=-999 ) angles = nav.transform_image_coords_to_scanning_angles( point=np.array([199, 99]), image_offset=offset, - scanning_angles=scanning_angles + scanning_angles=scanning_angles, ) np.testing.assert_allclose(angles, [-2, 1]) @@ -278,7 +273,7 @@ def test_transform_satellite_to_earth_fixed_coords(self): attitude = nav.Attitude( angle_between_earth_and_sun=np.pi, angle_between_sat_spin_and_z_axis=np.pi, - angle_between_sat_spin_and_yz_plane=np.pi / 2 + angle_between_sat_spin_and_yz_plane=np.pi / 2, ) orbit = nav.Orbit( angles=nav.OrbitAngles( @@ -289,25 +284,14 @@ def test_transform_satellite_to_earth_fixed_coords(self): sat_position=nav.SatellitePositionEarthFixed(-999, -999, -999), nutation_precession=np.diag([1, 2, 3]).astype(float), ) - res = nav.transform_satellite_to_earth_fixed_coords( - point_sat, - orbit, - attitude - ) + res = nav.transform_satellite_to_earth_fixed_coords(point_sat, orbit, attitude) np.testing.assert_allclose(res, [-3, 1, -2]) def test_intersect_view_vector_with_earth(self): """Test intersection of a view vector with the earth's surface.""" view_vector = np.array([-1, 0, 0], dtype=float) - ellipsoid = nav.EarthEllipsoid( - equatorial_radius=6371 * 1000, - flattening=0.003 - ) - sat_pos = nav.SatellitePositionEarthFixed( - x=36000 * 1000.0, - y=0.0, - z=0.0 - ) + ellipsoid = nav.EarthEllipsoid(equatorial_radius=6371 * 1000, flattening=0.003) + sat_pos = nav.SatellitePositionEarthFixed(x=36000 * 1000.0, y=0.0, z=0.0) point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid) exp = [ellipsoid.equatorial_radius, 0, 0] np.testing.assert_allclose(point, exp) @@ -361,7 +345,7 @@ def test_get_lons_lats(self, navigation_params, expected): lons, lats = nav.get_lons_lats( lines=np.array([1000, 1500, 2000]), pixels=np.array([1000, 1500, 2000]), - nav_params=navigation_params + nav_params=navigation_params, ) np.testing.assert_allclose(lons, expected["lon"]) np.testing.assert_allclose(lats, expected["lat"]) @@ -490,7 +474,7 @@ def attitude_prediction(): angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), - ) + ), ) @@ -536,32 +520,26 @@ def proj_params(sampling_angle): earth_ellipsoid=nav.EarthEllipsoid( flattening=0.003352813177897, equatorial_radius=6378136, - ) + ), ) @pytest.fixture def static_nav_params(proj_params, scan_params): """Get static navigation parameters.""" - return nav.StaticNavigationParameters( - proj_params, scan_params - ) + return nav.StaticNavigationParameters(proj_params, scan_params) @pytest.fixture def predicted_nav_params(attitude_prediction, orbit_prediction): """Get predicted navigation parameters.""" - return nav.PredictedNavigationParameters( - attitude_prediction, orbit_prediction - ) + return nav.PredictedNavigationParameters(attitude_prediction, orbit_prediction) @pytest.fixture def navigation_params(static_nav_params, predicted_nav_params): """Get image navigation parameters.""" - return nav.ImageNavigationParameters( - static_nav_params, predicted_nav_params - ) + return nav.ImageNavigationParameters(static_nav_params, predicted_nav_params) def test_get_observation_time(): @@ -586,9 +564,7 @@ def _assert_namedtuple_close(a, b): if _is_namedtuple(b_attr): _assert_namedtuple_close(a_attr, b_attr) np.testing.assert_allclose( - a_attr, - b_attr, - err_msg=f"{cls_name} attribute {attr} differs" + a_attr, b_attr, err_msg=f"{cls_name} attribute {attr} differs" ) From b4ff93e984d93a57ea5d1876921d62590bebcbd4 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 15:06:59 +0000 Subject: [PATCH 0249/1416] Reduce code complexity --- satpy/readers/gms5_vissr_navigation.py | 47 +++++++++---------- .../tests/reader_tests/test_gms5_vissr_l1b.py | 38 ++++++++++----- 2 files changed, 47 insertions(+), 38 deletions(-) diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 8bdb5d267c..62f4d3ed81 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -456,24 +456,6 @@ def get_lon_lat(point, nav_params): return lon, lat -@numba.njit -def _get_image_offset(proj_params): - return proj_params.line_offset, proj_params.pixel_offset - - -@numba.njit -def _get_sampling(proj_params): - return proj_params.stepping_angle, proj_params.sampling_angle - - -@numba.njit -def _get_spin_angles(attitude): - return ( - attitude.angle_between_sat_spin_and_z_axis, - attitude.angle_between_sat_spin_and_yz_plane, - ) - - @numba.njit def _get_sat_pos_vector(sat_position): return np.array( @@ -620,6 +602,8 @@ def _get_satellite_unit_vector_y(sat_unit_vector_x, sat_unit_vector_z): def intersect_with_earth(view_vector, sat_pos, ellipsoid): """Intersect instrument viewing vector with the earth's surface. + Reference: Appendix E, section 2.11 in the GMS user guide. + Args: view_vector: Instrument viewing vector (x, y, z) in earth-fixed coordinates. @@ -648,20 +632,33 @@ def _get_distance_to_intersection(view_vector, sat_pos, ellipsoid): @numba.njit def _get_distances_to_intersections(view_vector, sat_pos, ellipsoid): - flat2 = (1 - ellipsoid.flattening) ** 2 - ux, uy, uz = view_vector - x, y, z = sat_pos.x, sat_pos.y, sat_pos.z - - a = flat2 * (ux**2 + uy**2) + uz**2 - b = flat2 * (x * ux + y * uy) + z * uz - c = flat2 * (x**2 + y**2 - ellipsoid.equatorial_radius**2) + z**2 + """Get distances to intersections with the earth's surface. + Returns: + Distances to two intersections with the surface. + """ + a, b, c = _get_abc_helper(view_vector, sat_pos, ellipsoid) tmp = np.sqrt((b**2 - a * c)) dist_1 = (-b + tmp) / a dist_2 = (-b - tmp) / a return dist_1, dist_2 +@numba.njit +def _get_abc_helper(view_vector, sat_pos, ellipsoid): + """Get a,b,c helper variables. + + Reference: Appendix E, Equation (26) in the GMS user guide. + """ + flat2 = (1 - ellipsoid.flattening) ** 2 + ux, uy, uz = view_vector + x, y, z = sat_pos.x, sat_pos.y, sat_pos.z + a = flat2 * (ux ** 2 + uy ** 2) + uz ** 2 + b = flat2 * (x * ux + y * uy) + z * uz + c = flat2 * (x ** 2 + y ** 2 - ellipsoid.equatorial_radius ** 2) + z ** 2 + return a, b, c + + @numba.njit def transform_earth_fixed_to_geodetic_coords(point, earth_flattening): """Transform from earth-fixed to geodetic coordinates. diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index bda7109936..4eff8d2220 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -156,18 +156,14 @@ def nav_params( self, coordinate_conversion, attitude_prediction, - orbit_prediction_1, - orbit_prediction_2, - simple_coordinate_conversion_table, + orbit_prediction, ): """Get navigation parameters.""" - return { - "coordinate_conversion": coordinate_conversion, - "attitude_prediction": attitude_prediction, - "orbit_prediction_1": orbit_prediction_1, - "orbit_prediction_2": orbit_prediction_2, - "simple_coordinate_conversion_table": simple_coordinate_conversion_table, - } + nav_params = {} + nav_params.update(attitude_prediction) + nav_params.update(orbit_prediction) + nav_params.update(coordinate_conversion) + return nav_params @pytest.fixture def cal_params( @@ -201,7 +197,15 @@ def mode_block(self): return mode @pytest.fixture - def coordinate_conversion(self): + def coordinate_conversion(self, coord_conv, simple_coord_conv_table): + """Get all coordinate conversion parameters.""" + return { + "coordinate_conversion": coord_conv, + "simple_coordinate_conversion_table": simple_coord_conv_table + } + + @pytest.fixture + def coord_conv(self): """Get parameters for coordinate conversions. Adjust pixel offset so that the first column is at the image center. @@ -253,7 +257,15 @@ def attitude_prediction(self): """Get attitude prediction.""" att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION) att_pred["data"] = real_world.ATTITUDE_PREDICTION - return att_pred + return {"attitude_prediction": att_pred} + + @pytest.fixture + def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): + """Get predictions of orbital parameters.""" + return { + "orbit_prediction_1": orbit_prediction_1, + "orbit_prediction_2": orbit_prediction_2 + } @pytest.fixture def orbit_prediction_1(self): @@ -298,7 +310,7 @@ def wv_calibration(self): return cal @pytest.fixture - def simple_coordinate_conversion_table(self): + def simple_coord_conv_table(self): """Get simple coordinate conversion table.""" table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE) table["satellite_height"] = 123457.0 From 5900b10efd9b818c50d72ebf0f7556e56fbc3f3f Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 1 Jun 2023 17:40:13 +0200 Subject: [PATCH 0250/1416] Start implementation --- satpy/_config.py | 1 + satpy/readers/abi_base.py | 7 ++++++- satpy/readers/abi_l1b.py | 16 +++++++++++++++- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/satpy/_config.py b/satpy/_config.py index 2b583c435c..1582cac6e7 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -62,6 +62,7 @@ def impr_files(module_name: str) -> Path: 'demo_data_dir': '.', 'download_aux': True, 'sensor_angles_position_preference': 'actual', + 'clip_negative_radiances': False, } # Satpy main configuration object diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 5f55170b6f..18f89102b6 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -25,6 +25,7 @@ import xarray as xr from pyresample import geometry +import satpy from satpy._compat import cached_property from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler @@ -44,7 +45,7 @@ class NC_ABI_BASE(BaseFileHandler): """Base reader for ABI L1B L2+ NetCDF4 files.""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, clip_negative_radiances=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) @@ -56,6 +57,10 @@ def __init__(self, filename, filename_info, filetype_info): self.coords = {} + if clip_negative_radiances is None: + clip_negative_radiances = satpy.config.get("clip_negative_radiances") + self.clip_negative_radiances = clip_negative_radiances + @cached_property def nc(self): """Get the xarray dataset for this file.""" diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index d1ed730792..07cfbfc6c6 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -22,7 +22,6 @@ https://www.goes-r.gov/users/docs/PUG-L1b-vol3.pdf """ - import logging import numpy as np @@ -38,6 +37,7 @@ class NC_ABI_L1B(NC_ABI_BASE): def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key['name']) + # For raw cal, don't apply scale and offset, return raw file counts if key['calibration'] == 'counts': radiances = self.nc['Rad'].copy() @@ -139,6 +139,16 @@ def _vis_calibrate(self, data): res.attrs['standard_name'] = 'toa_bidirectional_reflectance' return res + def _get_minimum_radiance(self, data): + """Estimate minimum radiance from Rad DataArray.""" + attrs = data.attrs + scale_factor = attrs["scale_factor"] + add_offset = attrs["add_offset"] + count_zero_rad = - add_offset / scale_factor + count_pos = np.ceil(count_zero_rad) + min_rad = count_pos * scale_factor + add_offset + return min_rad + def _ir_calibrate(self, data): """Calibrate IR channels to BT.""" fk1 = float(self["planck_fk1"]) @@ -146,6 +156,10 @@ def _ir_calibrate(self, data): bc1 = float(self["planck_bc1"]) bc2 = float(self["planck_bc2"]) + if self.clip_negative_radiances: + min_rad = self._get_minimum_radiance(data) + data.data[data.data < min_rad] = min_rad + res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs res.attrs['units'] = 'K' From b1f7b7afa1160d076b0770417fa94e5e86536ec9 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 15:50:31 +0000 Subject: [PATCH 0251/1416] Add yaml info to dataset attributes --- satpy/etc/readers/gms5-vissr_l1b.yaml | 6 +++--- satpy/readers/gms5_vissr_l1b.py | 5 +++-- satpy/tests/reader_tests/test_gms5_vissr_l1b.py | 3 ++- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index 6b18579463..5381833686 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -67,7 +67,7 @@ datasets: units: 1 brightness_temperature: standard_name: toa_brightness_temperature - units: "K" + units: K file_type: gms5_vissr_ir1 IR2: @@ -81,7 +81,7 @@ datasets: units: 1 brightness_temperature: standard_name: toa_brightness_temperature - units: "K" + units: K file_type: gms5_vissr_ir2 IR3: @@ -95,5 +95,5 @@ datasets: units: 1 brightness_temperature: standard_name: toa_brightness_temperature - units: "K" + units: K file_type: gms5_vissr_ir3 diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 347f93afa3..07704f4ff5 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -310,7 +310,7 @@ def get_dataset(self, dataset_id, ds_info): space_masker = SpaceMasker(image_data, dataset_id["name"]) dataset = self._mask_space_pixels(dataset, space_masker) self._attach_lons_lats(dataset, dataset_id) - self._update_attrs(dataset, dataset_id) + self._update_attrs(dataset, dataset_id, ds_info) return dataset def _get_image_data(self): @@ -543,7 +543,8 @@ def _make_lons_lats_data_array(self, lons, lats): ) return lons, lats - def _update_attrs(self, dataset, dataset_id): + def _update_attrs(self, dataset, dataset_id, ds_info): + dataset.attrs.update(ds_info) dataset.attrs.update(self._mda) dataset.attrs[ "area_def_uniform_sampling" diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 4eff8d2220..405bdb11ee 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -517,6 +517,7 @@ def area_def_exp(self, dataset_id): def attrs_exp(self, area_def_exp): """Get expected dataset attributes.""" return { + "yaml": "info", "platform": "GMS-5", "sensor": "VISSR", "time_parameters": { @@ -536,7 +537,7 @@ def attrs_exp(self, area_def_exp): def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): """Test getting the dataset.""" - dataset = file_handler.get_dataset(dataset_id, None) + dataset = file_handler.get_dataset(dataset_id, {"yaml": "info"}) xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1e-6) self._assert_attrs_equal(dataset.attrs, attrs_exp) From 71f2ee0b4bc877962a41a03e88fe80b82d8182f1 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 1 Jun 2023 15:56:58 +0000 Subject: [PATCH 0252/1416] Fix reflectance units --- satpy/readers/gms5_vissr_l1b.py | 16 ++++++++++++++-- satpy/tests/reader_tests/test_gms5_vissr_l1b.py | 4 ++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 07704f4ff5..0d40838247 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -605,13 +605,25 @@ def calibrate(self, counts, calibration): """Transform counts to given calibration level.""" if calibration == "counts": return counts - res = da.map_blocks( + res = self._calibrate(counts) + res = self._postproc(res, calibration) + return self._make_data_array(res, counts) + + def _calibrate(self, counts): + return da.map_blocks( self._lookup_calib_table, counts.data, calib_table=self._calib_table, dtype=np.float32, ) - return self._make_data_array(res, counts) + + def _postproc(self, res, calibration): + if calibration == "reflectance": + res = self._convert_to_percent(res) + return res + + def _convert_to_percent(self, res): + return res * 100 def _make_data_array(self, interp, counts): return xr.DataArray( diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index 405bdb11ee..e9accadea5 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -364,9 +364,9 @@ def vis_refl_exp(self, mask_space, lons_lats_exp): """Get expected VIS reflectance.""" lons, lats = lons_lats_exp if mask_space: - data = [[np.nan, np.nan], [0.5, 1]] + data = [[np.nan, np.nan], [50, 100]] else: - data = [[0, 0.25], [0.5, 1]] + data = [[0, 25], [50, 100]] return xr.DataArray( data, dims=("y", "x"), From be34f0371fa48ef5405249ae32eddb84c1681f2e Mon Sep 17 00:00:00 2001 From: ghiggi Date: Fri, 2 Jun 2023 10:27:27 +0200 Subject: [PATCH 0253/1416] Working implementation --- satpy/readers/abi_base.py | 7 +------ satpy/readers/abi_l1b.py | 12 +++++++++++- satpy/tests/reader_tests/test_abi_l1b.py | 21 +++++++++++++++++++-- 3 files changed, 31 insertions(+), 9 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 18f89102b6..5f55170b6f 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -25,7 +25,6 @@ import xarray as xr from pyresample import geometry -import satpy from satpy._compat import cached_property from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler @@ -45,7 +44,7 @@ class NC_ABI_BASE(BaseFileHandler): """Base reader for ABI L1B L2+ NetCDF4 files.""" - def __init__(self, filename, filename_info, filetype_info, clip_negative_radiances=None): + def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) @@ -57,10 +56,6 @@ def __init__(self, filename, filename_info, filetype_info, clip_negative_radianc self.coords = {} - if clip_negative_radiances is None: - clip_negative_radiances = satpy.config.get("clip_negative_radiances") - self.clip_negative_radiances = clip_negative_radiances - @cached_property def nc(self): """Get the xarray dataset for this file.""" diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 07cfbfc6c6..9883a5919b 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -26,6 +26,7 @@ import numpy as np +import satpy from satpy.readers.abi_base import NC_ABI_BASE logger = logging.getLogger(__name__) @@ -34,6 +35,13 @@ class NC_ABI_L1B(NC_ABI_BASE): """File reader for individual ABI L1B NetCDF4 files.""" + def __init__(self, filename, filename_info, filetype_info, clip_negative_radiances=None): + """Open the NetCDF file with xarray and prepare the Dataset for reading.""" + super(NC_ABI_L1B, self).__init__(filename, filename_info, filetype_info) + if clip_negative_radiances is None: + clip_negative_radiances = satpy.config.get("clip_negative_radiances") + self.clip_negative_radiances = clip_negative_radiances + def get_dataset(self, key, info): """Load a dataset.""" logger.debug('Reading in get_dataset %s.', key['name']) @@ -157,8 +165,10 @@ def _ir_calibrate(self, data): bc2 = float(self["planck_bc2"]) if self.clip_negative_radiances: + print(self.clip_negative_radiances) min_rad = self._get_minimum_radiance(data) - data.data[data.data < min_rad] = min_rad + clip_mask = np.logical_and(data < min_rad, ~np.isnan(data)) + data = data.where(~clip_mask, min_rad) res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index b8ad4400cb..fd1015856e 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -218,14 +218,31 @@ def setUp(self): ) super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad) - def test_ir_calibrate(self): + def test_ir_calibrate_unclipped(self): """Test IR calibration.""" res = self.reader.get_dataset( make_dataid(name='C05', calibration='brightness_temperature'), {}) expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) - self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) + assert np.allclose(res.data, expected, equal_nan=True) + + # make sure the attributes from the file are in the data array + self.assertNotIn('scale_factor', res.attrs) + self.assertNotIn('_FillValue', res.attrs) + self.assertEqual(res.attrs['standard_name'], + 'toa_brightness_temperature') + self.assertEqual(res.attrs['long_name'], 'Brightness Temperature') + + def test_ir_calibrate_clipped(self): + """Test IR calibration.""" + res = self.reader.get_dataset( + make_dataid(name='C05', calibration='brightness_temperature'), {}) + + expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], + [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) + assert np.allclose(res.data, expected, equal_nan=True) + # make sure the attributes from the file are in the data array self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) From fe88ab2ecefedddc23affbc7e36576120f2bcf50 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Fri, 2 Jun 2023 11:03:40 +0200 Subject: [PATCH 0254/1416] Add tests --- satpy/tests/reader_tests/test_abi_l1b.py | 40 +++++++++++++++++++----- 1 file changed, 32 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index fd1015856e..0a25a30358 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -31,7 +31,7 @@ class Test_NC_ABI_L1B_Base(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @mock.patch('satpy.readers.abi_base.xr') - def setUp(self, xr_, rad=None): + def setUp(self, xr_, rad=None, clip_negative_radiances=False): """Create a fake dataset using the given radiance data.""" from satpy.readers.abi_l1b import NC_ABI_L1B @@ -113,7 +113,8 @@ def setUp(self, xr_, rad=None): {'platform_shortname': 'G16', 'observation_type': 'Rad', 'suffix': 'custom', 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'info'}) + {'filetype': 'info'}, + clip_negative_radiances=False) class TestABIYAML: @@ -199,7 +200,7 @@ def test_get_area_def(self, adef): np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) -class Test_NC_ABI_L1B_ir_cal(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B_unclipped_ir_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader's IR calibration.""" def setUp(self): @@ -213,10 +214,10 @@ def setUp(self): attrs={ 'scale_factor': 0.5, 'add_offset': -1., - '_FillValue': 1002, + '_FillValue': 1002, # last rad_data value } ) - super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad) + super(Test_NC_ABI_L1B_unclipped_ir_cal, self).setUp(rad=rad) def test_ir_calibrate_unclipped(self): """Test IR calibration.""" @@ -234,12 +235,35 @@ def test_ir_calibrate_unclipped(self): 'toa_brightness_temperature') self.assertEqual(res.attrs['long_name'], 'Brightness Temperature') - def test_ir_calibrate_clipped(self): + +class Test_NC_ABI_L1B_clipped_ir_cal(Test_NC_ABI_L1B_Base): + """Test the NC_ABI_L1B reader's IR calibration.""" + + def setUp(self): + """Create fake data for the tests.""" + values = np.arange(10.) + values[0] = -0.0001 # introduce below minimum expected radiance + rad_data = (values.reshape((2, 5)) + 1.) * 50. + rad_data = (rad_data + 1.) / 0.5 + rad_data = rad_data.astype(np.int16) + rad = xr.DataArray( + rad_data, + dims=('y', 'x'), + attrs={ + 'scale_factor': 0.5, + 'add_offset': -1., + '_FillValue': 1002, + } + ) + super(Test_NC_ABI_L1B_clipped_ir_cal, self).setUp(rad=rad, clip_negative_radiances=True) + + def test_ir_calibrate_unclipped(self): """Test IR calibration.""" res = self.reader.get_dataset( - make_dataid(name='C05', calibration='brightness_temperature'), {}) + make_dataid(name='C07', calibration='brightness_temperature'), {}) - expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], + clipped_ir = 267.07775531 + expected = np.array([[clipped_ir, 305.15576503, 332.37383249, 354.73895301, 374.19710115], [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) assert np.allclose(res.data, expected, equal_nan=True) From 6a8f09bec146bf2e987c827112a49581f280afc4 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Fri, 2 Jun 2023 11:34:36 +0200 Subject: [PATCH 0255/1416] Add missing test --- satpy/readers/abi_l1b.py | 1 - satpy/tests/reader_tests/test_abi_l1b.py | 12 ++++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 9883a5919b..35043bbede 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -165,7 +165,6 @@ def _ir_calibrate(self, data): bc2 = float(self["planck_bc2"]) if self.clip_negative_radiances: - print(self.clip_negative_radiances) min_rad = self._get_minimum_radiance(data) clip_mask = np.logical_and(data < min_rad, ~np.isnan(data)) data = data.where(~clip_mask, min_rad) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 0a25a30358..00222332d2 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -257,6 +257,18 @@ def setUp(self): ) super(Test_NC_ABI_L1B_clipped_ir_cal, self).setUp(rad=rad, clip_negative_radiances=True) + def test_get_minimum_radiance(self): + """Test get_minimum_radiance from Rad DataArray.""" + from satpy.readers.abi_l1b import NC_ABI_L1B + data = xr.DataArray( + attrs={ + 'scale_factor': 0.5, + 'add_offset': -1., + '_FillValue': 1002, + } + ) + np.testing.assert_allclose(NC_ABI_L1B._get_minimum_radiance(NC_ABI_L1B, data), 0.0) + def test_ir_calibrate_unclipped(self): """Test IR calibration.""" res = self.reader.get_dataset( From 0871b7dc0be1902d7d972d8d819c10c173fe38c3 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Fri, 2 Jun 2023 12:09:34 +0200 Subject: [PATCH 0256/1416] Make CodeScene happy --- satpy/tests/reader_tests/test_abi_l1b.py | 222 ++++++++++++----------- 1 file changed, 120 insertions(+), 102 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 00222332d2..8d9f02bc8c 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -27,6 +27,90 @@ from satpy.tests.utils import make_dataid +def _create_fake_rad_dataarray(rad=None): + x_image = xr.DataArray(0.) + y_image = xr.DataArray(0.) + time = xr.DataArray(0.) + if rad is None: + rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. + rad_data = (rad_data + 1.) / 0.5 + rad_data = rad_data.astype(np.int16) + rad = xr.DataArray( + rad_data, + dims=('y', 'x'), + attrs={ + 'scale_factor': 0.5, + 'add_offset': -1., + '_FillValue': 1002, + 'units': 'W m-2 um-1 sr-1', + 'valid_range': (0, 4095), + } + ) + rad.coords['t'] = time + rad.coords['x_image'] = x_image + rad.coords['y_image'] = y_image + return rad + + +def _create_fake_rad_dataset(rad=None): + rad = _create_fake_rad_dataarray(rad=rad) + + x__ = xr.DataArray( + range(5), + attrs={'scale_factor': 2., 'add_offset': -1.}, + dims=('x',) + ) + y__ = xr.DataArray( + range(2), + attrs={'scale_factor': -2., 'add_offset': 1.}, + dims=('y',) + ) + proj = xr.DataArray( + [], + attrs={ + 'semi_major_axis': 1., + 'semi_minor_axis': 1., + 'perspective_point_height': 1., + 'longitude_of_projection_origin': -90., + 'latitude_of_projection_origin': 0., + 'sweep_angle_axis': u'x' + } + ) + + fake_dataset = xr.Dataset( + data_vars={ + 'Rad': rad, + 'band_id': np.array(8), + # 'x': x__, + # 'y': y__, + 'x_image': xr.DataArray(0.), + 'y_image': xr.DataArray(0.), + 'goes_imager_projection': proj, + 'yaw_flip_flag': np.array([1]), + "planck_fk1": np.array(13432.1), + "planck_fk2": np.array(1497.61), + "planck_bc1": np.array(0.09102), + "planck_bc2": np.array(0.99971), + "esun": np.array(2017), + "nominal_satellite_subpoint_lat": np.array(0.0), + "nominal_satellite_subpoint_lon": np.array(-89.5), + "nominal_satellite_height": np.array(35786.02), + "earth_sun_distance_anomaly_in_AU": np.array(0.99) + }, + coords={ + 't': rad.coords['t'], + 'x': x__, + 'y': y__, + + }, + attrs={ + "time_coverage_start": "2017-09-20T17:30:40.8Z", + "time_coverage_end": "2017-09-20T17:41:17.5Z", + }, + ) + return fake_dataset + + class Test_NC_ABI_L1B_Base(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @@ -35,86 +119,13 @@ def setUp(self, xr_, rad=None, clip_negative_radiances=False): """Create a fake dataset using the given radiance data.""" from satpy.readers.abi_l1b import NC_ABI_L1B - x_image = xr.DataArray(0.) - y_image = xr.DataArray(0.) - time = xr.DataArray(0.) - if rad is None: - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. - rad_data = (rad_data + 1.) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - rad_data, - dims=('y', 'x'), - attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 1002, - 'units': 'W m-2 um-1 sr-1', - 'valid_range': (0, 4095), - } - ) - rad.coords['t'] = time - rad.coords['x_image'] = x_image - rad.coords['y_image'] = y_image - x__ = xr.DataArray( - range(5), - attrs={'scale_factor': 2., 'add_offset': -1.}, - dims=('x',) - ) - y__ = xr.DataArray( - range(2), - attrs={'scale_factor': -2., 'add_offset': 1.}, - dims=('y',) - ) - proj = xr.DataArray( - [], - attrs={ - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'perspective_point_height': 1., - 'longitude_of_projection_origin': -90., - 'latitude_of_projection_origin': 0., - 'sweep_angle_axis': u'x' - } - ) - fake_dataset = xr.Dataset( - data_vars={ - 'Rad': rad, - 'band_id': np.array(8), - # 'x': x__, - # 'y': y__, - 'x_image': x_image, - 'y_image': y_image, - 'goes_imager_projection': proj, - 'yaw_flip_flag': np.array([1]), - "planck_fk1": np.array(13432.1), - "planck_fk2": np.array(1497.61), - "planck_bc1": np.array(0.09102), - "planck_bc2": np.array(0.99971), - "esun": np.array(2017), - "nominal_satellite_subpoint_lat": np.array(0.0), - "nominal_satellite_subpoint_lon": np.array(-89.5), - "nominal_satellite_height": np.array(35786.02), - "earth_sun_distance_anomaly_in_AU": np.array(0.99) - }, - coords={ - 't': rad.coords['t'], - 'x': x__, - 'y': y__, - - }, - attrs={ - "time_coverage_start": "2017-09-20T17:30:40.8Z", - "time_coverage_end": "2017-09-20T17:41:17.5Z", - }, - ) - xr_.open_dataset.return_value = fake_dataset + xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad) self.reader = NC_ABI_L1B('filename', {'platform_shortname': 'G16', 'observation_type': 'Rad', 'suffix': 'custom', 'scene_abbr': 'C', 'scan_mode': 'M3'}, {'filetype': 'info'}, - clip_negative_radiances=False) + clip_negative_radiances=clip_negative_radiances) class TestABIYAML: @@ -200,8 +211,8 @@ def test_get_area_def(self, adef): np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) -class Test_NC_ABI_L1B_unclipped_ir_cal(Test_NC_ABI_L1B_Base): - """Test the NC_ABI_L1B reader's IR calibration.""" +class Test_NC_ABI_L1B_ir_cal(Test_NC_ABI_L1B_Base): + """Test the NC_ABI_L1B reader's default IR calibration.""" def setUp(self): """Create fake data for the tests.""" @@ -217,17 +228,13 @@ def setUp(self): '_FillValue': 1002, # last rad_data value } ) - super(Test_NC_ABI_L1B_unclipped_ir_cal, self).setUp(rad=rad) + super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad) - def test_ir_calibrate_unclipped(self): - """Test IR calibration.""" + def test_ir_calibration_attrs(self): + """Test IR calibrated DataArray attributes.""" res = self.reader.get_dataset( make_dataid(name='C05', calibration='brightness_temperature'), {}) - expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], - [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) - assert np.allclose(res.data, expected, equal_nan=True) - # make sure the attributes from the file are in the data array self.assertNotIn('scale_factor', res.attrs) self.assertNotIn('_FillValue', res.attrs) @@ -235,9 +242,22 @@ def test_ir_calibrate_unclipped(self): 'toa_brightness_temperature') self.assertEqual(res.attrs['long_name'], 'Brightness Temperature') + def test_clip_negative_radiances_attribute(self): + """Assert that clip_negative_radiances is set to False.""" + assert not self.reader.clip_negative_radiances + + def test_ir_calibrate(self): + """Test IR calibration.""" + res = self.reader.get_dataset( + make_dataid(name='C05', calibration='brightness_temperature'), {}) + + expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], + [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) + assert np.allclose(res.data, expected, equal_nan=True) + class Test_NC_ABI_L1B_clipped_ir_cal(Test_NC_ABI_L1B_Base): - """Test the NC_ABI_L1B reader's IR calibration.""" + """Test the NC_ABI_L1B reader's IR calibration (clipping negative radiance).""" def setUp(self): """Create fake data for the tests.""" @@ -255,8 +275,23 @@ def setUp(self): '_FillValue': 1002, } ) + super(Test_NC_ABI_L1B_clipped_ir_cal, self).setUp(rad=rad, clip_negative_radiances=True) + def test_clip_negative_radiances_attribute(self): + """Assert that clip_negative_radiances has been set to True.""" + assert self.reader.clip_negative_radiances + + def test_ir_calibrate(self): + """Test IR calibration.""" + res = self.reader.get_dataset( + make_dataid(name='C07', calibration='brightness_temperature'), {}) + + clipped_ir = 267.07775531 + expected = np.array([[clipped_ir, 305.15576503, 332.37383249, 354.73895301, 374.19710115], + [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) + assert np.allclose(res.data, expected, equal_nan=True) + def test_get_minimum_radiance(self): """Test get_minimum_radiance from Rad DataArray.""" from satpy.readers.abi_l1b import NC_ABI_L1B @@ -269,23 +304,6 @@ def test_get_minimum_radiance(self): ) np.testing.assert_allclose(NC_ABI_L1B._get_minimum_radiance(NC_ABI_L1B, data), 0.0) - def test_ir_calibrate_unclipped(self): - """Test IR calibration.""" - res = self.reader.get_dataset( - make_dataid(name='C07', calibration='brightness_temperature'), {}) - - clipped_ir = 267.07775531 - expected = np.array([[clipped_ir, 305.15576503, 332.37383249, 354.73895301, 374.19710115], - [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) - assert np.allclose(res.data, expected, equal_nan=True) - - # make sure the attributes from the file are in the data array - self.assertNotIn('scale_factor', res.attrs) - self.assertNotIn('_FillValue', res.attrs) - self.assertEqual(res.attrs['standard_name'], - 'toa_brightness_temperature') - self.assertEqual(res.attrs['long_name'], 'Brightness Temperature') - class Test_NC_ABI_L1B_vis_cal(Test_NC_ABI_L1B_Base): """Test the NC_ABI_L1B reader.""" From b1bacff103225385faea670283ae16a589392a61 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Mon, 5 Jun 2023 11:46:12 +0200 Subject: [PATCH 0257/1416] feat: add searchable reader table --- doc/source/conf.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index f45a83333b..82098eeb0a 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -26,8 +26,8 @@ sys.path.append(os.path.abspath('../../')) sys.path.append(os.path.abspath(os.path.dirname(__file__))) -from pyresample.area_config import generate_area_def_rst_list # noqa: E402 -from reader_table import generate_reader_table # noqa: E402 +from pyresample.area_config import _read_yaml_area_file_content, generate_area_def_rst_list # noqa: E402 +from reader_table import generate_reader_table, rst_table_header, rst_table_row # noqa: E402 from satpy.resample import get_area_file # noqa: E402 @@ -83,8 +83,20 @@ def __getattr__(cls, name): with open("reader_table.rst", mode="w") as f: f.write(generate_reader_table()) +# create table from area definition yaml file area_file = get_area_file()[0] + +area_dict = _read_yaml_area_file_content(area_file) +area_table = [rst_table_header("Area Definitions", header=["Name", "Description", "Projection"], + widths=[45, 60, 10], class_name="area-table")] + +for aname, params in area_dict.items(): + area_table.append(rst_table_row([f"`{aname}`_", params.get("description", ""), + params.get("projection").get("proj")])) + with open("area_def_list.rst", mode="w") as f: + f.write("".join(area_table)) + f.write("\n\n") f.write(generate_area_def_rst_list(area_file)) # -- General configuration ----------------------------------------------------- From f7664571219b487277e1d9c7e27dd743f5fab0eb Mon Sep 17 00:00:00 2001 From: BENR0 Date: Mon, 5 Jun 2023 11:46:37 +0200 Subject: [PATCH 0258/1416] fix: areas yaml --- satpy/etc/areas.yaml | 97 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 93 insertions(+), 4 deletions(-) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 6abbc18c82..946b73c6db 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -301,7 +301,7 @@ EastEurope: lower_left_xy: [654112.8864287604, 2989901.7547366405] upper_right_xy: [4553111.804127298, 5390224.287390241] -AfHorn: +AfHorn_geos: description: Eastern disk MSG image 0 degrees projection: proj: geos @@ -316,7 +316,7 @@ AfHorn: lower_left_xy: [2263804.1886089267, -1327678.4008740226] upper_right_xy: [5564247.671007627, 3472966.6644331776] -SouthAmerica: +SouthAmerica_geos: description: Lower West part of Southern disk MSG image 0 degrees projection: proj: geos @@ -716,6 +716,7 @@ australia: area_extent: lower_left_xy: [-2504688.5428486555, -5591295.9185533915] upper_right_xy: [2504688.5428486555, -1111475.102852225] + mali: description: mali projection: @@ -729,6 +730,7 @@ mali: area_extent: lower_left_xy: [-1224514.3987260093, 1111475.1028522244] upper_right_xy: [1224514.3987260093, 3228918.5790461157] + mali_eqc: description: mali projection: @@ -742,6 +744,7 @@ mali_eqc: area_extent: lower_left_xy: [-1224514.3987260093, -1001875.4171394627] upper_right_xy: [1224514.3987260093, 1001875.4171394617] + sve: description: Sweden and baltic sea @@ -753,6 +756,7 @@ sve: area_extent: lower_left_xy: [-342379.698, 6032580.06] upper_right_xy: [1423701.52, 8029648.75] + brazil2: description: brazil, platecarree projection: @@ -765,6 +769,7 @@ brazil2: lower_left_xy: [-7792364.355529149, -4452779.631730943] upper_right_xy: [-2226389.8158654715, 1669792.3618991035] units: m + sudeste: description: sudeste, platecarree projection: @@ -777,6 +782,7 @@ sudeste: lower_left_xy: [-6122571.993630046, -3005626.251418386] upper_right_xy: [-4230140.650144396, -1447153.3803125564] units: m + SouthAmerica_flat: description: South America flat projection: @@ -790,6 +796,7 @@ SouthAmerica_flat: lower_left_xy: [-8326322.82790897, -4609377.085697311] upper_right_xy: [-556597.4539663679, 1535833.8895192828] units: m + south_america: description: south_america, platecarree projection: @@ -802,6 +809,7 @@ south_america: lower_left_xy: [-8126322.82790897, -5009377.085697311] upper_right_xy: [-556597.4539663679, 1335833.8895192828] units: m + brazil: description: brazil, platecarree projection: @@ -814,6 +822,7 @@ brazil: lower_left_xy: [-8348961.809495518, -3896182.1777645745] upper_right_xy: [-3784862.6869713017, 1001875.4171394621] units: m + worldeqc3km70: description: World in 3km, platecarree projection: @@ -826,6 +835,7 @@ worldeqc3km70: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m + worldeqc30km70: description: World in 3km, platecarree projection: @@ -838,6 +848,7 @@ worldeqc30km70: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m + worldeqc3km73: description: World in 3km, platecarree projection: @@ -850,6 +861,7 @@ worldeqc3km73: lower_left_xy: [-20037508.3428, -8181982.573309999] upper_right_xy: [20037508.3428, 8181982.573309999] units: m + worldeqc3km: description: World in 3km, platecarree projection: @@ -862,6 +874,7 @@ worldeqc3km: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m + worldeqc30km: description: World in 3km, platecarree projection: @@ -874,6 +887,7 @@ worldeqc30km: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m + libya: description: libya area projection: @@ -888,6 +902,7 @@ libya: lower_left_xy: [-1921632.0902750609, 1725320.2028891125] upper_right_xy: [1918367.9097249391, 4797320.202889113] units: m + phil: description: kuwait area projection: @@ -903,6 +918,7 @@ phil: lower_left_xy: [-2200000.0, 0.0] upper_right_xy: [2200000.0, 2200000.0] units: m + phil_small: description: kuwait area projection: @@ -918,6 +934,7 @@ phil_small: lower_left_xy: [-600000.0, 0.0] upper_right_xy: [1600000.0, 2200000.0] units: m + kuwait: description: kuwait area projection: @@ -932,6 +949,7 @@ kuwait: lower_left_xy: [-1280000.0, 1820000.0] upper_right_xy: [1280000.0, 4380000.0] units: m + afghanistan: description: Afghanistan projection: @@ -960,7 +978,8 @@ maspalomas: area_extent: lower_left_xy: [-1200000.0, 2900000.0] upper_right_xy: [900000.0, 4000000.0] -afhorn: + +afhorn_merc: description: Africa horn 3km resolution projection: proj: merc @@ -972,6 +991,7 @@ afhorn: area_extent: lower_left_xy: [-2432000.0, -1130348.139543] upper_right_xy: [2432000.0, 3733651.860457] + spain: description: Spain projection: @@ -988,6 +1008,7 @@ spain: area_extent: lower_left_xy: [-500000.0, -500000.0] upper_right_xy: [500000.0, 500000.0] + germ: description: Germany projection: @@ -1004,6 +1025,7 @@ germ: area_extent: lower_left_xy: [-155100.436345, -4441495.37946] upper_right_xy: [868899.563655, -3417495.37946] + germ2: description: Germany projection: @@ -1020,6 +1042,7 @@ germ2: area_extent: lower_left_xy: [-165100.436345, -4441495.37946] upper_right_xy: [878899.563655, -3417495.37946] + euro4: description: Euro 4km area - Europe projection: @@ -1034,6 +1057,7 @@ euro4: area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] + euro1: description: Euro 4km area - Europe projection: @@ -1048,6 +1072,7 @@ euro1: area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] + scan: description: Scandinavia projection: @@ -1062,6 +1087,7 @@ scan: area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] + scan2: description: Scandinavia - 2km area projection: @@ -1076,6 +1102,7 @@ scan2: area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] + scan1: description: Scandinavia - 1km area projection: @@ -1090,6 +1117,7 @@ scan1: area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] + scan500m: description: Scandinavia - 500m area projection: @@ -1104,6 +1132,7 @@ scan500m: area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] + mesanX: description: Mesan-X rotated lon/lat 1.8km projection: @@ -1121,6 +1150,7 @@ mesanX: area_extent: lower_left_xy: [1067435.7598983962, -1278764.890341909] upper_right_xy: [3791765.9965939857, 1690140.6680267097] + mesanE: description: Europe Mesan rotated lon/lat 1.8km projection: @@ -1138,6 +1168,7 @@ mesanE: area_extent: lower_left_xy: [289083.0005619671, -2957836.6467769896] upper_right_xy: [5381881.121371055, 3335826.68502126] + baws: description: BAWS projection: @@ -1152,6 +1183,7 @@ baws: area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] + eurotv: description: Europe TV - 6.2x5.0km projection: @@ -1168,6 +1200,7 @@ eurotv: area_extent: lower_left_xy: [-3503748.8201907813, -6589593.134058789] upper_right_xy: [2842567.6359087573, -1499856.5846593212] + eurotv4n: description: Europe TV4 - 4.1x4.1km projection: @@ -1184,6 +1217,7 @@ eurotv4n: area_extent: lower_left_xy: [-5103428.678666952, -6772478.60053407] upper_right_xy: [3293371.321333048, -2049278.6005340703] + eurol: description: Euro 3.0km area - Europe projection: @@ -1198,6 +1232,7 @@ eurol: area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] + eurol1: description: Euro 3.0km area - Europe projection: @@ -1212,6 +1247,7 @@ eurol1: area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] + scanl: description: Scandinavia - Large projection: @@ -1226,6 +1262,7 @@ scanl: area_extent: lower_left_xy: [-900000.0, -4500000.0] upper_right_xy: [2000000.0, -1600000.0] + euron1: description: Northern Europe - 1km projection: @@ -1240,6 +1277,7 @@ euron1: area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] + euron0250: description: Northern Europe - 1km projection: @@ -1254,6 +1292,7 @@ euron0250: area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] + nsea: description: North Baltic Sea projection: @@ -1267,6 +1306,7 @@ nsea: area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] + ssea: description: South Baltic Sea projection: @@ -1280,6 +1320,7 @@ ssea: area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] + nsea250: description: North Baltic Sea projection: @@ -1293,6 +1334,7 @@ nsea250: area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] + ssea250: description: South Baltic Sea projection: @@ -1306,6 +1348,7 @@ ssea250: area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] + bsea250: description: South Baltic Sea projection: @@ -1319,6 +1362,7 @@ bsea250: area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] + test250: description: South Baltic Sea projection: @@ -1332,6 +1376,7 @@ test250: area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] + bsea1000: description: South Baltic Sea projection: @@ -1345,6 +1390,7 @@ bsea1000: area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] + euro: description: Euro area - Europe projection: @@ -1359,6 +1405,7 @@ euro: area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] + baltrad_lambert: description: Baltrad Lambert projection: @@ -1373,6 +1420,7 @@ baltrad_lambert: area_extent: lower_left_xy: [-994211.85388, -1291605.15396] upper_right_xy: [635788.14612, 1098394.84604] + eport: description: eport projection: @@ -1387,6 +1435,7 @@ eport: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + eport1: description: eport projection: @@ -1401,6 +1450,7 @@ eport1: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + eport10: description: eport reduced resolution projection: @@ -1415,6 +1465,7 @@ eport10: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + eport4: description: eport reduced resolution projection: @@ -1429,6 +1480,7 @@ eport4: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + eport2: description: eport reduced resolution projection: @@ -1443,6 +1495,7 @@ eport2: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m + npp_sample_m: description: North America - NPP sample data - M-bands projection: @@ -1457,6 +1510,7 @@ npp_sample_m: area_extent: lower_left_xy: [-1700000.0, -1400000.0] upper_right_xy: [1100000.0, 1400000.0] + arctic_europe_1km: description: Arctic and Europe projection: @@ -1471,6 +1525,7 @@ arctic_europe_1km: area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] + arctic_europe_9km: description: Arctic and Europe projection: @@ -1485,6 +1540,7 @@ arctic_europe_9km: area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] + sswe: description: Southern Sweden projection: @@ -1501,6 +1557,7 @@ sswe: area_extent: lower_left_xy: [-400884.23045, -3946631.71387] upper_right_xy: [623115.76955, -2922631.71387] + nswe: description: Northern Sweden projection: @@ -1517,6 +1574,7 @@ nswe: area_extent: lower_left_xy: [-392288.010506, -3105279.35252] upper_right_xy: [631711.989494, -2081279.35252] + sval: description: Svalbard projection: @@ -1531,6 +1589,7 @@ sval: area_extent: lower_left_xy: [-287554.9489620461, -1630805.15418955] upper_right_xy: [736445.0510379539, -606805.1541895501] + ease_sh: description: Antarctic EASE grid projection: @@ -1546,6 +1605,7 @@ ease_sh: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m + ease_nh: description: Arctic EASE grid projection: @@ -1561,6 +1621,7 @@ ease_nh: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m + barents_sea: description: Barents and Greenland seas projection: @@ -1575,6 +1636,7 @@ barents_sea: area_extent: lower_left_xy: [-1600000.0, -2000000.0] upper_right_xy: [1400000.0, -300000.0] + antarctica: description: Antarctica - 1km projection: @@ -1589,6 +1651,7 @@ antarctica: area_extent: lower_left_xy: [-2858899.2042342643, -2858899.204234264] upper_right_xy: [2858899.204234264, 2858899.2042342643] + arctica: description: arctica - 1km projection: @@ -1603,6 +1666,7 @@ arctica: area_extent: lower_left_xy: [-1458899.2042342643, -1458899.2042342639] upper_right_xy: [1458899.2042342639, 1458899.2042342643] + euroasia: description: Euroasia - Global 1km USGS Landuse database projection: @@ -1617,6 +1681,7 @@ euroasia: area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] + euroasia_10km: description: Euroasia - Global 1km USGS Landuse database projection: @@ -1631,6 +1696,7 @@ euroasia_10km: area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] + euroasia_asia: description: Euroasia - optimised for Asia - @@ -1647,6 +1713,7 @@ euroasia_asia: area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] + euroasia_asia_10km: description: Euroasia - optimised for Asia - Global 1km USGS Landuse database projection: @@ -1661,6 +1728,7 @@ euroasia_asia_10km: area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] + australia_pacific: description: Austalia/Pacific - Global 1km USGS Landuse database projection: @@ -1675,6 +1743,7 @@ australia_pacific: area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] + australia_pacific_10km: description: Austalia/Pacific - Global 1km USGS Landuse database projection: @@ -1689,6 +1758,7 @@ australia_pacific_10km: area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] + africa: description: Africa - Global 1km USGS Landuse database projection: @@ -1703,6 +1773,7 @@ africa: area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] + africa_10km: description: Africa - Global 1km USGS Landuse database projection: @@ -1717,7 +1788,8 @@ africa_10km: area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] -southamerica: + +southamerica_laea: description: South America - Global 1km USGS Landuse database projection: proj: laea @@ -1731,6 +1803,7 @@ southamerica: area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] + southamerica_10km: description: South America - Global 1km USGS Landuse database projection: @@ -1745,6 +1818,7 @@ southamerica_10km: area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] + northamerica: description: North America - Global 1km USGS Landuse database projection: @@ -1759,6 +1833,7 @@ northamerica: area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] + northamerica_10km: description: North America - Global 1km USGS Landuse database projection: @@ -1773,6 +1848,7 @@ northamerica_10km: area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] + romania: description: Romania - 3km projection: @@ -1787,6 +1863,7 @@ romania: area_extent: lower_left_xy: [-2226837.662574135, -1684219.2829063328] upper_right_xy: [2299196.337425865, 881436.7170936672] + stere_asia_test: description: stere projection: @@ -1799,6 +1876,7 @@ stere_asia_test: area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] + bocheng_test: description: stere projection: @@ -1811,6 +1889,7 @@ bocheng_test: area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] + nsper_swe: description: nsper_swe projection: @@ -1825,6 +1904,7 @@ nsper_swe: area_extent: lower_left_xy: [-5000000.0, -5000000.0] upper_right_xy: [5000000.0, 5000000.0] + new_bsea250: description: new_bsea250 projection: @@ -1838,6 +1918,7 @@ new_bsea250: area_extent: lower_left_xy: [-638072.2772287376, -680339.8397175331] upper_right_xy: [638072.277228737, 757253.9342263378] + scanice: description: Scandinavia and Iceland projection: @@ -1852,6 +1933,7 @@ scanice: area_extent: lower_left_xy: [-1920000.0, -1536000.0] upper_right_xy: [1920000.0, 1536000.0] + baws250: description: BAWS, 250m resolution projection: @@ -1866,6 +1948,7 @@ baws250: area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] + moll: description: moll projection: @@ -1879,6 +1962,7 @@ moll: area_extent: lower_left_xy: [-18040095.696147293, -9020047.848073646] upper_right_xy: [18040095.696147293, 9020047.848073646] + robinson: description: robinson projection: @@ -1912,6 +1996,7 @@ met07globe: # obsolete platform number area_extent: lower_left_xy: [-5621225.237846375, -5621225.237846375] upper_right_xy: [5621225.237846375, 5621225.237846375] + met09globe: # obsolete platform number description: Cropped disk MSG image 0 degrees projection: @@ -1926,6 +2011,7 @@ met09globe: # obsolete platform number area_extent: lower_left_xy: [-5432229.931711678, -5429229.528545862] upper_right_xy: [5429229.528545862, 5432229.931711678] + met09globeFull: # superseded by msg_seviri_fes_3km description: Full disk MSG image 0 degrees projection: @@ -1940,6 +2026,7 @@ met09globeFull: # superseded by msg_seviri_fes_3km area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] + seviri_0deg: # superseded by msg_seviri_fes_3km description: Full disk MSG image 0 degrees projection: @@ -1954,6 +2041,7 @@ seviri_0deg: # superseded by msg_seviri_fes_3km area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] + seviri_iodc: # superseded by msg_seviri_iodc_3km description: Full disk MSG image 41.5 degrees projection: @@ -1968,6 +2056,7 @@ seviri_iodc: # superseded by msg_seviri_iodc_3km area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] + msg_resample_area: description: Full disk MSG image 20.75 degrees projection: From 6a7e1c07f5f225fdc6e8bd76494e6c6da413b32e Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Mon, 5 Jun 2023 09:14:28 -0500 Subject: [PATCH 0259/1416] Trying to make sure valid range is given for non-flag variables (this is important for AWIPS) It also seems important that integer (flag data) does not have a valid range provided. Add tests to flag this problem if it appears again. --- satpy/readers/clavrx.py | 1 + satpy/tests/reader_tests/test_clavrx.py | 1 + satpy/tests/reader_tests/test_clavrx_nc.py | 3 +++ 3 files changed, 5 insertions(+) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index fdcd5ff8cf..23255adda9 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -171,6 +171,7 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: valid_min = _scale_data(valid_range[0], factor, offset) valid_max = _scale_data(valid_range[1], factor, offset) data = data.where((data >= valid_min) & (data <= valid_max)) + attrs['valid_range'] = [valid_min, valid_max] data.attrs = _CLAVRxHelper._remove_attributes(attrs) diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index f7c8f1f1cd..71d666b93d 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -421,6 +421,7 @@ def test_load_all_old_donor(self): else: self.assertNotIn('_FillValue', v.attrs) if v.attrs["name"] == 'refl_1_38um_nom': + self.assertIn("valid_range", v.attrs) self.assertIsInstance(v.attrs["valid_range"], list) else: self.assertNotIn('valid_range', v.attrs) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index 0a6bdddfec..8487db48c7 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -206,15 +206,18 @@ def test_load_all_new_donor(self, filenames, loadable_ids): assert "valid_range" not in datasets["variable1"].attrs assert "_FillValue" not in datasets["variable1"].attrs assert np.float64 == datasets["variable1"].dtype + assert "valid_range" not in datasets["variable1"].attrs assert np.issubdtype(datasets["var_flags"].dtype, np.integer) assert datasets['var_flags'].attrs.get('flag_meanings') is not None assert '' == datasets['var_flags'].attrs.get('flag_meanings') assert np.issubdtype(datasets["out_of_range_flags"].dtype, np.integer) + assert "valid_range" not in datasets["out_of_range_flags"].attrs assert isinstance(datasets["refl_0_65um_nom"].valid_range, list) assert np.float64 == datasets["refl_0_65um_nom"].dtype assert "_FillValue" not in datasets["refl_0_65um_nom"].attrs + assert "valid_range" in datasets["refl_0_65um_nom"].attrs assert "refl_0_65um_nom" == datasets["C02"].file_key assert "_FillValue" not in datasets["C02"].attrs From e973b0663b09ca389a4702145cfba31a04a9cccd Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 6 Jun 2023 08:23:30 +0000 Subject: [PATCH 0260/1416] Update documentation --- satpy/readers/gms5_vissr_l1b.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 0d40838247..ef7dff2855 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -108,6 +108,19 @@ constant. +Performance +~~~~~~~~~~~ + +Navigation of VISSR images is computationally expensive, because for each pixel +the view vector of the (rotating) instrument needs to be intersected with the +earth, including interpolation of attitude and orbit prediction. + +Currently, navigation takes about 20 seconds for IR channels and 8 minutes for +the VIS channel. Although the navigation module is jit-compiled using numba, +JMA's C library ``Msial`` is still four times faster. So there's certainly room +for optimization. + + Space Pixels ------------ From 25f2def8a39dc46556b0e0ed0812c27dcbbe247b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 6 Jun 2023 10:37:22 +0000 Subject: [PATCH 0261/1416] Add reference to Pygac documentation --- satpy/readers/avhrr_l1b_gaclac.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index 6dbcc84895..e520b29b30 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -17,11 +17,16 @@ # satpy. If not, see . """Reading and calibrating GAC and LAC AVHRR data. +Uses Pygac under the hood. See the `Pygac Documentation`_ for supported data +formats as well as calibration and navigation methods. + .. todo:: Fine grained calibration Radiance output +.. _Pygac Documentation: + https://pygac.readthedocs.io/en/stable """ import logging From e16b7db6b4c1b26eed246a37ae8a4df0ea4475cd Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 7 Jun 2023 00:55:43 +0000 Subject: [PATCH 0262/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/test_composites.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index cf3898ad72..e6751835f7 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -31,7 +31,6 @@ import satpy - # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path From 9c03b1ac4804d841f0b121ce510679a9021f7daf Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 7 Jun 2023 19:06:15 +0800 Subject: [PATCH 0263/1416] some clean-ups --- satpy/composites/__init__.py | 29 +++------- satpy/tests/test_composites.py | 102 ++++++++++++++++++++------------- 2 files changed, 72 insertions(+), 59 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index e965f82a61..1fdb727627 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1103,41 +1103,30 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) if 'rows_per_scan' in high_res.attrs: new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) new_attrs.setdefault('resolution', high_res.attrs['resolution']) - colors = ['red', 'green', 'blue', None] - low_resolution_index = colors.index(self.high_resolution_color) - neutral_resolution_index = colors.index(self.neutral_resolution_color) - neutral_res = datasets[neutral_resolution_index] if self.neutral_resolution_color is not None else None else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None - neutral_res = None - low_resolution_index = 0 - neutral_resolution_index = 0 if high_res is not None: - bands = locals() - colors = ['red', 'green', 'blue'] - bands["low_res_" + self.high_resolution_color] = high_res - colors.remove(self.high_resolution_color) - low_res = (low_res_red, low_res_green, low_res_blue)[low_resolution_index] + bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} + ratio = da.map_blocks( _get_sharpening_ratio, high_res.data, - low_res.data, + bands[self.high_resolution_color].data, meta=np.array((), dtype=high_res.dtype), dtype=high_res.dtype, chunks=high_res.chunks, ) - with xr.set_options(keep_attrs=True): - if neutral_res is not None: - if low_resolution_index != neutral_resolution_index: - colors.remove(self.neutral_resolution_color) - for color in colors: - bands["low_res_" + color] = bands["low_res_" + color] * ratio + bands[self.high_resolution_color] = high_res - return bands["low_res_red"], bands["low_res_green"], bands["low_res_blue"], new_attrs + with xr.set_options(keep_attrs=True): + for color in bands.keys(): + if color != self.neutral_resolution_color and color != self.high_resolution_color: + bands[color] = bands[color] * ratio + return bands['red'], bands['green'], bands['blue'], new_attrs else: return low_res_red, low_res_green, low_res_blue, new_attrs diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index e6751835f7..83da02462f 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -132,7 +132,7 @@ def test_nondimensional_coords(self): class TestRatioSharpenedCompositors: """Test RatioSharpenedRGB and SelfSharpendRGB compositors.""" - def setUp(self): + def setup_method(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition('test', 'test', 'test', @@ -150,16 +150,19 @@ def setUp(self): ds1 = xr.DataArray(da.from_array(low_res_data, chunks=2), attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) + self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds2.attrs['name'] += '2' + self.ds2 = ds2 ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, attrs=attrs, dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) ds3.attrs['name'] += '3' + self.ds3 = ds3 # high resolution version high_res_data = np.ones((2, 2), dtype=np.float64) @@ -169,6 +172,7 @@ def setUp(self): coords={'y': [0, 1], 'x': [0, 1]}) ds4.attrs['name'] += '4' ds4.attrs['resolution'] = 500 + self.ds4 = ds4 # high resolution version - but too big ds4_big = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), @@ -180,46 +184,70 @@ def setUp(self): ds4_big.attrs['area'] = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 4, 4, (-2000, -2000, 2000, 2000)) - - return ds1, ds2, ds3, ds4, ds4_big + self.ds4_big = ds4_big @pytest.mark.parametrize( - ("case", "exp"), + "exp", [ - ("high bad color", ValueError), - ("neutral bad color", ValueError), - ("match_data_arrays", satpy.composites.IncompatibleAreas), - ("more than three datasets", ValueError), - ("no high res band in self sharpened", ValueError) + ValueError, ] ) - def test_errors(self, case, exp): - """Test errors under different cases.""" + def test_high_bad_color(self, exp): + """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB - ds1, ds2, ds3, ds4, ds4_big = self.setUp() + with pytest.raises(exp): + RatioSharpenedRGB(name='true_color', high_resolution_band="bad", neutral_resolution_band="red") - if case == "high bad color": - with pytest.raises(exp): - RatioSharpenedRGB(name='true_color', high_resolution_band="bad", neutral_resolution_band="red") - - elif case == "neutral bad color": - with pytest.raises(exp): - RatioSharpenedRGB(name='true_color', high_resolution_band="red", neutral_resolution_band="bad") + @pytest.mark.parametrize( + "exp", + [ + ValueError, + ] + ) + def test_neutral_bad_color(self, exp): + """Test that only valid band colors can be provided.""" + from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + with pytest.raises(exp): + RatioSharpenedRGB(name='true_color', high_resolution_band="red", neutral_resolution_band="bad") - elif case == "match_data_arrays": - comp = RatioSharpenedRGB(name='true_color') - with pytest.raises(exp): - comp((ds1, ds2, ds3), optional_datasets=(ds4_big,)) + @pytest.mark.parametrize( + "exp", + [ + satpy.composites.IncompatibleAreas, + ] + ) + def test_match_data_arrays(self, exp): + """Test that all areas have to be the same resolution.""" + from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + comp = RatioSharpenedRGB(name='true_color') + with pytest.raises(exp): + comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) - elif case == "more than three datasets": - comp = RatioSharpenedRGB(name='true_color') - with pytest.raises(exp): - comp((ds1, ds2, ds3, ds1), optional_datasets=(ds4_big,)) + @pytest.mark.parametrize( + "exp", + [ + ValueError, + ] + ) + def test_more_than_three_datasets(self, exp): + """Test that only 3 datasets can be passed.""" + from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) + with pytest.raises(exp): + comp((self.ds1, self.ds2, self.ds3)) - elif case == "no high res band in self sharpened": - comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) - with pytest.raises(exp): - comp((ds1, ds2, ds3)) + @pytest.mark.parametrize( + "exp", + [ + ValueError, + ] + ) + def test_self_sharpened_no_high_res(self, exp): + """Test for exception when no high_res band is specified.""" + from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) + with pytest.raises(exp): + comp((self.ds1, self.ds2, self.ds3)) @pytest.mark.parametrize( ("case", "exp"), @@ -231,14 +259,13 @@ def test_errors(self, case, exp): def test_basic_function(self, case, exp): """Test basic composite function without sharpening.""" from satpy.composites import RatioSharpenedRGB - ds1, ds2, ds3, ds4, ds4_big = self.setUp() if case == "without optional high res": comp = RatioSharpenedRGB(name='true_color') - res = comp((ds1, ds2, ds3)) + res = comp((self.ds1, self.ds2, self.ds3)) elif case == "high res band is None": comp = RatioSharpenedRGB(name='true_color', high_resolution_band=None) - res = comp((ds1, ds2, ds3), optional_datasets=(ds4,)) + res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) assert res.shape == exp @@ -276,8 +303,7 @@ def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, e from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color', high_resolution_band=high_resolution_band, neutral_resolution_band=neutral_resolution_band) - ds1, ds2, ds3, ds4, ds4_big = self.setUp() - res = comp((ds1, ds2, ds3), optional_datasets=(ds4,)) + res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) assert "units" not in res.attrs assert isinstance(res, xr.DataArray) @@ -301,9 +327,7 @@ def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color') - - ds1, ds2, ds3, ds4, ds4_big = self.setUp() - res = comp((ds1, ds2, ds3)) + res = comp((self.ds1, self.ds2, self.ds3)) data = res.values assert data.shape == exp_shape From 44cd12572d64182f886652b808fadf34e89c951b Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 7 Jun 2023 19:09:39 +0800 Subject: [PATCH 0264/1416] Update test_composites.py --- satpy/tests/test_composites.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 83da02462f..b8bb0981b6 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -194,7 +194,7 @@ def setup_method(self): ) def test_high_bad_color(self, exp): """Test that only valid band colors can be provided.""" - from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + from satpy.composites import RatioSharpenedRGB with pytest.raises(exp): RatioSharpenedRGB(name='true_color', high_resolution_band="bad", neutral_resolution_band="red") @@ -206,7 +206,7 @@ def test_high_bad_color(self, exp): ) def test_neutral_bad_color(self, exp): """Test that only valid band colors can be provided.""" - from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + from satpy.composites import RatioSharpenedRGB with pytest.raises(exp): RatioSharpenedRGB(name='true_color', high_resolution_band="red", neutral_resolution_band="bad") @@ -218,7 +218,7 @@ def test_neutral_bad_color(self, exp): ) def test_match_data_arrays(self, exp): """Test that all areas have to be the same resolution.""" - from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') with pytest.raises(exp): comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) @@ -231,7 +231,7 @@ def test_match_data_arrays(self, exp): ) def test_more_than_three_datasets(self, exp): """Test that only 3 datasets can be passed.""" - from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) with pytest.raises(exp): comp((self.ds1, self.ds2, self.ds3)) @@ -244,7 +244,7 @@ def test_more_than_three_datasets(self, exp): ) def test_self_sharpened_no_high_res(self, exp): """Test for exception when no high_res band is specified.""" - from satpy.composites import RatioSharpenedRGB, SelfSharpenedRGB + from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) with pytest.raises(exp): comp((self.ds1, self.ds2, self.ds3)) From ac32288c43b1f28c013a80b55cde45cc98db3f28 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 7 Jun 2023 14:56:12 +0200 Subject: [PATCH 0265/1416] Add clip method and docs --- doc/source/config.rst | 17 +++++++++++++++++ satpy/readers/abi_l1b.py | 3 +-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/doc/source/config.rst b/doc/source/config.rst index 63da03dac7..78f022984b 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -258,6 +258,23 @@ as part of the :func:`~satpy.modifiers.angles.get_angles` and used by multiple modifiers and composites including the default rayleigh correction. +Clipping Negative Infrared Radiances +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* **Environment variable**: ``SATPY_CLIP_NEGATIVE_RADIANCES`` +* **YAML/Config Key**: ``clip_negative_radiances`` +* **Default**: False + +Whether to clip negative infrared radiances to the minimum possible value before +computing the brightness temperature. +If ``clip_negative_radiances=False``, pixels with negative radiances will have +``np.nan`` brightness temperatures. + +Clipping of negative radiances is currently implemented for the following readers: + +* `abi_l1b` + + Temporary Directory ^^^^^^^^^^^^^^^^^^^ diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 35043bbede..aa32955fef 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -166,8 +166,7 @@ def _ir_calibrate(self, data): if self.clip_negative_radiances: min_rad = self._get_minimum_radiance(data) - clip_mask = np.logical_and(data < min_rad, ~np.isnan(data)) - data = data.where(~clip_mask, min_rad) + data = data.clip(min=min_rad) res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs From 56af5613b17c8b795381b4470e6a0819fe88582f Mon Sep 17 00:00:00 2001 From: Gionata Ghiggi Date: Wed, 7 Jun 2023 14:58:37 +0200 Subject: [PATCH 0266/1416] Update doc/source/config.rst --- doc/source/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/config.rst b/doc/source/config.rst index 78f022984b..ee6a86cf30 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -272,7 +272,7 @@ If ``clip_negative_radiances=False``, pixels with negative radiances will have Clipping of negative radiances is currently implemented for the following readers: -* `abi_l1b` +* ``abi_l1b`` Temporary Directory From afe356d8c9edcdf48295cab88bad37b9af2a73c0 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 8 Jun 2023 10:22:42 +0200 Subject: [PATCH 0267/1416] Remove unneeded calibration keys for data products --- satpy/etc/readers/olci_l2.yaml | 80 +++++++++++----------------------- 1 file changed, 25 insertions(+), 55 deletions(-) diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml index 5da6e0b1ce..c34ff76ce3 100644 --- a/satpy/etc/readers/olci_l2.yaml +++ b/satpy/etc/readers/olci_l2.yaml @@ -356,10 +356,8 @@ datasets: name: chl_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: algal_pigment_concentration - units: "lg(re mg.m-3)" + standard_name: algal_pigment_concentration + units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_nn nc_key: CHL_NN @@ -368,10 +366,8 @@ datasets: name: iop_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: cdm_absorption_coefficient - units: "lg(re m-l)" + standard_name: cdm_absorption_coefficient + units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_iop_nn nc_key: ADG443_NN @@ -380,10 +376,8 @@ datasets: name: trsp sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: diffuse_attenuation_coefficient - units: "lg(re m-l)" + standard_name: diffuse_attenuation_coefficient + units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_trsp nc_key: KD490_M07 @@ -392,10 +386,8 @@ datasets: name: tsm_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: total_suspended_matter_concentration - units: "lg(re g.m-3)" + standard_name: total_suspended_matter_concentration + units: "lg(re g.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_tsm_nn nc_key: TSM_NN @@ -412,10 +404,8 @@ datasets: name: iwv sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: integrated_water_vapour_column - units: "kg.m-2" + standard_name: integrated_water_vapour_column + units: "kg.m-2" coordinates: [longitude, latitude] file_type: esa_l2_iwv nc_key: IWV @@ -424,10 +414,8 @@ datasets: name: iwv_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: uncertainty_estimate_integrated_water_vapour_column - units: "kg.m-2" + standard_name: uncertainty_estimate_integrated_water_vapour_column + units: "kg.m-2" coordinates: [longitude, latitude] file_type: esa_l2_iwv nc_key: IWV_unc @@ -436,9 +424,7 @@ datasets: name: otci sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: terrestrial_chlorophyll_index + standard_name: terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI @@ -447,9 +433,7 @@ datasets: name: otci_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: uncertainty_estimate_terrestrial_chlorophyll_index + standard_name: uncertainty_estimate_terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI_unc @@ -458,9 +442,7 @@ datasets: name: otci_quality_flags sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: quality_flags_for_terrestrial_chlorophyll_index + standard_name: quality_flags_for_terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI_quality_flags @@ -469,9 +451,7 @@ datasets: name: gifapar sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation + standard_name: green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation coordinates: [longitude, latitude] file_type: esa_l2_gifapar nc_key: GIFAPAR @@ -480,9 +460,7 @@ datasets: name: gifapar_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: uncertainty_in_green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation + standard_name: uncertainty_in_green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation coordinates: [longitude, latitude] file_type: esa_l2_gifapar nc_key: GIFAPAR_unc @@ -491,10 +469,8 @@ datasets: name: rc_gifapar_oa10 sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: rectified_reflectance_for_band_oa10 - units: 'mW.m-2.sr-1.nm-1' + standard_name: rectified_reflectance_for_band_oa10 + units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC681 @@ -503,10 +479,8 @@ datasets: name: rc_gifapar_oa10_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: ucnertainty_in_rectified_reflectance_for_band_oa10 - units: 'mW.m-2.sr-1.nm-1' + standard_name: ucnertainty_in_rectified_reflectance_for_band_oa10 + units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC681_unc @@ -515,10 +489,8 @@ datasets: name: rc_gifapar_oa17 sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: rectified_reflectance_for_band_oa17 - units: 'mW.m-2.sr-1.nm-1' + standard_name: rectified_reflectance_for_band_oa17 + units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC865 @@ -527,10 +499,8 @@ datasets: name: rc_gifapar_oa17_unc sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: ucnertainty_in_rectified_reflectance_for_band_oa17 - units: 'mW.m-2.sr-1.nm-1' + standard_name: ucnertainty_in_rectified_reflectance_for_band_oa17 + units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC865_unc From 43f231568a3a8e998fb39a414b2ec48f0fe11aae Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 8 Jun 2023 12:38:32 +0200 Subject: [PATCH 0268/1416] Add delogging for olci channels --- satpy/readers/olci_nc.py | 21 ++++++++++++-- satpy/tests/reader_tests/test_olci_nc.py | 37 ++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 9aafb981fb..eba0cef59f 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -100,7 +100,7 @@ class NCOLCIBase(BaseFileHandler): cols_name = "columns" def __init__(self, filename, filename_info, filetype_info, - engine=None): + engine=None, **kwargs): """Init the olci reader base.""" super().__init__(filename, filename_info, filetype_info) self._engine = engine @@ -203,6 +203,11 @@ def get_dataset(self, key, info): class NCOLCI2(NCOLCIChannelBase): """File handler for OLCI l2.""" + def __init__(self, filename, filename_info, filetype_info, engine=None, unlog=False): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, engine) + self.unlog = unlog + def get_dataset(self, key, info): """Load a dataset.""" if self.channel is not None and self.channel != key['name']: @@ -221,8 +226,20 @@ def get_dataset(self, key, info): dataset.attrs['platform_name'] = self.platform_name dataset.attrs['sensor'] = self.sensor dataset.attrs.update(key.to_dict()) + if self.unlog: + dataset = self.delog(dataset) + return dataset + def delog(self, data_array): + """Remove log10 from the units and values.""" + units = data_array.attrs["units"] + + if units.startswith("lg("): + data_array = 10 ** data_array + data_array.attrs["units"] = units.split("lg(re ")[1].strip(")") + return data_array + def getbitmask(self, wqsf, items=None): """Get the bitmask.""" if items is None: @@ -240,7 +257,7 @@ class NCOLCILowResData(NCOLCIBase): cols_name = "tie_columns" def __init__(self, filename, filename_info, filetype_info, - engine=None): + engine=None, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) self.l_step = self.nc.attrs['al_subsampling_factor'] diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index b0196eb3b8..e415f152b1 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.olci_nc module.""" +import datetime import unittest import unittest.mock as mock @@ -177,6 +178,42 @@ def test_olci_meteo(self, mocked_dataset): mocked_dataset.assert_called() mocked_dataset.reset_mock() + @mock.patch("xarray.open_dataset") + def test_chl_nn(self, mocked_dataset): + """Test unlogging the chl_nn product.""" + import numpy as np + import xarray as xr + + from satpy.readers.olci_nc import NCOLCI2 + from satpy.tests.utils import make_dataid + attr_dict = { + 'ac_subsampling_factor': 64, + 'al_subsampling_factor': 1, + } + data = {'CHL_NN': (['rows', 'columns'], + np.arange(30).reshape(5, 6).astype(float), + {"units": "lg(re mg.m-3)"})} + mocked_dataset.return_value = xr.Dataset(data, + coords={'rows': np.arange(5), + 'columns': np.arange(6)}, + attrs=attr_dict) + ds_info = {'name': 'chl_nn', 'sensor': 'olci', 'resolution': 300, + 'standard_name': 'algal_pigment_concentration', 'units': 'lg(re mg.m-3)', + 'coordinates': ('longitude', 'latitude'), 'file_type': 'esa_l2_chl_nn', 'nc_key': 'CHL_NN', + 'modifiers': ()} + filename_info = {'mission_id': 'S3A', 'datatype_id': 'WFR', + 'start_time': datetime.datetime(2019, 9, 24, 9, 29, 39), + 'end_time': datetime.datetime(2019, 9, 24, 9, 32, 39), + 'creation_time': datetime.datetime(2019, 9, 24, 11, 40, 26), 'duration': 179, 'cycle': 49, + 'relative_orbit': 307, 'frame': 1800, 'centre': 'MAR', 'mode': 'O', 'timeliness': 'NR', + 'collection': '002'} + ds_id = make_dataid(name='chl_nn') + file_handler = NCOLCI2('somedir/somefile.nc', filename_info, None, unlog=True) + res = file_handler.get_dataset(ds_id, ds_info) + + assert res.attrs["units"] == "mg.m-3" + assert res.values[-1, -1] == 1e29 + class TestBitFlags(unittest.TestCase): """Test the bitflag reading.""" From 0d75dd96fca64bdf42a9461dbb27845677c1828d Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 8 Jun 2023 12:40:45 +0200 Subject: [PATCH 0269/1416] Allow custom masking items --- satpy/readers/olci_nc.py | 14 +++++++----- satpy/tests/reader_tests/test_olci_nc.py | 28 +++++++++++++++++++++++- 2 files changed, 35 insertions(+), 7 deletions(-) diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index eba0cef59f..112f5455ac 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -51,6 +51,10 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import angle2xyz, get_legacy_chunk_size, xyz2angle +DEFAULT_MASK_ITEMS = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", + "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", + "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() @@ -203,10 +207,11 @@ def get_dataset(self, key, info): class NCOLCI2(NCOLCIChannelBase): """File handler for OLCI l2.""" - def __init__(self, filename, filename_info, filetype_info, engine=None, unlog=False): + def __init__(self, filename, filename_info, filetype_info, engine=None, unlog=False, mask_items=None): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) self.unlog = unlog + self.mask_items = mask_items def get_dataset(self, key, info): """Load a dataset.""" @@ -221,8 +226,7 @@ def get_dataset(self, key, info): if key['name'] == 'wqsf': dataset.attrs['_FillValue'] = 1 elif key['name'] == 'mask': - dataset = self.getbitmask(dataset) - + dataset = self.getbitmask(dataset, self.mask_items) dataset.attrs['platform_name'] = self.platform_name dataset.attrs['sensor'] = self.sensor dataset.attrs.update(key.to_dict()) @@ -243,9 +247,7 @@ def delog(self, data_array): def getbitmask(self, wqsf, items=None): """Get the bitmask.""" if items is None: - items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", - "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", - "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + items = DEFAULT_MASK_ITEMS bflags = BitFlags(wqsf) return reduce(np.logical_or, [bflags[item] for item in items]) diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index e415f152b1..6761511cf5 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -94,7 +94,7 @@ def test_open_file_objects(self, mocked_open_dataset): open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) @mock.patch('xarray.open_dataset') - def test_get_dataset(self, mocked_dataset): + def test_get_mask(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr @@ -110,6 +110,32 @@ def test_get_dataset(self, mocked_dataset): test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') res = test.get_dataset(ds_id, {'nc_key': 'mask'}) self.assertEqual(res.dtype, np.dtype('bool')) + expected = np.array([[True, False, True, True, True, True], + [False, False, True, True, False, False], + [False, False, False, False, False, True], + [False, True, False, False, False, True], + [True, False, False, True, False, False]]) + np.testing.assert_array_equal(res.values, expected) + + @mock.patch('xarray.open_dataset') + def test_get_mask_with_alternative_items(self, mocked_dataset): + """Test reading datasets.""" + import numpy as np + import xarray as xr + + from satpy.readers.olci_nc import NCOLCI2 + from satpy.tests.utils import make_dataid + mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + np.array([1 << x for x in range(30)]).reshape(5, 6))}, + coords={'rows': np.arange(5), + 'columns': np.arange(6)}) + ds_id = make_dataid(name='mask') + filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} + test = NCOLCI2('somedir/somefile.nc', filename_info, 'c', mask_items=["INVALID"]) + res = test.get_dataset(ds_id, {'nc_key': 'mask'}) + self.assertEqual(res.dtype, np.dtype('bool')) + expected = np.array([True] + [False] * 29).reshape(5, 6) + np.testing.assert_array_equal(res.values, expected) @mock.patch('xarray.open_dataset') def test_olci_angles(self, mocked_dataset): From 8d0ae2451781d5cb15ed522e948c75ef58f14a1f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 8 Jun 2023 12:53:18 +0200 Subject: [PATCH 0270/1416] Fix erraneous units --- satpy/etc/readers/olci_l2.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml index c34ff76ce3..0750d96f68 100644 --- a/satpy/etc/readers/olci_l2.yaml +++ b/satpy/etc/readers/olci_l2.yaml @@ -367,7 +367,7 @@ datasets: sensor: olci resolution: 300 standard_name: cdm_absorption_coefficient - units: "lg(re m-l)" + units: "lg(re m-1)" coordinates: [longitude, latitude] file_type: esa_l2_iop_nn nc_key: ADG443_NN @@ -377,7 +377,7 @@ datasets: sensor: olci resolution: 300 standard_name: diffuse_attenuation_coefficient - units: "lg(re m-l)" + units: "lg(re m-1)" coordinates: [longitude, latitude] file_type: esa_l2_trsp nc_key: KD490_M07 From 71773534296fcc2b971b5eee9a194e53f9ff037a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 8 Jun 2023 23:36:15 +0800 Subject: [PATCH 0271/1416] loose ends --- satpy/composites/__init__.py | 6 ++---- satpy/tests/test_composites.py | 24 +++++++----------------- 2 files changed, 9 insertions(+), 21 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 1fdb727627..afe7400bc3 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1108,9 +1108,8 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) LOG.debug("No sharpening band specified for ratio sharpening") high_res = None + bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} if high_res is not None: - bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} - ratio = da.map_blocks( _get_sharpening_ratio, high_res.data, @@ -1128,8 +1127,7 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) bands[color] = bands[color] * ratio return bands['red'], bands['green'], bands['blue'], new_attrs - else: - return low_res_red, low_res_green, low_res_blue, new_attrs + return bands['red'], bands['green'], bands['blue'], new_attrs def _combined_sharpened_info(self, info, new_attrs): combined_info = {} diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index b8bb0981b6..70457b83df 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -187,28 +187,18 @@ def setup_method(self): self.ds4_big = ds4_big @pytest.mark.parametrize( - "exp", - [ - ValueError, - ] - ) - def test_high_bad_color(self, exp): - """Test that only valid band colors can be provided.""" - from satpy.composites import RatioSharpenedRGB - with pytest.raises(exp): - RatioSharpenedRGB(name='true_color', high_resolution_band="bad", neutral_resolution_band="red") - - @pytest.mark.parametrize( - "exp", + "init_kwarg", [ - ValueError, + "bad", + "bad" ] ) - def test_neutral_bad_color(self, exp): + def test_bad_colors(self, init_kwarg): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB - with pytest.raises(exp): - RatioSharpenedRGB(name='true_color', high_resolution_band="red", neutral_resolution_band="bad") + with pytest.raises(ValueError): + RatioSharpenedRGB(name='true_color', high_resolution_band="red", neutral_resolution_band=init_kwarg) + RatioSharpenedRGB(name='true_color', high_resolution_band=init_kwarg, neutral_resolution_band="red") @pytest.mark.parametrize( "exp", From 13e6da046eac85a6d510ef6831aeb5f8fed5f720 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 9 Jun 2023 23:19:15 +0800 Subject: [PATCH 0272/1416] Update test_composites.py --- satpy/tests/test_composites.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 70457b83df..d1b8415ca4 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -187,18 +187,16 @@ def setup_method(self): self.ds4_big = ds4_big @pytest.mark.parametrize( - "init_kwarg", + "init_kwargs", [ - "bad", - "bad" + {'name': "true_color", "high_resolution_band": "bad", "neutral_resolution_band": "bad"}, ] ) - def test_bad_colors(self, init_kwarg): + def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB with pytest.raises(ValueError): - RatioSharpenedRGB(name='true_color', high_resolution_band="red", neutral_resolution_band=init_kwarg) - RatioSharpenedRGB(name='true_color', high_resolution_band=init_kwarg, neutral_resolution_band="red") + RatioSharpenedRGB(**init_kwargs) @pytest.mark.parametrize( "exp", From 7ff8e1c4d6de971962701d983dac6d7f4fe4c22c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 9 Jun 2023 21:10:42 -0500 Subject: [PATCH 0273/1416] Convert ABI L2 AOD test to pytest --- satpy/tests/reader_tests/test_abi_l2_nc.py | 46 ++++++++++++++-------- 1 file changed, 29 insertions(+), 17 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 63014685f9..5cdfb2e776 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . """The abi_l2_nc reader tests package.""" - +import contextlib import unittest from unittest import mock @@ -281,13 +281,11 @@ def test_get_area_def_latlon(self, adef): np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) -class Test_NC_ABI_L2_area_AOD(unittest.TestCase): +class Test_NC_ABI_L2_area_AOD: """Test the NC_ABI_L2 reader for the AOD product.""" - @mock.patch('satpy.readers.abi_base.xr') - def setUp(self, xr_): + def setup_method(self, xr_): """Create fake data for the tests.""" - from satpy.readers.abi_l2_nc import NC_ABI_L2 proj = xr.DataArray( [], attrs={'semi_major_axis': 1., @@ -326,22 +324,36 @@ def setUp(self, xr_): 'RSR': xr.DataArray(np.ones((2, 2)), dims=('y', 'x')), }, ) - xr_.open_dataset.return_value = fake_dataset - - self.reader = NC_ABI_L2('filename', - {'platform_shortname': 'G16', 'observation_type': 'RSR', - 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'info'}) + self.fake_dataset = fake_dataset @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_xy(self, adef): """Test the area generation.""" - self.reader.get_area_def(None) + with _create_reader_for_fake_data(self.fake_dataset) as reader: + reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, - 'lon_0': -75.0, 'lat_0': 0.0}) - self.assertEqual(call_args[4], self.reader.ncols) - self.assertEqual(call_args[5], self.reader.nlines) + assert call_args[3] == {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, + 'lon_0': -75.0, 'lat_0': 0.0} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) + + +@contextlib.contextmanager +def _create_reader_for_fake_data(fake_dataset: xr.Dataset): + from satpy.readers.abi_l2_nc import NC_ABI_L2 + + reader_args = ( + "filename", + { + 'platform_shortname': 'G16', 'observation_type': 'RSR', + 'scene_abbr': 'C', 'scan_mode': 'M3' + }, + {'filetype': 'info'}, + ) + with mock.patch('satpy.readers.abi_base.xr') as xr_: + xr_.open_dataset.return_value = fake_dataset + reader = NC_ABI_L2(*reader_args) + yield reader From 6415c583b0579cf43136a6213fa53ed862b88aca Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 9 Jun 2023 21:21:33 -0500 Subject: [PATCH 0274/1416] Rewrite all ABI L2 tests with pytest --- satpy/tests/reader_tests/test_abi_l2_nc.py | 54 +++++++++------------- 1 file changed, 22 insertions(+), 32 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 5cdfb2e776..b08b0a2f62 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -16,7 +16,6 @@ # along with this program. If not, see . """The abi_l2_nc reader tests package.""" import contextlib -import unittest from unittest import mock import numpy as np @@ -94,10 +93,10 @@ def _create_mcmip_dataset(): return fake_dataset -class Test_NC_ABI_L2_base(unittest.TestCase): - """Test the NC_ABI_L2 reader.""" +class Test_NC_ABI_L2_get_dataset: + """Test get dataset function of the NC_ABI_L2 reader.""" - def setUp(self): + def setup_method(self): """Create fake data for the tests.""" from satpy.readers.abi_l2_nc import NC_ABI_L2 fake_cmip_dataset = _create_cmip_dataset() @@ -116,10 +115,6 @@ def setUp(self): }, ) - -class Test_NC_ABI_L2_get_dataset(Test_NC_ABI_L2_base): - """Test get dataset function of the NC_ABI_L2 reader.""" - def test_get_dataset(self): """Test basic L2 load.""" from satpy.tests.utils import make_dataid @@ -144,7 +139,7 @@ def test_get_dataset(self): 'timeline_ID': None, 'units': 'm'} - self.assertTrue(np.allclose(res.data, exp_data, equal_nan=True)) + np.testing.assert_allclose(res.data, exp_data, equal_nan=True) _compare_subdict(res.attrs, exp_attrs) _assert_orbital_parameters(res.attrs['orbital_parameters']) @@ -198,30 +193,29 @@ def test_mcmip_get_dataset(self, xr_): _assert_orbital_parameters(res.attrs['orbital_parameters']) -class Test_NC_ABI_L2_area_fixedgrid(Test_NC_ABI_L2_base): +class Test_NC_ABI_L2_area_fixedgrid: """Test the NC_ABI_L2 reader.""" @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_fixedgrid(self, adef): """Test the area generation.""" - self.reader.get_area_def(None) + with _create_reader_for_fake_data(_create_cmip_dataset()) as reader: + reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], {'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, - 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) - self.assertEqual(call_args[4], self.reader.ncols) - self.assertEqual(call_args[5], self.reader.nlines) + assert call_args[3] == {'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, + 'proj': 'geos', 'sweep': 'x', 'units': 'm'} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2., 2.)) -class Test_NC_ABI_L2_area_latlon(unittest.TestCase): +class Test_NC_ABI_L2_area_latlon: """Test the NC_ABI_L2 reader.""" - @mock.patch('satpy.readers.abi_base.xr') - def setUp(self, xr_): + def setup_method(self): """Create fake data for the tests.""" - from satpy.readers.abi_l2_nc import NC_ABI_L2 proj = xr.DataArray( [], attrs={'semi_major_axis': 1., @@ -260,24 +254,20 @@ def setUp(self, xr_): 'RSR': xr.DataArray(np.ones((2, 2)), dims=('lat', 'lon')), }, ) - xr_.open_dataset.return_value = fake_dataset - - self.reader = NC_ABI_L2('filename', - {'platform_shortname': 'G16', 'observation_type': 'RSR', - 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'info'}) + self.fake_dataset = fake_dataset @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_latlon(self, adef): """Test the area generation.""" - self.reader.get_area_def(None) + with _create_reader_for_fake_data(self.fake_dataset) as reader: + reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, - 'lon_0': -75.0, 'lat_0': 0.0}) - self.assertEqual(call_args[4], self.reader.ncols) - self.assertEqual(call_args[5], self.reader.nlines) + assert call_args[3] == {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0, + 'lon_0': -75.0, 'lat_0': 0.0} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) From 91494eec23548af754c4b69402d285c9de879875 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 9 Jun 2023 21:48:14 -0500 Subject: [PATCH 0275/1416] More ABI L2 test cleanup --- satpy/tests/reader_tests/test_abi_l2_nc.py | 36 ++++++---------------- 1 file changed, 9 insertions(+), 27 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index b08b0a2f62..2d566770ae 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -96,30 +96,12 @@ def _create_mcmip_dataset(): class Test_NC_ABI_L2_get_dataset: """Test get dataset function of the NC_ABI_L2 reader.""" - def setup_method(self): - """Create fake data for the tests.""" - from satpy.readers.abi_l2_nc import NC_ABI_L2 - fake_cmip_dataset = _create_cmip_dataset() - with mock.patch('satpy.readers.abi_base.xr') as xr_: - xr_.open_dataset.return_value = fake_cmip_dataset - self.reader = NC_ABI_L2( - 'filename', - { - 'platform_shortname': 'G16', - 'scan_mode': 'M3', - 'scene_abbr': 'M1', - }, - { - 'file_type': 'info', - 'observation_type': 'ACHA', - }, - ) - def test_get_dataset(self): """Test basic L2 load.""" from satpy.tests.utils import make_dataid key = make_dataid(name='HT') - res = self.reader.get_dataset(key, {'file_key': 'HT'}) + with _create_reader_for_fake_data("ACHA", _create_cmip_dataset()) as reader: + res = reader.get_dataset(key, {'file_key': 'HT'}) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) @@ -133,7 +115,7 @@ def test_get_dataset(self): 'platform_shortname': 'G16', 'production_site': None, 'scan_mode': 'M3', - 'scene_abbr': 'M1', + 'scene_abbr': 'C', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, @@ -199,7 +181,7 @@ class Test_NC_ABI_L2_area_fixedgrid: @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_fixedgrid(self, adef): """Test the area generation.""" - with _create_reader_for_fake_data(_create_cmip_dataset()) as reader: + with _create_reader_for_fake_data("RSR", _create_cmip_dataset()) as reader: reader.get_area_def(None) assert adef.call_count == 1 @@ -259,7 +241,7 @@ def setup_method(self): @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_latlon(self, adef): """Test the area generation.""" - with _create_reader_for_fake_data(self.fake_dataset) as reader: + with _create_reader_for_fake_data("RSR", self.fake_dataset) as reader: reader.get_area_def(None) assert adef.call_count == 1 @@ -319,7 +301,7 @@ def setup_method(self, xr_): @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') def test_get_area_def_xy(self, adef): """Test the area generation.""" - with _create_reader_for_fake_data(self.fake_dataset) as reader: + with _create_reader_for_fake_data("RSR", self.fake_dataset) as reader: reader.get_area_def(None) assert adef.call_count == 1 @@ -332,16 +314,16 @@ def test_get_area_def_xy(self, adef): @contextlib.contextmanager -def _create_reader_for_fake_data(fake_dataset: xr.Dataset): +def _create_reader_for_fake_data(observation_type: str, fake_dataset: xr.Dataset): from satpy.readers.abi_l2_nc import NC_ABI_L2 reader_args = ( "filename", { - 'platform_shortname': 'G16', 'observation_type': 'RSR', + 'platform_shortname': 'G16', 'scene_abbr': 'C', 'scan_mode': 'M3' }, - {'filetype': 'info'}, + {'file_type': 'info', 'observation_type': observation_type}, ) with mock.patch('satpy.readers.abi_base.xr') as xr_: xr_.open_dataset.return_value = fake_dataset From ab5ea465e114f28c54ff1ee18d7d19e00c3cf5a9 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 10 Jun 2023 14:42:25 +0800 Subject: [PATCH 0276/1416] Update __init__.py --- satpy/composites/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index afe7400bc3..0812f5c2b0 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1125,7 +1125,6 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) for color in bands.keys(): if color != self.neutral_resolution_color and color != self.high_resolution_color: bands[color] = bands[color] * ratio - return bands['red'], bands['green'], bands['blue'], new_attrs return bands['red'], bands['green'], bands['blue'], new_attrs From 727a6f99e7901bf3b5f2f01898c2fb98f24e6f02 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 10 Jun 2023 19:31:28 +0800 Subject: [PATCH 0277/1416] Update test_composites.py --- satpy/tests/test_composites.py | 68 ++++++++++++---------------------- 1 file changed, 23 insertions(+), 45 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index d1b8415ca4..01ad6d0647 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -31,6 +31,7 @@ import satpy + # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path @@ -189,73 +190,50 @@ def setup_method(self): @pytest.mark.parametrize( "init_kwargs", [ - {'name': "true_color", "high_resolution_band": "bad", "neutral_resolution_band": "bad"}, + {'high_resolution_band': "bad", 'neutral_resolution_band': "red"}, + {'high_resolution_band': "red", 'neutral_resolution_band': "bad"} ] ) def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB with pytest.raises(ValueError): - RatioSharpenedRGB(**init_kwargs) + RatioSharpenedRGB(name='true_color', **init_kwargs) - @pytest.mark.parametrize( - "exp", - [ - satpy.composites.IncompatibleAreas, - ] - ) - def test_match_data_arrays(self, exp): + def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" - from satpy.composites import RatioSharpenedRGB + from satpy.composites import RatioSharpenedRGB, IncompatibleAreas comp = RatioSharpenedRGB(name='true_color') - with pytest.raises(exp): + with pytest.raises(IncompatibleAreas): comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) - @pytest.mark.parametrize( - "exp", - [ - ValueError, - ] - ) - def test_more_than_three_datasets(self, exp): + def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) - with pytest.raises(exp): + with pytest.raises(ValueError): comp((self.ds1, self.ds2, self.ds3)) - @pytest.mark.parametrize( - "exp", - [ - ValueError, - ] - ) - def test_self_sharpened_no_high_res(self, exp): + def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) - with pytest.raises(exp): + with pytest.raises(ValueError): comp((self.ds1, self.ds2, self.ds3)) - @pytest.mark.parametrize( - ("case", "exp"), - [ - ("without optional high res", (3, 2, 2)), - ("high res band is None", (3, 2, 2)) - ] - ) - def test_basic_function(self, case, exp): - """Test basic composite function without sharpening.""" + def test_basic_no_high_res(self): + """Test that three datasets can be passed without optional high res.""" from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name="true_color") + res = comp((self.ds1, self.ds2, self.ds3)) + assert res.shape == (3, 2, 2) - if case == "without optional high res": - comp = RatioSharpenedRGB(name='true_color') - res = comp((self.ds1, self.ds2, self.ds3)) - elif case == "high res band is None": - comp = RatioSharpenedRGB(name='true_color', high_resolution_band=None) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) - - assert res.shape == exp + def test_basic_no_sharpen(self): + """Test that color None does no sharpening.""" + from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name="true_color", high_resolution_band=None) + res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + assert res.shape == (3, 2, 2) @pytest.mark.parametrize( ("high_resolution_band", "neutral_resolution_band", "exp_r", "exp_g", "exp_b"), @@ -308,7 +286,7 @@ def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, e ((3, 2, 2), np.array([[5.0, 5.0], [5.0, 0]], dtype=np.float64), np.array([[4.0, 4.0], [4.0, 0]], dtype=np.float64), - np.array([[16/3, 16/3], [16/3, 0]], dtype=np.float64)) + np.array([[16 / 3, 16 / 3], [16 / 3, 0]], dtype=np.float64)) ] ) def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): From 35e4fc5a701a46d5dac8ac173d420cf8fec03fc2 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 12 Jun 2023 11:28:28 +0000 Subject: [PATCH 0278/1416] Improve geolocation performance --- satpy/readers/gms5_vissr_l1b.py | 10 +- satpy/readers/gms5_vissr_navigation.py | 314 ++++++++++++------ .../test_gms5_vissr_navigation.py | 51 ++- 3 files changed, 232 insertions(+), 143 deletions(-) diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index ef7dff2855..347ccadf3e 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -113,12 +113,8 @@ Navigation of VISSR images is computationally expensive, because for each pixel the view vector of the (rotating) instrument needs to be intersected with the -earth, including interpolation of attitude and orbit prediction. - -Currently, navigation takes about 20 seconds for IR channels and 8 minutes for -the VIS channel. Although the navigation module is jit-compiled using numba, -JMA's C library ``Msial`` is still four times faster. So there's certainly room -for optimization. +earth, including interpolation of attitude and orbit prediction. For IR channels +this takes about 10 seconds, for VIS channels about 160 seconds. Space Pixels @@ -528,7 +524,7 @@ def _get_orbit_prediction(self): orb_pred["sat_sun_vector_earth_fixed"]["azimuth"].astype(np.float64) ), ) - sat_position = nav.SatellitePositionEarthFixed( + sat_position = nav.Satpos( x=orb_pred["satellite_position_earth_fixed"][:, 0].astype(np.float64), y=orb_pred["satellite_position_earth_fixed"][:, 1].astype(np.float64), z=orb_pred["satellite_position_earth_fixed"][:, 2].astype(np.float64), diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms5_vissr_navigation.py index 62f4d3ed81..8a811b2210 100644 --- a/satpy/readers/gms5_vissr_navigation.py +++ b/satpy/readers/gms5_vissr_navigation.py @@ -22,6 +22,33 @@ """Constants taken from JMA's Msial library.""" +Pixel = namedtuple( + "Pixel", + ["line", "pixel"] +) +"""A VISSR pixel.""" + +Vector2D = namedtuple( + "Vector2D", + ["x", "y"] +) +"""A 2D vector.""" + + +Vector3D = namedtuple( + "Vector3D", + ["x", "y", "z"] +) +"""A 3D vector.""" + + +Satpos = namedtuple( + "Satpos", + ["x", "y", "z"] +) +"""A 3D vector.""" + + Attitude = namedtuple( "Attitude", [ @@ -48,7 +75,7 @@ Args: angles (OrbitAngles): Orbit angles - sat_position (SatellitePositionEarthFixed): Satellite position + sat_position (Vector3D): Satellite position nutation_precession: Nutation and precession matrix (3x3) """ @@ -67,16 +94,6 @@ """ -SatellitePositionEarthFixed = namedtuple( - "SatellitePositionEarthFixed", - ["x", "y", "z"], -) -"""Satellite position in earth-fixed coordinates. - -Units: meters -""" - - ImageNavigationParameters = namedtuple( "ImageNavigationParameters", ["static", "predicted"] @@ -294,7 +311,7 @@ def __init__( Args: prediction_times: Timestamps of orbit prediction. angles (OrbitAngles): Orbit angles - sat_position (SatellitePositionEarthFixed): Satellite position + sat_position (Vector3D): Satellite position nutation_precession: Nutation and precession matrix. """ self.prediction_times = prediction_times @@ -365,11 +382,11 @@ def _get_lons_lats_numba(lines_2d, pixels_2d, nav_params): lats = np.zeros(shape, dtype=np.float32) for i in range(shape[0]): for j in range(shape[1]): - point = (lines_2d[i, j], pixels_2d[i, j]) + pixel = Pixel(lines_2d[i, j], pixels_2d[i, j]) nav_params_pix = _get_pixel_navigation_parameters( - point, nav_params + pixel, nav_params ) - lon, lat = get_lon_lat(point, nav_params_pix) + lon, lat = get_lon_lat(pixel, nav_params_pix) lons[i, j] = lon lats[i, j] = lat # Stack lons and lats because da.map_blocks doesn't support multiple @@ -421,18 +438,18 @@ def interpolate_navigation_prediction( @numba.njit -def get_lon_lat(point, nav_params): +def get_lon_lat(pixel, nav_params): """Get longitude and latitude coordinates for a given image pixel. Args: - point: Point (line, pixel) in image coordinates. + pixel (Pixel): Point in image coordinates. nav_params (PixelNavigationParameters): Navigation parameters for a single pixel. Returns: Longitude and latitude in degrees. """ scan_angles = transform_image_coords_to_scanning_angles( - point, + pixel, nav_params.proj_params.image_offset, nav_params.proj_params.scanning_angles ) @@ -456,36 +473,24 @@ def get_lon_lat(point, nav_params): return lon, lat -@numba.njit -def _get_sat_pos_vector(sat_position): - return np.array( - ( - sat_position.x, - sat_position.y, - sat_position.z, - ) - ) - - @numba.njit def transform_image_coords_to_scanning_angles(point, image_offset, scanning_angles): """Transform image coordinates to scanning angles. Args: - point: Point (line, pixel) in image coordinates. + point (Pixel): Point in image coordinates. image_offset (ImageOffset): Image offset. scanning_angles (ScanningAngles): Scanning angles. Returns: Scanning angles (x, y) at the pixel center (rad). """ - line, pixel = point line_offset = image_offset.line_offset pixel_offset = image_offset.pixel_offset stepping_angle = scanning_angles.stepping_angle sampling_angle = scanning_angles.sampling_angle - x = sampling_angle * (pixel + 1 - pixel_offset) - y = stepping_angle * (line + 1 - line_offset) - return np.array([x, y]) + x = sampling_angle * (point.pixel + 1 - pixel_offset) + y = stepping_angle * (point.line + 1 - line_offset) + return Vector2D(x, y) @numba.njit @@ -493,24 +498,26 @@ def transform_scanning_angles_to_satellite_coords(angles, misalignment): """Transform scanning angles to satellite angular momentum coordinates. Args: - angles: Scanning angles (x, y) in radians. + angles (Vector2D): Scanning angles in radians. misalignment: Misalignment matrix (3x3) Returns: - View vector (x, y, z) in satellite angular momentum coordinates. + View vector (Vector3D) in satellite angular momentum coordinates. """ - rotation, vector = _get_transforms_from_scanning_angles_to_satellite_coords(angles) - return np.dot(rotation, np.dot(misalignment, vector)) + x, y = angles.x, angles.y + sin_x = np.sin(x) + cos_x = np.cos(x) + view = Vector3D(np.cos(y), 0.0, np.sin(y)) + # Correct for misalignment + view = matrix_vector(misalignment, view) -@numba.njit -def _get_transforms_from_scanning_angles_to_satellite_coords(angles): - x, y = angles - cos_x = np.cos(x) - sin_x = np.sin(x) - rot = np.array(((cos_x, -sin_x, 0), (sin_x, cos_x, 0), (0, 0, 1))) - vec = np.array([np.cos(y), 0, np.sin(y)]) - return rot, vec + # Rotate around z-axis + return Vector3D( + cos_x * view.x - sin_x * view.y, + sin_x * view.x + cos_x * view.y, + view.z + ) @numba.njit @@ -522,80 +529,141 @@ def transform_satellite_to_earth_fixed_coords( """Transform from earth-fixed to satellite angular momentum coordinates. Args: - point: Point (x, y, z) in satellite angular momentum coordinates. + point (Vector3D): Point in satellite angular momentum coordinates. orbit (Orbit): Orbital parameters attitude (Attitude): Attitude parameters Returns: - Point (x', y', z') in earth-fixed coordinates. + Point (Vector3D) in earth-fixed coordinates. """ - sat_unit_vectors = _get_satellite_unit_vectors(orbit, attitude) - return np.dot(sat_unit_vectors, point) - - -@numba.njit -def _get_satellite_unit_vectors(orbit, attitude): unit_vector_z = _get_satellite_unit_vector_z(attitude, orbit) - unit_vector_x = _get_satellite_unit_vector_x( - attitude, orbit, unit_vector_z - ) + unit_vector_x = _get_satellite_unit_vector_x(unit_vector_z, attitude, orbit) unit_vector_y = _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z) - return np.stack((unit_vector_x, unit_vector_y, unit_vector_z), axis=-1) + return _get_earth_fixed_coords( + point, + unit_vector_x, + unit_vector_y, + unit_vector_z + ) @numba.njit def _get_satellite_unit_vector_z(attitude, orbit): - sat_z_axis_1950 = _get_satellite_z_axis_1950(attitude) - rotation = _get_transform_from_1950_to_earth_fixed(orbit.angles.greenwich_sidereal_time) - z_vec = np.dot(rotation, np.dot(orbit.nutation_precession, sat_z_axis_1950)) - return normalize_vector(z_vec) + v1950 = _get_satellite_z_axis_1950( + attitude.angle_between_sat_spin_and_z_axis, + attitude.angle_between_sat_spin_and_yz_plane + ) + vcorr = _correct_nutation_precession( + v1950, + orbit.nutation_precession + ) + return _rotate_to_greenwich( + vcorr, + orbit.angles.greenwich_sidereal_time + ) @numba.njit -def _get_satellite_z_axis_1950(attitude): +def _get_satellite_z_axis_1950( + angle_between_sat_spin_and_z_axis, + angle_between_sat_spin_and_yz_plane +): """Get satellite z-axis (spin) in mean of 1950 coordinates.""" - alpha = attitude.angle_between_sat_spin_and_z_axis - delta = attitude.angle_between_sat_spin_and_yz_plane + alpha = angle_between_sat_spin_and_z_axis + delta = angle_between_sat_spin_and_yz_plane cos_delta = np.cos(delta) - x = np.sin(delta) - y = -cos_delta * np.sin(alpha) - z = cos_delta * np.cos(alpha) - return np.array([x, y, z]) + return Vector3D( + x=np.sin(delta), + y=-cos_delta * np.sin(alpha), + z=cos_delta * np.cos(alpha) + ) @numba.njit -def _get_transform_from_1950_to_earth_fixed(greenwich_sidereal_time): - cos = np.cos(greenwich_sidereal_time) - sin = np.sin(greenwich_sidereal_time) - return np.array(((cos, sin, 0), (-sin, cos, 0), (0, 0, 1))) +def _correct_nutation_precession(vector, nutation_precession): + return matrix_vector(nutation_precession, vector) + + +@numba.njit +def _rotate_to_greenwich(vector, greenwich_sidereal_time): + cos_sid = np.cos(greenwich_sidereal_time) + sin_sid = np.sin(greenwich_sidereal_time) + rotated = Vector3D( + x=cos_sid * vector.x + sin_sid * vector.y, + y=-sin_sid * vector.x + cos_sid * vector.y, + z=vector.z + ) + return normalize_vector(rotated) @numba.njit -def _get_satellite_unit_vector_x(attitude, orbit, sat_unit_vector_z): - beta = attitude.angle_between_earth_and_sun - sat_sun_vector = _get_vector_from_satellite_to_sun(orbit.angles) - z_cross_satsun = np.cross(sat_unit_vector_z, sat_sun_vector) - z_cross_satsun = normalize_vector(z_cross_satsun) - x_vec = z_cross_satsun * np.sin(beta) + np.cross( - z_cross_satsun, sat_unit_vector_z - ) * np.cos(beta) - return normalize_vector(x_vec) +def _get_satellite_unit_vector_x(unit_vector_z, attitude, orbit): + sat_sun_vec = _get_vector_from_satellite_to_sun( + orbit.angles.declination_from_sat_to_sun, + orbit.angles.right_ascension_from_sat_to_sun + ) + return _get_unit_vector_x( + sat_sun_vec, + unit_vector_z, + attitude.angle_between_earth_and_sun + ) @numba.njit -def _get_vector_from_satellite_to_sun(orbit_angles): - declination = orbit_angles.declination_from_sat_to_sun - right_ascension = orbit_angles.right_ascension_from_sat_to_sun +def _get_vector_from_satellite_to_sun( + declination_from_sat_to_sun, + right_ascension_from_sat_to_sun +): + declination = declination_from_sat_to_sun + right_ascension = right_ascension_from_sat_to_sun cos_declination = np.cos(declination) - x = cos_declination * np.cos(right_ascension) - y = cos_declination * np.sin(right_ascension) - z = np.sin(declination) - return np.array([x, y, z]) + return Vector3D( + x=cos_declination * np.cos(right_ascension), + y=cos_declination * np.sin(right_ascension), + z=np.sin(declination) + ) + + +@numba.njit +def _get_unit_vector_x( + sat_sun_vec, + unit_vector_z, + angle_between_earth_and_sun + +): + beta = angle_between_earth_and_sun + sin_beta = np.sin(beta) + cos_beta = np.cos(beta) + cross1 = _get_uz_cross_satsun(unit_vector_z, sat_sun_vec) + cross2 = cross_product(cross1, unit_vector_z) + unit_vector_x = Vector3D( + x=sin_beta * cross1.x + cos_beta * cross2.x, + y=sin_beta * cross1.y + cos_beta * cross2.y, + z=sin_beta * cross1.z + cos_beta * cross2.z + ) + return normalize_vector(unit_vector_x) @numba.njit -def _get_satellite_unit_vector_y(sat_unit_vector_x, sat_unit_vector_z): - y_vec = np.cross(sat_unit_vector_z, sat_unit_vector_x) - return normalize_vector(y_vec) +def _get_uz_cross_satsun(unit_vector_z, sat_sun_vec): + res = cross_product(unit_vector_z, sat_sun_vec) + return normalize_vector(res) + + +@numba.njit +def _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z): + res = cross_product(unit_vector_z, unit_vector_x) + return normalize_vector(res) + + +@numba.njit +def _get_earth_fixed_coords(point, unit_vector_x, unit_vector_y, unit_vector_z): + ux, uy, uz = unit_vector_x, unit_vector_y, unit_vector_z + # Multiply with matrix of satellite unit vectors [ux, uy, uz] + return Vector3D( + x=ux.x * point.x + uy.x * point.y + uz.x * point.z, + y=ux.y * point.x + uy.y * point.y + uz.y * point.z, + z=ux.z * point.x + uy.z * point.y + uz.z * point.z + ) @numba.njit @@ -605,17 +673,19 @@ def intersect_with_earth(view_vector, sat_pos, ellipsoid): Reference: Appendix E, section 2.11 in the GMS user guide. Args: - view_vector: Instrument viewing vector (x, y, z) in earth-fixed + view_vector (Vector3D): Instrument viewing vector in earth-fixed coordinates. - sat_pos (SatellitePositionEarthFixed): Satellite position in - earth-fixed coordinates. + sat_pos (Vector3D): Satellite position in earth-fixed coordinates. ellipsoid (EarthEllipsoid): Earth ellipsoid. Returns: - Intersection (x', y', z') with the earth's surface. + Intersection (Vector3D) with the earth's surface. """ distance = _get_distance_to_intersection(view_vector, sat_pos, ellipsoid) - sat_pos_vec = _get_sat_pos_vector(sat_pos) - return sat_pos_vec + distance * view_vector + return Vector3D( + sat_pos.x + distance * view_vector.x, + sat_pos.y + distance * view_vector.y, + sat_pos.z + distance * view_vector.z + ) @numba.njit @@ -651,7 +721,7 @@ def _get_abc_helper(view_vector, sat_pos, ellipsoid): Reference: Appendix E, Equation (26) in the GMS user guide. """ flat2 = (1 - ellipsoid.flattening) ** 2 - ux, uy, uz = view_vector + ux, uy, uz = view_vector.x, view_vector.y, view_vector.z x, y, z = sat_pos.x, sat_pos.y, sat_pos.z a = flat2 * (ux ** 2 + uy ** 2) + uz ** 2 b = flat2 * (x * ux + y * uy) + z * uz @@ -664,28 +734,22 @@ def transform_earth_fixed_to_geodetic_coords(point, earth_flattening): """Transform from earth-fixed to geodetic coordinates. Args: - point: Point (x, y, z) in earth-fixed coordinates. + point (Vector3D): Point in earth-fixed coordinates. earth_flattening: Flattening of the earth. Returns: Geodetic longitude and latitude (degrees). """ - x, y, z = point + x, y, z = point.x, point.y, point.z f = earth_flattening lon = np.arctan2(y, x) lat = np.arctan2(z, ((1 - f) ** 2 * np.sqrt(x**2 + y**2))) return np.rad2deg(lon), np.rad2deg(lat) -@numba.njit -def normalize_vector(v): - """Normalize the given vector.""" - return v / np.sqrt(np.dot(v, v)) - - @numba.njit def interpolate_orbit_prediction(orbit_prediction, observation_time): - """Interpolate orbit prediction.""" + """Interpolate orbit prediction at the given observation time.""" angles = _interpolate_orbit_angles(observation_time, orbit_prediction) sat_position = _interpolate_sat_position(observation_time, orbit_prediction) nutation_precession = interpolate_nearest( @@ -741,12 +805,12 @@ def _interpolate_sat_position(observation_time, orbit_prediction): orbit_prediction.prediction_times, orbit_prediction.sat_position.z, ) - return SatellitePositionEarthFixed(x, y, z) + return Vector3D(x, y, z) @numba.njit def interpolate_attitude_prediction(attitude_prediction, observation_time): - """Interpolate attitude prediction.""" + """Interpolate attitude prediction at given observation time.""" angle_between_earth_and_sun = interpolate_angles( observation_time, attitude_prediction.prediction_times, @@ -836,3 +900,33 @@ def interpolate_nearest(x, x_sample, y_sample): def _interpolate_nearest(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) return y_sample[i] + + +@numba.njit +def matrix_vector(m, v): + """Multiply (3,3)-matrix and Vector3D.""" + x = m[0, 0] * v.x + m[0, 1] * v.y + m[0, 2] * v.z + y = m[1, 0] * v.x + m[1, 1] * v.y + m[1, 2] * v.z + z = m[2, 0] * v.x + m[2, 1] * v.y + m[2, 2] * v.z + return Vector3D(x, y, z) + + +@numba.njit +def cross_product(a, b): + """Compute vector product a x b.""" + return Vector3D( + x=a.y * b.z - a.z * b.y, + y=a.z * b.x - a.x * b.z, + z=a.x * b.y - a.y * b.x + ) + + +@numba.njit +def normalize_vector(v): + """Normalize a Vector3D.""" + norm = np.sqrt(v.x**2 + v.y**2 + v.z**2) + return Vector3D( + v.x / norm, + v.y / norm, + v.z / norm + ) diff --git a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py index 8cc73b87b0..ec564e7ab8 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_navigation.py @@ -13,8 +13,7 @@ # fmt: off IR_NAVIGATION_REFERENCE = [ { - 'line': 686, - 'pixel': 1680, + "pixel": nav.Pixel(line=686, pixel=1680), 'lon': 139.990380, 'lat': 35.047056, 'nav_params': nav.PixelNavigationParameters( @@ -29,7 +28,7 @@ declination_from_sat_to_sun=-0.208770861178982, right_ascension_from_sat_to_sun=3.304369303579407, ), - sat_position=nav.SatellitePositionEarthFixed( + sat_position=nav.Vector3D( x=-32390963.148471601307392, y=27003395.381247851997614, z=-228134.860026293463307, @@ -62,8 +61,7 @@ ) }, { - 'line': 2089, - 'pixel': 1793, + "pixel": nav.Pixel(line=2089, pixel=1793), 'lon': 144.996967, 'lat': -34.959853, 'nav_params': nav.PixelNavigationParameters( @@ -78,7 +76,7 @@ declination_from_sat_to_sun=-0.208713576872247, right_ascension_from_sat_to_sun=3.242660398458377, ), - sat_position=nav.SatellitePositionEarthFixed( + sat_position=nav.Vector3D( x=-32390273.633551981300116, y=27003859.543135114014149, z=-210800.087589388160268, @@ -115,8 +113,7 @@ VIS_NAVIGATION_REFERENCE = [ { - 'line': 2744, - 'pixel': 6720, + "pixel": nav.Pixel(line=2744, pixel=6720), 'lon': 139.975527, 'lat': 35.078028, 'nav_params': nav.PixelNavigationParameters( @@ -131,7 +128,7 @@ declination_from_sat_to_sun=-0.208770861179448, right_ascension_from_sat_to_sun=3.304369304082406, ), - sat_position=nav.SatellitePositionEarthFixed( + sat_position=nav.Vector3D( x=-32390963.148477241396904, y=27003395.381243918091059, z=-228134.860164520738181, @@ -164,8 +161,7 @@ ) }, { - 'line': 8356, - 'pixel': 7172, + "pixel": nav.Pixel(line=8356, pixel=7172), 'lon': 144.980104, 'lat': -34.929123, 'nav_params': nav.PixelNavigationParameters( @@ -180,7 +176,7 @@ declination_from_sat_to_sun=-0.208713576872715, right_ascension_from_sat_to_sun=3.242660398961383, ), - sat_position=nav.SatellitePositionEarthFixed( + sat_position=nav.Vector3D( x=-32390273.633557569235563, y=27003859.543131537735462, z=-210800.087734811415430, @@ -236,7 +232,7 @@ class TestSinglePixelNavigation: @pytest.mark.parametrize( "point,nav_params,expected", [ - ((ref["line"], ref["pixel"]), ref["nav_params"], (ref["lon"], ref["lat"])) + (ref["pixel"], ref["nav_params"], (ref["lon"], ref["lat"])) for ref in NAVIGATION_REFERENCE ], ) @@ -252,7 +248,7 @@ def test_transform_image_coords_to_scanning_angles(self): stepping_angle=0.01, sampling_angle=0.02, misalignment=-999 ) angles = nav.transform_image_coords_to_scanning_angles( - point=np.array([199, 99]), + point=nav.Pixel(199, 99), image_offset=offset, scanning_angles=scanning_angles, ) @@ -260,7 +256,7 @@ def test_transform_image_coords_to_scanning_angles(self): def test_transform_scanning_angles_to_satellite_coords(self): """Test transformation from scanning angles to satellite coordinates.""" - scanning_angles = np.array([np.pi, np.pi / 2]) + scanning_angles = nav.Vector2D(np.pi, np.pi / 2) misalignment = np.diag([1, 2, 3]).astype(float) point_sat = nav.transform_scanning_angles_to_satellite_coords( scanning_angles, misalignment @@ -269,7 +265,7 @@ def test_transform_scanning_angles_to_satellite_coords(self): def test_transform_satellite_to_earth_fixed_coords(self): """Test transformation from satellite to earth-fixed coordinates.""" - point_sat = np.array([1, 2, 3], dtype=float) + point_sat = nav.Vector3D(1, 2, 3) attitude = nav.Attitude( angle_between_earth_and_sun=np.pi, angle_between_sat_spin_and_z_axis=np.pi, @@ -281,7 +277,7 @@ def test_transform_satellite_to_earth_fixed_coords(self): declination_from_sat_to_sun=np.pi, right_ascension_from_sat_to_sun=np.pi / 2, ), - sat_position=nav.SatellitePositionEarthFixed(-999, -999, -999), + sat_position=nav.Vector3D(-999, -999, -999), nutation_precession=np.diag([1, 2, 3]).astype(float), ) res = nav.transform_satellite_to_earth_fixed_coords(point_sat, orbit, attitude) @@ -289,9 +285,9 @@ def test_transform_satellite_to_earth_fixed_coords(self): def test_intersect_view_vector_with_earth(self): """Test intersection of a view vector with the earth's surface.""" - view_vector = np.array([-1, 0, 0], dtype=float) + view_vector = nav.Vector3D(-1, 0, 0) ellipsoid = nav.EarthEllipsoid(equatorial_radius=6371 * 1000, flattening=0.003) - sat_pos = nav.SatellitePositionEarthFixed(x=36000 * 1000.0, y=0.0, z=0.0) + sat_pos = nav.Vector3D(x=36000 * 1000.0, y=0.0, z=0.0) point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid) exp = [ellipsoid.equatorial_radius, 0, 0] np.testing.assert_allclose(point, exp) @@ -311,15 +307,18 @@ def test_transform_earth_fixed_to_geodetic_coords( ): """Test transformation from earth-fixed to geodetic coordinates.""" point_geodetic = nav.transform_earth_fixed_to_geodetic_coords( - np.array(point_earth_fixed), 0.003 + nav.Vector3D(*point_earth_fixed), + 0.003 ) np.testing.assert_allclose(point_geodetic, point_geodetic_exp) def test_normalize_vector(self): """Test vector normalization.""" - v = np.array([1, 2, 3], dtype=float) + v = nav.Vector3D(1, 2, 3) + norm = np.sqrt(14) + exp = nav.Vector3D(1 / norm, 2 / norm, 3 / norm) normed = nav.normalize_vector(v) - np.testing.assert_allclose(normed, v / np.sqrt(14)) + np.testing.assert_allclose(normed, exp) class TestImageNavigation: @@ -430,7 +429,7 @@ def orbit_expected(self): declination_from_sat_to_sun=1.6, right_ascension_from_sat_to_sun=1.7, ), - sat_position=nav.SatellitePositionEarthFixed( + sat_position=nav.Vector3D( x=1.8, y=1.9, z=2.0, @@ -488,7 +487,7 @@ def orbit_prediction(): declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), ), - sat_position=nav.SatellitePositionEarthFixed( + sat_position=nav.Vector3D( x=np.array([0.3, 1.3, 2.3, 3.3]), y=np.array([0.4, 1.4, 2.4, 3.4]), z=np.array([0.5, 1.5, 2.5, 3.5]), @@ -550,8 +549,8 @@ def test_get_observation_time(): num_sensors=1, sampling_angle=0.01, ) - point = np.array([11, 100]) - obs_time = nav.get_observation_time(point, scan_params) + pixel = nav.Pixel(11, 100) + obs_time = nav.get_observation_time(pixel, scan_params) np.testing.assert_allclose(obs_time, 50000.0000705496871047) From ebd1cfebf3bab5a19cbc4a16e4c9ccf80e30572e Mon Sep 17 00:00:00 2001 From: Lloyd Haydn Hughes Date: Mon, 12 Jun 2023 16:01:13 +0200 Subject: [PATCH 0279/1416] Adding support for S3 L2 files which do not specify a frame_id. This is the case with some older <2019 imagery which is supplied by EUMETSAT --- satpy/etc/readers/olci_l2.yaml | 52 +++++++++++++++++++++++++--------- 1 file changed, 39 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml index 0750d96f68..bbaa60a5bc 100644 --- a/satpy/etc/readers/olci_l2.yaml +++ b/satpy/etc/readers/olci_l2.yaml @@ -13,43 +13,69 @@ file_types: esa_l2_reflectance: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc' esa_l2_chl_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc' esa_l2_chl_oc4me: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc' esa_l2_iop_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc' esa_l2_trsp: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc' esa_l2_tsm_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc' esa_l2_wqsf: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' esa_l2_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc' esa_l2_rc_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc' esa_l2_iwv: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc' esa_l2_otci: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc' esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo - file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc'] + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' datasets: From 34215211b5b806ae99928a67dd4142cd859571a4 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 12 Jun 2023 22:59:45 +0800 Subject: [PATCH 0280/1416] fix --- satpy/composites/__init__.py | 7 +++++-- satpy/tests/test_composites.py | 6 +++--- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0812f5c2b0..d23f056eaf 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1109,6 +1109,11 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) high_res = None bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} + self._sharpen_bands_with_high_res(bands, high_res) + + return bands['red'], bands['green'], bands['blue'], new_attrs + + def _sharpen_bands_with_high_res(self, bands, high_res): if high_res is not None: ratio = da.map_blocks( _get_sharpening_ratio, @@ -1126,8 +1131,6 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) if color != self.neutral_resolution_color and color != self.high_resolution_color: bands[color] = bands[color] * ratio - return bands['red'], bands['green'], bands['blue'], new_attrs - def _combined_sharpened_info(self, info, new_attrs): combined_info = {} combined_info.update(info) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 01ad6d0647..e5a857035d 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -209,10 +209,10 @@ def test_match_data_arrays(self): def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" - from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) + from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name='true_color') with pytest.raises(ValueError): - comp((self.ds1, self.ds2, self.ds3)) + comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" From 1fd6bafde06728e0884e334a7d9a98c3a4bea719 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 Jun 2023 15:31:15 +0000 Subject: [PATCH 0281/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/test_composites.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index e5a857035d..f27c73d849 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -31,7 +31,6 @@ import satpy - # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path @@ -202,7 +201,7 @@ def test_bad_colors(self, init_kwargs): def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" - from satpy.composites import RatioSharpenedRGB, IncompatibleAreas + from satpy.composites import IncompatibleAreas, RatioSharpenedRGB comp = RatioSharpenedRGB(name='true_color') with pytest.raises(IncompatibleAreas): comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) From 2cc1edcaa84f50230a75ddc148a5f4dc3212f266 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 13 Jun 2023 15:22:40 +0800 Subject: [PATCH 0282/1416] Update __init__.py --- satpy/composites/__init__.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d23f056eaf..da4d1a9e5c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1109,27 +1109,27 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) high_res = None bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} - self._sharpen_bands_with_high_res(bands, high_res) + if high_res is not None: + self._sharpen_bands_with_high_res(bands, high_res) return bands['red'], bands['green'], bands['blue'], new_attrs def _sharpen_bands_with_high_res(self, bands, high_res): - if high_res is not None: - ratio = da.map_blocks( - _get_sharpening_ratio, - high_res.data, - bands[self.high_resolution_color].data, - meta=np.array((), dtype=high_res.dtype), - dtype=high_res.dtype, - chunks=high_res.chunks, - ) + ratio = da.map_blocks( + _get_sharpening_ratio, + high_res.data, + bands[self.high_resolution_color].data, + meta=np.array((), dtype=high_res.dtype), + dtype=high_res.dtype, + chunks=high_res.chunks, + ) - bands[self.high_resolution_color] = high_res + bands[self.high_resolution_color] = high_res - with xr.set_options(keep_attrs=True): - for color in bands.keys(): - if color != self.neutral_resolution_color and color != self.high_resolution_color: - bands[color] = bands[color] * ratio + with xr.set_options(keep_attrs=True): + for color in bands.keys(): + if color != self.neutral_resolution_color and color != self.high_resolution_color: + bands[color] = bands[color] * ratio def _combined_sharpened_info(self, info, new_attrs): combined_info = {} From 5cf3d68c6ac0c2b61e7e0f02be75aa91cbdbddd6 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 13 Jun 2023 15:43:32 +0800 Subject: [PATCH 0283/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index da4d1a9e5c..3750bd9c64 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1114,7 +1114,7 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) return bands['red'], bands['green'], bands['blue'], new_attrs - def _sharpen_bands_with_high_res(self, bands, high_res): + def _sharpen_bands_with_high_res(self, bands, high_res): ratio = da.map_blocks( _get_sharpening_ratio, high_res.data, From 7a0375171725c6e2263d554d3c84ef8c40148226 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 13 Jun 2023 15:43:44 +0800 Subject: [PATCH 0284/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 3750bd9c64..da4d1a9e5c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1114,7 +1114,7 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) return bands['red'], bands['green'], bands['blue'], new_attrs - def _sharpen_bands_with_high_res(self, bands, high_res): + def _sharpen_bands_with_high_res(self, bands, high_res): ratio = da.map_blocks( _get_sharpening_ratio, high_res.data, From 1195b31f98106477edd3940b68870bf8524e4778 Mon Sep 17 00:00:00 2001 From: Lloyd Haydn Hughes Date: Tue, 13 Jun 2023 11:46:09 +0200 Subject: [PATCH 0285/1416] Updated AUTHORS.md --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 688740270f..f67f579e95 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -85,3 +85,4 @@ The following people have made contributions to this project: - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) +- [Lloyd Hughes (system123)](https://github.com/system123) From 5ca5498347147ad4aa491e7ae713fe9eda591d56 Mon Sep 17 00:00:00 2001 From: Lloyd Haydn Hughes Date: Tue, 13 Jun 2023 11:54:27 +0200 Subject: [PATCH 0286/1416] Cleanup trailing whitespaces from refactoring --- satpy/etc/readers/olci_l2.yaml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml index bbaa60a5bc..110bb11a2e 100644 --- a/satpy/etc/readers/olci_l2.yaml +++ b/satpy/etc/readers/olci_l2.yaml @@ -13,67 +13,67 @@ file_types: esa_l2_reflectance: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc' esa_l2_chl_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc' esa_l2_chl_oc4me: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc' esa_l2_iop_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc' esa_l2_trsp: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc' esa_l2_tsm_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc' esa_l2_wqsf: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' esa_l2_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc' esa_l2_rc_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc' esa_l2_iwv: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc' esa_l2_otci: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc' esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo - file_patterns: + file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' From 1c5bbbb8d9e395ff72c5bd839fa5f285d3cb75b0 Mon Sep 17 00:00:00 2001 From: Lloyd Haydn Hughes Date: Tue, 13 Jun 2023 13:21:07 +0200 Subject: [PATCH 0287/1416] Alphabetical ordering by last name --- AUTHORS.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/AUTHORS.md b/AUTHORS.md index f67f579e95..e5511d2666 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -35,6 +35,7 @@ The following people have made contributions to this project: - [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - Deutscher Wetterdienst - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) +- [Lloyd Hughes (system123)](https://github.com/system123) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) - [Tommy Jasmin (tommyjasmin)](https://github.com/tommyjasmin) - [Jactry Zeng](https://github.com/jactry) @@ -85,4 +86,4 @@ The following people have made contributions to this project: - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) -- [Lloyd Hughes (system123)](https://github.com/system123) + From 26c23d9dc3bbbe9e031eae5e3623193a0d12a35b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 13 Jun 2023 11:47:08 +0000 Subject: [PATCH 0288/1416] Add support for partial scans --- satpy/etc/readers/gms5-vissr_l1b.yaml | 16 +++--- satpy/readers/gms5_vissr_l1b.py | 55 +++++++++++-------- .../tests/reader_tests/test_gms5_vissr_l1b.py | 38 ++++--------- 3 files changed, 49 insertions(+), 60 deletions(-) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index 5381833686..1b81d0f8b0 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -18,27 +18,27 @@ file_types: gms5_vissr_vis: file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.A.IMG' - - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.A.IMG.gz' + - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG.gz' gms5_vissr_ir1: file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.A.IMG' - - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.A.IMG.gz' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG.gz' gms5_vissr_ir2: file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.A.IMG' - - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.A.IMG.gz' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG.gz' gms5_vissr_ir3: file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.A.IMG' - - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.A.IMG.gz' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG' + - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG.gz' datasets: diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms5_vissr_l1b.py index 347ccadf3e..c5e98fd753 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms5_vissr_l1b.py @@ -86,7 +86,7 @@ This cannot be represented by a pyresample area definition, so each dataset is accompanied by 2-dimensional longitude and latitude coordinates. For -resampling purpose a square area definition with uniform sampling is provided +resampling purpose a full disc area definition with uniform sampling is provided via .. code-block:: python @@ -143,6 +143,12 @@ Dataset attributes include metadata such as time and orbital parameters, see :ref:`dataset_metadata`. +Partial Scans +------------- + +Between 2001 and 2003 VISSR also recorded partial scans of the northern +hemisphere. On demand a special Typhoon schedule would be activated between +03:00 and 05:00 UTC. """ import datetime as dt @@ -384,9 +390,7 @@ def _get_area_def_uniform_sampling(self, dataset_id): coord_conv_params=self._header["image_parameters"]["coordinate_conversion"], metadata=self._mda, ) - return a.get_area_def_uniform_sampling( - original_shape=self._get_actual_shape(), dataset_id=dataset_id - ) + return a.get_area_def_uniform_sampling(dataset_id) def _mask_space_pixels(self, dataset, space_masker): if self._mask_space: @@ -718,6 +722,11 @@ def is_vis_channel(channel_name): class AreaDefEstimator: """Estimate area definition for VISSR images.""" + full_disk_size = { + "IR": 2366, + "VIS": 9464, + } + def __init__(self, coord_conv_params, metadata): """Initialize the area definition estimator. @@ -728,22 +737,21 @@ def __init__(self, coord_conv_params, metadata): self.coord_conv = coord_conv_params self.metadata = metadata - def get_area_def_uniform_sampling(self, original_shape, dataset_id): - """Get square area definition with uniform sampling. + def get_area_def_uniform_sampling(self, dataset_id): + """Get full disk area definition with uniform sampling. Args: - original_shape: Shape of the oversampled VISSR image. dataset_id: ID of the corresponding dataset. """ - proj_dict = self._get_proj_dict(dataset_id, original_shape) + proj_dict = self._get_proj_dict(dataset_id) extent = geos_area.get_area_extent(proj_dict) return geos_area.get_area_definition(proj_dict, extent) - def _get_proj_dict(self, dataset_id, original_shape): + def _get_proj_dict(self, dataset_id): proj_dict = {} proj_dict.update(self._get_name_dict(dataset_id)) proj_dict.update(self._get_proj4_dict()) - proj_dict.update(self._get_shape_dict(original_shape, dataset_id)) + proj_dict.update(self._get_shape_dict(dataset_id)) return proj_dict def _get_name_dict(self, dataset_id): @@ -775,22 +783,21 @@ def _get_proj4_dict( "h": self.metadata["orbital_parameters"]["satellite_nominal_altitude"], } - def _get_shape_dict(self, original_shape, dataset_id): - # Apply parameters from the vertical dimension (num lines, stepping - # angle) to the horizontal dimension to obtain a square area definition - # with uniform sampling. - num_lines, _ = original_shape + def _get_shape_dict(self, dataset_id): + # Apply sampling from the vertical dimension to the horizontal + # dimension to obtain a square area definition with uniform sampling. + ch_type = fmt.CHANNEL_TYPES[dataset_id["name"]] alt_ch_name = _get_alternative_channel_name(dataset_id) stepping_angle = self.coord_conv["stepping_angle_along_line"][alt_ch_name] - uniform_size = num_lines - uniform_line_pixel_offset = 0.5 * num_lines - uniform_sampling_angle = geos_area.sampling_to_lfac_cfac(stepping_angle) + size = self.full_disk_size[ch_type] + line_pixel_offset = 0.5 * size + lfac_cfac = geos_area.sampling_to_lfac_cfac(stepping_angle) return { - "nlines": uniform_size, - "ncols": uniform_size, - "lfac": uniform_sampling_angle, - "cfac": uniform_sampling_angle, - "coff": uniform_line_pixel_offset, - "loff": uniform_line_pixel_offset, + "nlines": size, + "ncols": size, + "lfac": lfac_cfac, + "cfac": lfac_cfac, + "coff": line_pixel_offset, + "loff": line_pixel_offset, "scandir": "N2S", } diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py index e9accadea5..85c14d1a02 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/test_gms5_vissr_l1b.py @@ -11,6 +11,7 @@ import satpy.readers.gms5_vissr_format as fmt import satpy.readers.gms5_vissr_l1b as vissr +import satpy.readers.gms5_vissr_navigation as nav import satpy.tests.reader_tests.test_gms5_vissr_data as real_world from satpy.readers import FSFile from satpy.tests.reader_tests.utils import get_jit_methods @@ -477,20 +478,12 @@ def area_def_exp(self, dataset_id): """Get expected area definition.""" if dataset_id["name"] == "IR1": resol = 5 - extent = ( - -8.641922536247211, - -8.641922536247211, - 25.925767608741637, - 25.925767608741637, - ) + size = 2366 + extent = (-20438.1468, -20438.1468, 20455.4306, 20455.4306) else: resol = 1 - extent = ( - -2.1604801323784297, - -2.1604801323784297, - 6.481440397135289, - 6.481440397135289, - ) + size = 9464 + extent = (-20444.6235, -20444.6235, 20448.9445, 20448.9445) area_id = f"gms-5_vissr_western-pacific_{resol}km" desc = f"GMS-5 VISSR Western Pacific area definition with {resol} km resolution" return AreaDefinition( @@ -498,7 +491,8 @@ def area_def_exp(self, dataset_id): description=desc, proj_id=area_id, projection={ - "ellps": "SGS85", + "a": nav.EARTH_EQUATORIAL_RADIUS, + "b": nav.EARTH_POLAR_RADIUS, "h": "123456", "lon_0": "140", "no_defs": "None", @@ -509,8 +503,8 @@ def area_def_exp(self, dataset_id): "y_0": "0", }, area_extent=extent, - width=2, - height=2, + width=size, + height=size, ) @pytest.fixture @@ -539,7 +533,7 @@ def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): """Test getting the dataset.""" dataset = file_handler.get_dataset(dataset_id, {"yaml": "info"}) xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1e-6) - self._assert_attrs_equal(dataset.attrs, attrs_exp) + assert dataset.attrs == attrs_exp def test_time_attributes(self, file_handler, attrs_exp): """Test the file handler's time attributes.""" @@ -548,18 +542,6 @@ def test_time_attributes(self, file_handler, attrs_exp): assert file_handler.start_time == start_time_exp assert file_handler.end_time == end_time_exp - def _assert_attrs_equal(self, attrs_tst, attrs_exp): - area_tst = attrs_tst.pop("area_def_uniform_sampling") - area_exp = attrs_exp.pop("area_def_uniform_sampling") - assert attrs_tst == attrs_exp - self._assert_areas_close(area_tst, area_exp) - - def _assert_areas_close(self, area_tst, area_exp): - lons_tst, lats_tst = area_tst.get_lonlats() - lons_exp, lats_exp = area_exp.get_lonlats() - np.testing.assert_allclose(lons_tst, lons_exp) - np.testing.assert_allclose(lats_tst, lats_exp) - class TestCorruptFile: """Test reading corrupt files.""" From b0cabedd0d0575d4794ca9d4e6d5fe7cca50f0cf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 13 Jun 2023 20:58:25 -0500 Subject: [PATCH 0289/1416] Add Low Cloud and Fog products to ABI L2 reader --- satpy/etc/readers/abi_l2_nc.yaml | 31 +++++++++++++++++++++ satpy/readers/abi_base.py | 3 ++ satpy/readers/abi_l2_nc.py | 2 +- satpy/tests/reader_tests/test_abi_l2_nc.py | 32 ++++++++++++++-------- 4 files changed, 55 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml index 9bf69a1b7f..7c0f1a75ac 100644 --- a/satpy/etc/readers/abi_l2_nc.yaml +++ b/satpy/etc/readers/abi_l2_nc.yaml @@ -331,6 +331,27 @@ datasets: file_type: abi_l2_nav file_key: Latitude +# ---Low Cloud and Fog (GFLS) Products --- + mvfr_prob: + name: MVFR_Fog_Prob + file_type: abi_l2_gfls + file_key: MVFR_Fog_Prob + + ifr_prob: + name: IFR_Fog_Prob + file_type: abi_l2_gfls + file_key: IFR_Fog_Prob + + lifr_prob: + name: LIFR_Fog_Prob + file_type: abi_l2_gfls + file_key: LIFR_Fog_Prob + + fog_depth: + name: Fog_Depth + file_type: abi_l2_gfls + file_key: Fog_Depth + # ---- file_types: @@ -550,3 +571,13 @@ file_types: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-NAV{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "NAV" + + # Low Cloud and Fog + abi_l2_gfls: + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: + # AIT scheme: GOES16_ABI_2KM_MESO_2019147_1800_48_AVIATION_FOG_EN.nc + - '{platform_shortname:s}_{mission_id:3s}_2KM_{scene_abbr:s}_{start_time:%Y%j_%H%M}_{file_product:s}_{algorithm_type:2s}.nc' + # NDE scheme: ABI-L2-GFLSC-M6_v3r1_g16_s202306071931181_e202306071933554_c202306071934440.nc + - '{mission_id:3s}-L2-GFLS{scene_abbr:s}-{scan_mode:2s}_v{sw_version:d}r{sw_revision:d}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' + observation_type: "GFLS" diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 5f55170b6f..5f4cf506fe 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -38,6 +38,9 @@ 'G17': 'GOES-17', 'G18': 'GOES-18', 'G19': 'GOES-19', + 'GOES16': 'GOES-16', + 'GOES17': 'GOES-17', + 'GOES18': 'GOES-18', } diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index 1e83d2e7ef..a152790197 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -61,7 +61,7 @@ def _update_data_arr_with_filename_attrs(self, variable): # add in information from the filename that may be useful to the user for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): - variable.attrs[attr] = self.filename_info[attr] + variable.attrs[attr] = self.filename_info.get(attr) # add in information hardcoded in the filetype YAML for attr in ('observation_type',): diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 2d566770ae..b05ceb0f64 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -16,13 +16,14 @@ # along with this program. If not, see . """The abi_l2_nc reader tests package.""" import contextlib +from typing import Optional from unittest import mock import numpy as np import xarray as xr -def _create_cmip_dataset(): +def _create_cmip_dataset(data_variable: str = "HT"): proj = xr.DataArray( [], attrs={ @@ -57,7 +58,7 @@ def _create_cmip_dataset(): 'goes_imager_projection': proj, 'x': x__, 'y': y__, - 'HT': ht_da, + data_variable: ht_da, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), @@ -86,11 +87,7 @@ def _assert_orbital_parameters(orb_params): def _create_mcmip_dataset(): - fake_dataset = _create_cmip_dataset() - fake_dataset = fake_dataset.copy(deep=True) - fake_dataset['CMI_C14'] = fake_dataset['HT'] - del fake_dataset['HT'] - return fake_dataset + return _create_cmip_dataset("CMI_C14") class Test_NC_ABI_L2_get_dataset: @@ -125,6 +122,15 @@ def test_get_dataset(self): _compare_subdict(res.attrs, exp_attrs) _assert_orbital_parameters(res.attrs['orbital_parameters']) + def test_get_dataset_gfls(self): + """Test that Low Cloud and Fog filenames work.""" + from satpy.tests.utils import make_dataid + filename_info = {'platform_shortname': 'GOES16', 'scene_abbr': 'FD'} + key = make_dataid(name='MVFR_Fog_Prob') + with _create_reader_for_fake_data("GFLS", _create_cmip_dataset("MVFR_Fog_Prob"), filename_info) as reader: + res = reader.get_dataset(key, {'file_key': 'MVFR_Fog_Prob'}) + assert res.attrs["platform_name"] == "GOES-16" + class TestMCMIPReading: """Test cases of the MCMIP file format.""" @@ -314,15 +320,17 @@ def test_get_area_def_xy(self, adef): @contextlib.contextmanager -def _create_reader_for_fake_data(observation_type: str, fake_dataset: xr.Dataset): +def _create_reader_for_fake_data(observation_type: str, fake_dataset: xr.Dataset, filename_info: Optional[dict] = None): from satpy.readers.abi_l2_nc import NC_ABI_L2 - reader_args = ( - "filename", - { + if filename_info is None: + filename_info = { 'platform_shortname': 'G16', 'scene_abbr': 'C', 'scan_mode': 'M3' - }, + } + reader_args = ( + "filename", + filename_info, {'file_type': 'info', 'observation_type': observation_type}, ) with mock.patch('satpy.readers.abi_base.xr') as xr_: From a6340840e1b1a46ee9cf044dbbc3de6e94f3b860 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 14 Jun 2023 08:41:03 +0000 Subject: [PATCH 0290/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- AUTHORS.md | 1 - 1 file changed, 1 deletion(-) diff --git a/AUTHORS.md b/AUTHORS.md index e5511d2666..fa5e0272d0 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -86,4 +86,3 @@ The following people have made contributions to this project: - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) - From 502efc8def459cdd2f1ee73c22fb6f0c52a70607 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Wed, 14 Jun 2023 14:26:59 +0200 Subject: [PATCH 0291/1416] Add ancillary datasets to nwcsaf-pps yaml file This will remove the warnings: [2023-06-12 12:58:33,423 WARNING satpy.readers.yaml_reader] Can't load ancillary dataset cmic_dcwp [2023-06-12 12:58:33,424 WARNING satpy.readers.yaml_reader] Can't load ancillary dataset cmic_dcwp [2023-06-12 12:58:33,425 WARNING satpy.readers.yaml_reader] Can't load ancillary dataset cmic_dcot [2023-06-12 12:58:33,426 WARNING satpy.readers.yaml_reader] Can't load ancillary dataset cma_conditions [2023-06-12 12:58:33,427 WARNING satpy.readers.yaml_reader] Can't load ancillary dataset cma_quality [2023-06-12 12:58:33,428 WARNING satpy.readers.yaml_reader] Can't load ancillary dataset cma_status_flag [2023-06-12 12:58:33,429 WARNING satpy.readers.yaml_reader] Can't load ancillary dataset cmic_dcwp [2023-06-12 12:58:33,431 WARNING satpy.readers.yaml_reader] Can't load ancillary dataset cmic_dcre --- satpy/etc/readers/nwcsaf-pps_nc.yaml | 36 ++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/satpy/etc/readers/nwcsaf-pps_nc.yaml b/satpy/etc/readers/nwcsaf-pps_nc.yaml index d3a61e80ef..67031d99dd 100644 --- a/satpy/etc/readers/nwcsaf-pps_nc.yaml +++ b/satpy/etc/readers/nwcsaf-pps_nc.yaml @@ -85,6 +85,24 @@ datasets: name: cma_extended_pal file_type: nc_nwcsaf_cma + cma_conditions: + name: cma_conditions + file_type: nc_nwcsaf_cma + coordinates: [lon, lat] + standard_name: cma_conditions + + cma_quality: + name: cma_quality + file_type: nc_nwcsaf_cma + coordinates: [lon, lat] + standard_name: cma_quality + + cma_status_flag: + name: cma_status_flag + file_type: nc_nwcsaf_cma + coordinates: [lon, lat] + standard_name: cma_status_flag + cmaprob: name: cmaprob file_type: nc_nwcsaf_cmaprob @@ -315,3 +333,21 @@ datasets: file_key: quality file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] + + cmic_dcwp: + name: cmic_dcwp + file_key: dcwp + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] + + cmic_dcre: + name: cmic_dcre + file_key: dcre + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] + + cmic_dcot: + name: cmic_dcot + file_key: dcot + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] \ No newline at end of file From 5c3cc8c3a80fd509ae6a4b1d51d532659b5f55b0 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Thu, 15 Jun 2023 13:46:44 -0500 Subject: [PATCH 0292/1416] BUG FIX: added back the handled variables which was lost when trying to reduce complexity. --- satpy/readers/clavrx.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 23255adda9..39b3afc007 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -348,8 +348,9 @@ def _available_aliases(self, ds_info, current_var): new_info.update(alias_info) yield True, new_info - def _supplement_configured(self, configured_datasets=None): + def available_datasets(self, configured_datasets=None): """Add more information if this reader can provide it.""" + handled_variables = set() for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: @@ -362,6 +363,7 @@ def _supplement_configured(self, configured_datasets=None): # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != self.resolution: + handled_variables.add(var_name) new_info['resolution'] = self.resolution if self._is_polar(): new_info['coordinates'] = ds_info.get('coordinates', ('longitude', 'latitude')) @@ -371,6 +373,9 @@ def _supplement_configured(self, configured_datasets=None): # then we should keep it going down the chain yield is_avail, ds_info + # get data from file dynamically + yield from self._dynamic_datasets() + def _dynamic_datasets(self): """Get data from file and build aliases.""" for var_name, val in self.file_content.items(): @@ -390,14 +395,6 @@ def _dynamic_datasets(self): # yield any associated aliases yield from self._available_aliases(ds_info, var_name) - def available_datasets(self, configured_datasets=None): - """Automatically determine datasets provided by this file.""" - # update previously configured datasets - yield from self._supplement_configured(configured_datasets) - - # get data from file dynamically - yield from self._dynamic_datasets() - def get_shape(self, dataset_id, ds_info): """Get the shape.""" var_name = ds_info.get('file_key', dataset_id['name']) From 1d37638662ca2fa32f0d16eed789bc4ac732c809 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Mon, 19 Jun 2023 19:58:02 -0500 Subject: [PATCH 0293/1416] Fix HDF4 support in geocat reader with hardcoded engine (#2507) Co-authored-by: David Hoese --- satpy/etc/readers/nwcsaf-pps_nc.yaml | 2 +- satpy/readers/geocat.py | 25 ++++++++++++++++++++++++- satpy/tests/reader_tests/test_geocat.py | 16 ++++++++++++++-- 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/nwcsaf-pps_nc.yaml b/satpy/etc/readers/nwcsaf-pps_nc.yaml index 67031d99dd..29fabf304c 100644 --- a/satpy/etc/readers/nwcsaf-pps_nc.yaml +++ b/satpy/etc/readers/nwcsaf-pps_nc.yaml @@ -350,4 +350,4 @@ datasets: name: cmic_dcot file_key: dcot file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] - coordinates: [lon, lat] \ No newline at end of file + coordinates: [lon, lat] diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py index 3343e25533..5086cd899b 100644 --- a/satpy/readers/geocat.py +++ b/satpy/readers/geocat.py @@ -56,7 +56,30 @@ class GEOCATFileHandler(NetCDF4FileHandler): - """GEOCAT netCDF4 file handler.""" + """GEOCAT netCDF4 file handler. + + **Loading data with decode_times=True** + + By default, this reader will use ``xarray_kwargs={"engine": "netcdf4", "decode_times": False}``. + to match behavior of xarray when the geocat reader was first written. To use different options + use reader_kwargs when loading the Scene:: + + scene = satpy.Scene(filenames, + reader='geocat', + reader_kwargs={'xarray_kwargs': {'engine': 'netcdf4', 'decode_times': True}}) + """ + + def __init__(self, filename, filename_info, filetype_info, + **kwargs): + """Open and perform initial investigation of NetCDF file.""" + kwargs.setdefault('xarray_kwargs', {}).setdefault( + 'engine', "netcdf4") + kwargs.setdefault('xarray_kwargs', {}).setdefault( + 'decode_times', False) + + super(GEOCATFileHandler, self).__init__( + filename, filename_info, filetype_info, + xarray_kwargs=kwargs["xarray_kwargs"]) sensors = { 'goes': 'goes_imager', diff --git a/satpy/tests/reader_tests/test_geocat.py b/satpy/tests/reader_tests/test_geocat.py index b9323a39e2..91de6a4265 100644 --- a/satpy/tests/reader_tests/test_geocat.py +++ b/satpy/tests/reader_tests/test_geocat.py @@ -130,10 +130,22 @@ def test_init(self): loadables = r.select_files_from_pathnames([ 'geocatL2.GOES-13.2015143.234500.nc', ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers + + def test_init_with_kwargs(self): + """Test basic init with extra parameters.""" + from satpy.readers import load_reader + r = load_reader(self.reader_configs, xarray_kwargs={"decode_times": True}) + loadables = r.select_files_from_pathnames([ + 'geocatL2.GOES-13.2015143.234500.nc', + ]) + assert len(loadables) == 1 + r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {'decode_times': True}}) + # make sure we have some files + assert r.file_handlers def test_load_all_old_goes(self): """Test loading all test datasets from old GOES files.""" From 0384cd30750c76566ff00e8854b5bca94b750f98 Mon Sep 17 00:00:00 2001 From: marty-sullivan Date: Tue, 20 Jun 2023 02:12:51 +0000 Subject: [PATCH 0294/1416] fix argument name in DayNightComposite example document --- doc/source/composites.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/composites.rst b/doc/source/composites.rst index 4804aba0df..d0c494e414 100644 --- a/doc/source/composites.rst +++ b/doc/source/composites.rst @@ -173,7 +173,7 @@ In the case below, the image shows its day portion and day/night transition with night portion blacked-out instead of transparent:: >>> from satpy.composites import DayNightCompositor - >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only", need_alpha=False) + >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only", include_alpha=False) >>> composite = compositor([local_scene['true_color']) RealisticColors From 1e3c0b52ff9d3e24924331f61829ef95f495bcdd Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 21 Jun 2023 10:20:25 +0000 Subject: [PATCH 0295/1416] Move reader and tests to dedicated sub-packages --- satpy/etc/readers/gms5-vissr_l1b.yaml | 8 ++++---- satpy/readers/gms/__init__.py | 1 + satpy/readers/{ => gms}/gms5_vissr_format.py | 0 satpy/readers/{ => gms}/gms5_vissr_l1b.py | 4 ++-- satpy/readers/{ => gms}/gms5_vissr_navigation.py | 0 satpy/tests/reader_tests/gms/__init__.py | 1 + .../reader_tests/{ => gms}/test_gms5_vissr_data.py | 2 +- .../reader_tests/{ => gms}/test_gms5_vissr_l1b.py | 14 +++++++------- .../{ => gms}/test_gms5_vissr_navigation.py | 2 +- 9 files changed, 17 insertions(+), 15 deletions(-) create mode 100644 satpy/readers/gms/__init__.py rename satpy/readers/{ => gms}/gms5_vissr_format.py (100%) rename satpy/readers/{ => gms}/gms5_vissr_l1b.py (99%) rename satpy/readers/{ => gms}/gms5_vissr_navigation.py (100%) create mode 100644 satpy/tests/reader_tests/gms/__init__.py rename satpy/tests/reader_tests/{ => gms}/test_gms5_vissr_data.py (99%) rename satpy/tests/reader_tests/{ => gms}/test_gms5_vissr_l1b.py (97%) rename satpy/tests/reader_tests/{ => gms}/test_gms5_vissr_navigation.py (99%) diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml index 1b81d0f8b0..7bcca57399 100644 --- a/satpy/etc/readers/gms5-vissr_l1b.yaml +++ b/satpy/etc/readers/gms5-vissr_l1b.yaml @@ -16,26 +16,26 @@ reader: file_types: gms5_vissr_vis: - file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler + file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG' - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG.gz' gms5_vissr_ir1: - file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler + file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG' - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG.gz' gms5_vissr_ir2: - file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler + file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG' - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG.gz' gms5_vissr_ir3: - file_reader: !!python/name:satpy.readers.gms5_vissr_l1b.GMS5VISSRFileHandler + file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG' - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG.gz' diff --git a/satpy/readers/gms/__init__.py b/satpy/readers/gms/__init__.py new file mode 100644 index 0000000000..7b1f2041c3 --- /dev/null +++ b/satpy/readers/gms/__init__.py @@ -0,0 +1 @@ +"""GMS reader module.""" diff --git a/satpy/readers/gms5_vissr_format.py b/satpy/readers/gms/gms5_vissr_format.py similarity index 100% rename from satpy/readers/gms5_vissr_format.py rename to satpy/readers/gms/gms5_vissr_format.py diff --git a/satpy/readers/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py similarity index 99% rename from satpy/readers/gms5_vissr_l1b.py rename to satpy/readers/gms/gms5_vissr_l1b.py index c5e98fd753..a5648f8f99 100644 --- a/satpy/readers/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -159,8 +159,8 @@ import xarray as xr import satpy.readers._geos_area as geos_area -import satpy.readers.gms5_vissr_format as fmt -import satpy.readers.gms5_vissr_navigation as nav +import satpy.readers.gms.gms5_vissr_format as fmt +import satpy.readers.gms.gms5_vissr_navigation as nav from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.hrit_jma import mjd2datetime64 from satpy.readers.utils import generic_open diff --git a/satpy/readers/gms5_vissr_navigation.py b/satpy/readers/gms/gms5_vissr_navigation.py similarity index 100% rename from satpy/readers/gms5_vissr_navigation.py rename to satpy/readers/gms/gms5_vissr_navigation.py diff --git a/satpy/tests/reader_tests/gms/__init__.py b/satpy/tests/reader_tests/gms/__init__.py new file mode 100644 index 0000000000..d37bb755ca --- /dev/null +++ b/satpy/tests/reader_tests/gms/__init__.py @@ -0,0 +1 @@ +"""Unit tests for GMS reader.""" diff --git a/satpy/tests/reader_tests/test_gms5_vissr_data.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py similarity index 99% rename from satpy/tests/reader_tests/test_gms5_vissr_data.py rename to satpy/tests/reader_tests/gms/test_gms5_vissr_data.py index 754cab59da..5ddf13438e 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_data.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py @@ -2,7 +2,7 @@ import numpy as np -import satpy.readers.gms5_vissr_format as fmt +import satpy.readers.gms.gms5_vissr_format as fmt ATTITUDE_PREDICTION = np.array( [ diff --git a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py similarity index 97% rename from satpy/tests/reader_tests/test_gms5_vissr_l1b.py rename to satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index 85c14d1a02..4244973072 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -9,10 +9,10 @@ import xarray as xr from pyresample.geometry import AreaDefinition -import satpy.readers.gms5_vissr_format as fmt -import satpy.readers.gms5_vissr_l1b as vissr -import satpy.readers.gms5_vissr_navigation as nav -import satpy.tests.reader_tests.test_gms5_vissr_data as real_world +import satpy.readers.gms.gms5_vissr_format as fmt +import satpy.readers.gms.gms5_vissr_l1b as vissr +import satpy.readers.gms.gms5_vissr_navigation as nav +import satpy.tests.reader_tests.gms.test_gms5_vissr_data as real_world from satpy.readers import FSFile from satpy.tests.reader_tests.utils import get_jit_methods from satpy.tests.utils import make_dataid @@ -82,12 +82,12 @@ def patch_number_of_pixels_per_scanline(self, monkeypatch): }, } monkeypatch.setattr( - "satpy.readers.gms5_vissr_format.IMAGE_DATA_BLOCK_IR", IMAGE_DATA_BLOCK_IR + "satpy.readers.gms.gms5_vissr_format.IMAGE_DATA_BLOCK_IR", IMAGE_DATA_BLOCK_IR ) monkeypatch.setattr( - "satpy.readers.gms5_vissr_format.IMAGE_DATA_BLOCK_VIS", IMAGE_DATA_BLOCK_VIS + "satpy.readers.gms.gms5_vissr_format.IMAGE_DATA_BLOCK_VIS", IMAGE_DATA_BLOCK_VIS ) - monkeypatch.setattr("satpy.readers.gms5_vissr_format.IMAGE_DATA", IMAGE_DATA) + monkeypatch.setattr("satpy.readers.gms.gms5_vissr_format.IMAGE_DATA", IMAGE_DATA) @pytest.fixture( params=[ diff --git a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py similarity index 99% rename from satpy/tests/reader_tests/test_gms5_vissr_navigation.py rename to satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index ec564e7ab8..ef767899eb 100644 --- a/satpy/tests/reader_tests/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -3,7 +3,7 @@ import numpy as np import pytest -import satpy.readers.gms5_vissr_navigation as nav +import satpy.readers.gms.gms5_vissr_navigation as nav from satpy.tests.reader_tests.utils import get_jit_methods # Navigation references computed with JMA's Msial library (files From 2b67b8fcd9068f81a536e08881b8e1c405072fac Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 21 Jun 2023 10:23:21 +0000 Subject: [PATCH 0296/1416] Fix trailing whitespace --- satpy/readers/gms/gms5_vissr_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py index a5648f8f99..f3c6898f65 100644 --- a/satpy/readers/gms/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -723,7 +723,7 @@ class AreaDefEstimator: """Estimate area definition for VISSR images.""" full_disk_size = { - "IR": 2366, + "IR": 2366, "VIS": 9464, } From 408a981cce324ca7243ed09a98a8fe5274986b87 Mon Sep 17 00:00:00 2001 From: andream Date: Wed, 21 Jun 2023 16:51:33 +0200 Subject: [PATCH 0297/1416] add test for checking that code doesn't fail if header handling is removed --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index a7de60dcc2..aaf8dc07e2 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1423,6 +1423,14 @@ def test_header_warning(): with pytest.warns(UserWarning, match=exp_warning): NativeMSGFileHandler('myfile', {}, None) + # check that without Main Header the code doesn't crash + header_missing = header_good.copy() + header_missing.pop('15_MAIN_PRODUCT_HEADER') + fromfile.return_value = header_missing + with warnings.catch_warnings(): + warnings.simplefilter("error") + NativeMSGFileHandler('myfile', {}, None) + @pytest.mark.parametrize( "starts_with, expected", From 7941c2256c2982fa6d1bb40646da9279043e7c51 Mon Sep 17 00:00:00 2001 From: andream Date: Wed, 21 Jun 2023 16:54:06 +0200 Subject: [PATCH 0298/1416] add handling of missing main header by issuing a log message --- satpy/readers/seviri_l1b_native.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 89d54ab3d0..4f230c6526 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -370,13 +370,16 @@ def _read_header(self): self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) self.mda['hrv_number_of_columns'] = cols_hrv - if self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': - warnings.warn( - "The quality flag for this file indicates not OK. " - "Use this data with caution!", - UserWarning, - stacklevel=2 - ) + if '15_MAIN_PRODUCT_HEADER' not in self.header: + logger.info("Quality flag check was not possible due to missing 15_MAIN_PRODUCT_HEADER.") + else: + if self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': + warnings.warn( + "The quality flag for this file indicates not OK. " + "Use this data with caution!", + UserWarning, + stacklevel=2 + ) def _read_trailer(self): From 7432312089d20acdd9c9e97754d6993aa65084cf Mon Sep 17 00:00:00 2001 From: andream Date: Wed, 21 Jun 2023 17:53:13 +0200 Subject: [PATCH 0299/1416] add test --- satpy/readers/seviri_l1b_native.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 4f230c6526..cdad865f0c 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -372,14 +372,13 @@ def _read_header(self): if '15_MAIN_PRODUCT_HEADER' not in self.header: logger.info("Quality flag check was not possible due to missing 15_MAIN_PRODUCT_HEADER.") - else: - if self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': - warnings.warn( - "The quality flag for this file indicates not OK. " - "Use this data with caution!", - UserWarning, - stacklevel=2 - ) + elif self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': + warnings.warn( + "The quality flag for this file indicates not OK. " + "Use this data with caution!", + UserWarning, + stacklevel=2 + ) def _read_trailer(self): From 6d881054f1e3a799c95e2fec27d5765c26027599 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 21 Jun 2023 11:19:46 -0500 Subject: [PATCH 0300/1416] Add default enhancements for low cloud/fog products --- satpy/etc/enhancements/abi.yaml | 38 +++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/satpy/etc/enhancements/abi.yaml b/satpy/etc/enhancements/abi.yaml index c1ef573d07..b89d2d0785 100644 --- a/satpy/etc/enhancements/abi.yaml +++ b/satpy/etc/enhancements/abi.yaml @@ -235,3 +235,41 @@ enhancements: min_value: 0.0, max_value: 1.0, } + + # L2 low cloud/fog products + mvfr_prob: + name: MVFR_Fog_Prob + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: 0 + max_stretch: 100 + ifr_prob: + name: IFR_Fog_Prob + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: 0 + max_stretch: 100 + lifr_prob: + name: LIFR_Fog_Prob + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: 0 + max_stretch: 100 + fog_depth: + name: Fog_Depth + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: 0 + max_stretch: 500 From c3205c68fba7cbebf368d5d0c0506d69201494fd Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 21 Jun 2023 21:44:17 +0100 Subject: [PATCH 0301/1416] Include documentation of how to add text to an image. --- doc/source/writers.rst | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index f453f4d5a5..64be014461 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -136,3 +136,34 @@ and save them all at once. ... compute=False) >>> results = [res1, res2] >>> compute_writer_results(results) + + +Adding text to images +===================== + +Satpy, via `pydecorate`, can add text to images when they're being saved. +To use this functionality, you must create a dictionary describing the text +to be added. + +:: + + >>> decodict = {'decorate': [{'text': {'txt': f' {my_text}', + >>> 'align': {'top_bottom': 'top', 'left_right': 'left'}, + >>> 'font': , + >>> 'font_size': 48, + >>> 'line': 'white', + >>> 'bg_opacity': 255, + >>> 'bg': 'black', + >>> 'height': 30, + >>> }}]} + +Where `my_text` is the text you wish to add and `` is the +location of the font file you wish to use, often in `/usr/share/fonts/` + +This dictionary can then be passed to the `save_dataset` or `save_datasets` command. + +:: + + >>> scene.save_dataset(my_dataset, writer='simple_image', fill_value=False, + >>> decorate=decodict) + From 93a1f0f2426719909ac37c669ae47a37c67c0b5b Mon Sep 17 00:00:00 2001 From: Gionata Ghiggi Date: Thu, 22 Jun 2023 09:39:28 +0200 Subject: [PATCH 0302/1416] Update satpy/readers/abi_l1b.py Co-authored-by: Martin Raspaud --- satpy/readers/abi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index aa32955fef..bde34b5d82 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -37,7 +37,7 @@ class NC_ABI_L1B(NC_ABI_BASE): def __init__(self, filename, filename_info, filetype_info, clip_negative_radiances=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" - super(NC_ABI_L1B, self).__init__(filename, filename_info, filetype_info) + super().__init__(filename, filename_info, filetype_info) if clip_negative_radiances is None: clip_negative_radiances = satpy.config.get("clip_negative_radiances") self.clip_negative_radiances = clip_negative_radiances From 864d3cd1fe5e321de800db98e3e6392debbe9485 Mon Sep 17 00:00:00 2001 From: Gionata Ghiggi Date: Thu, 22 Jun 2023 09:42:48 +0200 Subject: [PATCH 0303/1416] Update satpy/tests/reader_tests/test_abi_l1b.py Co-authored-by: Martin Raspaud --- satpy/tests/reader_tests/test_abi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 8d9f02bc8c..344f918f8f 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -276,7 +276,7 @@ def setUp(self): } ) - super(Test_NC_ABI_L1B_clipped_ir_cal, self).setUp(rad=rad, clip_negative_radiances=True) + super().setUp(rad=rad, clip_negative_radiances=True) def test_clip_negative_radiances_attribute(self): """Assert that clip_negative_radiances has been set to True.""" From b41ab921a2607e71d6d581325cc2874ed68eecb2 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 22 Jun 2023 11:12:26 +0200 Subject: [PATCH 0304/1416] Change config name --- doc/source/config.rst | 6 +++--- satpy/_config.py | 2 +- satpy/readers/abi_l1b.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/source/config.rst b/doc/source/config.rst index ee6a86cf30..b1777c9751 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -261,11 +261,11 @@ correction. Clipping Negative Infrared Radiances ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -* **Environment variable**: ``SATPY_CLIP_NEGATIVE_RADIANCES`` -* **YAML/Config Key**: ``clip_negative_radiances`` +* **Environment variable**: ``SATPY_READERS__CLIP_NEGATIVE_RADIANCES`` +* **YAML/Config Key**: ``readers.clip_negative_radiances`` * **Default**: False -Whether to clip negative infrared radiances to the minimum possible value before +Whether to clip negative infrared radiances to the minimum allowable value before computing the brightness temperature. If ``clip_negative_radiances=False``, pixels with negative radiances will have ``np.nan`` brightness temperatures. diff --git a/satpy/_config.py b/satpy/_config.py index 1582cac6e7..ae5a52fbd3 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -62,7 +62,7 @@ def impr_files(module_name: str) -> Path: 'demo_data_dir': '.', 'download_aux': True, 'sensor_angles_position_preference': 'actual', - 'clip_negative_radiances': False, + 'readers.clip_negative_radiances': False, } # Satpy main configuration object diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index bde34b5d82..dafdc8a373 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -39,7 +39,7 @@ def __init__(self, filename, filename_info, filetype_info, clip_negative_radianc """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super().__init__(filename, filename_info, filetype_info) if clip_negative_radiances is None: - clip_negative_radiances = satpy.config.get("clip_negative_radiances") + clip_negative_radiances = satpy.config.get("readers.clip_negative_radiances") self.clip_negative_radiances = clip_negative_radiances def get_dataset(self, key, info): From 3e358a3f54a953320c8676005f35e1fe349ebf0b Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 22 Jun 2023 11:18:02 +0200 Subject: [PATCH 0305/1416] Add func as arguement --- satpy/scene.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 87254918d3..0f945d45b5 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -50,9 +50,8 @@ def _get_area_resolution(area): return resolution -def _aggregate_data_array(data_array, **coarsen_kwargs): +def _aggregate_data_array(data_array, func, **coarsen_kwargs): """Aggregate xr.DataArray.""" - func = coarsen_kwargs.pop("func") res = data_array.coarsen(**coarsen_kwargs) if callable(func): out = res.reduce(func) From be2bd8756c18bb6d9fbf33339d7282454fe0b3cc Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 22 Jun 2023 10:29:38 +0100 Subject: [PATCH 0306/1416] Update docs describing text addition to saved images. --- doc/source/writers.rst | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index 64be014461..12ee786f56 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -141,29 +141,28 @@ and save them all at once. Adding text to images ===================== -Satpy, via `pydecorate`, can add text to images when they're being saved. +Satpy, via :doc:`pydecorate `, can add text to images when they're being saved. To use this functionality, you must create a dictionary describing the text to be added. -:: +.. code-block:: python >>> decodict = {'decorate': [{'text': {'txt': f' {my_text}', - >>> 'align': {'top_bottom': 'top', 'left_right': 'left'}, - >>> 'font': , - >>> 'font_size': 48, - >>> 'line': 'white', - >>> 'bg_opacity': 255, - >>> 'bg': 'black', - >>> 'height': 30, - >>> }}]} + ... 'align': {'top_bottom': 'top', 'left_right': 'left'}, + ... 'font': , + ... 'font_size': 48, + ... 'line': 'white', + ... 'bg_opacity': 255, + ... 'bg': 'black', + ... 'height': 30, + ... }}]} Where `my_text` is the text you wish to add and `` is the location of the font file you wish to use, often in `/usr/share/fonts/` This dictionary can then be passed to the `save_dataset` or `save_datasets` command. -:: +.. code-block:: python >>> scene.save_dataset(my_dataset, writer='simple_image', fill_value=False, - >>> decorate=decodict) - + ... decorate=decodict) From 4d2b9d30fb3eccc21f928267ec7694811a120753 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 22 Jun 2023 16:05:20 +0200 Subject: [PATCH 0307/1416] Correct donfig config --- satpy/_config.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/_config.py b/satpy/_config.py index ae5a52fbd3..4abc00aba2 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -62,7 +62,9 @@ def impr_files(module_name: str) -> Path: 'demo_data_dir': '.', 'download_aux': True, 'sensor_angles_position_preference': 'actual', - 'readers.clip_negative_radiances': False, + 'readers': { + 'clip_negative_radiances': False, + }, } # Satpy main configuration object From dba604e5bd025bfb29c7ed75f99657c20bbc462e Mon Sep 17 00:00:00 2001 From: Gionata Ghiggi Date: Thu, 22 Jun 2023 16:41:31 +0200 Subject: [PATCH 0308/1416] Update for using attrs["resolution"] --- satpy/scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index 0f945d45b5..7ace7432e8 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -46,7 +46,7 @@ def _get_area_resolution(area): try: resolution = max(area.pixel_size_x, area.pixel_size_y) except AttributeError: - resolution = max(area.lats.resolution, area.lons.resolution) + resolution = max(area.lats.attrs["resolution"], area.lons.attrs["resolution"]) return resolution From 3a920145eb31ad667811aec0c7e2b7488f69e588 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 22 Jun 2023 17:51:37 +0000 Subject: [PATCH 0309/1416] Remove formatting comments --- satpy/readers/gms/gms5_vissr_format.py | 2 -- satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py | 6 ------ satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py | 4 ---- 3 files changed, 12 deletions(-) diff --git a/satpy/readers/gms/gms5_vissr_format.py b/satpy/readers/gms/gms5_vissr_format.py index cfd7241192..a5052097eb 100644 --- a/satpy/readers/gms/gms5_vissr_format.py +++ b/satpy/readers/gms/gms5_vissr_format.py @@ -32,7 +32,6 @@ CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] VISIR_SOLAR = [("VIS", R4), ("IR", R4)] -# fmt: off CONTROL_BLOCK = np.dtype([('control_block_size', I2), ('head_block_number_of_parameter_block', I2), ('parameter_block_size', I2), @@ -396,4 +395,3 @@ 'dtype': IMAGE_DATA_BLOCK_IR } } -# fmt: on diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index 4244973072..31482f1e10 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -38,14 +38,12 @@ def test_get_earth_mask(self): first_earth_pixels = np.array([-1, 1, 0, -1]) last_earth_pixels = np.array([-1, 3, 2, -1]) edges = first_earth_pixels, last_earth_pixels - # fmt: off mask_exp = np.array( [[0, 0, 0, 0], [0, 1, 1, 1], [1, 1, 1, 0], [0, 0, 0, 0]] ) - # fmt: on mask = vissr.get_earth_mask(mask_exp.shape, edges) np.testing.assert_equal(mask, mask_exp) @@ -213,7 +211,6 @@ def coord_conv(self): This has the advantage that we can test with very small 2x2 images. Otherwise, all pixels would be in space. """ - # fmt: off conv = np.zeros(1, dtype=fmt.COORDINATE_CONVERSION_PARAMETERS) cline = conv["central_line_number_of_vissr_frame"] @@ -250,7 +247,6 @@ def coord_conv(self): conv["orbital_parameters"]["longitude_of_ssp"] = 141.0 conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 - # fmt: on return conv @pytest.fixture @@ -435,7 +431,6 @@ def lons_lats_exp(self, dataset_id): pix = [1672, 1672, 1673, 1673] lin = [686, 2089, 686, 2089] """ - # fmt: off expectations = { "IR1": { "lons": [[139.680120, 139.718902], @@ -450,7 +445,6 @@ def lons_lats_exp(self, dataset_id): [-34.940439, -34.940370]] } } - # fmt: on exp = expectations[dataset_id["name"]] lons = xr.DataArray(exp["lons"], dims=("y", "x")) lats = xr.DataArray(exp["lats"], dims=("y", "x")) diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index ef767899eb..47c5fd044c 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -10,7 +10,6 @@ # VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS # navigation is slightly off (< 0.01 deg) compared to JMA's reference. # This is probably due to precision problems with the copied numbers. -# fmt: off IR_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=686, pixel=1680), @@ -209,7 +208,6 @@ ) }, ] -# fmt: on NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE @@ -327,7 +325,6 @@ class TestImageNavigation: @pytest.fixture def expected(self): """Get expected coordinates.""" - # fmt: off exp = { "lon": [[-114.56923, -112.096837, -109.559702], [8.33221, 8.793893, 9.22339], @@ -336,7 +333,6 @@ def expected(self): [-42.513409, -39.790231, -37.06392], [3.342834, 6.07043, 8.795932]] } - # fmt: on return exp def test_get_lons_lats(self, navigation_params, expected): From 059635a0a8ba8213114b756d54080f65a83ea0ad Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 27 Jun 2023 13:14:23 +0200 Subject: [PATCH 0310/1416] Add a median filter modifier --- satpy/etc/composites/visir.yaml | 5 ++++ satpy/modifiers/filters.py | 33 ++++++++++++++++++++ satpy/tests/modifier_tests/test_filters.py | 35 ++++++++++++++++++++++ 3 files changed, 73 insertions(+) create mode 100644 satpy/modifiers/filters.py create mode 100644 satpy/tests/modifier_tests/test_filters.py diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index 8bd61ff7a8..0bb177e9ff 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -96,6 +96,11 @@ modifiers: - solar_azimuth_angle - solar_zenith_angle + median5x5: + modifier: !!python/name:satpy.modifiers.filters.Median + median_filter_params: + size: 5 + composites: airmass: diff --git a/satpy/modifiers/filters.py b/satpy/modifiers/filters.py new file mode 100644 index 0000000000..d8c54c15f3 --- /dev/null +++ b/satpy/modifiers/filters.py @@ -0,0 +1,33 @@ +"""Tests for image filters.""" +import logging + +import xarray as xr +from dask_image.ndfilters import median_filter + +from satpy.modifiers import ModifierBase + +logger = logging.getLogger('filters') + + +class Median(ModifierBase): + """Apply a median filter to the band.""" + + def __init__(self, median_filter_params, **kwargs): + """Create the instance. + + Args: + median_filter_params: The arguments to pass to dask-image's median_filter function. For example, {size: 3} + makes give the median filter a kernel of size 3. + + """ + self.median_filter_params = median_filter_params + super().__init__(**kwargs) + + def __call__(self, arrays, **info): + """Get the median filtered band.""" + data = arrays[0] + logger.debug(f"Apply median filtering with parameters {self.median_filter_params}.") + res = xr.DataArray(median_filter(data.data, **self.median_filter_params), + dims=data.dims, attrs=data.attrs, coords=data.coords) + self.apply_modifier_info(data, res) + return res diff --git a/satpy/tests/modifier_tests/test_filters.py b/satpy/tests/modifier_tests/test_filters.py new file mode 100644 index 0000000000..62e732d300 --- /dev/null +++ b/satpy/tests/modifier_tests/test_filters.py @@ -0,0 +1,35 @@ +"""Implementation of some image filters.""" + +import logging + +import dask.array as da +import numpy as np +import xarray as xr + +from satpy.modifiers.filters import Median + + +def test_median(caplog): + """Test the median filter modifier.""" + caplog.set_level(logging.DEBUG) + dims = "y", "x" + coordinates = dict(x=np.arange(6), y=np.arange(6)) + attrs = dict(units="K") + median_filter_params = dict(size=3) + name = "median_filter" + median_filter = Median(median_filter_params, name=name) + array = xr.DataArray(da.arange(36).reshape((6, 6)), coords=coordinates, dims=dims, attrs=attrs) + res = median_filter([array]) + filtered_array = np.array([[1, 2, 3, 4, 5, 5], + [6, 7, 8, 9, 10, 11], + [12, 13, 14, 15, 16, 17], + [18, 19, 20, 21, 22, 23], + [24, 25, 26, 27, 28, 29], + [30, 30, 31, 32, 33, 34]]) + np.testing.assert_allclose(res, filtered_array) + assert res.dims == dims + assert attrs.items() <= res.attrs.items() + assert res.attrs["name"] == name + np.testing.assert_equal(res.coords["x"], coordinates["x"]) + np.testing.assert_equal(res.coords["y"], coordinates["y"]) + assert "Apply median filtering with parameters {'size': 3}" in caplog.text From 47db52b89f5db6d71b27ebafbaf62288c202adea Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 27 Jun 2023 13:14:46 +0200 Subject: [PATCH 0311/1416] Add dask-image for tests and filters --- continuous_integration/environment.yaml | 1 + setup.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 41fc50627a..192ed9e6e3 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -5,6 +5,7 @@ dependencies: - xarray!=2022.9.0 - dask - distributed + - dask-image - donfig - appdirs - toolz diff --git a/setup.py b/setup.py index 2ad639c6fa..995855b195 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml', 's3fs', 'eccodes', 'h5netcdf', 'xarray-datatree', - 'skyfield', 'ephem', 'pint-xarray', 'astropy'] + 'skyfield', 'ephem', 'pint-xarray', 'astropy', 'dask-image'] extras_require = { # Readers: @@ -76,6 +76,7 @@ # Composites/Modifiers: 'rayleigh': ['pyspectral >= 0.10.1'], 'angles': ['pyorbital >= 1.3.1'], + 'filters': ['dask-image'], # MultiScene: 'animations': ['imageio'], # Documentation: From 2336dcbf374be0a6475598b1580d7112e54d6b3e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 27 Jun 2023 13:25:19 +0200 Subject: [PATCH 0312/1416] Add dask-image for doc generation --- doc/rtd_environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 82168df77d..d18466e5c9 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -6,6 +6,7 @@ dependencies: - pip - appdirs - dask + - dask-image - defusedxml - donfig # 2.19.1 seems to cause library linking issues From 51c9c1c44d317a489ac380bec21230d5bcf8d56b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 27 Jun 2023 13:48:45 +0200 Subject: [PATCH 0313/1416] Fix CF tests due to new xarray release This is due to https://github.com/pydata/xarray/issues/7388 not being solved yet. --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 2ddf51c1e3..79c270157a 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1432,5 +1432,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.6") + versions["xarray"] >= Version("2023.7") ) From ad83f08396e086d8f86276613d4b505b8ffc2fef Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 17:21:26 +0200 Subject: [PATCH 0314/1416] Replace self.assertEqual --- satpy/tests/writer_tests/test_cf.py | 92 ++++++++++++++--------------- 1 file changed, 44 insertions(+), 48 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 79c270157a..28ffad7a7a 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -74,10 +74,10 @@ def test_lonlat_storage(tmp_path): """Test correct storage for area with lon/lat units.""" from ..utils import make_fake_scene scn = make_fake_scene( - {"ketolysis": np.arange(25).reshape(5, 5)}, - daskify=True, - area=create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1))) + {"ketolysis": np.arange(25).reshape(5, 5)}, + daskify=True, + area=create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1))) filename = os.fspath(tmp_path / "test.nc") scn.save_datasets(filename=filename, writer="cf", include_lonlats=False) @@ -212,8 +212,7 @@ def test_save_array(self): with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['test-array'][:], [1, 2, 3]) expected_prereq = ("DataQuery(name='hej')") - self.assertEqual(f['test-array'].attrs['prerequisites'], - expected_prereq) + assert f['test-array'].attrs['prerequisites'] == expected_prereq def test_save_array_coords(self): """Test saving array with coordinates.""" @@ -245,8 +244,7 @@ def test_save_array_coords(self): self.assertNotIn('_FillValue', f['x'].attrs) self.assertNotIn('_FillValue', f['y'].attrs) expected_prereq = ("DataQuery(name='hej')") - self.assertEqual(f['test-array'].attrs['prerequisites'], - expected_prereq) + assert f['test-array'].attrs['prerequisites'] == expected_prereq def test_save_dataset_a_digit(self): """Test saving an array to netcdf/cf where dataset name starting with a digit.""" @@ -274,7 +272,7 @@ def test_save_dataset_a_digit_prefix_include_attr(self): scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='TEST') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) - self.assertEqual(f['TEST1'].attrs['original_name'], '1') + assert f['TEST1'].attrs['original_name'] == '1' def test_save_dataset_a_digit_no_prefix_include_attr(self): """Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name.""" @@ -303,10 +301,8 @@ def test_ancillary_variables(self): with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: - self.assertEqual(f['test-array-1'].attrs['ancillary_variables'], - 'test-array-2') - self.assertEqual(f['test-array-2'].attrs['ancillary_variables'], - 'test-array-1') + assert f['test-array-1'].attrs['ancillary_variables'] == 'test-array-2' + assert f['test-array-2'].attrs['ancillary_variables'] == 'test-array-1' def test_groups(self): """Test creating a file with groups.""" @@ -412,7 +408,7 @@ def test_bounds(self): with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) - self.assertEqual(f['time'].attrs['bounds'], 'time_bnds') + assert f['time'].attrs['bounds'] == 'time_bnds' # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: @@ -512,16 +508,16 @@ def test_header_attrs(self): writer='cf') with xr.open_dataset(filename) as f: self.assertIn('history', f.attrs) - self.assertEqual(f.attrs['sensor'], 'SEVIRI') - self.assertEqual(f.attrs['orbit'], 99999) + assert f.attrs['sensor'] == 'SEVIRI' + assert f.attrs['orbit'] == 99999 np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) - self.assertEqual(f.attrs['set'], '{1, 2, 3}') - self.assertEqual(f.attrs['dict_a'], 1) - self.assertEqual(f.attrs['dict_b'], 2) - self.assertEqual(f.attrs['nested_outer_inner1'], 1) - self.assertEqual(f.attrs['nested_outer_inner2'], 2) - self.assertEqual(f.attrs['bool'], 'true') - self.assertEqual(f.attrs['bool_'], 'true') + assert f.attrs['set'] == '{1, 2, 3}' + assert f.attrs['dict_a'] == 1 + assert f.attrs['dict_b'] == 2 + assert f.attrs['nested_outer_inner1'] == 1 + assert f.attrs['nested_outer_inner2'] == 2 + assert f.attrs['bool'] == 'true' + assert f.attrs['bool_'] == 'true' self.assertTrue('none' not in f.attrs.keys()) def get_test_attrs(self): @@ -553,9 +549,9 @@ def get_test_attrs(self): 'dict': {'a': 1, 'b': 2}, 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), + ('flag', np.bool_(True)), + ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) ])} encoded = {'name': 'IR_108', 'start_time': '2018-01-01 00:00:00', @@ -613,12 +609,12 @@ def assertDictWithArraysEqual(self, d1, d2): val2 = d2[key] if isinstance(val1, np.ndarray): np.testing.assert_array_equal(val1, val2) - self.assertEqual(val1.dtype, val2.dtype) + assert val1.dtype == val2.dtype else: - self.assertEqual(val1, val2) + assert val1 == val2 if isinstance(val1, (np.floating, np.integer, np.bool_)): self.assertTrue(isinstance(val2, np.generic)) - self.assertEqual(val1.dtype, val2.dtype) + assert val1.dtype == val2.dtype def test_encode_attrs_nc(self): """Test attributes encoding.""" @@ -838,18 +834,18 @@ def test_area2cf(self): ds.attrs['area'] = geos res = area2cf(ds, include_lonlats=False) - self.assertEqual(len(res), 2) - self.assertEqual(res[0].size, 1) # grid mapping variable - self.assertEqual(res[0].name, res[1].attrs['grid_mapping']) + assert len(res) == 2 + assert res[0].size == 1 # grid mapping variable + assert res[0].name == res[1].attrs['grid_mapping'] # b) Area Definition and include_lonlats=False ds = ds_base.copy(deep=True) ds.attrs['area'] = geos res = area2cf(ds, include_lonlats=True) # same as above - self.assertEqual(len(res), 2) - self.assertEqual(res[0].size, 1) # grid mapping variable - self.assertEqual(res[0].name, res[1].attrs['grid_mapping']) + assert len(res) == 2 + assert res[0].size == 1 # grid mapping variable + assert res[0].name == res[1].attrs['grid_mapping'] # but now also have the lon/lats self.assertIn('longitude', res[1].coords) self.assertIn('latitude', res[1].coords) @@ -860,7 +856,7 @@ def test_area2cf(self): ds.attrs['area'] = swath res = area2cf(ds, include_lonlats=False) - self.assertEqual(len(res), 1) + assert len(res) == 1 self.assertIn('longitude', res[0].coords) self.assertIn('latitude', res[0].coords) self.assertNotIn('grid_mapping', res[0].attrs) @@ -874,7 +870,7 @@ def _gm_matches(gmapping, expected): for attr_key, attr_val in expected.attrs.items(): test_val = gmapping.attrs[attr_key] if attr_val is None or isinstance(attr_val, str): - self.assertEqual(test_val, attr_val) + assert test_val == attr_val else: np.testing.assert_almost_equal(test_val, attr_val, decimal=3) @@ -908,9 +904,9 @@ def _gm_matches(gmapping, expected): new_ds, grid_mapping = _add_grid_mapping(ds) if 'sweep_angle_axis' in grid_mapping.attrs: # older versions of pyproj might not include this - self.assertEqual(grid_mapping.attrs['sweep_angle_axis'], 'y') + assert grid_mapping.attrs['sweep_angle_axis'] == 'y' - self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') + assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) # should not have been modified self.assertNotIn('grid_mapping', ds.attrs) @@ -937,7 +933,7 @@ def _gm_matches(gmapping, expected): self.assertIn('PARAMETER["lon_0",4.535', wkt) self.assertIn('PARAMETER["o_lat_p",90', wkt) self.assertIn('PARAMETER["o_lon_p",-5.465', wkt) - self.assertEqual(new_ds.attrs['grid_mapping'], 'cosmo7') + assert new_ds.attrs['grid_mapping'] == 'cosmo7' # c) Projection Transverse Mercator lat_0 = 36.5 @@ -963,7 +959,7 @@ def _gm_matches(gmapping, expected): ds = ds_base.copy() ds.attrs['area'] = tmerc new_ds, grid_mapping = _add_grid_mapping(ds) - self.assertEqual(new_ds.attrs['grid_mapping'], 'tmerc') + assert new_ds.attrs['grid_mapping'] == 'tmerc' _gm_matches(grid_mapping, tmerc_expected) # d) Projection that has a representation but no explicit a/b @@ -990,7 +986,7 @@ def _gm_matches(gmapping, expected): ds.attrs['area'] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') + assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) # e) oblique Mercator @@ -1021,7 +1017,7 @@ def _gm_matches(gmapping, expected): ds.attrs['area'] = area new_ds, grid_mapping = _add_grid_mapping(ds) - self.assertEqual(new_ds.attrs['grid_mapping'], 'omerc_otf') + assert new_ds.attrs['grid_mapping'] == 'omerc_otf' _gm_matches(grid_mapping, omerc_expected) # f) Projection that has a representation but no explicit a/b @@ -1046,7 +1042,7 @@ def _gm_matches(gmapping, expected): ds.attrs['area'] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - self.assertEqual(new_ds.attrs['grid_mapping'], 'geos') + assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) def test_add_lonlat_coords(self): @@ -1068,7 +1064,7 @@ def test_add_lonlat_coords(self): # original should be unmodified self.assertNotIn('longitude', dataarray.coords) - self.assertEqual(set(res.coords), {'longitude', 'latitude'}) + assert set(res.coords) == {'longitude', 'latitude'} lat = res['latitude'] lon = res['longitude'] np.testing.assert_array_equal(lat.data, lats_ref) @@ -1091,7 +1087,7 @@ def test_add_lonlat_coords(self): # original should be unmodified self.assertNotIn('longitude', dataarray.coords) - self.assertEqual(set(res.coords), {'longitude', 'latitude'}) + assert set(res.coords) == {'longitude', 'latitude'} lat = res['latitude'] lon = res['longitude'] np.testing.assert_array_equal(lat.data, lats_ref) @@ -1127,7 +1123,7 @@ def test_global_attr_default_history_and_Conventions(self): with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: - self.assertEqual(f.attrs['Conventions'], 'CF-1.7') + assert f.attrs['Conventions'] == 'CF-1.7' self.assertIn('Created by pytroll/satpy on', f.attrs['history']) def test_global_attr_history_and_Conventions(self): @@ -1146,7 +1142,7 @@ def test_global_attr_history_and_Conventions(self): with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs) with xr.open_dataset(filename) as f: - self.assertEqual(f.attrs['Conventions'], 'CF-1.7, ACDD-1.3') + assert f.attrs['Conventions'] == 'CF-1.7, ACDD-1.3' self.assertIn('TEST add history\n', f.attrs['history']) self.assertIn('Created by pytroll/satpy on', f.attrs['history']) From dab75136a090d6b0c1407d6961ba6fd01692a54b Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 17:46:35 +0200 Subject: [PATCH 0315/1416] Remove all self.assert statements --- satpy/tests/writer_tests/test_cf.py | 199 +++++++++++++++------------- 1 file changed, 106 insertions(+), 93 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 28ffad7a7a..6d9cd27703 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -240,9 +240,9 @@ def test_save_array_coords(self): np.testing.assert_array_equal(f['test-array'][:], [[1, 2, 3]]) np.testing.assert_array_equal(f['x'][:], [0, 1, 2]) np.testing.assert_array_equal(f['y'][:], [0]) - self.assertNotIn('crs', f) - self.assertNotIn('_FillValue', f['x'].attrs) - self.assertNotIn('_FillValue', f['y'].attrs) + assert 'crs' not in f + assert '_FillValue' not in f['x'].attrs + assert '_FillValue' not in f['y'].attrs expected_prereq = ("DataQuery(name='hej')") assert f['test-array'].attrs['prerequisites'] == expected_prereq @@ -282,7 +282,7 @@ def test_save_dataset_a_digit_no_prefix_include_attr(self): scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='') with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f['1'][:], [1, 2, 3]) - self.assertNotIn('original_name', f['1'].attrs) + assert 'original_name' not in f['1'].attrs def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" @@ -339,14 +339,14 @@ def test_groups(self): pretty=True) nc_root = xr.open_dataset(filename) - self.assertIn('history', nc_root.attrs) - self.assertSetEqual(set(nc_root.variables.keys()), set()) + assert 'history' in nc_root.attrs + assert set(nc_root.variables.keys()) == set() nc_visir = xr.open_dataset(filename, group='visir') nc_hrv = xr.open_dataset(filename, group='hrv') - self.assertSetEqual(set(nc_visir.variables.keys()), {'VIS006', 'IR_108', 'y', 'x', 'VIS006_acq_time', - 'IR_108_acq_time'}) - self.assertSetEqual(set(nc_hrv.variables.keys()), {'HRV', 'y', 'x', 'acq_time'}) + assert set(nc_visir.variables.keys()) == {'VIS006', 'IR_108', + 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'} + assert set(nc_hrv.variables.keys()) == {'HRV', 'y', 'x', 'acq_time'} for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']], [scn['VIS006'], scn['IR_108'], scn['HRV']]): np.testing.assert_array_equal(tst.data, ref.data) @@ -356,7 +356,8 @@ def test_groups(self): # Different projection coordinates in one group are not supported with TempFile() as filename: - self.assertRaises(ValueError, scn.save_datasets, datasets=['VIS006', 'HRV'], filename=filename, writer='cf') + with pytest.raises(ValueError): + scn.save_datasets(datasets=['VIS006', 'HRV'], filename=filename, writer='cf') def test_single_time_value(self): """Test setting a single time value.""" @@ -482,7 +483,7 @@ def test_unlimited_dims_kwarg(self): with TempFile() as filename: scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time']) with xr.open_dataset(filename) as f: - self.assertSetEqual(f.encoding['unlimited_dims'], {'time'}) + assert set(f.encoding['unlimited_dims']) == {'time'} def test_header_attrs(self): """Check global attributes are set.""" @@ -507,7 +508,7 @@ def test_header_attrs(self): flatten_attrs=True, writer='cf') with xr.open_dataset(filename) as f: - self.assertIn('history', f.attrs) + assert 'history' in f.attrs assert f.attrs['sensor'] == 'SEVIRI' assert f.attrs['orbit'] == 99999 np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) @@ -518,7 +519,7 @@ def test_header_attrs(self): assert f.attrs['nested_outer_inner2'] == 2 assert f.attrs['bool'] == 'true' assert f.attrs['bool_'] == 'true' - self.assertTrue('none' not in f.attrs.keys()) + assert 'none' not in f.attrs.keys() def get_test_attrs(self): """Create some dataset attributes for testing purpose. @@ -604,7 +605,7 @@ def get_test_attrs(self): def assertDictWithArraysEqual(self, d1, d2): """Check that dicts containing arrays are equal.""" - self.assertSetEqual(set(d1.keys()), set(d2.keys())) + assert set(d1.keys()) == set(d2.keys()) for key, val1 in d1.items(): val2 = d2[key] if isinstance(val1, np.ndarray): @@ -613,7 +614,7 @@ def assertDictWithArraysEqual(self, d1, d2): else: assert val1 == val2 if isinstance(val1, (np.floating, np.integer, np.bool_)): - self.assertTrue(isinstance(val2, np.generic)) + assert isinstance(val2, np.generic) assert val1.dtype == val2.dtype def test_encode_attrs_nc(self): @@ -630,10 +631,10 @@ def test_encode_attrs_nc(self): raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], 'flag': 'true', 'dict': {'a': 1, 'b': [1, 2, 3]}} - self.assertDictEqual(json.loads(encoded['raw_metadata']), raw_md_roundtrip) - self.assertListEqual(json.loads(encoded['array_3d']), [[[1, 2], [3, 4]], [[1, 2], [3, 4]]]) - self.assertDictEqual(json.loads(encoded['nested_dict']), {"l1": {"l2": {"l3": [1, 2, 3]}}}) - self.assertListEqual(json.loads(encoded['nested_list']), ["1", ["2", [3]]]) + assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip + assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] + assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} + assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" @@ -664,8 +665,8 @@ def test_da2cf(self): np.testing.assert_array_equal(res['x'], arr['x']) np.testing.assert_array_equal(res['y'], arr['y']) np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) - self.assertDictEqual(res['x'].attrs, {'units': 'm', 'standard_name': 'projection_x_coordinate'}) - self.assertDictEqual(res['y'].attrs, {'units': 'm', 'standard_name': 'projection_y_coordinate'}) + assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} + assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs @@ -732,7 +733,8 @@ def test_assert_xy_unique(self): assert_xy_unique(datas) datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) - self.assertRaises(ValueError, assert_xy_unique, datas) + with pytest.raises(ValueError): + assert_xy_unique(datas) def test_link_coords(self): """Check that coordinates link has been established correctly.""" @@ -755,19 +757,19 @@ def test_link_coords(self): link_coords(datasets) # Check that link has been established correctly and 'coordinate' atrribute has been dropped - self.assertIn('lon', datasets['var1'].coords) - self.assertIn('lat', datasets['var1'].coords) + assert 'lon' in datasets['var1'].coords + assert 'lat' in datasets['var1'].coords np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - self.assertNotIn('coordinates', datasets['var1'].attrs) + assert 'coordinates' not in datasets['var1'].attrs # There should be no link if there was no 'coordinate' attribute - self.assertNotIn('lon', datasets['var2'].coords) - self.assertNotIn('lat', datasets['var2'].coords) + assert 'lon' not in datasets['var2'].coords + assert 'lat' not in datasets['var2'].coords - # The non-existant dimension or coordinate should be dropped - self.assertNotIn('time', datasets['var3'].coords) - self.assertNotIn('not_exist', datasets['var4'].coords) + # The non-existent dimension or coordinate should be dropped + assert 'time' not in datasets['var3'].coords + assert 'not_exist' not in datasets['var4'].coords def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" @@ -789,8 +791,8 @@ def test_make_alt_coords_unique(self): res = make_alt_coords_unique(datasets) np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - self.assertNotIn('acq_time', res['var1'].coords) - self.assertNotIn('acq_time', res['var2'].coords) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords # Make sure nothing else is modified np.testing.assert_array_equal(res['var1']['x'], x) @@ -804,16 +806,16 @@ def test_make_alt_coords_unique(self): warn.assert_called() np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - self.assertNotIn('acq_time', res['var1'].coords) - self.assertNotIn('acq_time', res['var2'].coords) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords # Coords unique and pretty=True -> Don't modify coordinate names datasets['var2']['acq_time'] = ('y', time1) res = make_alt_coords_unique(datasets, pretty=True) np.testing.assert_array_equal(res['var1']['acq_time'], time1) np.testing.assert_array_equal(res['var2']['acq_time'], time1) - self.assertNotIn('var1_acq_time', res['var1'].coords) - self.assertNotIn('var2_acq_time', res['var2'].coords) + assert 'var1_acq_time' not in res['var1'].coords + assert 'var2_acq_time' not in res['var2'].coords def test_area2cf(self): """Test the conversion of an area to CF standards.""" @@ -847,8 +849,8 @@ def test_area2cf(self): assert res[0].size == 1 # grid mapping variable assert res[0].name == res[1].attrs['grid_mapping'] # but now also have the lon/lats - self.assertIn('longitude', res[1].coords) - self.assertIn('latitude', res[1].coords) + assert 'longitude' in res[1].coords + assert 'latitude' in res[1].coords # c) Swath Definition swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) @@ -857,9 +859,9 @@ def test_area2cf(self): res = area2cf(ds, include_lonlats=False) assert len(res) == 1 - self.assertIn('longitude', res[0].coords) - self.assertIn('latitude', res[0].coords) - self.assertNotIn('grid_mapping', res[0].attrs) + assert 'longitude' in res[0].coords + assert 'latitude' in res[0].coords + assert 'grid_mapping' not in res[0].attrs def test__add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" @@ -909,7 +911,7 @@ def _gm_matches(gmapping, expected): assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) # should not have been modified - self.assertNotIn('grid_mapping', ds.attrs) + assert 'grid_mapping' not in ds.attrs # b) Projection does not have a corresponding CF representation (COSMO) cosmo7 = pyresample.geometry.AreaDefinition( @@ -926,13 +928,13 @@ def _gm_matches(gmapping, expected): ds.attrs['area'] = cosmo7 new_ds, grid_mapping = _add_grid_mapping(ds) - self.assertIn('crs_wkt', grid_mapping.attrs) + assert 'crs_wkt' in grid_mapping.attrs wkt = grid_mapping.attrs['crs_wkt'] - self.assertIn('ELLIPSOID["WGS 84"', wkt) - self.assertIn('PARAMETER["lat_0",46', wkt) - self.assertIn('PARAMETER["lon_0",4.535', wkt) - self.assertIn('PARAMETER["o_lat_p",90', wkt) - self.assertIn('PARAMETER["o_lon_p",-5.465', wkt) + assert 'ELLIPSOID["WGS 84"' in wkt + assert 'PARAMETER["lat_0",46' in wkt + assert 'PARAMETER["lon_0",4.535' in wkt + assert 'PARAMETER["o_lat_p",90' in wkt + assert 'PARAMETER["o_lon_p",-5.465' in wkt assert new_ds.attrs['grid_mapping'] == 'cosmo7' # c) Projection Transverse Mercator @@ -1063,7 +1065,7 @@ def test_add_lonlat_coords(self): res = add_lonlat_coords(dataarray) # original should be unmodified - self.assertNotIn('longitude', dataarray.coords) + assert 'longitude' not in dataarray.coords assert set(res.coords) == {'longitude', 'latitude'} lat = res['latitude'] lon = res['longitude'] @@ -1086,7 +1088,7 @@ def test_add_lonlat_coords(self): res = add_lonlat_coords(dataarray) # original should be unmodified - self.assertNotIn('longitude', dataarray.coords) + assert 'longitude' not in dataarray.coords assert set(res.coords) == {'longitude', 'latitude'} lat = res['latitude'] lon = res['longitude'] @@ -1124,7 +1126,7 @@ def test_global_attr_default_history_and_Conventions(self): scn.save_datasets(filename=filename, writer='cf') with xr.open_dataset(filename) as f: assert f.attrs['Conventions'] == 'CF-1.7' - self.assertIn('Created by pytroll/satpy on', f.attrs['history']) + assert 'Created by pytroll/satpy on' in f.attrs['history'] def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" @@ -1143,8 +1145,8 @@ def test_global_attr_history_and_Conventions(self): scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs) with xr.open_dataset(filename) as f: assert f.attrs['Conventions'] == 'CF-1.7, ACDD-1.3' - self.assertIn('TEST add history\n', f.attrs['history']) - self.assertIn('Created by pytroll/satpy on', f.attrs['history']) + assert 'TEST add history\n' in f.attrs['history'] + assert 'Created by pytroll/satpy on' in f.attrs['history'] class TestCFWriterData(unittest.TestCase): @@ -1188,16 +1190,16 @@ def test_is_lon_or_lat_dataarray(self): """Test the is_lon_or_lat_dataarray function.""" from satpy.writers.cf_writer import is_lon_or_lat_dataarray - self.assertTrue(is_lon_or_lat_dataarray(self.datasets['lat'])) - self.assertFalse(is_lon_or_lat_dataarray(self.datasets['var1'])) + assert is_lon_or_lat_dataarray(self.datasets['lat']) + assert not is_lon_or_lat_dataarray(self.datasets['var1']) def test_has_projection_coords(self): """Test the has_projection_coords function.""" from satpy.writers.cf_writer import has_projection_coords - self.assertTrue(has_projection_coords(self.datasets)) + assert has_projection_coords(self.datasets) self.datasets['lat'].attrs['standard_name'] = 'dummy' - self.assertFalse(has_projection_coords(self.datasets)) + assert not has_projection_coords(self.datasets) def test_collect_cf_dataarrays_with_latitude_named_lat(self, *mocks): """Test collecting CF datasets with latitude named lat.""" @@ -1247,11 +1249,14 @@ def test_dataset_name_digit(self): '2': {'dtype': 'float32'}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs, numeric_name_prefix='CHANNEL_') - self.assertDictEqual(enc, {'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'CHANNEL_1': {'dtype': 'float32'}, - 'CHANNEL_2': {'dtype': 'float32'}}) - self.assertDictEqual(other_kwargs, {'other': 'kwargs'}) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'CHANNEL_1': {'dtype': 'float32'}, + 'CHANNEL_2': {'dtype': 'float32'} + } + assert enc == expected_dict + assert other_kwargs == {'other': 'kwargs'} def test_without_time(self): """Test data with no time dimension.""" @@ -1262,22 +1267,28 @@ def test_without_time(self): kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) - self.assertDictEqual(enc, {'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (1, 1)}}) - self.assertDictEqual(other_kwargs, {'other': 'kwargs'}) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (2, 2)}, + 'bar': {'chunksizes': (1, 1)} + } + assert enc == expected_dict + assert other_kwargs == {'other': 'kwargs'} # Chunksize may not exceed shape ds = self.ds.chunk(8) kwargs = {'encoding': {}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) - self.assertDictEqual(enc, {'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (2, 2)}}) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (2, 2)}, + 'bar': {'chunksizes': (2, 2)} + } + assert enc == expected_dict def test_with_time(self): """Test data with a time dimension.""" @@ -1288,26 +1299,28 @@ def test_with_time(self): kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) - self.assertDictEqual(enc, {'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (1, 2, 2)}, - 'bar': {'chunksizes': (1, 1, 1)}, - 'time': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}, - 'time_bnds': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}}) - + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (1, 2, 2)}, + 'bar': {'chunksizes': (1, 1, 1)}, + 'time': {'_FillValue': None, + 'calendar': 'proleptic_gregorian', + 'units': 'days since 2009-07-01 12:15:00'}, + 'time_bnds': {'_FillValue': None, + 'calendar': 'proleptic_gregorian', + 'units': 'days since 2009-07-01 12:15:00'} + } + assert enc == expected_dict # User-defined encoding may not be altered - self.assertDictEqual(kwargs['encoding'], {'bar': {'chunksizes': (1, 1, 1)}}) + assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} class TestEncodingKwarg: """Test CF writer with 'encoding' keyword argument.""" - @pytest.fixture + @ pytest.fixture def scene(self): """Create a fake scene.""" scn = Scene() @@ -1318,12 +1331,12 @@ def scene(self): scn['test-array'] = xr.DataArray([1., 2, 3], attrs=attrs) return scn - @pytest.fixture(params=[True, False]) + @ pytest.fixture(params=[True, False]) def compression_on(self, request): """Get compression options.""" return request.param - @pytest.fixture + @ pytest.fixture def encoding(self, compression_on): """Get encoding.""" enc = { @@ -1339,19 +1352,19 @@ def encoding(self, compression_on): enc["test-array"].update(comp_params) return enc - @pytest.fixture + @ pytest.fixture def filename(self, tmp_path): """Get output filename.""" return str(tmp_path / "test.nc") - @pytest.fixture + @ pytest.fixture def complevel_exp(self, compression_on): """Get expected compression level.""" if compression_on: return 7 return 0 - @pytest.fixture + @ pytest.fixture def expected(self, complevel_exp): """Get expectated file contents.""" return { @@ -1399,7 +1412,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): class TestEncodingAttribute(TestEncodingKwarg): """Test CF writer with 'encoding' dataset attribute.""" - @pytest.fixture + @ pytest.fixture def scene_with_encoding(self, scene, encoding): """Create scene with a dataset providing the 'encoding' attribute.""" scene["test-array"].encoding = encoding["test-array"] From 6d0f8c6e92f3200abdb732d8e19258bc96414dd0 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 17:52:39 +0200 Subject: [PATCH 0316/1416] Remove unitest from TestCFWriter --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 6d9cd27703..f4d8d2e951 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -188,7 +188,7 @@ def test_empty_collect_cf_datasets(): collect_cf_datasets(list_dataarrays=[]) -class TestCFWriter(unittest.TestCase): +class TestCFWriter(): """Test case for CF writer.""" def test_init(self): From 22603193918e386595df669ad50e3fbca25156b9 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 18:09:42 +0200 Subject: [PATCH 0317/1416] Remove unitest from EncodingUpdateTest --- satpy/tests/writer_tests/test_cf.py | 53 ++++++++++++++++------------- 1 file changed, 30 insertions(+), 23 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index f4d8d2e951..041e43a8c3 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -51,7 +51,7 @@ # - request -class TempFile(object): +class TempFile: """A temporary filename class.""" def __init__(self, suffix=".nc"): @@ -188,7 +188,7 @@ def test_empty_collect_cf_datasets(): collect_cf_datasets(list_dataarrays=[]) -class TestCFWriter(): +class TestCFWriter: """Test case for CF writer.""" def test_init(self): @@ -1223,32 +1223,39 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, *mocks): assert ds2['var1']['longitude'].attrs['name'] == 'longitude' -class EncodingUpdateTest(unittest.TestCase): +class EncodingUpdateTest: """Test update of netCDF encoding.""" - def setUp(self): + @ pytest.fixture + def fake_ds(self): + """Create fake data for testing.""" + ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), + 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, + coords={'y': [1, 2], + 'x': [3, 4], + 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + return ds + + @ pytest.fixture + def fake_ds_digit(self): """Create fake data for testing.""" - self.ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), - 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) - self.ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), - 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) - - def test_dataset_name_digit(self): + ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), + 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, + coords={'y': [1, 2], + 'x': [3, 4], + 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + return ds_digit + + def test_dataset_name_digit(self, fake_ds_digit): """Test data with dataset name staring with a digit.""" from satpy.writers.cf_writer import update_encoding # Dataset with name staring with digit - ds = self.ds_digit + ds_digit = fake_ds_digit kwargs = {'encoding': {'1': {'dtype': 'float32'}, '2': {'dtype': 'float32'}}, 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds, kwargs, numeric_name_prefix='CHANNEL_') + enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') expected_dict = { 'y': {'_FillValue': None}, 'x': {'_FillValue': None}, @@ -1258,12 +1265,12 @@ def test_dataset_name_digit(self): assert enc == expected_dict assert other_kwargs == {'other': 'kwargs'} - def test_without_time(self): + def test_without_time(self, fake_ds): """Test data with no time dimension.""" from satpy.writers.cf_writer import update_encoding # Without time dimension - ds = self.ds.chunk(2) + ds = fake_ds.chunk(2) kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) @@ -1278,7 +1285,7 @@ def test_without_time(self): assert other_kwargs == {'other': 'kwargs'} # Chunksize may not exceed shape - ds = self.ds.chunk(8) + ds = fake_ds.chunk(8) kwargs = {'encoding': {}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { @@ -1290,12 +1297,12 @@ def test_without_time(self): } assert enc == expected_dict - def test_with_time(self): + def test_with_time(self, fake_ds): """Test data with a time dimension.""" from satpy.writers.cf_writer import update_encoding # With time dimension - ds = self.ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) + ds = fake_ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, 'other': 'kwargs'} enc, other_kwargs = update_encoding(ds, kwargs) From b2552c096e8b5431decfc921b9b0fbd494aaf7c3 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 18:30:15 +0200 Subject: [PATCH 0318/1416] Remove unitest from TestCFWriterData --- satpy/tests/writer_tests/test_cf.py | 99 +++++++++++++++-------------- 1 file changed, 51 insertions(+), 48 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 041e43a8c3..ad85ccdde1 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -21,11 +21,9 @@ import logging import os import tempfile -import unittest import warnings from collections import OrderedDict from datetime import datetime -from unittest import mock import dask.array as da import numpy as np @@ -49,6 +47,7 @@ # - tmp_path # - caplog # - request +# - mocker class TempFile: @@ -682,7 +681,7 @@ def test_da2cf_one_dimensional_array(self): coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) _ = CFWriter.da2cf(arr) - def test_collect_cf_dataarrays(self, *mocks): + def test_collect_cf_dataarrays(self): """Test collecting CF datasets from a DataArray objects.""" from satpy.writers.cf_writer import _collect_cf_dataset @@ -771,7 +770,8 @@ def test_link_coords(self): assert 'time' not in datasets['var3'].coords assert 'not_exist' not in datasets['var4'].coords - def test_make_alt_coords_unique(self): + @pytest.mark.usefixtures("mocker") + def test_make_alt_coords_unique(self, mocker): """Test that created coordinate variables are unique.""" from satpy.writers.cf_writer import make_alt_coords_unique @@ -801,13 +801,13 @@ def test_make_alt_coords_unique(self): np.testing.assert_array_equal(res['var2']['y'], y) # Coords not unique -> Dataset names must be prepended, even if pretty=True - with mock.patch('satpy.writers.cf_writer.warnings.warn') as warn: - res = make_alt_coords_unique(datasets, pretty=True) - warn.assert_called() - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords + mocker.patch('satpy.writers.cf_writer.warnings.warn') + res = make_alt_coords_unique(datasets, pretty=True) + warnings.warn.assert_called() + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords # Coords unique and pretty=True -> Don't modify coordinate names datasets['var2']['acq_time'] = ('y', time1) @@ -1149,11 +1149,12 @@ def test_global_attr_history_and_Conventions(self): assert 'Created by pytroll/satpy on' in f.attrs['history'] -class TestCFWriterData(unittest.TestCase): +class TestCFWriterData: """Test case for CF writer where data arrays are needed.""" - def setUp(self): - """Create some test data.""" + @ pytest.fixture + def datasets(self): + """Create test dataset.""" data = [[75, 2], [3, 4]] y = [1, 2] x = [1, 2] @@ -1164,53 +1165,55 @@ def setUp(self): projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) - self.datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - self.datasets['lat'].attrs['standard_name'] = 'latitude' - self.datasets['var1'].attrs['standard_name'] = 'dummy' - self.datasets['var2'].attrs['standard_name'] = 'dummy' - self.datasets['var2'].attrs['area'] = geos - self.datasets['var1'].attrs['area'] = geos - self.datasets['lat'].attrs['name'] = 'lat' - self.datasets['var1'].attrs['name'] = 'var1' - self.datasets['var2'].attrs['name'] = 'var2' - self.datasets['lon'].attrs['name'] = 'lon' - - def test_is_lon_or_lat_dataarray(self): + datasets = { + 'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lat': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lon': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x})} + datasets['lat'].attrs['standard_name'] = 'latitude' + datasets['var1'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['area'] = geos + datasets['var1'].attrs['area'] = geos + datasets['lat'].attrs['name'] = 'lat' + datasets['var1'].attrs['name'] = 'var1' + datasets['var2'].attrs['name'] = 'var2' + datasets['lon'].attrs['name'] = 'lon' + return datasets + + def test_is_lon_or_lat_dataarray(self, datasets): """Test the is_lon_or_lat_dataarray function.""" from satpy.writers.cf_writer import is_lon_or_lat_dataarray - assert is_lon_or_lat_dataarray(self.datasets['lat']) - assert not is_lon_or_lat_dataarray(self.datasets['var1']) + assert is_lon_or_lat_dataarray(datasets['lat']) + assert not is_lon_or_lat_dataarray(datasets['var1']) - def test_has_projection_coords(self): + def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" from satpy.writers.cf_writer import has_projection_coords - assert has_projection_coords(self.datasets) - self.datasets['lat'].attrs['standard_name'] = 'dummy' - assert not has_projection_coords(self.datasets) + assert has_projection_coords(datasets) + datasets['lat'].attrs['standard_name'] = 'dummy' + assert not has_projection_coords(datasets) - def test_collect_cf_dataarrays_with_latitude_named_lat(self, *mocks): + def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): """Test collecting CF datasets with latitude named lat.""" from satpy.writers.cf_writer import _collect_cf_dataset - self.datasets_list = [self.datasets[key] for key in self.datasets] - self.datasets_list_no_latlon = [self.datasets[key] for key in ['var1', 'var2']] + datasets_list = [datasets[key] for key in datasets.keys()] + datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] # Collect datasets - ds = _collect_cf_dataset(self.datasets_list, include_lonlats=True) - ds2 = _collect_cf_dataset(self.datasets_list_no_latlon, include_lonlats=True) + ds = _collect_cf_dataset(datasets_list, include_lonlats=True) + ds2 = _collect_cf_dataset(datasets_list_no_latlon, include_lonlats=True) # Test results assert len(ds.keys()) == 5 From d806fd4e6df985edfd135f4d92416bb812be4d3f Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 18:30:51 +0200 Subject: [PATCH 0319/1416] Update authors list --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index fa5e0272d0..9078e441b4 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -27,6 +27,7 @@ The following people have made contributions to this project: - [Ulrik Egede (egede)](https://github.com/egede) - [Joleen Feltz (joleenf)](https://github.com/joleenf) - [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - Deutscher Wetterdienst +- [Gionata Ghiggi (ghiggi)](https://github.com/ghiggi) - [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY) - [Blanka Gvozdikova (gvozdikb)](https://github.com/gvozdikb) - [Nina Håkansson (ninahakansson)](https://github.com/ninahakansson) From 07e550a80e5954a7792a150e6d0f20942335f57e Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 18:54:45 +0200 Subject: [PATCH 0320/1416] Fix tests --- satpy/tests/writer_tests/test_cf.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index ad85ccdde1..2446502e88 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -47,7 +47,6 @@ # - tmp_path # - caplog # - request -# - mocker class TempFile: @@ -770,8 +769,7 @@ def test_link_coords(self): assert 'time' not in datasets['var3'].coords assert 'not_exist' not in datasets['var4'].coords - @pytest.mark.usefixtures("mocker") - def test_make_alt_coords_unique(self, mocker): + def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" from satpy.writers.cf_writer import make_alt_coords_unique @@ -801,9 +799,8 @@ def test_make_alt_coords_unique(self, mocker): np.testing.assert_array_equal(res['var2']['y'], y) # Coords not unique -> Dataset names must be prepended, even if pretty=True - mocker.patch('satpy.writers.cf_writer.warnings.warn') - res = make_alt_coords_unique(datasets, pretty=True) - warnings.warn.assert_called() + with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): + res = make_alt_coords_unique(datasets, pretty=True) np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) assert 'acq_time' not in res['var1'].coords From a7f28e0b85735d457131f178b138f1847ad2cf72 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 18:56:45 +0200 Subject: [PATCH 0321/1416] Remove space after decorator --- satpy/tests/writer_tests/test_cf.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 2446502e88..2b0a5dfc6c 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1149,7 +1149,7 @@ def test_global_attr_history_and_Conventions(self): class TestCFWriterData: """Test case for CF writer where data arrays are needed.""" - @ pytest.fixture + @pytest.fixture def datasets(self): """Create test dataset.""" data = [[75, 2], [3, 4]] @@ -1226,7 +1226,7 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): class EncodingUpdateTest: """Test update of netCDF encoding.""" - @ pytest.fixture + @pytest.fixture def fake_ds(self): """Create fake data for testing.""" ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), @@ -1236,7 +1236,7 @@ def fake_ds(self): 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) return ds - @ pytest.fixture + @pytest.fixture def fake_ds_digit(self): """Create fake data for testing.""" ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), @@ -1327,7 +1327,7 @@ def test_with_time(self, fake_ds): class TestEncodingKwarg: """Test CF writer with 'encoding' keyword argument.""" - @ pytest.fixture + @pytest.fixture def scene(self): """Create a fake scene.""" scn = Scene() @@ -1338,12 +1338,12 @@ def scene(self): scn['test-array'] = xr.DataArray([1., 2, 3], attrs=attrs) return scn - @ pytest.fixture(params=[True, False]) + @pytest.fixture(params=[True, False]) def compression_on(self, request): """Get compression options.""" return request.param - @ pytest.fixture + @pytest.fixture def encoding(self, compression_on): """Get encoding.""" enc = { @@ -1359,19 +1359,19 @@ def encoding(self, compression_on): enc["test-array"].update(comp_params) return enc - @ pytest.fixture + @pytest.fixture def filename(self, tmp_path): """Get output filename.""" return str(tmp_path / "test.nc") - @ pytest.fixture + @pytest.fixture def complevel_exp(self, compression_on): """Get expected compression level.""" if compression_on: return 7 return 0 - @ pytest.fixture + @pytest.fixture def expected(self, complevel_exp): """Get expectated file contents.""" return { @@ -1419,7 +1419,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): class TestEncodingAttribute(TestEncodingKwarg): """Test CF writer with 'encoding' dataset attribute.""" - @ pytest.fixture + @pytest.fixture def scene_with_encoding(self, scene, encoding): """Create scene with a dataset providing the 'encoding' attribute.""" scene["test-array"].encoding = encoding["test-array"] From 7443bb0f517501432475590a524133ee97958be3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 03:34:51 +0000 Subject: [PATCH 0322/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.3.0 → v1.4.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.3.0...v1.4.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 66a4db273d..995f3035c4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.3.0' # Use the sha / tag you want to point at + rev: 'v1.4.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From 5ac92a774ce9f1cf84ae9e35d81e823c82ff906f Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 22:17:22 +0200 Subject: [PATCH 0323/1416] Refactor area-related functions --- satpy/tests/writer_tests/cf_tests/__init__.py | 18 + .../tests/writer_tests/cf_tests/test_area.py | 401 ++++++++++++++++++ satpy/tests/writer_tests/test_cf.py | 379 +---------------- satpy/writers/cf/area.py | 192 +++++++++ satpy/writers/cf_writer.py | 178 +------- 5 files changed, 621 insertions(+), 547 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/__init__.py create mode 100644 satpy/tests/writer_tests/cf_tests/test_area.py create mode 100644 satpy/writers/cf/area.py diff --git a/satpy/tests/writer_tests/cf_tests/__init__.py b/satpy/tests/writer_tests/cf_tests/__init__.py new file mode 100644 index 0000000000..e654e26dcc --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/__init__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""The CF dataset tests package.""" diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py new file mode 100644 index 0000000000..e293ff39a6 --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -0,0 +1,401 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for the CF Area.""" +import dask.array as da +import numpy as np +import pytest +import xarray as xr +from pyresample import AreaDefinition, SwathDefinition + + +class TestCFArea: + """Test case for CF Area.""" + + def test_assert_xy_unique(self): + """Test that the x and y coordinates are unique.""" + from satpy.writers.cf.area import assert_xy_unique + + dummy = [[1, 2], [3, 4]] + datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), + 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), + 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} + assert_xy_unique(datas) + + datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) + with pytest.raises(ValueError): + assert_xy_unique(datas) + + def test_link_coords(self): + """Check that coordinates link has been established correctly.""" + from satpy.writers.cf.area import link_coords + + data = [[1, 2], [3, 4]] + lon = np.zeros((2, 2)) + lon2 = np.zeros((1, 2, 2)) + lat = np.ones((2, 2)) + datasets = { + 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), + 'var2': xr.DataArray(data=data, dims=('y', 'x')), + 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), + 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), + 'lon': xr.DataArray(data=lon, dims=('y', 'x')), + 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), + 'lat': xr.DataArray(data=lat, dims=('y', 'x')) + } + + link_coords(datasets) + + # Check that link has been established correctly and 'coordinate' atrribute has been dropped + assert 'lon' in datasets['var1'].coords + assert 'lat' in datasets['var1'].coords + np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) + np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) + assert 'coordinates' not in datasets['var1'].attrs + + # There should be no link if there was no 'coordinate' attribute + assert 'lon' not in datasets['var2'].coords + assert 'lat' not in datasets['var2'].coords + + # The non-existent dimension or coordinate should be dropped + assert 'time' not in datasets['var3'].coords + assert 'not_exist' not in datasets['var4'].coords + + def test_make_alt_coords_unique(self): + """Test that created coordinate variables are unique.""" + from satpy.writers.cf.area import make_alt_coords_unique + + data = [[1, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + time1 = [1, 2] + time2 = [3, 4] + datasets = {'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} + + # Test that dataset names are prepended to alternative coordinates + res = make_alt_coords_unique(datasets) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords + + # Make sure nothing else is modified + np.testing.assert_array_equal(res['var1']['x'], x) + np.testing.assert_array_equal(res['var1']['y'], y) + np.testing.assert_array_equal(res['var2']['x'], x) + np.testing.assert_array_equal(res['var2']['y'], y) + + # Coords not unique -> Dataset names must be prepended, even if pretty=True + with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): + res = make_alt_coords_unique(datasets, pretty=True) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords + + # Coords unique and pretty=True -> Don't modify coordinate names + datasets['var2']['acq_time'] = ('y', time1) + res = make_alt_coords_unique(datasets, pretty=True) + np.testing.assert_array_equal(res['var1']['acq_time'], time1) + np.testing.assert_array_equal(res['var2']['acq_time'], time1) + assert 'var1_acq_time' not in res['var1'].coords + assert 'var2_acq_time' not in res['var2'].coords + + def test_area2cf(self): + """Test the conversion of an area to CF standards.""" + from satpy.writers.cf.area import area2cf + + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, + attrs={'name': 'var1'}) + + # a) Area Definition and strict=False + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + ds = ds_base.copy(deep=True) + ds.attrs['area'] = geos + + res = area2cf(ds, include_lonlats=False) + assert len(res) == 2 + assert res[0].size == 1 # grid mapping variable + assert res[0].name == res[1].attrs['grid_mapping'] + + # b) Area Definition and include_lonlats=False + ds = ds_base.copy(deep=True) + ds.attrs['area'] = geos + res = area2cf(ds, include_lonlats=True) + # same as above + assert len(res) == 2 + assert res[0].size == 1 # grid mapping variable + assert res[0].name == res[1].attrs['grid_mapping'] + # but now also have the lon/lats + assert 'longitude' in res[1].coords + assert 'latitude' in res[1].coords + + # c) Swath Definition + swath = SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) + ds = ds_base.copy(deep=True) + ds.attrs['area'] = swath + + res = area2cf(ds, include_lonlats=False) + assert len(res) == 1 + assert 'longitude' in res[0].coords + assert 'latitude' in res[0].coords + assert 'grid_mapping' not in res[0].attrs + + def test__add_grid_mapping(self): + """Test the conversion from pyresample area object to CF grid mapping.""" + from satpy.writers.cf.area import _add_grid_mapping + + def _gm_matches(gmapping, expected): + """Assert that all keys in ``expected`` match the values in ``gmapping``.""" + for attr_key, attr_val in expected.attrs.items(): + test_val = gmapping.attrs[attr_key] + if attr_val is None or isinstance(attr_val, str): + assert test_val == attr_val + else: + np.testing.assert_almost_equal(test_val, attr_val, decimal=3) + + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, + attrs={'name': 'var1'}) + + # a) Projection has a corresponding CF representation (e.g. geos) + a = 6378169. + b = 6356583.8 + h = 35785831. + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': h, 'a': a, 'b': b, + 'lat_0': 0, 'lon_0': 0}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + geos_expected = xr.DataArray(data=0, + attrs={'perspective_point_height': h, + 'latitude_of_projection_origin': 0, + 'longitude_of_projection_origin': 0, + 'grid_mapping_name': 'geostationary', + 'semi_major_axis': a, + 'semi_minor_axis': b, + # 'sweep_angle_axis': None, + }) + + ds = ds_base.copy() + ds.attrs['area'] = geos + new_ds, grid_mapping = _add_grid_mapping(ds) + if 'sweep_angle_axis' in grid_mapping.attrs: + # older versions of pyproj might not include this + assert grid_mapping.attrs['sweep_angle_axis'] == 'y' + + assert new_ds.attrs['grid_mapping'] == 'geos' + _gm_matches(grid_mapping, geos_expected) + # should not have been modified + assert 'grid_mapping' not in ds.attrs + + # b) Projection does not have a corresponding CF representation (COSMO) + cosmo7 = AreaDefinition( + area_id='cosmo7', + description='cosmo7', + proj_id='cosmo7', + projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, + 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, + width=597, height=510, + area_extent=[-1812933, -1003565, 814056, 1243448] + ) + + ds = ds_base.copy() + ds.attrs['area'] = cosmo7 + + new_ds, grid_mapping = _add_grid_mapping(ds) + assert 'crs_wkt' in grid_mapping.attrs + wkt = grid_mapping.attrs['crs_wkt'] + assert 'ELLIPSOID["WGS 84"' in wkt + assert 'PARAMETER["lat_0",46' in wkt + assert 'PARAMETER["lon_0",4.535' in wkt + assert 'PARAMETER["o_lat_p",90' in wkt + assert 'PARAMETER["o_lon_p",-5.465' in wkt + assert new_ds.attrs['grid_mapping'] == 'cosmo7' + + # c) Projection Transverse Mercator + lat_0 = 36.5 + lon_0 = 15.0 + + tmerc = AreaDefinition( + area_id='tmerc', + description='tmerc', + proj_id='tmerc', + projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + + tmerc_expected = xr.DataArray(data=0, + attrs={'latitude_of_projection_origin': lat_0, + 'longitude_of_central_meridian': lon_0, + 'grid_mapping_name': 'transverse_mercator', + 'reference_ellipsoid_name': 'WGS 84', + 'false_easting': 0., + 'false_northing': 0., + }) + + ds = ds_base.copy() + ds.attrs['area'] = tmerc + new_ds, grid_mapping = _add_grid_mapping(ds) + assert new_ds.attrs['grid_mapping'] == 'tmerc' + _gm_matches(grid_mapping, tmerc_expected) + + # d) Projection that has a representation but no explicit a/b + h = 35785831. + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', + 'lat_0': 0, 'lon_0': 0}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + geos_expected = xr.DataArray(data=0, + attrs={'perspective_point_height': h, + 'latitude_of_projection_origin': 0, + 'longitude_of_projection_origin': 0, + 'grid_mapping_name': 'geostationary', + # 'semi_major_axis': 6378137.0, + # 'semi_minor_axis': 6356752.314, + # 'sweep_angle_axis': None, + }) + + ds = ds_base.copy() + ds.attrs['area'] = geos + new_ds, grid_mapping = _add_grid_mapping(ds) + + assert new_ds.attrs['grid_mapping'] == 'geos' + _gm_matches(grid_mapping, geos_expected) + + # e) oblique Mercator + area = AreaDefinition( + area_id='omerc_otf', + description='On-the-fly omerc area', + proj_id='omerc', + projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', + 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', + 'proj': 'omerc', 'units': 'm'}, + width=2837, + height=5940, + area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] + ) + + omerc_dict = {'azimuth_of_central_line': 9.02638777018478, + 'false_easting': 0., + 'false_northing': 0., + # 'gamma': 0, # this is not CF compliant + 'grid_mapping_name': "oblique_mercator", + 'latitude_of_projection_origin': -0.256794486098476, + 'longitude_of_projection_origin': 13.7888658224205, + # 'prime_meridian_name': "Greenwich", + 'reference_ellipsoid_name': "WGS 84"} + omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) + + ds = ds_base.copy() + ds.attrs['area'] = area + new_ds, grid_mapping = _add_grid_mapping(ds) + + assert new_ds.attrs['grid_mapping'] == 'omerc_otf' + _gm_matches(grid_mapping, omerc_expected) + + # f) Projection that has a representation but no explicit a/b + h = 35785831. + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', + 'lat_0': 0, 'lon_0': 0}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + geos_expected = xr.DataArray(data=0, + attrs={'perspective_point_height': h, + 'latitude_of_projection_origin': 0, + 'longitude_of_projection_origin': 0, + 'grid_mapping_name': 'geostationary', + 'reference_ellipsoid_name': 'WGS 84', + }) + + ds = ds_base.copy() + ds.attrs['area'] = geos + new_ds, grid_mapping = _add_grid_mapping(ds) + + assert new_ds.attrs['grid_mapping'] == 'geos' + _gm_matches(grid_mapping, geos_expected) + + def test_add_lonlat_coords(self): + """Test the conversion from areas to lon/lat.""" + from satpy.writers.cf.area import add_lonlat_coords + + area = AreaDefinition( + 'seviri', + 'Native SEVIRI grid', + 'geos', + "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", + 2, 2, + [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] + ) + lons_ref, lats_ref = area.get_lonlats() + dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) + + res = add_lonlat_coords(dataarray) + + # original should be unmodified + assert 'longitude' not in dataarray.coords + assert set(res.coords) == {'longitude', 'latitude'} + lat = res['latitude'] + lon = res['longitude'] + np.testing.assert_array_equal(lat.data, lats_ref) + np.testing.assert_array_equal(lon.data, lons_ref) + assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() + assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + + area = AreaDefinition( + 'seviri', + 'Native SEVIRI grid', + 'geos', + "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", + 10, 10, + [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] + ) + lons_ref, lats_ref = area.get_lonlats() + dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), + dims=('bands', 'y', 'x'), attrs={'area': area}) + res = add_lonlat_coords(dataarray) + + # original should be unmodified + assert 'longitude' not in dataarray.coords + assert set(res.coords) == {'longitude', 'latitude'} + lat = res['latitude'] + lon = res['longitude'] + np.testing.assert_array_equal(lat.data, lats_ref) + np.testing.assert_array_equal(lon.data, lons_ref) + assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() + assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 2b0a5dfc6c..005509f165 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -25,7 +25,6 @@ from collections import OrderedDict from datetime import datetime -import dask.array as da import numpy as np import pyresample.geometry import pytest @@ -720,380 +719,6 @@ def test_collect_cf_dataarrays(self): assert 'grid_mapping' not in da_var2.attrs assert da_var2.attrs['long_name'] == 'variable 2' - def test_assert_xy_unique(self): - """Test that the x and y coordinates are unique.""" - from satpy.writers.cf_writer import assert_xy_unique - - dummy = [[1, 2], [3, 4]] - datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} - assert_xy_unique(datas) - - datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) - with pytest.raises(ValueError): - assert_xy_unique(datas) - - def test_link_coords(self): - """Check that coordinates link has been established correctly.""" - from satpy.writers.cf_writer import link_coords - - data = [[1, 2], [3, 4]] - lon = np.zeros((2, 2)) - lon2 = np.zeros((1, 2, 2)) - lat = np.ones((2, 2)) - datasets = { - 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), - 'var2': xr.DataArray(data=data, dims=('y', 'x')), - 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), - 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), - 'lon': xr.DataArray(data=lon, dims=('y', 'x')), - 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), - 'lat': xr.DataArray(data=lat, dims=('y', 'x')) - } - - link_coords(datasets) - - # Check that link has been established correctly and 'coordinate' atrribute has been dropped - assert 'lon' in datasets['var1'].coords - assert 'lat' in datasets['var1'].coords - np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) - np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - assert 'coordinates' not in datasets['var1'].attrs - - # There should be no link if there was no 'coordinate' attribute - assert 'lon' not in datasets['var2'].coords - assert 'lat' not in datasets['var2'].coords - - # The non-existent dimension or coordinate should be dropped - assert 'time' not in datasets['var3'].coords - assert 'not_exist' not in datasets['var4'].coords - - def test_make_alt_coords_unique(self): - """Test that created coordinate variables are unique.""" - from satpy.writers.cf_writer import make_alt_coords_unique - - data = [[1, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - time1 = [1, 2] - time2 = [3, 4] - datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} - - # Test that dataset names are prepended to alternative coordinates - res = make_alt_coords_unique(datasets) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords - - # Make sure nothing else is modified - np.testing.assert_array_equal(res['var1']['x'], x) - np.testing.assert_array_equal(res['var1']['y'], y) - np.testing.assert_array_equal(res['var2']['x'], x) - np.testing.assert_array_equal(res['var2']['y'], y) - - # Coords not unique -> Dataset names must be prepended, even if pretty=True - with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): - res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords - - # Coords unique and pretty=True -> Don't modify coordinate names - datasets['var2']['acq_time'] = ('y', time1) - res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['acq_time'], time1) - np.testing.assert_array_equal(res['var2']['acq_time'], time1) - assert 'var1_acq_time' not in res['var1'].coords - assert 'var2_acq_time' not in res['var2'].coords - - def test_area2cf(self): - """Test the conversion of an area to CF standards.""" - from satpy.writers.cf_writer import area2cf - - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) - - # a) Area Definition and strict=False - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos - - res = area2cf(ds, include_lonlats=False) - assert len(res) == 2 - assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] - - # b) Area Definition and include_lonlats=False - ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos - res = area2cf(ds, include_lonlats=True) - # same as above - assert len(res) == 2 - assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] - # but now also have the lon/lats - assert 'longitude' in res[1].coords - assert 'latitude' in res[1].coords - - # c) Swath Definition - swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) - ds = ds_base.copy(deep=True) - ds.attrs['area'] = swath - - res = area2cf(ds, include_lonlats=False) - assert len(res) == 1 - assert 'longitude' in res[0].coords - assert 'latitude' in res[0].coords - assert 'grid_mapping' not in res[0].attrs - - def test__add_grid_mapping(self): - """Test the conversion from pyresample area object to CF grid mapping.""" - from satpy.writers.cf_writer import _add_grid_mapping - - def _gm_matches(gmapping, expected): - """Assert that all keys in ``expected`` match the values in ``gmapping``.""" - for attr_key, attr_val in expected.attrs.items(): - test_val = gmapping.attrs[attr_key] - if attr_val is None or isinstance(attr_val, str): - assert test_val == attr_val - else: - np.testing.assert_almost_equal(test_val, attr_val, decimal=3) - - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) - - # a) Projection has a corresponding CF representation (e.g. geos) - a = 6378169. - b = 6356583.8 - h = 35785831. - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'a': a, 'b': b, - 'lat_0': 0, 'lon_0': 0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'semi_major_axis': a, - 'semi_minor_axis': b, - # 'sweep_angle_axis': None, - }) - - ds = ds_base.copy() - ds.attrs['area'] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) - if 'sweep_angle_axis' in grid_mapping.attrs: - # older versions of pyproj might not include this - assert grid_mapping.attrs['sweep_angle_axis'] == 'y' - - assert new_ds.attrs['grid_mapping'] == 'geos' - _gm_matches(grid_mapping, geos_expected) - # should not have been modified - assert 'grid_mapping' not in ds.attrs - - # b) Projection does not have a corresponding CF representation (COSMO) - cosmo7 = pyresample.geometry.AreaDefinition( - area_id='cosmo7', - description='cosmo7', - proj_id='cosmo7', - projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, - 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, - width=597, height=510, - area_extent=[-1812933, -1003565, 814056, 1243448] - ) - - ds = ds_base.copy() - ds.attrs['area'] = cosmo7 - - new_ds, grid_mapping = _add_grid_mapping(ds) - assert 'crs_wkt' in grid_mapping.attrs - wkt = grid_mapping.attrs['crs_wkt'] - assert 'ELLIPSOID["WGS 84"' in wkt - assert 'PARAMETER["lat_0",46' in wkt - assert 'PARAMETER["lon_0",4.535' in wkt - assert 'PARAMETER["o_lat_p",90' in wkt - assert 'PARAMETER["o_lon_p",-5.465' in wkt - assert new_ds.attrs['grid_mapping'] == 'cosmo7' - - # c) Projection Transverse Mercator - lat_0 = 36.5 - lon_0 = 15.0 - - tmerc = pyresample.geometry.AreaDefinition( - area_id='tmerc', - description='tmerc', - proj_id='tmerc', - projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - - tmerc_expected = xr.DataArray(data=0, - attrs={'latitude_of_projection_origin': lat_0, - 'longitude_of_central_meridian': lon_0, - 'grid_mapping_name': 'transverse_mercator', - 'reference_ellipsoid_name': 'WGS 84', - 'false_easting': 0., - 'false_northing': 0., - }) - - ds = ds_base.copy() - ds.attrs['area'] = tmerc - new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'tmerc' - _gm_matches(grid_mapping, tmerc_expected) - - # d) Projection that has a representation but no explicit a/b - h = 35785831. - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - # 'semi_major_axis': 6378137.0, - # 'semi_minor_axis': 6356752.314, - # 'sweep_angle_axis': None, - }) - - ds = ds_base.copy() - ds.attrs['area'] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) - - assert new_ds.attrs['grid_mapping'] == 'geos' - _gm_matches(grid_mapping, geos_expected) - - # e) oblique Mercator - area = pyresample.geometry.AreaDefinition( - area_id='omerc_otf', - description='On-the-fly omerc area', - proj_id='omerc', - projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', - 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', - 'proj': 'omerc', 'units': 'm'}, - width=2837, - height=5940, - area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] - ) - - omerc_dict = {'azimuth_of_central_line': 9.02638777018478, - 'false_easting': 0., - 'false_northing': 0., - # 'gamma': 0, # this is not CF compliant - 'grid_mapping_name': "oblique_mercator", - 'latitude_of_projection_origin': -0.256794486098476, - 'longitude_of_projection_origin': 13.7888658224205, - # 'prime_meridian_name': "Greenwich", - 'reference_ellipsoid_name': "WGS 84"} - omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) - - ds = ds_base.copy() - ds.attrs['area'] = area - new_ds, grid_mapping = _add_grid_mapping(ds) - - assert new_ds.attrs['grid_mapping'] == 'omerc_otf' - _gm_matches(grid_mapping, omerc_expected) - - # f) Projection that has a representation but no explicit a/b - h = 35785831. - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'reference_ellipsoid_name': 'WGS 84', - }) - - ds = ds_base.copy() - ds.attrs['area'] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) - - assert new_ds.attrs['grid_mapping'] == 'geos' - _gm_matches(grid_mapping, geos_expected) - - def test_add_lonlat_coords(self): - """Test the conversion from areas to lon/lat.""" - from satpy.writers.cf_writer import add_lonlat_coords - - area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', - "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", - 2, 2, - [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] - ) - lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) - - res = add_lonlat_coords(dataarray) - - # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] - np.testing.assert_array_equal(lat.data, lats_ref) - np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() - - area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', - "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", - 10, 10, - [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] - ) - lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), - dims=('bands', 'y', 'x'), attrs={'area': area}) - res = add_lonlat_coords(dataarray) - - # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] - np.testing.assert_array_equal(lat.data, lats_ref) - np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() - def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" import importlib @@ -1188,14 +813,14 @@ def datasets(self): def test_is_lon_or_lat_dataarray(self, datasets): """Test the is_lon_or_lat_dataarray function.""" - from satpy.writers.cf_writer import is_lon_or_lat_dataarray + from satpy.writers.cf.area import is_lon_or_lat_dataarray assert is_lon_or_lat_dataarray(datasets['lat']) assert not is_lon_or_lat_dataarray(datasets['var1']) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" - from satpy.writers.cf_writer import has_projection_coords + from satpy.writers.cf.area import has_projection_coords assert has_projection_coords(datasets) datasets['lat'].attrs['standard_name'] = 'dummy' diff --git a/satpy/writers/cf/area.py b/satpy/writers/cf/area.py new file mode 100644 index 0000000000..68113c1ee2 --- /dev/null +++ b/satpy/writers/cf/area.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF processing of pyresample area information.""" +import logging +import warnings +from collections import defaultdict + +import xarray as xr +from dask.base import tokenize +from packaging.version import Version +from pyresample.geometry import AreaDefinition, SwathDefinition + +logger = logging.getLogger(__name__) + + +def add_lonlat_coords(dataarray): + """Add 'longitude' and 'latitude' coordinates to DataArray.""" + dataarray = dataarray.copy() + area = dataarray.attrs['area'] + ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} + chunks = getattr(dataarray.isel(**ignore_dims), 'chunks', None) + lons, lats = area.get_lonlats(chunks=chunks) + dataarray['longitude'] = xr.DataArray(lons, dims=['y', 'x'], + attrs={'name': "longitude", + 'standard_name': "longitude", + 'units': 'degrees_east'}, + name='longitude') + dataarray['latitude'] = xr.DataArray(lats, dims=['y', 'x'], + attrs={'name': "latitude", + 'standard_name': "latitude", + 'units': 'degrees_north'}, + name='latitude') + return dataarray + + +def _create_grid_mapping(area): + """Create the grid mapping instance for `area`.""" + import pyproj + + if Version(pyproj.__version__) < Version('2.4.1'): + # technically 2.2, but important bug fixes in 2.4.1 + raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") + # let pyproj do the heavily lifting (pyproj 2.0+ required) + grid_mapping = area.crs.to_cf() + return area.area_id, grid_mapping + + +def _add_grid_mapping(dataarray): + """Convert an area to at CF grid mapping.""" + dataarray = dataarray.copy() + area = dataarray.attrs['area'] + gmapping_var_name, attrs = _create_grid_mapping(area) + dataarray.attrs['grid_mapping'] = gmapping_var_name + return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) + + +def area2cf(dataarray, include_lonlats=False, got_lonlats=False): + """Convert an area to at CF grid mapping or lon and lats.""" + res = [] + if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): + dataarray = add_lonlat_coords(dataarray) + if isinstance(dataarray.attrs['area'], AreaDefinition): + dataarray, gmapping = _add_grid_mapping(dataarray) + res.append(gmapping) + res.append(dataarray) + return res + + +def is_lon_or_lat_dataarray(dataarray): + """Check if the DataArray represents the latitude or longitude coordinate.""" + if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: + return True + return False + + +def has_projection_coords(ds_collection): + """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" + for dataarray in ds_collection.values(): + if is_lon_or_lat_dataarray(dataarray): + return True + return False + + +def make_alt_coords_unique(datas, pretty=False): + """Make non-dimensional coordinates unique among all datasets. + + Non-dimensional (or alternative) coordinates, such as scanline timestamps, + may occur in multiple datasets with the same name and dimension + but different values. + + In order to avoid conflicts, prepend the dataset name to the coordinate name. + If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, + its name will not be modified. + + Since all datasets must have the same projection coordinates, + this is not applied to latitude and longitude. + + Args: + datas (dict): + Dictionary of (dataset name, dataset) + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + + Returns: + Dictionary holding the updated datasets + + """ + # Determine which non-dimensional coordinates are unique + tokens = defaultdict(set) + for dataset in datas.values(): + for coord_name in dataset.coords: + if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: + tokens[coord_name].add(tokenize(dataset[coord_name].data)) + coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + + # Prepend dataset name, if not unique or no pretty-format desired + new_datas = datas.copy() + for coord_name, unique in coords_unique.items(): + if not pretty or not unique: + if pretty: + warnings.warn( + 'Cannot pretty-format "{}" coordinates because they are ' + 'not identical among the given datasets'.format(coord_name), + stacklevel=2 + ) + for ds_name, dataset in datas.items(): + if coord_name in dataset.coords: + rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} + new_datas[ds_name] = new_datas[ds_name].rename(rename) + + return new_datas + + +def assert_xy_unique(datas): + """Check that all datasets share the same projection coordinates x/y.""" + unique_x = set() + unique_y = set() + for dataset in datas.values(): + if 'y' in dataset.dims: + token_y = tokenize(dataset['y'].data) + unique_y.add(token_y) + if 'x' in dataset.dims: + token_x = tokenize(dataset['x'].data) + unique_x.add(token_x) + if len(unique_x) > 1 or len(unique_y) > 1: + raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' + 'Please group them by area or save them in separate files.') + + +def link_coords(datas): + """Link dataarrays and coordinates. + + If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example + `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to + `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set + automatically. + + """ + for da_name, data in datas.items(): + declared_coordinates = data.attrs.get('coordinates', []) + if isinstance(declared_coordinates, str): + declared_coordinates = declared_coordinates.split(' ') + for coord in declared_coordinates: + if coord not in data.coords: + try: + dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) + data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) + except KeyError: + warnings.warn( + 'Coordinate "{}" referenced by dataarray {} does not ' + 'exist, dropping reference.'.format(coord, da_name), + stacklevel=2 + ) + continue + + # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() + data.attrs.pop('coordinates', None) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index b9a24b9292..4c672b70b6 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -164,9 +164,7 @@ import numpy as np import xarray as xr -from dask.base import tokenize from packaging.version import Version -from pyresample.geometry import AreaDefinition, SwathDefinition from xarray.coding.times import CFDatetimeCoder from satpy.writers import Writer @@ -228,174 +226,6 @@ def get_extra_ds(dataarray, keys=None): return ds_collection -# ###--------------------------------------------------------------------------. -# ### CF-Area - - -def add_lonlat_coords(dataarray): - """Add 'longitude' and 'latitude' coordinates to DataArray.""" - dataarray = dataarray.copy() - area = dataarray.attrs['area'] - ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} - chunks = getattr(dataarray.isel(**ignore_dims), 'chunks', None) - lons, lats = area.get_lonlats(chunks=chunks) - dataarray['longitude'] = xr.DataArray(lons, dims=['y', 'x'], - attrs={'name': "longitude", - 'standard_name': "longitude", - 'units': 'degrees_east'}, - name='longitude') - dataarray['latitude'] = xr.DataArray(lats, dims=['y', 'x'], - attrs={'name': "latitude", - 'standard_name': "latitude", - 'units': 'degrees_north'}, - name='latitude') - return dataarray - - -def _create_grid_mapping(area): - """Create the grid mapping instance for `area`.""" - import pyproj - - if Version(pyproj.__version__) < Version('2.4.1'): - # technically 2.2, but important bug fixes in 2.4.1 - raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") - # let pyproj do the heavily lifting (pyproj 2.0+ required) - grid_mapping = area.crs.to_cf() - return area.area_id, grid_mapping - - -def _add_grid_mapping(dataarray): - """Convert an area to at CF grid mapping.""" - dataarray = dataarray.copy() - area = dataarray.attrs['area'] - gmapping_var_name, attrs = _create_grid_mapping(area) - dataarray.attrs['grid_mapping'] = gmapping_var_name - return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) - - -def area2cf(dataarray, include_lonlats=False, got_lonlats=False): - """Convert an area to at CF grid mapping or lon and lats.""" - res = [] - if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): - dataarray = add_lonlat_coords(dataarray) - if isinstance(dataarray.attrs['area'], AreaDefinition): - dataarray, gmapping = _add_grid_mapping(dataarray) - res.append(gmapping) - res.append(dataarray) - return res - - -def is_lon_or_lat_dataarray(dataarray): - """Check if the DataArray represents the latitude or longitude coordinate.""" - if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: - return True - return False - - -def has_projection_coords(ds_collection): - """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" - for dataarray in ds_collection.values(): - if is_lon_or_lat_dataarray(dataarray): - return True - return False - - -def make_alt_coords_unique(datas, pretty=False): - """Make non-dimensional coordinates unique among all datasets. - - Non-dimensional (or alternative) coordinates, such as scanline timestamps, - may occur in multiple datasets with the same name and dimension - but different values. - - In order to avoid conflicts, prepend the dataset name to the coordinate name. - If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, - its name will not be modified. - - Since all datasets must have the same projection coordinates, - this is not applied to latitude and longitude. - - Args: - datas (dict): - Dictionary of (dataset name, dataset) - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - - Returns: - Dictionary holding the updated datasets - - """ - # Determine which non-dimensional coordinates are unique - tokens = defaultdict(set) - for dataset in datas.values(): - for coord_name in dataset.coords: - if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: - tokens[coord_name].add(tokenize(dataset[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) - - # Prepend dataset name, if not unique or no pretty-format desired - new_datas = datas.copy() - for coord_name, unique in coords_unique.items(): - if not pretty or not unique: - if pretty: - warnings.warn( - 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), - stacklevel=2 - ) - for ds_name, dataset in datas.items(): - if coord_name in dataset.coords: - rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} - new_datas[ds_name] = new_datas[ds_name].rename(rename) - - return new_datas - - -def assert_xy_unique(datas): - """Check that all datasets share the same projection coordinates x/y.""" - unique_x = set() - unique_y = set() - for dataset in datas.values(): - if 'y' in dataset.dims: - token_y = tokenize(dataset['y'].data) - unique_y.add(token_y) - if 'x' in dataset.dims: - token_x = tokenize(dataset['x'].data) - unique_x.add(token_x) - if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' - 'Please group them by area or save them in separate files.') - - -def link_coords(datas): - """Link dataarrays and coordinates. - - If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example - `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to - `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set - automatically. - - """ - for da_name, data in datas.items(): - declared_coordinates = data.attrs.get('coordinates', []) - if isinstance(declared_coordinates, str): - declared_coordinates = declared_coordinates.split(' ') - for coord in declared_coordinates: - if coord not in data.coords: - try: - dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) - data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) - except KeyError: - warnings.warn( - 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), - stacklevel=2 - ) - continue - - # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - data.attrs.pop('coordinates', None) - - # ###--------------------------------------------------------------------------. # ### CF-Time def add_time_bounds_dimension(ds, time="time"): @@ -864,6 +694,14 @@ def _collect_cf_dataset(list_dataarrays, ds : xr.Dataset A partially CF-compliant xr.Dataset """ + from satpy.writers.cf.area import ( + area2cf, + assert_xy_unique, + has_projection_coords, + link_coords, + make_alt_coords_unique, + ) + # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! ds_collection = {} From 4b195664431d0eb4a0bcb7271afacabcdd6a5573 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:10:01 +0200 Subject: [PATCH 0324/1416] Refactor attrs-related functions --- .../tests/writer_tests/cf_tests/test_attrs.py | 144 ++++++++++ .../writer_tests/cf_tests/test_time_coords.py | 44 +++ satpy/tests/writer_tests/test_cf.py | 44 +-- satpy/writers/cf/attrs.py | 220 +++++++++++++++ satpy/writers/cf/time.py | 62 +++++ satpy/writers/cf_writer.py | 254 +----------------- 6 files changed, 479 insertions(+), 289 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/test_attrs.py create mode 100644 satpy/tests/writer_tests/cf_tests/test_time_coords.py create mode 100644 satpy/writers/cf/attrs.py create mode 100644 satpy/writers/cf/time.py diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py new file mode 100644 index 0000000000..a7b36837b4 --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for CF-compatible attributes encoding.""" +import datetime +import json +from collections import OrderedDict + +import numpy as np + + +class TestCFAttributeEncoding: + """Test case for CF attribute encodings.""" + + def get_test_attrs(self): + """Create some dataset attributes for testing purpose. + + Returns: + Attributes, encoded attributes, encoded and flattened attributes + + """ + # TODO: this is also used by test_da2cf + attrs = {'name': 'IR_108', + 'start_time': datetime(2018, 1, 1, 0), + 'end_time': datetime(2018, 1, 1, 0, 15), + 'int': 1, + 'float': 1.0, + 'none': None, # should be dropped + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': True, + 'numpy_void': np.void(0), + 'numpy_bytes': np.bytes_('test'), + 'numpy_string': np.string_('test'), + 'list': [1, 2, np.float64(3)], + 'nested_list': ["1", ["2", [3]]], + 'bool': True, + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': np.array([True, False, True]), + 'array_2d': np.array([[1, 2], [3, 4]]), + 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + 'dict': {'a': 1, 'b': 2}, + 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, + 'raw_metadata': OrderedDict([ + ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), + ('flag', np.bool_(True)), + ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + ])} + encoded = {'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict': '{"a": 1, "b": 2}', + 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} + encoded_flat = {'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict_a': 1, + 'dict_b': 2, + 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), + 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', + 'raw_metadata_flag': 'true', + 'raw_metadata_dict_a': 1, + 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} + return attrs, encoded, encoded_flat + + def assertDictWithArraysEqual(self, d1, d2): + """Check that dicts containing arrays are equal.""" + # TODO: this is also used by test_da2cf + assert set(d1.keys()) == set(d2.keys()) + for key, val1 in d1.items(): + val2 = d2[key] + if isinstance(val1, np.ndarray): + np.testing.assert_array_equal(val1, val2) + assert val1.dtype == val2.dtype + else: + assert val1 == val2 + if isinstance(val1, (np.floating, np.integer, np.bool_)): + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype + + def test_encode_attrs_nc(self): + """Test attributes encoding.""" + from satpy.writers.cf.attrs import encode_attrs_nc + + attrs, expected, _ = self.get_test_attrs() + + # Test encoding + encoded = encode_attrs_nc(attrs) + self.assertDictWithArraysEqual(expected, encoded) + + # Test decoding of json-encoded attributes + raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], + 'flag': 'true', + 'dict': {'a': 1, 'b': [1, 2, 3]}} + assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip + assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] + assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} + assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] diff --git a/satpy/tests/writer_tests/cf_tests/test_time_coords.py b/satpy/tests/writer_tests/cf_tests/test_time_coords.py new file mode 100644 index 0000000000..ce7845dcca --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_time_coords.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF processing of time information (coordinates and dimensions).""" +import numpy as np +import xarray as xr + + +class TestCFtime: + """Test cases for CF time dimension and coordinates.""" + + def test_add_time_bounds_dimension(self): + """Test addition of CF-compliant time attributes.""" + from satpy.writers.cf.time import add_time_bounds_dimension + + test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) + times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', + '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) + dataarray = xr.DataArray(test_array, + dims=['y', 'x'], + coords={'time': ('y', times)}, + attrs=dict(start_time=times[0], end_time=times[-1])) + ds = dataarray.to_dataset(name='test-array') + ds = add_time_bounds_dimension(ds) + + assert "bnds_1d" in ds.dims + assert ds.dims['bnds_1d'] == 2 + assert "time_bnds" in list(ds.data_vars) + assert "bounds" in ds["time"].attrs + assert "standard_name" in ds["time"].attrs diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 005509f165..ae55dcc1a2 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -17,7 +17,6 @@ # satpy. If not, see . """Tests for the CF writer.""" -import json import logging import os import tempfile @@ -155,28 +154,6 @@ def test_preprocess_dataarray_name(): assert "original_name" not in out_da.attrs -def test_add_time_cf_attrs(): - """Test addition of CF-compliant time attributes.""" - from satpy import Scene - from satpy.writers.cf_writer import add_time_bounds_dimension - - scn = Scene() - test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) - times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', - '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) - scn['test-array'] = xr.DataArray(test_array, - dims=['y', 'x'], - coords={'time': ('y', times)}, - attrs=dict(start_time=times[0], end_time=times[-1])) - ds = scn['test-array'].to_dataset(name='test-array') - ds = add_time_bounds_dimension(ds) - assert "bnds_1d" in ds.dims - assert ds.dims['bnds_1d'] == 2 - assert "time_bnds" in list(ds.data_vars) - assert "bounds" in ds["time"].attrs - assert "standard_name" in ds["time"].attrs - - def test_empty_collect_cf_datasets(): """Test that if no DataArrays, collect_cf_datasets raise error.""" from satpy.writers.cf_writer import collect_cf_datasets @@ -525,6 +502,7 @@ def get_test_attrs(self): Attributes, encoded attributes, encoded and flattened attributes """ + # TODO: also used by cf/test_attrs.py attrs = {'name': 'IR_108', 'start_time': datetime(2018, 1, 1, 0), 'end_time': datetime(2018, 1, 1, 0, 15), @@ -602,6 +580,7 @@ def get_test_attrs(self): def assertDictWithArraysEqual(self, d1, d2): """Check that dicts containing arrays are equal.""" + # TODO: also used by cf/test_attrs.py assert set(d1.keys()) == set(d2.keys()) for key, val1 in d1.items(): val2 = d2[key] @@ -614,25 +593,6 @@ def assertDictWithArraysEqual(self, d1, d2): assert isinstance(val2, np.generic) assert val1.dtype == val2.dtype - def test_encode_attrs_nc(self): - """Test attributes encoding.""" - from satpy.writers.cf_writer import encode_attrs_nc - - attrs, expected, _ = self.get_test_attrs() - - # Test encoding - encoded = encode_attrs_nc(attrs) - self.assertDictWithArraysEqual(expected, encoded) - - # Test decoding of json-encoded attributes - raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], - 'flag': 'true', - 'dict': {'a': 1, 'b': [1, 2, 3]}} - assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip - assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] - assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} - assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] - def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" from satpy.writers.cf_writer import CFWriter diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py new file mode 100644 index 0000000000..7a9ecc33c8 --- /dev/null +++ b/satpy/writers/cf/attrs.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF processing of attributes.""" + +import datetime +import json +from collections import OrderedDict + +import numpy as np + +from satpy.writers.utils import flatten_dict + + +class AttributeEncoder(json.JSONEncoder): + """JSON encoder for dataset attributes.""" + + def default(self, obj): + """Return a json-serializable object for *obj*. + + In order to facilitate decoding, elements in dictionaries, lists/tuples and multi-dimensional arrays are + encoded recursively. + """ + if isinstance(obj, dict): + serialized = {} + for key, val in obj.items(): + serialized[key] = self.default(val) + return serialized + elif isinstance(obj, (list, tuple, np.ndarray)): + return [self.default(item) for item in obj] + return self._encode(obj) + + def _encode(self, obj): + """Encode the given object as a json-serializable datatype.""" + if isinstance(obj, (bool, np.bool_)): + # Bool has to be checked first, because it is a subclass of int + return str(obj).lower() + elif isinstance(obj, (int, float, str)): + return obj + elif isinstance(obj, np.integer): + return int(obj) + elif isinstance(obj, np.floating): + return float(obj) + elif isinstance(obj, np.void): + return tuple(obj) + elif isinstance(obj, np.ndarray): + return obj.tolist() + + return str(obj) + + +def _encode_nc(obj): + """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. + + Raises: + ValueError if no such datatype could be found + """ + from satpy.writers.cf_writer import NC4_DTYPES + + if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): + return obj + elif isinstance(obj, (float, str, np.integer, np.floating)): + return obj + elif isinstance(obj, np.ndarray): + # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. + is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 + if is_plain_1d: + if obj.dtype in NC4_DTYPES: + return obj + elif obj.dtype == np.bool_: + # Boolean arrays are not supported, convert to array of strings. + return [s.lower() for s in obj.astype(str)] + return obj.tolist() + raise ValueError('Unable to encode') + + +def encode_nc(obj): + """Encode the given object as a netcdf compatible datatype.""" + try: + return obj.to_cf() + except AttributeError: + return _encode_python_objects(obj) + + +def _encode_python_objects(obj): + """Try to find the datatype which most closely resembles the object's nature. + + If on failure, encode as a string. Plain lists are encoded recursively. + """ + if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): + return [encode_nc(item) for item in obj] + try: + dump = _encode_nc(obj) + except ValueError: + try: + # Decode byte-strings + decoded = obj.decode() + except AttributeError: + decoded = obj + dump = json.dumps(decoded, cls=AttributeEncoder).strip('"') + return dump + + +def encode_attrs_nc(attrs): + """Encode dataset attributes in a netcdf compatible datatype. + + Args: + attrs (dict): + Attributes to be encoded + Returns: + dict: Encoded (and sorted) attributes + + """ + encoded_attrs = [] + for key, val in sorted(attrs.items()): + if val is not None: + encoded_attrs.append((key, encode_nc(val))) + return OrderedDict(encoded_attrs) + + +def _add_ancillary_variables_attrs(dataarray): + """Replace ancillary_variables DataArray with a list of their name.""" + list_ancillary_variable_names = [da_ancillary.attrs['name'] + for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + if list_ancillary_variable_names: + dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) + else: + dataarray.attrs.pop("ancillary_variables", None) + return dataarray + + +def _drop_exclude_attrs(dataarray, exclude_attrs): + """Remove user-specified list of attributes.""" + if exclude_attrs is None: + exclude_attrs = [] + for key in exclude_attrs: + dataarray.attrs.pop(key, None) + return dataarray + + +def _remove_satpy_attrs(new_data): + """Remove _satpy attribute.""" + satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] + for satpy_attr in satpy_attrs: + new_data.attrs.pop(satpy_attr) + new_data.attrs.pop('_last_resampler', None) + return new_data + + +def _format_prerequisites_attrs(dataarray): + """Reformat prerequisites attribute value to string.""" + if 'prerequisites' in dataarray.attrs: + dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + return dataarray + + +def _remove_none_attrs(dataarray): + """Remove attribute keys with None value.""" + for key, val in dataarray.attrs.copy().items(): + if val is None: + dataarray.attrs.pop(key) + return dataarray + + +def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): + """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" + dataarray = _remove_satpy_attrs(dataarray) + dataarray = _add_ancillary_variables_attrs(dataarray) + dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) + dataarray = _format_prerequisites_attrs(dataarray) + dataarray = _remove_none_attrs(dataarray) + _ = dataarray.attrs.pop("area", None) + + if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: + dataarray.attrs['long_name'] = dataarray.name + + if flatten_attrs: + dataarray.attrs = flatten_dict(dataarray.attrs) + + dataarray.attrs = encode_attrs_nc(dataarray.attrs) + + return dataarray + + +def _add_history(attrs): + """Add 'history' attribute to dictionary.""" + _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) + if 'history' in attrs: + if isinstance(attrs['history'], list): + attrs['history'] = ''.join(attrs['history']) + attrs['history'] += '\n' + _history_create + else: + attrs['history'] = _history_create + return attrs + + +def preprocess_header_attrs(header_attrs, flatten_attrs=False): + """Prepare file header attributes.""" + if header_attrs is not None: + if flatten_attrs: + header_attrs = flatten_dict(header_attrs) + header_attrs = encode_attrs_nc(header_attrs) # OrderedDict + else: + header_attrs = {} + header_attrs = _add_history(header_attrs) + return header_attrs diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py new file mode 100644 index 0000000000..6308f42364 --- /dev/null +++ b/satpy/writers/cf/time.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF processing of time dimension and coordinates.""" +import numpy as np +import xarray as xr + + +def add_time_bounds_dimension(ds, time="time"): + """Add time bound dimension to xr.Dataset.""" + start_times = [] + end_times = [] + for _var_name, data_array in ds.items(): + start_times.append(data_array.attrs.get("start_time", None)) + end_times.append(data_array.attrs.get("end_time", None)) + + start_time = min(start_time for start_time in start_times + if start_time is not None) + end_time = min(end_time for end_time in end_times + if end_time is not None) + ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + np.datetime64(end_time)]], + dims=['time', 'bnds_1d']) + ds[time].attrs['bounds'] = "time_bnds" + ds[time].attrs['standard_name'] = "time" + return ds + + +def _process_time_coord(dataarray, epoch): + """Process the 'time' coordinate, if existing. + + It expand the DataArray with a time dimension if does not yet exists. + + The function assumes + + - that x and y dimensions have at least shape > 1 + - the time coordinate has size 1 + + """ + if 'time' in dataarray.coords: + dataarray['time'].encoding['units'] = epoch + dataarray['time'].attrs['standard_name'] = 'time' + dataarray['time'].attrs.pop('bounds', None) + + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') + + return dataarray diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 4c672b70b6..a2edd70ab2 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -154,13 +154,10 @@ .. _xarray encoding documentation: http://xarray.pydata.org/en/stable/user-guide/io.html?highlight=encoding#writing-encoded-data """ - import copy -import json import logging import warnings -from collections import OrderedDict, defaultdict -from datetime import datetime +from collections import defaultdict import numpy as np import xarray as xr @@ -169,7 +166,6 @@ from satpy.writers import Writer from satpy.writers.cf.coords_attrs import add_xy_coords_attrs -from satpy.writers.utils import flatten_dict logger = logging.getLogger(__name__) @@ -226,236 +222,6 @@ def get_extra_ds(dataarray, keys=None): return ds_collection -# ###--------------------------------------------------------------------------. -# ### CF-Time -def add_time_bounds_dimension(ds, time="time"): - """Add time bound dimension to xr.Dataset.""" - start_times = [] - end_times = [] - for _var_name, data_array in ds.items(): - start_times.append(data_array.attrs.get("start_time", None)) - end_times.append(data_array.attrs.get("end_time", None)) - - start_time = min(start_time for start_time in start_times - if start_time is not None) - end_time = min(end_time for end_time in end_times - if end_time is not None) - ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), - np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - ds[time].attrs['bounds'] = "time_bnds" - ds[time].attrs['standard_name'] = "time" - return ds - - -def _process_time_coord(dataarray, epoch): - """Process the 'time' coordinate, if existing. - - If expand the DataArray with a time dimension if does not yet exists. - - The function assumes - - - that x and y dimensions have at least shape > 1 - - the time coordinate has size 1 - - """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) - - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') - - return dataarray - - -# --------------------------------------------------------------------------. -# ### Attributes - - -class AttributeEncoder(json.JSONEncoder): - """JSON encoder for dataset attributes.""" - - def default(self, obj): - """Return a json-serializable object for *obj*. - - In order to facilitate decoding, elements in dictionaries, lists/tuples and multi-dimensional arrays are - encoded recursively. - """ - if isinstance(obj, dict): - serialized = {} - for key, val in obj.items(): - serialized[key] = self.default(val) - return serialized - elif isinstance(obj, (list, tuple, np.ndarray)): - return [self.default(item) for item in obj] - return self._encode(obj) - - def _encode(self, obj): - """Encode the given object as a json-serializable datatype.""" - if isinstance(obj, (bool, np.bool_)): - # Bool has to be checked first, because it is a subclass of int - return str(obj).lower() - elif isinstance(obj, (int, float, str)): - return obj - elif isinstance(obj, np.integer): - return int(obj) - elif isinstance(obj, np.floating): - return float(obj) - elif isinstance(obj, np.void): - return tuple(obj) - elif isinstance(obj, np.ndarray): - return obj.tolist() - - return str(obj) - - -def _encode_nc(obj): - """Try to encode `obj` as a netcdf compatible datatype which most closely resembles the object's nature. - - Raises: - ValueError if no such datatype could be found - - """ - if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): - return obj - elif isinstance(obj, (float, str, np.integer, np.floating)): - return obj - elif isinstance(obj, np.ndarray): - # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. - is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 - if is_plain_1d: - if obj.dtype in NC4_DTYPES: - return obj - elif obj.dtype == np.bool_: - # Boolean arrays are not supported, convert to array of strings. - return [s.lower() for s in obj.astype(str)] - return obj.tolist() - - raise ValueError('Unable to encode') - - -def encode_nc(obj): - """Encode the given object as a netcdf compatible datatype.""" - try: - return obj.to_cf() - except AttributeError: - return _encode_python_objects(obj) - - -def _encode_python_objects(obj): - """Try to find the datatype which most closely resembles the object's nature. - - If on failure, encode as a string. Plain lists are encoded recursively. - """ - if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): - return [encode_nc(item) for item in obj] - try: - dump = _encode_nc(obj) - except ValueError: - try: - # Decode byte-strings - decoded = obj.decode() - except AttributeError: - decoded = obj - dump = json.dumps(decoded, cls=AttributeEncoder).strip('"') - return dump - - -def encode_attrs_nc(attrs): - """Encode dataset attributes in a netcdf compatible datatype. - - Args: - attrs (dict): - Attributes to be encoded - Returns: - dict: Encoded (and sorted) attributes - - """ - encoded_attrs = [] - for key, val in sorted(attrs.items()): - if val is not None: - encoded_attrs.append((key, encode_nc(val))) - return OrderedDict(encoded_attrs) - - -def _add_ancillary_variables_attrs(dataarray): - """Replace ancillary_variables DataArray with a list of their name.""" - list_ancillary_variable_names = [da_ancillary.attrs['name'] - for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] - if list_ancillary_variable_names: - dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) - else: - dataarray.attrs.pop("ancillary_variables", None) - return dataarray - - -def _drop_exclude_attrs(dataarray, exclude_attrs): - """Remove user-specified list of attributes.""" - if exclude_attrs is None: - exclude_attrs = [] - for key in exclude_attrs: - dataarray.attrs.pop(key, None) - return dataarray - - -def _remove_satpy_attrs(new_data): - """Remove _satpy attribute.""" - satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] - for satpy_attr in satpy_attrs: - new_data.attrs.pop(satpy_attr) - new_data.attrs.pop('_last_resampler', None) - return new_data - - -def _format_prerequisites_attrs(dataarray): - """Reformat prerequisites attribute value to string.""" - if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] - return dataarray - - -def _remove_none_attrs(dataarray): - """Remove attribute keys with None value.""" - for key, val in dataarray.attrs.copy().items(): - if val is None: - dataarray.attrs.pop(key) - return dataarray - - -def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): - """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" - dataarray = _remove_satpy_attrs(dataarray) - dataarray = _add_ancillary_variables_attrs(dataarray) - dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) - dataarray = _format_prerequisites_attrs(dataarray) - dataarray = _remove_none_attrs(dataarray) - _ = dataarray.attrs.pop("area", None) - - if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: - dataarray.attrs['long_name'] = dataarray.name - - if flatten_attrs: - dataarray.attrs = flatten_dict(dataarray.attrs) - - dataarray.attrs = encode_attrs_nc(dataarray.attrs) - - return dataarray - - -def preprocess_header_attrs(header_attrs, flatten_attrs=False): - """Prepare file header attributes.""" - if header_attrs is not None: - if flatten_attrs: - header_attrs = flatten_dict(header_attrs) - header_attrs = encode_attrs_nc(header_attrs) # OrderedDict - else: - header_attrs = {} - header_attrs = _add_history(header_attrs) - return header_attrs - - # ###--------------------------------------------------------------------------. # ### netCDF encodings @@ -582,18 +348,6 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name return dataarray -def _add_history(attrs): - """Add 'history' attribute to dictionary.""" - _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) - if 'history' in attrs: - if isinstance(attrs['history'], list): - attrs['history'] = ''.join(attrs['history']) - attrs['history'] += '\n' + _history_create - else: - attrs['history'] = _history_create - return attrs - - def _get_groups(groups, list_datarrays): """Return a dictionary with the list of xr.DataArray associated to each group. @@ -645,6 +399,9 @@ def make_cf_dataarray(dataarray, CF-compliant xr.DataArray. """ + from satpy.writers.cf.attrs import preprocess_datarray_attrs + from satpy.writers.cf.time import _process_time_coord + dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, include_orig_name=include_orig_name) @@ -822,6 +579,9 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ + from satpy.writers.cf.attrs import preprocess_header_attrs + from satpy.writers.cf.time import add_time_bounds_dimension + if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " From 6c27d86ed22ffd5a697449c57a0ee0a616081cbc Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:13:10 +0200 Subject: [PATCH 0325/1416] Fix datetime import --- satpy/tests/writer_tests/cf_tests/test_attrs.py | 4 ++-- satpy/writers/cf/attrs.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py index a7b36837b4..87cdfd173d 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -35,8 +35,8 @@ def get_test_attrs(self): """ # TODO: this is also used by test_da2cf attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), + 'start_time': datetime.datetime(2018, 1, 1, 0), + 'end_time': datetime.datetime(2018, 1, 1, 0, 15), 'int': 1, 'float': 1.0, 'none': None, # should be dropped diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py index 7a9ecc33c8..aac0f5f289 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/writers/cf/attrs.py @@ -198,7 +198,7 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): def _add_history(attrs): """Add 'history' attribute to dictionary.""" - _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) + _history_create = 'Created by pytroll/satpy on {}'.format(datetime.datetime.utcnow()) if 'history' in attrs: if isinstance(attrs['history'], list): attrs['history'] = ''.join(attrs['history']) From 59f73ea592231a8379788cd7999273cf9be9dae1 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:18:04 +0200 Subject: [PATCH 0326/1416] Replace deprecated CFWriter.da2cf with make_cf_dataarray --- satpy/tests/writer_tests/test_cf.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index ae55dcc1a2..35b454c87f 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -87,10 +87,10 @@ def test_lonlat_storage(tmp_path): np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) -def test_da2cf_lonlat(): - """Test correct da2cf encoding for area with lon/lat units.""" +def test_make_cf_dataarray_lonlat(): + """Test correct CF encoding for area with lon/lat units.""" from satpy.resample import add_crs_xy_coords - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import make_cf_dataarray area = create_area_def("mavas", 4326, shape=(5, 5), center=(0, 0), resolution=(1, 1)) @@ -99,7 +99,7 @@ def test_da2cf_lonlat(): dims=("y", "x"), attrs={"area": area}) da = add_crs_xy_coords(da, area) - new_da = CFWriter.da2cf(da) + new_da = make_cf_dataarray(da) assert new_da["x"].attrs["units"] == "degrees_east" assert new_da["y"].attrs["units"] == "degrees_north" @@ -593,9 +593,9 @@ def assertDictWithArraysEqual(self, d1, d2): assert isinstance(val2, np.generic) assert val1.dtype == val2.dtype - def test_da2cf(self): + def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import make_cf_dataarray # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() @@ -618,7 +618,7 @@ def test_da2cf(self): coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) # Test conversion to something cf-compliant - res = CFWriter.da2cf(arr) + res = make_cf_dataarray(arr) np.testing.assert_array_equal(res['x'], arr['x']) np.testing.assert_array_equal(res['y'], arr['y']) np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) @@ -627,17 +627,17 @@ def test_da2cf(self): self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs - res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=['int']) + res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=['int']) attrs_expected_flat.pop('int') self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) - def test_da2cf_one_dimensional_array(self): + def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf_writer import CFWriter + from satpy.writers.cf_writer import make_cf_dataarray arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) - _ = CFWriter.da2cf(arr) + _ = make_cf_dataarray(arr) def test_collect_cf_dataarrays(self): """Test collecting CF datasets from a DataArray objects.""" From 396700f0c04aeee05b381eeb16f0480d9ee657ac Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:35:24 +0200 Subject: [PATCH 0327/1416] Refactor Dataset encodings --- .../writer_tests/cf_tests/test_encoding.py | 123 ++++++++++++++++++ satpy/tests/writer_tests/test_cf.py | 107 +-------------- satpy/writers/cf/encoding.py | 110 ++++++++++++++++ satpy/writers/cf_writer.py | 99 +------------- 4 files changed, 241 insertions(+), 198 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/test_encoding.py create mode 100644 satpy/writers/cf/encoding.py diff --git a/satpy/tests/writer_tests/cf_tests/test_encoding.py b/satpy/tests/writer_tests/cf_tests/test_encoding.py new file mode 100644 index 0000000000..66f7c72a48 --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_encoding.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for compatible netCDF/Zarr DataArray encodings.""" +import datetime + +import pytest +import xarray as xr + + +class TestUpdateDatasetEncodings: + """Test update of Dataset encodings.""" + + @pytest.fixture + def fake_ds(self): + """Create fake data for testing.""" + ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), + 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, + coords={'y': [1, 2], + 'x': [3, 4], + 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + return ds + + @pytest.fixture + def fake_ds_digit(self): + """Create fake data for testing.""" + ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), + 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, + coords={'y': [1, 2], + 'x': [3, 4], + 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + return ds_digit + + def test_dataset_name_digit(self, fake_ds_digit): + """Test data with dataset name staring with a digit.""" + from satpy.writers.cf.encoding import update_encoding + + # Dataset with name staring with digit + ds_digit = fake_ds_digit + kwargs = {'encoding': {'1': {'dtype': 'float32'}, + '2': {'dtype': 'float32'}}, + 'other': 'kwargs'} + enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'CHANNEL_1': {'dtype': 'float32'}, + 'CHANNEL_2': {'dtype': 'float32'} + } + assert enc == expected_dict + assert other_kwargs == {'other': 'kwargs'} + + def test_without_time(self, fake_ds): + """Test data with no time dimension.""" + from satpy.writers.cf.encoding import update_encoding + + # Without time dimension + ds = fake_ds.chunk(2) + kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, + 'other': 'kwargs'} + enc, other_kwargs = update_encoding(ds, kwargs) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (2, 2)}, + 'bar': {'chunksizes': (1, 1)} + } + assert enc == expected_dict + assert other_kwargs == {'other': 'kwargs'} + + # Chunksize may not exceed shape + ds = fake_ds.chunk(8) + kwargs = {'encoding': {}, 'other': 'kwargs'} + enc, other_kwargs = update_encoding(ds, kwargs) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (2, 2)}, + 'bar': {'chunksizes': (2, 2)} + } + assert enc == expected_dict + + def test_with_time(self, fake_ds): + """Test data with a time dimension.""" + from satpy.writers.cf.encoding import update_encoding + + # With time dimension + ds = fake_ds.chunk(8).expand_dims({'time': [datetime.datetime(2009, 7, 1, 12, 15)]}) + kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, + 'other': 'kwargs'} + enc, other_kwargs = update_encoding(ds, kwargs) + expected_dict = { + 'y': {'_FillValue': None}, + 'x': {'_FillValue': None}, + 'lon': {'chunksizes': (2, 2)}, + 'foo': {'chunksizes': (1, 2, 2)}, + 'bar': {'chunksizes': (1, 1, 1)}, + 'time': {'_FillValue': None, + 'calendar': 'proleptic_gregorian', + 'units': 'days since 2009-07-01 12:15:00'}, + 'time_bnds': {'_FillValue': None, + 'calendar': 'proleptic_gregorian', + 'units': 'days since 2009-07-01 12:15:00'} + } + assert enc == expected_dict + # User-defined encoding may not be altered + assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 35b454c87f..1d9e9c7650 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -808,109 +808,8 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): assert ds2['var1']['longitude'].attrs['name'] == 'longitude' -class EncodingUpdateTest: - """Test update of netCDF encoding.""" - - @pytest.fixture - def fake_ds(self): - """Create fake data for testing.""" - ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), - 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) - return ds - - @pytest.fixture - def fake_ds_digit(self): - """Create fake data for testing.""" - ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), - 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) - return ds_digit - - def test_dataset_name_digit(self, fake_ds_digit): - """Test data with dataset name staring with a digit.""" - from satpy.writers.cf_writer import update_encoding - - # Dataset with name staring with digit - ds_digit = fake_ds_digit - kwargs = {'encoding': {'1': {'dtype': 'float32'}, - '2': {'dtype': 'float32'}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') - expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'CHANNEL_1': {'dtype': 'float32'}, - 'CHANNEL_2': {'dtype': 'float32'} - } - assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} - - def test_without_time(self, fake_ds): - """Test data with no time dimension.""" - from satpy.writers.cf_writer import update_encoding - - # Without time dimension - ds = fake_ds.chunk(2) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds, kwargs) - expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (1, 1)} - } - assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} - - # Chunksize may not exceed shape - ds = fake_ds.chunk(8) - kwargs = {'encoding': {}, 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds, kwargs) - expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (2, 2)} - } - assert enc == expected_dict - - def test_with_time(self, fake_ds): - """Test data with a time dimension.""" - from satpy.writers.cf_writer import update_encoding - - # With time dimension - ds = fake_ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds, kwargs) - expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (1, 2, 2)}, - 'bar': {'chunksizes': (1, 1, 1)}, - 'time': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}, - 'time_bnds': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'} - } - assert enc == expected_dict - # User-defined encoding may not be altered - assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} - - -class TestEncodingKwarg: - """Test CF writer with 'encoding' keyword argument.""" +class TestNETCDFEncodingKwargs: + """Test netCDF compression encodings.""" @pytest.fixture def scene(self): @@ -1001,7 +900,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): warnings.simplefilter("error") -class TestEncodingAttribute(TestEncodingKwarg): +class TestEncodingAttribute(TestNETCDFEncodingKwargs): """Test CF writer with 'encoding' dataset attribute.""" @pytest.fixture diff --git a/satpy/writers/cf/encoding.py b/satpy/writers/cf/encoding.py new file mode 100644 index 0000000000..c8ea0f25f4 --- /dev/null +++ b/satpy/writers/cf/encoding.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF encoding.""" + +import numpy as np +import xarray as xr +from xarray.coding.times import CFDatetimeCoder + + +def _set_default_chunks(encoding, dataset): + """Update encoding to preserve current dask chunks. + + Existing user-defined chunks take precedence. + """ + for var_name, variable in dataset.variables.items(): + if variable.chunks: + chunks = tuple( + np.stack([variable.data.chunksize, + variable.shape]).min(axis=0) + ) # Chunksize may not exceed shape + encoding.setdefault(var_name, {}) + encoding[var_name].setdefault('chunksizes', chunks) + return encoding + + +def _set_default_fill_value(encoding, dataset): + """Set default fill values. + + Avoid _FillValue attribute being added to coordinate variables + (https://github.com/pydata/xarray/issues/1865). + """ + coord_vars = [] + for data_array in dataset.values(): + coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) + for coord_var in coord_vars: + encoding.setdefault(coord_var, {}) + encoding[coord_var].update({'_FillValue': None}) + return encoding + + +def _set_default_time_encoding(encoding, dataset): + """Set default time encoding. + + Make sure time coordinates and bounds have the same units. + Default is xarray's CF datetime encoding, which can be overridden + by user-defined encoding. + """ + if 'time' in dataset: + try: + dtnp64 = dataset['time'].data[0] + except IndexError: + dtnp64 = dataset['time'].data + + default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) + time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} + time_enc.update(encoding.get('time', {})) + bounds_enc = {'units': time_enc['units'], + 'calendar': time_enc['calendar'], + '_FillValue': None} + encoding['time'] = time_enc + encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ + return encoding + + +def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): + """Ensure variable names of the encoding dictionary account for numeric_name_prefix. + + A lot of channel names in satpy starts with a digit. + When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix. + + If variables names in the encoding dictionary are numeric digits, their name is prefixed + with numeric_name_prefix + """ + for var_name in list(dataset.variables): + if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): + continue + orig_var_name = var_name.replace(numeric_name_prefix, '') + if orig_var_name in encoding: + encoding[var_name] = encoding.pop(orig_var_name) + return encoding + + +def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): + """Update encoding. + + Preserve dask chunks, avoid fill values in coordinate variables and make sure that + time & time bounds have the same units. + """ + other_to_netcdf_kwargs = to_netcdf_kwargs.copy() + encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() + encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) + encoding = _set_default_chunks(encoding, dataset) + encoding = _set_default_fill_value(encoding, dataset) + encoding = _set_default_time_encoding(encoding, dataset) + return encoding, other_to_netcdf_kwargs diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index a2edd70ab2..63f57f2e63 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -162,7 +162,6 @@ import numpy as np import xarray as xr from packaging.version import Version -from xarray.coding.times import CFDatetimeCoder from satpy.writers import Writer from satpy.writers.cf.coords_attrs import add_xy_coords_attrs @@ -222,98 +221,6 @@ def get_extra_ds(dataarray, keys=None): return ds_collection -# ###--------------------------------------------------------------------------. -# ### netCDF encodings - - -def _set_default_chunks(encoding, dataset): - """Update encoding to preserve current dask chunks. - - Existing user-defined chunks take precedence. - """ - for var_name, variable in dataset.variables.items(): - if variable.chunks: - chunks = tuple( - np.stack([variable.data.chunksize, - variable.shape]).min(axis=0) - ) # Chunksize may not exceed shape - encoding.setdefault(var_name, {}) - encoding[var_name].setdefault('chunksizes', chunks) - return encoding - - -def _set_default_fill_value(encoding, dataset): - """Set default fill values. - - Avoid _FillValue attribute being added to coordinate variables - (https://github.com/pydata/xarray/issues/1865). - """ - coord_vars = [] - for data_array in dataset.values(): - coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) - for coord_var in coord_vars: - encoding.setdefault(coord_var, {}) - encoding[coord_var].update({'_FillValue': None}) - return encoding - - -def _set_default_time_encoding(encoding, dataset): - """Set default time encoding. - - Make sure time coordinates and bounds have the same units. - Default is xarray's CF datetime encoding, which can be overridden - by user-defined encoding. - """ - if 'time' in dataset: - try: - dtnp64 = dataset['time'].data[0] - except IndexError: - dtnp64 = dataset['time'].data - - default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) - time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} - time_enc.update(encoding.get('time', {})) - bounds_enc = {'units': time_enc['units'], - 'calendar': time_enc['calendar'], - '_FillValue': None} - encoding['time'] = time_enc - encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ - return encoding - - -def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): - """Ensure variable names of the encoding dictionary account for numeric_name_prefix. - - A lot of channel names in satpy starts with a digit. - When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix. - - If variables names in the encoding dictionary are numeric digits, their name is prefixed - with numeric_name_prefix - """ - for var_name in list(dataset.variables): - if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): - continue - orig_var_name = var_name.replace(numeric_name_prefix, '') - if orig_var_name in encoding: - encoding[var_name] = encoding.pop(orig_var_name) - return encoding - - -def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): - """Update encoding. - - Preserve dask chunks, avoid fill values in coordinate variables and make sure that - time & time bounds have the same units. - """ - other_to_netcdf_kwargs = to_netcdf_kwargs.copy() - encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() - encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) - encoding = _set_default_chunks(encoding, dataset) - encoding = _set_default_fill_value(encoding, dataset) - encoding = _set_default_time_encoding(encoding, dataset) - return encoding, other_to_netcdf_kwargs - - # ###--------------------------------------------------------------------------. # ### CF-conversion @@ -681,8 +588,10 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" + from satpy.writers.cf.encoding import update_encoding + warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf_writer.update_encoding instead.', + 'Use satpy.writers.cf.encoding.update_encoding instead.', DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) @@ -728,6 +637,8 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ + from satpy.writers.cf.encoding import update_encoding + logger.info('Saving datasets to NetCDF4/CF.') _check_backend_versions() From f664c60925e7edf844c1e66c8bf7bf3e810f9db6 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 27 Jun 2023 23:56:48 +0200 Subject: [PATCH 0328/1416] Refactor CF-compliant DataArray creation --- .../writer_tests/cf_tests/test_dataaarray.py | 197 ++++++++++++++++++ satpy/tests/writer_tests/test_cf.py | 174 +--------------- satpy/writers/cf/dataarray.py | 97 +++++++++ satpy/writers/cf/time.py | 2 + satpy/writers/cf_writer.py | 119 ++--------- 5 files changed, 320 insertions(+), 269 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/test_dataaarray.py create mode 100644 satpy/writers/cf/dataarray.py diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py new file mode 100644 index 0000000000..20c893d0a6 --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -0,0 +1,197 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests CF-compliant DataArray creation.""" + +import datetime +from collections import OrderedDict + +import numpy as np +import xarray as xr + +from satpy.tests.utils import make_dsq + + +def test_preprocess_dataarray_name(): + """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" + from satpy import Scene + from satpy.writers.cf.dataarray import _preprocess_dataarray_name + + scn = Scene() + scn['1'] = xr.DataArray([1, 2, 3]) + dataarray = scn['1'] + # If numeric_name_prefix is a string, test add the original_name attributes + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) + assert out_da.attrs['original_name'] == '1' + + # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) + assert "original_name" not in out_da.attrs + + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True) + assert "original_name" not in out_da.attrs + + out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True) + assert "original_name" not in out_da.attrs + + +class TestCFWriter: + """Test creation of CF DataArray.""" + + def get_test_attrs(self): + """Create some dataset attributes for testing purpose. + + Returns: + Attributes, encoded attributes, encoded and flattened attributes + + """ + # TODO: also used by cf/test_attrs.py + attrs = {'name': 'IR_108', + 'start_time': datetime.datetime(2018, 1, 1, 0), + 'end_time': datetime.datetime(2018, 1, 1, 0, 15), + 'int': 1, + 'float': 1.0, + 'none': None, # should be dropped + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': True, + 'numpy_void': np.void(0), + 'numpy_bytes': np.bytes_('test'), + 'numpy_string': np.string_('test'), + 'list': [1, 2, np.float64(3)], + 'nested_list': ["1", ["2", [3]]], + 'bool': True, + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': np.array([True, False, True]), + 'array_2d': np.array([[1, 2], [3, 4]]), + 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + 'dict': {'a': 1, 'b': 2}, + 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, + 'raw_metadata': OrderedDict([ + ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), + ('flag', np.bool_(True)), + ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + ])} + encoded = {'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict': '{"a": 1, "b": 2}', + 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} + encoded_flat = {'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict_a': 1, + 'dict_b': 2, + 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), + 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', + 'raw_metadata_flag': 'true', + 'raw_metadata_dict_a': 1, + 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} + return attrs, encoded, encoded_flat + + def assertDictWithArraysEqual(self, d1, d2): + """Check that dicts containing arrays are equal.""" + # TODO: also used by cf/test_attrs.py + assert set(d1.keys()) == set(d2.keys()) + for key, val1 in d1.items(): + val2 = d2[key] + if isinstance(val1, np.ndarray): + np.testing.assert_array_equal(val1, val2) + assert val1.dtype == val2.dtype + else: + assert val1 == val2 + if isinstance(val1, (np.floating, np.integer, np.bool_)): + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype + + def test_make_cf_dataarray(self): + """Test the conversion of a DataArray to a CF-compatible DataArray.""" + from satpy.writers.cf.dataarray import make_cf_dataarray + + # Create set of test attributes + attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() + attrs['area'] = 'some_area' + attrs['prerequisites'] = [make_dsq(name='hej')] + attrs['_satpy_id_name'] = 'myname' + + # Adjust expected attributes + expected_prereq = ("DataQuery(name='hej')") + update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} + + attrs_expected.update(update) + attrs_expected_flat.update(update) + + attrs_expected.pop('name') + attrs_expected_flat.pop('name') + + # Create test data array + arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), + coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) + + # Test conversion to something cf-compliant + res = make_cf_dataarray(arr) + np.testing.assert_array_equal(res['x'], arr['x']) + np.testing.assert_array_equal(res['y'], arr['y']) + np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) + assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} + assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} + self.assertDictWithArraysEqual(res.attrs, attrs_expected) + + # Test attribute kwargs + res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=['int']) + attrs_expected_flat.pop('int') + self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) + + def test_make_cf_dataarray_one_dimensional_array(self): + """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" + from satpy.writers.cf.dataarray import make_cf_dataarray + + arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), + coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) + _ = make_cf_dataarray(arr) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 1d9e9c7650..96cc09069a 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -21,7 +21,6 @@ import os import tempfile import warnings -from collections import OrderedDict from datetime import datetime import numpy as np @@ -90,7 +89,7 @@ def test_lonlat_storage(tmp_path): def test_make_cf_dataarray_lonlat(): """Test correct CF encoding for area with lon/lat units.""" from satpy.resample import add_crs_xy_coords - from satpy.writers.cf_writer import make_cf_dataarray + from satpy.writers.cf.dataarray import make_cf_dataarray area = create_area_def("mavas", 4326, shape=(5, 5), center=(0, 0), resolution=(1, 1)) @@ -131,29 +130,6 @@ def test_is_projected(caplog): assert "Failed to tell if data are projected." in caplog.text -def test_preprocess_dataarray_name(): - """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" - from satpy import Scene - from satpy.writers.cf_writer import _preprocess_dataarray_name - - scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) - dataarray = scn['1'] - # If numeric_name_prefix is a string, test add the original_name attributes - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) - assert out_da.attrs['original_name'] == '1' - - # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) - assert "original_name" not in out_da.attrs - - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True) - assert "original_name" not in out_da.attrs - - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True) - assert "original_name" not in out_da.attrs - - def test_empty_collect_cf_datasets(): """Test that if no DataArrays, collect_cf_datasets raise error.""" from satpy.writers.cf_writer import collect_cf_datasets @@ -495,150 +471,6 @@ def test_header_attrs(self): assert f.attrs['bool_'] == 'true' assert 'none' not in f.attrs.keys() - def get_test_attrs(self): - """Create some dataset attributes for testing purpose. - - Returns: - Attributes, encoded attributes, encoded and flattened attributes - - """ - # TODO: also used by cf/test_attrs.py - attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) - ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' - '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} - return attrs, encoded, encoded_flat - - def assertDictWithArraysEqual(self, d1, d2): - """Check that dicts containing arrays are equal.""" - # TODO: also used by cf/test_attrs.py - assert set(d1.keys()) == set(d2.keys()) - for key, val1 in d1.items(): - val2 = d2[key] - if isinstance(val1, np.ndarray): - np.testing.assert_array_equal(val1, val2) - assert val1.dtype == val2.dtype - else: - assert val1 == val2 - if isinstance(val1, (np.floating, np.integer, np.bool_)): - assert isinstance(val2, np.generic) - assert val1.dtype == val2.dtype - - def test_make_cf_dataarray(self): - """Test the conversion of a DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf_writer import make_cf_dataarray - - # Create set of test attributes - attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() - attrs['area'] = 'some_area' - attrs['prerequisites'] = [make_dsq(name='hej')] - attrs['_satpy_id_name'] = 'myname' - - # Adjust expected attributes - expected_prereq = ("DataQuery(name='hej')") - update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} - - attrs_expected.update(update) - attrs_expected_flat.update(update) - - attrs_expected.pop('name') - attrs_expected_flat.pop('name') - - # Create test data array - arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) - - # Test conversion to something cf-compliant - res = make_cf_dataarray(arr) - np.testing.assert_array_equal(res['x'], arr['x']) - np.testing.assert_array_equal(res['y'], arr['y']) - np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) - assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} - assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} - self.assertDictWithArraysEqual(res.attrs, attrs_expected) - - # Test attribute kwargs - res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=['int']) - attrs_expected_flat.pop('int') - self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) - - def test_make_cf_dataarray_one_dimensional_array(self): - """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf_writer import make_cf_dataarray - - arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), - coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) - _ = make_cf_dataarray(arr) - def test_collect_cf_dataarrays(self): """Test collecting CF datasets from a DataArray objects.""" from satpy.writers.cf_writer import _collect_cf_dataset @@ -808,7 +640,7 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): assert ds2['var1']['longitude'].attrs['name'] == 'longitude' -class TestNETCDFEncodingKwargs: +class TestNetcdfEncodingKwargs: """Test netCDF compression encodings.""" @pytest.fixture @@ -900,7 +732,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): warnings.simplefilter("error") -class TestEncodingAttribute(TestNETCDFEncodingKwargs): +class TestEncodingAttribute(TestNetcdfEncodingKwargs): """Test CF writer with 'encoding' dataset attribute.""" @pytest.fixture diff --git a/satpy/writers/cf/dataarray.py b/satpy/writers/cf/dataarray.py new file mode 100644 index 0000000000..9ca90ae52f --- /dev/null +++ b/satpy/writers/cf/dataarray.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Utility to generate a CF-compliant DataArray.""" +import warnings + +from satpy.writers.cf.attrs import preprocess_datarray_attrs +from satpy.writers.cf.coords_attrs import add_xy_coords_attrs +from satpy.writers.cf.time import EPOCH, _process_time_coord + + +def _handle_dataarray_name(original_name, numeric_name_prefix): + if original_name[0].isdigit(): + if numeric_name_prefix: + new_name = numeric_name_prefix + original_name + else: + warnings.warn( + f'Invalid NetCDF dataset name: {original_name} starts with a digit.', + stacklevel=5 + ) + new_name = original_name # occurs when numeric_name_prefix = '', None or False + else: + new_name = original_name + return original_name, new_name + + +def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): + """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" + original_name = None + dataarray = dataarray.copy() + if 'name' in dataarray.attrs: + original_name = dataarray.attrs.pop('name') + original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) + dataarray = dataarray.rename(new_name) + + if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: + dataarray.attrs['original_name'] = original_name + + return dataarray + + +def make_cf_dataarray(dataarray, + epoch=EPOCH, + flatten_attrs=False, + exclude_attrs=None, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Make the xr.DataArray CF-compliant. + + Parameters + ---------- + dataarray : xr.DataArray + The data array to be made CF-compliant. + epoch : str, optional + Reference time for encoding of time coordinates. + flatten_attrs : bool, optional + If True, flatten dict-type attributes. + The default is False. + exclude_attrs : list, optional + List of dataset attributes to be excluded. + The default is None. + include_orig_name : bool, optional + Include the original dataset name in the netcdf variable attributes. + The default is True. + numeric_name_prefix : TYPE, optional + Prepend dataset name with this if starting with a digit. + The default is ``"CHANNEL_"``. + + Returns + ------- + new_data : xr.DataArray + CF-compliant xr.DataArray. + + """ + dataarray = _preprocess_dataarray_name(dataarray=dataarray, + numeric_name_prefix=numeric_name_prefix, + include_orig_name=include_orig_name) + dataarray = preprocess_datarray_attrs(dataarray=dataarray, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs) + dataarray = add_xy_coords_attrs(dataarray) + dataarray = _process_time_coord(dataarray, epoch=epoch) + return dataarray diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py index 6308f42364..821f7b47b0 100644 --- a/satpy/writers/cf/time.py +++ b/satpy/writers/cf/time.py @@ -19,6 +19,8 @@ import numpy as np import xarray as xr +EPOCH = u"seconds since 1970-01-01 00:00:00" + def add_time_bounds_dimension(ds, time="time"): """Add time bound dimension to xr.Dataset.""" diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 63f57f2e63..d7503860b4 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -164,12 +164,10 @@ from packaging.version import Version from satpy.writers import Writer -from satpy.writers.cf.coords_attrs import add_xy_coords_attrs +from satpy.writers.cf.time import EPOCH logger = logging.getLogger(__name__) -EPOCH = u"seconds since 1970-01-01 00:00:00" - # Check availability of either netCDF4 or h5netcdf package try: import netCDF4 @@ -225,101 +223,6 @@ def get_extra_ds(dataarray, keys=None): # ### CF-conversion -def _handle_dataarray_name(original_name, numeric_name_prefix): - if original_name[0].isdigit(): - if numeric_name_prefix: - new_name = numeric_name_prefix + original_name - else: - warnings.warn( - f'Invalid NetCDF dataset name: {original_name} starts with a digit.', - stacklevel=5 - ) - new_name = original_name # occurs when numeric_name_prefix = '', None or False - else: - new_name = original_name - return original_name, new_name - - -def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): - """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" - original_name = None - dataarray = dataarray.copy() - if 'name' in dataarray.attrs: - original_name = dataarray.attrs.pop('name') - original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) - dataarray = dataarray.rename(new_name) - - if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: - dataarray.attrs['original_name'] = original_name - - return dataarray - - -def _get_groups(groups, list_datarrays): - """Return a dictionary with the list of xr.DataArray associated to each group. - - If no groups (groups=None), return all DataArray attached to a single None key. - Else, collect the DataArrays associated to each group. - """ - if groups is None: - grouped_dataarrays = {None: list_datarrays} - else: - grouped_dataarrays = defaultdict(list) - for datarray in list_datarrays: - for group_name, group_members in groups.items(): - if datarray.attrs['name'] in group_members: - grouped_dataarrays[group_name].append(datarray) - break - return grouped_dataarrays - - -def make_cf_dataarray(dataarray, - epoch=EPOCH, - flatten_attrs=False, - exclude_attrs=None, - include_orig_name=True, - numeric_name_prefix='CHANNEL_'): - """Make the xr.DataArray CF-compliant. - - Parameters - ---------- - dataarray : xr.DataArray - The data array to be made CF-compliant. - epoch : str, optional - Reference time for encoding of time coordinates. - flatten_attrs : bool, optional - If True, flatten dict-type attributes. - The default is False. - exclude_attrs : list, optional - List of dataset attributes to be excluded. - The default is None. - include_orig_name : bool, optional - Include the original dataset name in the netcdf variable attributes. - The default is True. - numeric_name_prefix : TYPE, optional - Prepend dataset name with this if starting with a digit. - The default is ``"CHANNEL_"``. - - Returns - ------- - new_data : xr.DataArray - CF-compliant xr.DataArray. - - """ - from satpy.writers.cf.attrs import preprocess_datarray_attrs - from satpy.writers.cf.time import _process_time_coord - - dataarray = _preprocess_dataarray_name(dataarray=dataarray, - numeric_name_prefix=numeric_name_prefix, - include_orig_name=include_orig_name) - dataarray = preprocess_datarray_attrs(dataarray=dataarray, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs) - dataarray = add_xy_coords_attrs(dataarray) - dataarray = _process_time_coord(dataarray, epoch=epoch) - return dataarray - - def _collect_cf_dataset(list_dataarrays, epoch=EPOCH, flatten_attrs=False, @@ -365,6 +268,7 @@ def _collect_cf_dataset(list_dataarrays, link_coords, make_alt_coords_unique, ) + from satpy.writers.cf.dataarray import make_cf_dataarray # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! @@ -534,6 +438,24 @@ def collect_cf_datasets(list_dataarrays, return grouped_datasets, header_attrs +def _get_groups(groups, list_datarrays): + """Return a dictionary with the list of xr.DataArray associated to each group. + + If no groups (groups=None), return all DataArray attached to a single None key. + Else, collect the DataArrays associated to each group. + """ + if groups is None: + grouped_dataarrays = {None: list_datarrays} + else: + grouped_dataarrays = defaultdict(list) + for datarray in list_datarrays: + for group_name, group_members in groups.items(): + if datarray.attrs['name'] in group_members: + grouped_dataarrays[group_name].append(datarray) + break + return grouped_dataarrays + + def _sanitize_writer_kwargs(writer_kwargs): """Remove satpy-specific kwargs.""" writer_kwargs = copy.deepcopy(writer_kwargs) @@ -575,6 +497,7 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ + from satpy.writers.cf.dataarray import make_cf_dataarray warnings.warn('CFWriter.da2cf is deprecated.' 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', DeprecationWarning, stacklevel=3) From 3953319ca3c8f94886e67030d589a35a68491fcc Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 28 Jun 2023 00:19:13 +0200 Subject: [PATCH 0329/1416] Refactor CF-compliant Datasets creation --- .../writer_tests/cf_tests/test_dataaarray.py | 19 + .../writer_tests/cf_tests/test_datasets.py | 129 +++++++ satpy/tests/writer_tests/test_cf.py | 86 ----- satpy/writers/cf/datasets.py | 272 ++++++++++++++ satpy/writers/cf_writer.py | 340 +++--------------- 5 files changed, 466 insertions(+), 380 deletions(-) create mode 100644 satpy/tests/writer_tests/cf_tests/test_datasets.py create mode 100644 satpy/writers/cf/datasets.py diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py index 20c893d0a6..13ed11474e 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -49,6 +49,25 @@ def test_preprocess_dataarray_name(): assert "original_name" not in out_da.attrs +def test_make_cf_dataarray_lonlat(): + """Test correct CF encoding for area with lon/lat units.""" + from pyresample import create_area_def + + from satpy.resample import add_crs_xy_coords + from satpy.writers.cf.dataarray import make_cf_dataarray + + area = create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1)) + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={"area": area}) + da = add_crs_xy_coords(da, area) + new_da = make_cf_dataarray(da) + assert new_da["x"].attrs["units"] == "degrees_east" + assert new_da["y"].attrs["units"] == "degrees_north" + + class TestCFWriter: """Test creation of CF DataArray.""" diff --git a/satpy/tests/writer_tests/cf_tests/test_datasets.py b/satpy/tests/writer_tests/cf_tests/test_datasets.py new file mode 100644 index 0000000000..d6784cd78f --- /dev/null +++ b/satpy/tests/writer_tests/cf_tests/test_datasets.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests CF-compliant DataArray creation.""" +import datetime + +import pytest +import xarray as xr +from pyresample import AreaDefinition + + +def test_empty_collect_cf_datasets(): + """Test that if no DataArrays, collect_cf_datasets raise error.""" + from satpy.writers.cf.datasets import collect_cf_datasets + + with pytest.raises(RuntimeError): + collect_cf_datasets(list_dataarrays=[]) + + +class TestCollectCfDatasets: + """Test case for collect_cf_dataset.""" + + def test_collect_cf_dataarrays(self): + """Test collecting CF datasets from a DataArray objects.""" + from satpy.writers.cf.datasets import _collect_cf_dataset + + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + + # Define test datasets + data = [[1, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + time = [1, 2] + tstart = datetime.datetime(2019, 4, 1, 12, 0) + tend = datetime.datetime(2019, 4, 1, 12, 15) + list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, + attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), + xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, + attrs={'name': 'var2', 'long_name': 'variable 2'})] + + # Collect datasets + ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) + + # Test results + assert len(ds.keys()) == 3 + assert set(ds.keys()) == {'var1', 'var2', 'geos'} + + da_var1 = ds['var1'] + da_var2 = ds['var2'] + assert da_var1.name == 'var1' + assert da_var1.attrs['grid_mapping'] == 'geos' + assert da_var1.attrs['long_name'] == 'var1' + # variable 2 + assert 'grid_mapping' not in da_var2.attrs + assert da_var2.attrs['long_name'] == 'variable 2' + + def test_collect_cf_dataarrays_with_latitude_named_lat(self): + """Test collecting CF datasets with latitude named lat.""" + from satpy.writers.cf.datasets import _collect_cf_dataset + + data = [[75, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + datasets = { + 'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lat': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lon': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x})} + datasets['lat'].attrs['standard_name'] = 'latitude' + datasets['var1'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['area'] = geos + datasets['var1'].attrs['area'] = geos + datasets['lat'].attrs['name'] = 'lat' + datasets['var1'].attrs['name'] = 'var1' + datasets['var2'].attrs['name'] = 'var2' + datasets['lon'].attrs['name'] = 'lon' + + datasets_list = [datasets[key] for key in datasets.keys()] + datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] + + # Collect datasets + ds = _collect_cf_dataset(datasets_list, include_lonlats=True) + ds2 = _collect_cf_dataset(datasets_list_no_latlon, include_lonlats=True) + + # Test results + assert len(ds.keys()) == 5 + assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} + with pytest.raises(KeyError): + ds['var1'].attrs["latitude"] + with pytest.raises(KeyError): + ds['var1'].attrs["longitude"] + assert ds2['var1']['latitude'].attrs['name'] == 'latitude' + assert ds2['var1']['longitude'].attrs['name'] == 'longitude' diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 96cc09069a..628a32fed3 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -86,23 +86,6 @@ def test_lonlat_storage(tmp_path): np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) -def test_make_cf_dataarray_lonlat(): - """Test correct CF encoding for area with lon/lat units.""" - from satpy.resample import add_crs_xy_coords - from satpy.writers.cf.dataarray import make_cf_dataarray - - area = create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1)) - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"area": area}) - da = add_crs_xy_coords(da, area) - new_da = make_cf_dataarray(da) - assert new_da["x"].attrs["units"] == "degrees_east" - assert new_da["y"].attrs["units"] == "degrees_north" - - def test_is_projected(caplog): """Tests for private _is_projected function.""" from satpy.writers.cf.crs import _is_projected @@ -130,14 +113,6 @@ def test_is_projected(caplog): assert "Failed to tell if data are projected." in caplog.text -def test_empty_collect_cf_datasets(): - """Test that if no DataArrays, collect_cf_datasets raise error.""" - from satpy.writers.cf_writer import collect_cf_datasets - - with pytest.raises(RuntimeError): - collect_cf_datasets(list_dataarrays=[]) - - class TestCFWriter: """Test case for CF writer.""" @@ -471,46 +446,6 @@ def test_header_attrs(self): assert f.attrs['bool_'] == 'true' assert 'none' not in f.attrs.keys() - def test_collect_cf_dataarrays(self): - """Test collecting CF datasets from a DataArray objects.""" - from satpy.writers.cf_writer import _collect_cf_dataset - - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - - # Define test datasets - data = [[1, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - time = [1, 2] - tstart = datetime(2019, 4, 1, 12, 0) - tend = datetime(2019, 4, 1, 12, 15) - list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), - xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var2', 'long_name': 'variable 2'})] - - # Collect datasets - ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) - - # Test results - assert len(ds.keys()) == 3 - assert set(ds.keys()) == {'var1', 'var2', 'geos'} - - da_var1 = ds['var1'] - da_var2 = ds['var2'] - assert da_var1.name == 'var1' - assert da_var1.attrs['grid_mapping'] == 'geos' - assert da_var1.attrs['long_name'] == 'var1' - # variable 2 - assert 'grid_mapping' not in da_var2.attrs - assert da_var2.attrs['long_name'] == 'variable 2' - def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" import importlib @@ -618,27 +553,6 @@ def test_has_projection_coords(self, datasets): datasets['lat'].attrs['standard_name'] = 'dummy' assert not has_projection_coords(datasets) - def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): - """Test collecting CF datasets with latitude named lat.""" - from satpy.writers.cf_writer import _collect_cf_dataset - - datasets_list = [datasets[key] for key in datasets.keys()] - datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] - - # Collect datasets - ds = _collect_cf_dataset(datasets_list, include_lonlats=True) - ds2 = _collect_cf_dataset(datasets_list_no_latlon, include_lonlats=True) - - # Test results - assert len(ds.keys()) == 5 - assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} - with pytest.raises(KeyError): - ds['var1'].attrs["latitude"] - with pytest.raises(KeyError): - ds['var1'].attrs["longitude"] - assert ds2['var1']['latitude'].attrs['name'] == 'latitude' - assert ds2['var1']['longitude'].attrs['name'] == 'longitude' - class TestNetcdfEncodingKwargs: """Test netCDF compression encodings.""" diff --git a/satpy/writers/cf/datasets.py b/satpy/writers/cf/datasets.py new file mode 100644 index 0000000000..09726c0e32 --- /dev/null +++ b/satpy/writers/cf/datasets.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Utility to generate a CF-compliant Datasets.""" +import warnings +from collections import defaultdict + +import xarray as xr + +from satpy.writers.cf.time import EPOCH +from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION + + +def get_extra_ds(dataarray, keys=None): + """Get the ancillary_variables DataArrays associated to a dataset.""" + ds_collection = {} + # Retrieve ancillary variable datarrays + for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): + ancillary_variable = ancillary_dataarray.name + if keys and ancillary_variable not in keys: + keys.append(ancillary_variable) + ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) + # Add input dataarray + ds_collection[dataarray.attrs['name']] = dataarray + return ds_collection + + +def _get_groups(groups, list_datarrays): + """Return a dictionary with the list of xr.DataArray associated to each group. + + If no groups (groups=None), return all DataArray attached to a single None key. + Else, collect the DataArrays associated to each group. + """ + if groups is None: + grouped_dataarrays = {None: list_datarrays} + else: + grouped_dataarrays = defaultdict(list) + for datarray in list_datarrays: + for group_name, group_members in groups.items(): + if datarray.attrs['name'] in group_members: + grouped_dataarrays[group_name].append(datarray) + break + return grouped_dataarrays + + +def _collect_cf_dataset(list_dataarrays, + epoch=EPOCH, + flatten_attrs=False, + exclude_attrs=None, + include_lonlats=True, + pretty=False, + include_orig_name=True, + numeric_name_prefix='CHANNEL_'): + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. + + Parameters + ---------- + list_dataarrays : list + List of DataArrays to make CF compliant and merge into a xr.Dataset. + epoch : str + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + flatten_attrs : bool, optional + If True, flatten dict-type attributes. + exclude_attrs : list, optional + List of xr.DataArray attribute names to be excluded. + include_lonlats : bool, optional + If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty : bool, optional + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name : bool, optional + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix : str, optional + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + + Returns + ------- + ds : xr.Dataset + A partially CF-compliant xr.Dataset + """ + from satpy.writers.cf.area import ( + area2cf, + assert_xy_unique, + has_projection_coords, + link_coords, + make_alt_coords_unique, + ) + from satpy.writers.cf.dataarray import make_cf_dataarray + + # Create dictionary of input datarrays + # --> Since keys=None, it doesn't never retrieve ancillary variables !!! + ds_collection = {} + for dataarray in list_dataarrays: + ds_collection.update(get_extra_ds(dataarray)) + + # Check if one DataArray in the collection has 'longitude' or 'latitude' + got_lonlats = has_projection_coords(ds_collection) + + # Sort dictionary by keys name + ds_collection = dict(sorted(ds_collection.items())) + + dict_dataarrays = {} + for dataarray in ds_collection.values(): + dataarray_type = dataarray.dtype + if dataarray_type not in CF_DTYPES: + warnings.warn( + f'dtype {dataarray_type} not compatible with {CF_VERSION}.', + stacklevel=3 + ) + # Deep copy the datarray since adding/modifying attributes and coordinates + dataarray = dataarray.copy(deep=True) + + # Add CF-compliant area information from the pyresample area + # - If include_lonlats=True, add latitude and longitude coordinates + # - Add grid_mapping attribute to the DataArray + # - Return the CRS DataArray as first list element + # - Return the CF-compliant input DataArray as second list element + try: + list_new_dataarrays = area2cf(dataarray, + include_lonlats=include_lonlats, + got_lonlats=got_lonlats) + except KeyError: + list_new_dataarrays = [dataarray] + + # Ensure each DataArray is CF-compliant + # --> NOTE: Here the CRS DataArray is repeatedly overwrited + # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name + # area information can be lost here !!! + for new_dataarray in list_new_dataarrays: + new_dataarray = make_cf_dataarray(new_dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + dict_dataarrays[new_dataarray.name] = new_dataarray + + # Check all DataArray have same size + assert_xy_unique(dict_dataarrays) + + # Deal with the 'coordinates' attributes indicating lat/lon coords + # NOTE: this currently is dropped by default !!! + link_coords(dict_dataarrays) + + # Ensure non-dimensional coordinates to be unique across DataArrays + # --> If not unique, prepend the DataArray name to the coordinate + # --> If unique, does not prepend the DataArray name only if pretty=True + # --> 'longitude' and 'latitude' coordinates are not prepended + dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) + + # Create a xr.Dataset + ds = xr.Dataset(dict_dataarrays) + return ds + + +def collect_cf_datasets(list_dataarrays, + header_attrs=None, + exclude_attrs=None, + flatten_attrs=False, + pretty=True, + include_lonlats=True, + epoch=EPOCH, + include_orig_name=True, + numeric_name_prefix='CHANNEL_', + groups=None): + """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. + + If the xr.DataArrays does not share the same dimensions, it creates a collection + of xr.Datasets sharing the same dimensions. + + Parameters + ---------- + list_dataarrays (list): + List of DataArrays to make CF compliant and merge into groups of xr.Datasets. + header_attrs: (dict): + Global attributes of the output xr.Dataset. + epoch (str): + Reference time for encoding the time coordinates (if available). + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + flatten_attrs (bool): + If True, flatten dict-type attributes. + exclude_attrs (list): + List of xr.DataArray attribute names to be excluded. + include_lonlats (bool): + If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name (bool). + Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str): + Prefix to add the each variable with name starting with a digit. + Use '' or None to leave this out. + groups (dict): + Group datasets according to the given assignment: + + `{'': ['dataset_name1', 'dataset_name2', ...]}` + + It is used to create grouped netCDFs using the CF_Writer. + If None (the default), no groups will be created. + + Returns + ------- + grouped_datasets : dict + A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} + header_attrs : dict + Global attributes to be attached to the xr.Dataset / netCDF4. + """ + from satpy.writers.cf.attrs import preprocess_header_attrs + from satpy.writers.cf.time import add_time_bounds_dimension + + if not list_dataarrays: + raise RuntimeError("None of the requested datasets have been " + "generated or could not be loaded. Requested " + "composite inputs may need to have matching " + "dimensions (eg. through resampling).") + + header_attrs = preprocess_header_attrs(header_attrs=header_attrs, + flatten_attrs=flatten_attrs) + + # Retrieve groups + # - If groups is None: {None: list_dataarrays} + # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} + # Note: if all dataset names are wrong, behave like groups = None ! + grouped_dataarrays = _get_groups(groups, list_dataarrays) + is_grouped = len(grouped_dataarrays) >= 2 + + # If not grouped, add CF conventions. + # - If 'Conventions' key already present, do not overwrite ! + if "Conventions" not in header_attrs and not is_grouped: + header_attrs['Conventions'] = CF_VERSION + + # Create dictionary of group xr.Datasets + # --> If no groups (groups=None) --> group_name=None + grouped_datasets = {} + for group_name, group_dataarrays in grouped_dataarrays.items(): + ds = _collect_cf_dataset( + list_dataarrays=group_dataarrays, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_lonlats=include_lonlats, + pretty=pretty, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + + if not is_grouped: + ds.attrs = header_attrs + + if 'time' in ds: + ds = add_time_bounds_dimension(ds, time="time") + + grouped_datasets[group_name] = ds + return grouped_datasets, header_attrs diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index d7503860b4..85c6fe999b 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -157,7 +157,6 @@ import copy import logging import warnings -from collections import defaultdict import numpy as np import xarray as xr @@ -183,6 +182,10 @@ if netCDF4 is None and h5netcdf is None: raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.') + +CF_VERSION = 'CF-1.7' + + # Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" @@ -202,259 +205,6 @@ np.dtype('float64'), np.string_] -CF_VERSION = 'CF-1.7' - - -def get_extra_ds(dataarray, keys=None): - """Get the ancillary_variables DataArrays associated to a dataset.""" - ds_collection = {} - # Retrieve ancillary variable datarrays - for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): - ancillary_variable = ancillary_dataarray.name - if keys and ancillary_variable not in keys: - keys.append(ancillary_variable) - ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) - # Add input dataarray - ds_collection[dataarray.attrs['name']] = dataarray - return ds_collection - - -# ###--------------------------------------------------------------------------. -# ### CF-conversion - - -def _collect_cf_dataset(list_dataarrays, - epoch=EPOCH, - flatten_attrs=False, - exclude_attrs=None, - include_lonlats=True, - pretty=False, - include_orig_name=True, - numeric_name_prefix='CHANNEL_'): - """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. - - Parameters - ---------- - list_dataarrays : list - List of DataArrays to make CF compliant and merge into a xr.Dataset. - epoch : str - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` - flatten_attrs : bool, optional - If True, flatten dict-type attributes. - exclude_attrs : list, optional - List of xr.DataArray attribute names to be excluded. - include_lonlats : bool, optional - If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. - If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty : bool, optional - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name : bool, optional - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix : str, optional - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. - - Returns - ------- - ds : xr.Dataset - A partially CF-compliant xr.Dataset - """ - from satpy.writers.cf.area import ( - area2cf, - assert_xy_unique, - has_projection_coords, - link_coords, - make_alt_coords_unique, - ) - from satpy.writers.cf.dataarray import make_cf_dataarray - - # Create dictionary of input datarrays - # --> Since keys=None, it doesn't never retrieve ancillary variables !!! - ds_collection = {} - for dataarray in list_dataarrays: - ds_collection.update(get_extra_ds(dataarray)) - - # Check if one DataArray in the collection has 'longitude' or 'latitude' - got_lonlats = has_projection_coords(ds_collection) - - # Sort dictionary by keys name - ds_collection = dict(sorted(ds_collection.items())) - - dict_dataarrays = {} - for dataarray in ds_collection.values(): - dataarray_type = dataarray.dtype - if dataarray_type not in CF_DTYPES: - warnings.warn( - f'dtype {dataarray_type} not compatible with {CF_VERSION}.', - stacklevel=3 - ) - # Deep copy the datarray since adding/modifying attributes and coordinates - dataarray = dataarray.copy(deep=True) - - # Add CF-compliant area information from the pyresample area - # - If include_lonlats=True, add latitude and longitude coordinates - # - Add grid_mapping attribute to the DataArray - # - Return the CRS DataArray as first list element - # - Return the CF-compliant input DataArray as second list element - try: - list_new_dataarrays = area2cf(dataarray, - include_lonlats=include_lonlats, - got_lonlats=got_lonlats) - except KeyError: - list_new_dataarrays = [dataarray] - - # Ensure each DataArray is CF-compliant - # --> NOTE: Here the CRS DataArray is repeatedly overwrited - # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name - # area information can be lost here !!! - for new_dataarray in list_new_dataarrays: - new_dataarray = make_cf_dataarray(new_dataarray, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - dict_dataarrays[new_dataarray.name] = new_dataarray - - # Check all DataArray have same size - assert_xy_unique(dict_dataarrays) - - # Deal with the 'coordinates' attributes indicating lat/lon coords - # NOTE: this currently is dropped by default !!! - link_coords(dict_dataarrays) - - # Ensure non-dimensional coordinates to be unique across DataArrays - # --> If not unique, prepend the DataArray name to the coordinate - # --> If unique, does not prepend the DataArray name only if pretty=True - # --> 'longitude' and 'latitude' coordinates are not prepended - dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) - - # Create a xr.Dataset - ds = xr.Dataset(dict_dataarrays) - return ds - - -def collect_cf_datasets(list_dataarrays, - header_attrs=None, - exclude_attrs=None, - flatten_attrs=False, - pretty=True, - include_lonlats=True, - epoch=EPOCH, - include_orig_name=True, - numeric_name_prefix='CHANNEL_', - groups=None): - """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. - - If the xr.DataArrays does not share the same dimensions, it creates a collection - of xr.Datasets sharing the same dimensions. - - Parameters - ---------- - list_dataarrays (list): - List of DataArrays to make CF compliant and merge into groups of xr.Datasets. - header_attrs: (dict): - Global attributes of the output xr.Dataset. - epoch (str): - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` - flatten_attrs (bool): - If True, flatten dict-type attributes. - exclude_attrs (list): - List of xr.DataArray attribute names to be excluded. - include_lonlats (bool): - If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. - If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name (bool). - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix (str): - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. - groups (dict): - Group datasets according to the given assignment: - - `{'': ['dataset_name1', 'dataset_name2', ...]}` - - It is used to create grouped netCDFs using the CF_Writer. - If None (the default), no groups will be created. - - Returns - ------- - grouped_datasets : dict - A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} - header_attrs : dict - Global attributes to be attached to the xr.Dataset / netCDF4. - """ - from satpy.writers.cf.attrs import preprocess_header_attrs - from satpy.writers.cf.time import add_time_bounds_dimension - - if not list_dataarrays: - raise RuntimeError("None of the requested datasets have been " - "generated or could not be loaded. Requested " - "composite inputs may need to have matching " - "dimensions (eg. through resampling).") - - header_attrs = preprocess_header_attrs(header_attrs=header_attrs, - flatten_attrs=flatten_attrs) - - # Retrieve groups - # - If groups is None: {None: list_dataarrays} - # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} - # Note: if all dataset names are wrong, behave like groups = None ! - grouped_dataarrays = _get_groups(groups, list_dataarrays) - is_grouped = len(grouped_dataarrays) >= 2 - - # If not grouped, add CF conventions. - # - If 'Conventions' key already present, do not overwrite ! - if "Conventions" not in header_attrs and not is_grouped: - header_attrs['Conventions'] = CF_VERSION - - # Create dictionary of group xr.Datasets - # --> If no groups (groups=None) --> group_name=None - grouped_datasets = {} - for group_name, group_dataarrays in grouped_dataarrays.items(): - ds = _collect_cf_dataset( - list_dataarrays=group_dataarrays, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_lonlats=include_lonlats, - pretty=pretty, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - - if not is_grouped: - ds.attrs = header_attrs - - if 'time' in ds: - ds = add_time_bounds_dimension(ds, time="time") - - grouped_datasets[group_name] = ds - return grouped_datasets, header_attrs - - -def _get_groups(groups, list_datarrays): - """Return a dictionary with the list of xr.DataArray associated to each group. - - If no groups (groups=None), return all DataArray attached to a single None key. - Else, collect the DataArrays associated to each group. - """ - if groups is None: - grouped_dataarrays = {None: list_datarrays} - else: - grouped_dataarrays = defaultdict(list) - for datarray in list_datarrays: - for group_name, group_members in groups.items(): - if datarray.attrs['name'] in group_members: - grouped_dataarrays[group_name].append(datarray) - break - return grouped_dataarrays - def _sanitize_writer_kwargs(writer_kwargs): """Remove satpy-specific kwargs.""" @@ -478,46 +228,6 @@ def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): class CFWriter(Writer): """Writer producing NetCDF/CF compatible datasets.""" - @staticmethod - def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, - include_orig_name=True, numeric_name_prefix='CHANNEL_'): - """Convert the dataarray to something cf-compatible. - - Args: - dataarray (xr.DataArray): - The data array to be converted - epoch (str): - Reference time for encoding of time coordinates - flatten_attrs (bool): - If True, flatten dict-type attributes - exclude_attrs (list): - List of dataset attributes to be excluded - include_orig_name (bool): - Include the original dataset name in the netcdf variable attributes - numeric_name_prefix (str): - Prepend dataset name with this if starting with a digit - """ - from satpy.writers.cf.dataarray import make_cf_dataarray - warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', - DeprecationWarning, stacklevel=3) - return make_cf_dataarray(dataarray=dataarray, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) - - @staticmethod - def update_encoding(dataset, to_netcdf_kwargs): - """Update encoding info (deprecated).""" - from satpy.writers.cf.encoding import update_encoding - - warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf.encoding.update_encoding instead.', - DeprecationWarning, stacklevel=3) - return update_encoding(dataset, to_netcdf_kwargs) - def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) @@ -560,6 +270,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ + from satpy.writers.cf.datasets import collect_cf_datasets from satpy.writers.cf.encoding import update_encoding logger.info('Saving datasets to NetCDF4/CF.') @@ -614,6 +325,47 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, written.append(res) return written + @staticmethod + def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, + include_orig_name=True, numeric_name_prefix='CHANNEL_'): + """Convert the dataarray to something cf-compatible. + + Args: + dataarray (xr.DataArray): + The data array to be converted + epoch (str): + Reference time for encoding of time coordinates + flatten_attrs (bool): + If True, flatten dict-type attributes + exclude_attrs (list): + List of dataset attributes to be excluded + include_orig_name (bool): + Include the original dataset name in the netcdf variable attributes + numeric_name_prefix (str): + Prepend dataset name with this if starting with a digit + """ + from satpy.writers.cf.dataarray import make_cf_dataarray + warnings.warn('CFWriter.da2cf is deprecated.' + 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + DeprecationWarning, stacklevel=3) + return make_cf_dataarray(dataarray=dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) + + @staticmethod + def update_encoding(dataset, to_netcdf_kwargs): + """Update encoding info (deprecated).""" + from satpy.writers.cf.encoding import update_encoding + + warnings.warn('CFWriter.update_encoding is deprecated. ' + 'Use satpy.writers.cf.encoding.update_encoding instead.', + DeprecationWarning, stacklevel=3) + return update_encoding(dataset, to_netcdf_kwargs) + + # --------------------------------------------------------------------------. # NetCDF version From 064558d25f460b2ba8c3a59082e0c9dee85d568a Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 28 Jun 2023 00:20:53 +0200 Subject: [PATCH 0330/1416] Fix changed imports --- satpy/_scene_converters.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 25fe728b9f..ba4432a58f 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -90,7 +90,8 @@ def to_xarray(scn, A CF-compliant xr.Dataset """ - from satpy.writers.cf_writer import EPOCH, collect_cf_datasets + from satpy.writers.cf.datasets import collect_cf_datasets + from satpy.writers.cf.time import EPOCH if epoch is None: epoch = EPOCH From 5762950332a450f4f3d36ff9af01bae26271e6e4 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 28 Jun 2023 00:44:41 +0200 Subject: [PATCH 0331/1416] Fix all writers tests --- .../tests/writer_tests/cf_tests/test_area.py | 87 ++++++++++++++ .../writer_tests/cf_tests/test_datasets.py | 23 +++- satpy/tests/writer_tests/test_cf.py | 108 ------------------ 3 files changed, 109 insertions(+), 109 deletions(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py index e293ff39a6..e3454b3170 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -16,12 +16,21 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF Area.""" +import logging + import dask.array as da import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition +logger = logging.getLogger(__name__) + + +# NOTE: +# The following fixtures are not defined in this file, but are used and injected by Pytest: +# - caplog + class TestCFArea: """Test case for CF Area.""" @@ -399,3 +408,81 @@ def test_add_lonlat_coords(self): np.testing.assert_array_equal(lon.data, lons_ref) assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + + def test_is_projected(self, caplog): + """Tests for private _is_projected function.""" + from satpy.writers.cf.crs import _is_projected + + # test case with units but no area + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) + assert _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) + assert not _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x")) + with caplog.at_level(logging.WARNING): + assert _is_projected(da) + assert "Failed to tell if data are projected." in caplog.text + + @pytest.fixture + def datasets(self): + """Create test dataset.""" + data = [[75, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + datasets = { + 'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lat': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lon': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x})} + datasets['lat'].attrs['standard_name'] = 'latitude' + datasets['var1'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['area'] = geos + datasets['var1'].attrs['area'] = geos + datasets['lat'].attrs['name'] = 'lat' + datasets['var1'].attrs['name'] = 'var1' + datasets['var2'].attrs['name'] = 'var2' + datasets['lon'].attrs['name'] = 'lon' + return datasets + + def test_is_lon_or_lat_dataarray(self, datasets): + """Test the is_lon_or_lat_dataarray function.""" + from satpy.writers.cf.area import is_lon_or_lat_dataarray + + assert is_lon_or_lat_dataarray(datasets['lat']) + assert not is_lon_or_lat_dataarray(datasets['var1']) + + def test_has_projection_coords(self, datasets): + """Test the has_projection_coords function.""" + from satpy.writers.cf.area import has_projection_coords + + assert has_projection_coords(datasets) + datasets['lat'].attrs['standard_name'] = 'dummy' + assert not has_projection_coords(datasets) diff --git a/satpy/tests/writer_tests/cf_tests/test_datasets.py b/satpy/tests/writer_tests/cf_tests/test_datasets.py index d6784cd78f..b094feecbc 100644 --- a/satpy/tests/writer_tests/cf_tests/test_datasets.py +++ b/satpy/tests/writer_tests/cf_tests/test_datasets.py @@ -18,9 +18,10 @@ """Tests CF-compliant DataArray creation.""" import datetime +import numpy as np import pytest import xarray as xr -from pyresample import AreaDefinition +from pyresample import AreaDefinition, create_area_def def test_empty_collect_cf_datasets(): @@ -127,3 +128,23 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self): ds['var1'].attrs["longitude"] assert ds2['var1']['latitude'].attrs['name'] == 'latitude' assert ds2['var1']['longitude'].attrs['name'] == 'longitude' + + def test_geographic_area_coords_attrs(self): + """Test correct storage for area with lon/lat units.""" + from satpy.tests.utils import make_fake_scene + from satpy.writers.cf.datasets import _collect_cf_dataset + + scn = make_fake_scene( + {"ketolysis": np.arange(25).reshape(5, 5)}, + daskify=True, + area=create_area_def("mavas", 4326, shape=(5, 5), + center=(0, 0), resolution=(1, 1))) + + ds = _collect_cf_dataset([scn["ketolysis"]], include_lonlats=False) + assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" + assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" + assert ds["x"].attrs["units"] == "degrees_east" + assert ds["y"].attrs["units"] == "degrees_north" + assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 + np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) + np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 628a32fed3..046e689002 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -17,18 +17,15 @@ # satpy. If not, see . """Tests for the CF writer.""" -import logging import os import tempfile import warnings from datetime import datetime import numpy as np -import pyresample.geometry import pytest import xarray as xr from packaging.version import Version -from pyresample import create_area_def from satpy import Scene from satpy.tests.utils import make_dsq @@ -42,7 +39,6 @@ # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path -# - caplog # - request @@ -65,54 +61,6 @@ def __exit__(self, *args): os.remove(self.filename) -def test_lonlat_storage(tmp_path): - """Test correct storage for area with lon/lat units.""" - from ..utils import make_fake_scene - scn = make_fake_scene( - {"ketolysis": np.arange(25).reshape(5, 5)}, - daskify=True, - area=create_area_def("mavas", 4326, shape=(5, 5), - center=(0, 0), resolution=(1, 1))) - - filename = os.fspath(tmp_path / "test.nc") - scn.save_datasets(filename=filename, writer="cf", include_lonlats=False) - with xr.open_dataset(filename) as ds: - assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" - assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" - assert ds["x"].attrs["units"] == "degrees_east" - assert ds["y"].attrs["units"] == "degrees_north" - assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 - np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) - np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) - - -def test_is_projected(caplog): - """Tests for private _is_projected function.""" - from satpy.writers.cf.crs import _is_projected - - # test case with units but no area - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) - assert _is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) - assert not _is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x")) - with caplog.at_level(logging.WARNING): - assert _is_projected(da) - assert "Failed to tell if data are projected." in caplog.text - - class TestCFWriter: """Test case for CF writer.""" @@ -498,62 +446,6 @@ def test_global_attr_history_and_Conventions(self): assert 'Created by pytroll/satpy on' in f.attrs['history'] -class TestCFWriterData: - """Test case for CF writer where data arrays are needed.""" - - @pytest.fixture - def datasets(self): - """Create test dataset.""" - data = [[75, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - datasets = { - 'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - datasets['lat'].attrs['standard_name'] = 'latitude' - datasets['var1'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['area'] = geos - datasets['var1'].attrs['area'] = geos - datasets['lat'].attrs['name'] = 'lat' - datasets['var1'].attrs['name'] = 'var1' - datasets['var2'].attrs['name'] = 'var2' - datasets['lon'].attrs['name'] = 'lon' - return datasets - - def test_is_lon_or_lat_dataarray(self, datasets): - """Test the is_lon_or_lat_dataarray function.""" - from satpy.writers.cf.area import is_lon_or_lat_dataarray - - assert is_lon_or_lat_dataarray(datasets['lat']) - assert not is_lon_or_lat_dataarray(datasets['var1']) - - def test_has_projection_coords(self, datasets): - """Test the has_projection_coords function.""" - from satpy.writers.cf.area import has_projection_coords - - assert has_projection_coords(datasets) - datasets['lat'].attrs['standard_name'] = 'dummy' - assert not has_projection_coords(datasets) - - class TestNetcdfEncodingKwargs: """Test netCDF compression encodings.""" From dabaa44223ed4f6acd634b9ab518ec149ca2fbfc Mon Sep 17 00:00:00 2001 From: ghiggi Date: Wed, 28 Jun 2023 00:49:10 +0200 Subject: [PATCH 0332/1416] Add logging on top-file --- satpy/tests/writer_tests/cf_tests/test_area.py | 3 --- satpy/writers/cf/attrs.py | 4 +++- satpy/writers/cf/dataarray.py | 3 +++ satpy/writers/cf/datasets.py | 3 +++ satpy/writers/cf/encoding.py | 3 +++ satpy/writers/cf/time.py | 5 +++++ 6 files changed, 17 insertions(+), 4 deletions(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py index e3454b3170..1dd82ddd9d 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -24,9 +24,6 @@ import xarray as xr from pyresample import AreaDefinition, SwathDefinition -logger = logging.getLogger(__name__) - - # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - caplog diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py index aac0f5f289..153d645594 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/writers/cf/attrs.py @@ -16,15 +16,17 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of attributes.""" - import datetime import json +import logging from collections import OrderedDict import numpy as np from satpy.writers.utils import flatten_dict +logger = logging.getLogger(__name__) + class AttributeEncoder(json.JSONEncoder): """JSON encoder for dataset attributes.""" diff --git a/satpy/writers/cf/dataarray.py b/satpy/writers/cf/dataarray.py index 9ca90ae52f..fd9b20be5e 100644 --- a/satpy/writers/cf/dataarray.py +++ b/satpy/writers/cf/dataarray.py @@ -16,12 +16,15 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utility to generate a CF-compliant DataArray.""" +import logging import warnings from satpy.writers.cf.attrs import preprocess_datarray_attrs from satpy.writers.cf.coords_attrs import add_xy_coords_attrs from satpy.writers.cf.time import EPOCH, _process_time_coord +logger = logging.getLogger(__name__) + def _handle_dataarray_name(original_name, numeric_name_prefix): if original_name[0].isdigit(): diff --git a/satpy/writers/cf/datasets.py b/satpy/writers/cf/datasets.py index 09726c0e32..4baabbc894 100644 --- a/satpy/writers/cf/datasets.py +++ b/satpy/writers/cf/datasets.py @@ -16,6 +16,7 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utility to generate a CF-compliant Datasets.""" +import logging import warnings from collections import defaultdict @@ -24,6 +25,8 @@ from satpy.writers.cf.time import EPOCH from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION +logger = logging.getLogger(__name__) + def get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" diff --git a/satpy/writers/cf/encoding.py b/satpy/writers/cf/encoding.py index c8ea0f25f4..55a48f70fd 100644 --- a/satpy/writers/cf/encoding.py +++ b/satpy/writers/cf/encoding.py @@ -16,11 +16,14 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF encoding.""" +import logging import numpy as np import xarray as xr from xarray.coding.times import CFDatetimeCoder +logger = logging.getLogger(__name__) + def _set_default_chunks(encoding, dataset): """Update encoding to preserve current dask chunks. diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py index 821f7b47b0..05b90c4641 100644 --- a/satpy/writers/cf/time.py +++ b/satpy/writers/cf/time.py @@ -16,9 +16,14 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of time dimension and coordinates.""" +import logging + import numpy as np import xarray as xr +logger = logging.getLogger(__name__) + + EPOCH = u"seconds since 1970-01-01 00:00:00" From d22e117d8c0678cd7a1f57602205b70bd1f69567 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 28 Jun 2023 11:48:49 +0200 Subject: [PATCH 0333/1416] Fix median filter --- satpy/modifiers/filters.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/modifiers/filters.py b/satpy/modifiers/filters.py index d8c54c15f3..151082e723 100644 --- a/satpy/modifiers/filters.py +++ b/satpy/modifiers/filters.py @@ -2,11 +2,10 @@ import logging import xarray as xr -from dask_image.ndfilters import median_filter from satpy.modifiers import ModifierBase -logger = logging.getLogger('filters') +logger = logging.getLogger(__name__) class Median(ModifierBase): @@ -25,6 +24,8 @@ def __init__(self, median_filter_params, **kwargs): def __call__(self, arrays, **info): """Get the median filtered band.""" + from dask_image.ndfilters import median_filter + data = arrays[0] logger.debug(f"Apply median filtering with parameters {self.median_filter_params}.") res = xr.DataArray(median_filter(data.data, **self.median_filter_params), From ea12d9b348cac72cf86c3c3df6abe140fce5d42e Mon Sep 17 00:00:00 2001 From: Gionata Ghiggi Date: Wed, 28 Jun 2023 11:54:21 +0200 Subject: [PATCH 0334/1416] Update satpy/tests/writer_tests/cf_tests/test_dataaarray.py --- satpy/tests/writer_tests/cf_tests/test_dataaarray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py index 13ed11474e..896de5c55b 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -68,7 +68,7 @@ def test_make_cf_dataarray_lonlat(): assert new_da["y"].attrs["units"] == "degrees_north" -class TestCFWriter: +class TestCfDataArray: """Test creation of CF DataArray.""" def get_test_attrs(self): From 24d20a78da803e1c752dd3093be593b98c89391a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:47:47 +0000 Subject: [PATCH 0335/1416] Bump pypa/gh-action-pypi-publish from 1.8.6 to 1.8.7 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.6 to 1.8.7. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.6...v1.8.7) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index a32d6a53dc..ab689698a4 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.6 + uses: pypa/gh-action-pypi-publish@v1.8.7 with: user: __token__ password: ${{ secrets.pypi_password }} From 18288df8dc943fc07322f47c435c08113abb0b8a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 3 Jul 2023 14:43:54 +0200 Subject: [PATCH 0336/1416] Update changelog for v0.43.0 --- CHANGELOG.md | 64 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b6be918dd..799ae0a867 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,67 @@ +## Version 0.43.0 (2023/07/03) + +### Issues Closed + +* [Issue 2519](https://github.com/pytroll/satpy/issues/2519) - MSG Dust RGB adding coastilnes and grid to the image +* [Issue 2506](https://github.com/pytroll/satpy/issues/2506) - Add xarray_kwargs capability to the geocat reader ([PR 2507](https://github.com/pytroll/satpy/pull/2507) by [@joleenf](https://github.com/joleenf)) +* [Issue 2502](https://github.com/pytroll/satpy/issues/2502) - Cropping S3 image not working +* [Issue 2494](https://github.com/pytroll/satpy/issues/2494) - avhrr_l1b_gaclac fails to read most files from NOAA CLASS ([PR 2501](https://github.com/pytroll/satpy/pull/2501) by [@sfinkens](https://github.com/sfinkens)) +* [Issue 2490](https://github.com/pytroll/satpy/issues/2490) - ninjogeotiff writer adds offset/scale factor when this is not meaningful ([PR 2491](https://github.com/pytroll/satpy/pull/2491) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2483](https://github.com/pytroll/satpy/issues/2483) - Cacheing doesn't work with `scn.crop` ([PR 2485](https://github.com/pytroll/satpy/pull/2485) by [@djhoese](https://github.com/djhoese)) +* [Issue 2465](https://github.com/pytroll/satpy/issues/2465) - Possibility of dual licensing: GPL-3.0 & MIT +* [Issue 2464](https://github.com/pytroll/satpy/issues/2464) - MITIFF writer using pillow: turn off compression due to rowsperstrip issues +* [Issue 2463](https://github.com/pytroll/satpy/issues/2463) - seviri_l1b_native reader issue with reading remote files (azure) +* [Issue 2409](https://github.com/pytroll/satpy/issues/2409) - Inconsistent behavior of time attributes in EUM L1 GEO readers ([PR 2420](https://github.com/pytroll/satpy/pull/2420) by [@YouvaEUMex](https://github.com/YouvaEUMex)) +* [Issue 1749](https://github.com/pytroll/satpy/issues/1749) - Load from blended scene +* [Issue 859](https://github.com/pytroll/satpy/issues/859) - Doesn't recognize MODIS L2 file + +In this release 12 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2522](https://github.com/pytroll/satpy/pull/2522) - Fix CF tests due to new xarray release +* [PR 2516](https://github.com/pytroll/satpy/pull/2516) - Fix SEVIRI native reader failing when missing main header +* [PR 2510](https://github.com/pytroll/satpy/pull/2510) - Fix warnings from NWCSAF reader +* [PR 2507](https://github.com/pytroll/satpy/pull/2507) - Fix HDF4 support in geocat reader with hardcoded engine ([2506](https://github.com/pytroll/satpy/issues/2506)) +* [PR 2492](https://github.com/pytroll/satpy/pull/2492) - Fix xarray version for cf tests +* [PR 2491](https://github.com/pytroll/satpy/pull/2491) - Change logic for ninjogeotiff gradient/axisintercept tags ([2490](https://github.com/pytroll/satpy/issues/2490)) +* [PR 2485](https://github.com/pytroll/satpy/pull/2485) - Fix angle caching not handling a specific type of irregular chunking ([2483](https://github.com/pytroll/satpy/issues/2483)) +* [PR 2481](https://github.com/pytroll/satpy/pull/2481) - Fix NWCSAF reading for NOAA-21 + +#### Features added + +* [PR 2521](https://github.com/pytroll/satpy/pull/2521) - Add a median filter modifier +* [PR 2508](https://github.com/pytroll/satpy/pull/2508) - Add support for OLCI L2 files which are missing Frame_IDs +* [PR 2504](https://github.com/pytroll/satpy/pull/2504) - Improve flexibility of olci level2 reader +* [PR 2501](https://github.com/pytroll/satpy/pull/2501) - Add Pygac reference to avhrr_l1b_gaclac documentation ([2494](https://github.com/pytroll/satpy/issues/2494)) +* [PR 2499](https://github.com/pytroll/satpy/pull/2499) - Add option to clip negative ABI radiances +* [PR 2497](https://github.com/pytroll/satpy/pull/2497) - Enable to pass a custom function to Scene.aggregate +* [PR 2489](https://github.com/pytroll/satpy/pull/2489) - Add "neutral_resolution_band" kwarg to RatioSharpenedRGB/SelfSharpenedRGB +* [PR 2480](https://github.com/pytroll/satpy/pull/2480) - Add helper-function for reading SEVIRI L1.5 Native header. +* [PR 2449](https://github.com/pytroll/satpy/pull/2449) - Generalise the `true_color_reproduction` composite and enhancement +* [PR 2420](https://github.com/pytroll/satpy/pull/2420) - Fix inconsistent behavior of time attributes in EUM L1 GEO readers ([2409](https://github.com/pytroll/satpy/issues/2409)) +* [PR 2259](https://github.com/pytroll/satpy/pull/2259) - Refactor `CFWriter.save_datasets` and enable retrieval of equivalent xr.Dataset with `scn.to_xarray()` +* [PR 2117](https://github.com/pytroll/satpy/pull/2117) - Add reader for GMS-5 VISSR data + +#### Documentation changes + +* [PR 2514](https://github.com/pytroll/satpy/pull/2514) - Fix argument name in DayNightComposite example document +* [PR 2501](https://github.com/pytroll/satpy/pull/2501) - Add Pygac reference to avhrr_l1b_gaclac documentation ([2494](https://github.com/pytroll/satpy/issues/2494)) +* [PR 2478](https://github.com/pytroll/satpy/pull/2478) - Fix eccodes package names in setup.py, update documentation for setting up development environment. +* [PR 2474](https://github.com/pytroll/satpy/pull/2474) - Reorganize seviri_l2_grib.yaml file and add more documentation to seviri_l1b_native.py + +#### Clean ups + +* [PR 2523](https://github.com/pytroll/satpy/pull/2523) - Convert CF Writer tests to pytest +* [PR 2486](https://github.com/pytroll/satpy/pull/2486) - Fix leftover deprecated nosetest teardown methods +* [PR 2478](https://github.com/pytroll/satpy/pull/2478) - Fix eccodes package names in setup.py, update documentation for setting up development environment. +* [PR 2474](https://github.com/pytroll/satpy/pull/2474) - Reorganize seviri_l2_grib.yaml file and add more documentation to seviri_l1b_native.py + +In this release 28 pull requests were closed. + + ## Version 0.42.2 (2023/05/10) ### Issues Closed From da59d5b9c80faf34d94e259a7f7f4d975a85ff2b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 6 Jul 2023 09:24:03 -0500 Subject: [PATCH 0337/1416] Update unstable download URL in CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8f6816285c..faa0aea2cc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -90,7 +90,7 @@ jobs: # may break the conda-forge libraries trying to use newer glibc versions run: | python -m pip install \ - --index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple/ \ + --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ From 3aa03fce189e3aaf695271a8cb0c85fd0c0ab29b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 6 Jul 2023 11:09:51 -0500 Subject: [PATCH 0338/1416] Add utility for checking if numba is available in unstable CI --- .../reader_tests/gms/test_gms5_vissr_l1b.py | 13 ++++++++---- .../gms/test_gms5_vissr_navigation.py | 9 ++++++-- satpy/tests/reader_tests/utils.py | 21 +++++++++++++++++++ 3 files changed, 37 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index 31482f1e10..e67e41c8d7 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -9,14 +9,19 @@ import xarray as xr from pyresample.geometry import AreaDefinition -import satpy.readers.gms.gms5_vissr_format as fmt -import satpy.readers.gms.gms5_vissr_l1b as vissr -import satpy.readers.gms.gms5_vissr_navigation as nav import satpy.tests.reader_tests.gms.test_gms5_vissr_data as real_world from satpy.readers import FSFile -from satpy.tests.reader_tests.utils import get_jit_methods +from satpy.tests.reader_tests.utils import get_jit_methods, skip_numba_unstable_if_missing from satpy.tests.utils import make_dataid +try: + import satpy.readers.gms.gms5_vissr_format as fmt + import satpy.readers.gms.gms5_vissr_l1b as vissr + import satpy.readers.gms.gms5_vissr_navigation as nav +except ImportError: + if skip_numba_unstable_if_missing(): + pytest.skip("Numba is not compatible with unstable NumPy", allow_module_level=True) + @pytest.fixture(params=[False, True], autouse=True) def disable_jit(request, monkeypatch): diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index 47c5fd044c..f89f911e5e 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -3,8 +3,13 @@ import numpy as np import pytest -import satpy.readers.gms.gms5_vissr_navigation as nav -from satpy.tests.reader_tests.utils import get_jit_methods +from satpy.tests.reader_tests.utils import get_jit_methods, skip_numba_unstable_if_missing + +try: + import satpy.readers.gms.gms5_vissr_navigation as nav +except ImportError: + if skip_numba_unstable_if_missing(): + pytest.skip("Numba is not compatible with unstable NumPy", allow_module_level=True) # Navigation references computed with JMA's Msial library (files # VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS diff --git a/satpy/tests/reader_tests/utils.py b/satpy/tests/reader_tests/utils.py index 9415ac56ec..05e6d9cb18 100644 --- a/satpy/tests/reader_tests/utils.py +++ b/satpy/tests/reader_tests/utils.py @@ -18,6 +18,7 @@ """Utilities for reader tests.""" import inspect +import os def default_attr_processor(root, attr): @@ -61,3 +62,23 @@ def get_jit_methods(module): def _is_jit_method(obj): return hasattr(obj, "py_func") + + +def skip_numba_unstable_if_missing(): + """Determine if numba-based tests should be skipped during unstable CI tests. + + If numba fails to import it could be because numba is not compatible with + a newer version of numpy. This is very likely to happen in the + unstable/experimental CI environment. This function returns ``True`` if + numba-based tests should be skipped if ``numba`` could not + be imported *and* we're in the unstable environment. We determine if we're + in this CI environment by looking for the ``UNSTABLE="1"`` + environment variable. + + """ + try: + import numba + except ImportError: + numba = None + + return numba is None and os.environ.get("UNSTABLE", "0") in ("1", "true") From f8f6dd698e28805191230225b91748de54580c6a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 7 Jul 2023 10:15:05 -0500 Subject: [PATCH 0339/1416] Reraise numba import errors for GMS5 tests --- satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py | 5 +++-- satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index e67e41c8d7..f4908c0a2b 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -18,9 +18,10 @@ import satpy.readers.gms.gms5_vissr_format as fmt import satpy.readers.gms.gms5_vissr_l1b as vissr import satpy.readers.gms.gms5_vissr_navigation as nav -except ImportError: +except ImportError as err: if skip_numba_unstable_if_missing(): - pytest.skip("Numba is not compatible with unstable NumPy", allow_module_level=True) + pytest.skip(f"Numba is not compatible with unstable NumPy: {err!s}", allow_module_level=True) + raise @pytest.fixture(params=[False, True], autouse=True) diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index f89f911e5e..144139a07a 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -7,9 +7,10 @@ try: import satpy.readers.gms.gms5_vissr_navigation as nav -except ImportError: +except ImportError as err: if skip_numba_unstable_if_missing(): - pytest.skip("Numba is not compatible with unstable NumPy", allow_module_level=True) + pytest.skip(f"Numba is not compatible with unstable NumPy: {err!s}", allow_module_level=True) + raise # Navigation references computed with JMA's Msial library (files # VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS From 4a50b98012fba06323b5e48f213a82aec1bbbf7e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:45:03 +0000 Subject: [PATCH 0340/1416] Bump pypa/gh-action-pypi-publish from 1.8.7 to 1.8.8 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.7 to 1.8.8. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.7...v1.8.8) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index ab689698a4..68463b2b03 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.7 + uses: pypa/gh-action-pypi-publish@v1.8.8 with: user: __token__ password: ${{ secrets.pypi_password }} From a935fe65a01222f7db9640f76fa8b79cf047206d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 17 Jul 2023 08:57:59 -0500 Subject: [PATCH 0341/1416] Bump expected xarray version in CF tests --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 2b0a5dfc6c..baeb45a4e4 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.7") + versions["xarray"] >= Version("2023.8") ) From bc7001f75573a2a5815f8e6195085791113e5d7c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 13:01:13 -0500 Subject: [PATCH 0342/1416] Rename viirs_l2_jrr reader to viirs_edr --- satpy/etc/readers/{viirs_l2_jrr.yaml => viirs_edr.yaml} | 4 ++-- satpy/readers/{viirs_l2_jrr.py => viirs_edr.py} | 7 ++++--- .../{test_viirs_l2_jrr.py => test_viirs_edr.py} | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) rename satpy/etc/readers/{viirs_l2_jrr.yaml => viirs_edr.yaml} (99%) rename satpy/readers/{viirs_l2_jrr.py => viirs_edr.py} (95%) rename satpy/tests/reader_tests/{test_viirs_l2_jrr.py => test_viirs_edr.py} (98%) diff --git a/satpy/etc/readers/viirs_l2_jrr.yaml b/satpy/etc/readers/viirs_edr.yaml similarity index 99% rename from satpy/etc/readers/viirs_l2_jrr.yaml rename to satpy/etc/readers/viirs_edr.yaml index f337909134..3f268b86b5 100644 --- a/satpy/etc/readers/viirs_l2_jrr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -1,6 +1,6 @@ reader: - description: VIIRS NOAA Enterprise L2 product reader - name: viirs_l2_jrr + description: VIIRS NOAA Enterprise EDR product reader + name: viirs_edr reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] group_keys: ['platform_shortname'] diff --git a/satpy/readers/viirs_l2_jrr.py b/satpy/readers/viirs_edr.py similarity index 95% rename from satpy/readers/viirs_l2_jrr.py rename to satpy/readers/viirs_edr.py index 8d07b3a7c5..80a4c347c3 100644 --- a/satpy/readers/viirs_l2_jrr.py +++ b/satpy/readers/viirs_edr.py @@ -46,8 +46,8 @@ import xarray as xr -from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) @@ -59,11 +59,12 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) + chunk_size = get_legacy_chunk_size() self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'Columns': CHUNK_SIZE, - 'Rows': CHUNK_SIZE}) + chunks={'Columns': chunk_size, + 'Rows': chunk_size}) if 'columns' in self.nc.dims: self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) elif 'Along_Track_375m' in self.nc.dims: diff --git a/satpy/tests/reader_tests/test_viirs_l2_jrr.py b/satpy/tests/reader_tests/test_viirs_edr.py similarity index 98% rename from satpy/tests/reader_tests/test_viirs_l2_jrr.py rename to satpy/tests/reader_tests/test_viirs_edr.py index a462ec1416..def2af7ad2 100644 --- a/satpy/tests/reader_tests/test_viirs_l2_jrr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -27,7 +27,7 @@ import xarray as xr -from satpy.readers.viirs_l2_jrr import VIIRSJRRFileHandler +from satpy.readers.viirs_edr import VIIRSJRRFileHandler class TestVIIRSJRRReader(unittest.TestCase): From 9ff96918c47647f81028b1135fe7b986089d73d4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 13:29:58 -0500 Subject: [PATCH 0343/1416] Convert VIIRS EDR tests to pytest --- satpy/tests/reader_tests/test_viirs_edr.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index def2af7ad2..cbd036639f 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -20,17 +20,17 @@ Note: This is adapted from the test_slstr_l2.py code. """ -import unittest from datetime import datetime from unittest import mock from unittest.mock import MagicMock +import pytest import xarray as xr from satpy.readers.viirs_edr import VIIRSJRRFileHandler -class TestVIIRSJRRReader(unittest.TestCase): +class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" @mock.patch('xarray.open_dataset') @@ -62,7 +62,7 @@ def test_get_dataset(self, mocked_dataset): test.get_dataset('latitude', {'file_key': 'Latitude'}) test.get_dataset('smoke_concentration', {'file_key': 'smoke_concentration'}) test.get_dataset('fire_mask', {'file_key': 'fire_mask'}) - with self.assertRaises(KeyError): + with pytest.raises(KeyError): test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) mocked_dataset.assert_called() mocked_dataset.reset_mock() @@ -78,8 +78,8 @@ def test_get_startend_times(self, mocked_dataset): tmp.rename.return_value = tmp xr.open_dataset.return_value = tmp hdl = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) - self.assertEqual(hdl.start_time, datetime(2021, 4, 3, 12, 0, 10)) - self.assertEqual(hdl.end_time, datetime(2021, 4, 3, 12, 4, 28)) + assert hdl.start_time == datetime(2021, 4, 3, 12, 0, 10) + assert hdl.end_time == datetime(2021, 4, 3, 12, 4, 28) @mock.patch('xarray.open_dataset') def test_get_platformname(self, mocked_dataset): @@ -88,8 +88,8 @@ def test_get_platformname(self, mocked_dataset): tmp.rename.return_value = tmp xr.open_dataset.return_value = tmp hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'npp'}, None) - self.assertEqual(hdl.platform_name, 'Suomi-NPP') + assert hdl.platform_name == 'Suomi-NPP' hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'JPSS-1'}, None) - self.assertEqual(hdl.platform_name, 'NOAA-20') + assert hdl.platform_name == 'NOAA-20' hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'J01'}, None) - self.assertEqual(hdl.platform_name, 'NOAA-20') + assert hdl.platform_name == 'NOAA-20' From 60deee1b605e2d00ab99903d6b48c52a52383bdb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 15:11:36 -0500 Subject: [PATCH 0344/1416] Add surface reflectance specific tests to viirs_edr --- satpy/etc/readers/viirs_edr.yaml | 6 +- satpy/tests/reader_tests/test_viirs_edr.py | 103 +++++++++++++++------ 2 files changed, 79 insertions(+), 30 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 3f268b86b5..bf8a949de8 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -9,17 +9,17 @@ reader: file_types: jrr_cloudmask: - file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aerosol_product: - file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref_product: - file_reader: !!python/name:satpy.readers.viirs_l2_jrr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index cbd036639f..c84fadf500 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -19,30 +19,89 @@ Note: This is adapted from the test_slstr_l2.py code. """ +from __future__ import annotations from datetime import datetime +from pathlib import Path from unittest import mock from unittest.mock import MagicMock +import numpy as np import pytest import xarray as xr +from pyresample import SwathDefinition from satpy.readers.viirs_edr import VIIRSJRRFileHandler +I_COLS = 64 # real-world 6400 +I_ROWS = 32 # one scan +M_COLS = 32 # real-world 3200 +M_ROWS = 16 # one scan +START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0) +END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) + + +@pytest.fixture(scope="module") +def surface_reflectance_file(tmp_path_factory) -> Path: + """Generate fake surface reflectance EDR file.""" + tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") + fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" + file_path = tmp_path / fn + sr_vars = _create_surf_refl_variables() + ds = _create_fake_dataset(sr_vars) + ds.to_netcdf(file_path) + return file_path + + +def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: + ds = xr.Dataset( + vars_dict, + attrs={} + ) + return ds + + +def _create_surf_refl_variables() -> dict[str, xr.DataArray]: + dim_y_750 = "Along_Track_750m" + dim_x_750 = "Along_Scan_750m" + m_dims = (dim_y_750, dim_x_750) + dim_y_375 = "Along_Track_375m" + dim_x_375 = "Along_Scan_375m" + i_dims = (dim_y_375, dim_x_375) + + lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} + lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} + sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} + + i_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) + m_data = np.zeros((M_ROWS, M_COLS), dtype=np.float32) + data_arrs = { + "Longitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), + "Latitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), + "Longitude_at_750m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), + "Latitude_at_750m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), + "375m Surface Reflectance Band I1": xr.DataArray(i_data, dims=i_dims, attrs=sr_attrs), + "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), + } + for data_arr in data_arrs.values(): + if "scale_factor" not in data_arr.attrs: + continue + data_arr.encoding["dtype"] = np.int16 + return data_arrs + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - @mock.patch('xarray.open_dataset') - def test_instantiate(self, mocked_dataset): - """Test initialization of file handlers.""" - filename_info = {'platform_shortname': 'npp'} - tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') - tmp.rename.return_value = tmp - xr.open_dataset.return_value = tmp - VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) - mocked_dataset.assert_called() - mocked_dataset.reset_mock() + def test_get_dataset_surf_refl(self, surface_reflectance_file): + """Test retrieval of datasets.""" + from satpy import Scene + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + assert scn.start_time == START_TIME + assert scn.end_time == END_TIME + scn.load(["surf_refl_I01", "surf_refl_M01"]) + _check_surf_refl_data_arr(scn["surf_refl_I01"]) + _check_surf_refl_data_arr(scn["surf_refl_M01"]) @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): @@ -55,8 +114,6 @@ def test_get_dataset(self, mocked_dataset): 'Latitude': xr.Dataset(), 'smoke_concentration': xr.Dataset(), 'fire_mask': xr.Dataset(), - 'surf_refl_I01': xr.Dataset(), - 'surf_refl_M05': xr.Dataset(), } test.get_dataset('longitude', {'file_key': 'Longitude'}) test.get_dataset('latitude', {'file_key': 'Latitude'}) @@ -65,21 +122,6 @@ def test_get_dataset(self, mocked_dataset): with pytest.raises(KeyError): test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) mocked_dataset.assert_called() - mocked_dataset.reset_mock() - test.get_dataset('surf_refl_I01', {'file_key': 'surf_refl_I01'}) - - @mock.patch('xarray.open_dataset') - def test_get_startend_times(self, mocked_dataset): - """Test finding start and end times of granules.""" - filename_info = {'platform_shortname': 'npp', - 'start_time': datetime(2021, 4, 3, 12, 0, 10), - 'end_time': datetime(2021, 4, 3, 12, 4, 28)} - tmp = MagicMock() - tmp.rename.return_value = tmp - xr.open_dataset.return_value = tmp - hdl = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) - assert hdl.start_time == datetime(2021, 4, 3, 12, 0, 10) - assert hdl.end_time == datetime(2021, 4, 3, 12, 4, 28) @mock.patch('xarray.open_dataset') def test_get_platformname(self, mocked_dataset): @@ -93,3 +135,10 @@ def test_get_platformname(self, mocked_dataset): assert hdl.platform_name == 'NOAA-20' hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'J01'}, None) assert hdl.platform_name == 'NOAA-20' + + +def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: + assert data_arr.dims == ("y", "x") + assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert np.issubdtype(data_arr.data.dtype, np.float32) + # TODO: More checks From dc1cf11020a9725dcdfe0aa785e5571628794628 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 20:07:19 -0500 Subject: [PATCH 0345/1416] Comment out new VIIRS EDR test to debug Windows CI hanging --- satpy/readers/viirs_edr.py | 2 ++ satpy/tests/reader_tests/test_viirs_edr.py | 22 ++++++++++++---------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 80a4c347c3..a68961be97 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -84,6 +84,8 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, info): """Get the dataset.""" ds = self.nc[info['file_key']] + if ds.attrs.get("units", None) == "unitless": + ds.attrs["units"] = "1" return ds diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index c84fadf500..95d7bc4563 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -93,15 +93,15 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - def test_get_dataset_surf_refl(self, surface_reflectance_file): - """Test retrieval of datasets.""" - from satpy import Scene - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) - assert scn.start_time == START_TIME - assert scn.end_time == END_TIME - scn.load(["surf_refl_I01", "surf_refl_M01"]) - _check_surf_refl_data_arr(scn["surf_refl_I01"]) - _check_surf_refl_data_arr(scn["surf_refl_M01"]) + # def test_get_dataset_surf_refl(self, surface_reflectance_file): + # """Test retrieval of datasets.""" + # from satpy import Scene + # scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + # assert scn.start_time == START_TIME + # assert scn.end_time == END_TIME + # scn.load(["surf_refl_I01", "surf_refl_M01"]) + # _check_surf_refl_data_arr(scn["surf_refl_I01"]) + # _check_surf_refl_data_arr(scn["surf_refl_M01"]) @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): @@ -141,4 +141,6 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert np.issubdtype(data_arr.data.dtype, np.float32) - # TODO: More checks + assert data_arr.attrs["units"] == "1" + exp_shape = (M_ROWS, M_COLS) if "M" in data_arr.attrs["name"] else (I_ROWS, I_COLS) + assert data_arr.shape == exp_shape From 74d45a45ba0d0147c3ba0571d203bff96fa977ee Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 20:45:43 -0500 Subject: [PATCH 0346/1416] Comment out module scoped fixture to see if it stops Windows CI hanging --- satpy/tests/reader_tests/test_viirs_edr.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 95d7bc4563..14e7c50bfe 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -41,16 +41,16 @@ END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) -@pytest.fixture(scope="module") -def surface_reflectance_file(tmp_path_factory) -> Path: - """Generate fake surface reflectance EDR file.""" - tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") - fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" - file_path = tmp_path / fn - sr_vars = _create_surf_refl_variables() - ds = _create_fake_dataset(sr_vars) - ds.to_netcdf(file_path) - return file_path +# @pytest.fixture(scope="module") +# def surface_reflectance_file(tmp_path_factory) -> Path: +# """Generate fake surface reflectance EDR file.""" +# tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") +# fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" +# file_path = tmp_path / fn +# sr_vars = _create_surf_refl_variables() +# ds = _create_fake_dataset(sr_vars) +# ds.to_netcdf(file_path) +# return file_path def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: From d0943807348be4d6815a964808899557fd325210 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 18 Jul 2023 20:48:19 -0500 Subject: [PATCH 0347/1416] Fix unused import due to commented out code --- satpy/tests/reader_tests/test_viirs_edr.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 14e7c50bfe..8f984a323d 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -22,7 +22,6 @@ from __future__ import annotations from datetime import datetime -from pathlib import Path from unittest import mock from unittest.mock import MagicMock From 2f0c6aa73d47fe04e1ab2e11dcbdf08a69043ab4 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 18:42:41 +0800 Subject: [PATCH 0348/1416] Update fy4_base.py --- satpy/readers/fy4_base.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index ceb8ee75bd..9b6b364420 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -212,8 +212,12 @@ def get_area_def(self, key): # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf res = key['resolution'] pdict = {} - pdict['coff'] = self._COFF_list[RESOLUTION_LIST.index(res)] - pdict['loff'] = self._LOFF_list[RESOLUTION_LIST.index(res)] + + begin_cols = float(self.file_content['/attr/Begin Pixel Number']) + end_lines = float(self.file_content['/attr/End Line Number']) + pdict['coff'] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 + pdict['loff'] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 + pdict['cfac'] = self._CFAC_list[RESOLUTION_LIST.index(res)] pdict['lfac'] = self._LFAC_list[RESOLUTION_LIST.index(res)] try: @@ -240,21 +244,12 @@ def get_area_def(self, key): pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m' pdict['p_id'] = f'FY-4, {res}m' - pdict['nlines'] = pdict['nlines'] - 1 - pdict['ncols'] = pdict['ncols'] - 1 - - pdict['coff'] = pdict['coff'] - 0.5 - pdict['loff'] = pdict['loff'] + 1 - area_extent = get_area_extent(pdict) area_extent = (area_extent[0], area_extent[1], area_extent[2], area_extent[3]) - pdict['nlines'] = pdict['nlines'] + 1 - pdict['ncols'] = pdict['ncols'] + 1 - area = get_area_definition(pdict, area_extent) return area From 4b63854d5ded42e0d7d21ce11da99cf40eb1607e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 20:47:14 +0800 Subject: [PATCH 0349/1416] Update test_agri_l1.py --- satpy/tests/reader_tests/test_agri_l1.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 722ea05a71..77c8e5e268 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -40,16 +40,16 @@ RESOLUTION_LIST = [500, 1000, 2000, 4000] AREA_EXTENTS_BY_RESOLUTION = {'FY4A': { - 500: (-5495271.006002, -5496021.008869, -5493270.998357, -5495521.006957), - 1000: (-5494521.070252, -5496021.076004, -5490521.054912, -5495021.072169), - 2000: (-5493021.198696, -5496021.210274, -5485021.167823, -5494021.202556), - 4000: (-5490021.187119, -5496021.210274, -5474021.125371, -5492021.194837) + 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), + 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }, 'FY4B': { - 500: (-5495271.006002, -5496021.008869, -5493270.998357, -5495521.006957), - 1000: (-5494521.070252, -5496021.076004, -5490521.054912, -5495021.072169), - 2000: (-5493021.198696, -5496021.210274, -5485021.167823, -5494021.202556), - 4000: (-5490021.187119, -5496021.210274, -5474021.125371, -5492021.194837) + 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683 , 5496021.076004), + 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }} @@ -201,6 +201,8 @@ def get_test_content(self, filename, filename_info, filetype_info): '/attr/RegWidth': np.array(5.0), '/attr/Begin Line Number': np.array(0), '/attr/End Line Number': np.array(1), + '/attr/Begin Pixel Number': np.array(0), + '/attr/End Pixel Number': np.array(1), '/attr/Observing Beginning Date': '2019-06-03', '/attr/Observing Beginning Time': '00:30:01.807', '/attr/Observing Ending Date': '2019-06-03', '/attr/Observing Ending Time': '00:34:07.572', '/attr/Satellite Name': 'FY4A', '/attr/Sensor Identification Code': 'AGRI', '/attr/Sensor Name': 'AGRI', From 0d14a9e0f25de1231146ab033ad628e0c7ba321f Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 20:49:31 +0800 Subject: [PATCH 0350/1416] Update test_agri_l1.py --- satpy/tests/reader_tests/test_agri_l1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 77c8e5e268..8bfc5057a7 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -47,7 +47,7 @@ }, 'FY4B': { 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), - 1000: (-5496021.076004, 5494021.068334, -5491021.05683 , 5496021.076004), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }} From 125a0b38b4748fa5df4cb455c88aa513ef0e449e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 20:50:56 +0800 Subject: [PATCH 0351/1416] Update test_agri_l1.py --- satpy/tests/reader_tests/test_agri_l1.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 8bfc5057a7..44de75a7eb 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -40,16 +40,16 @@ RESOLUTION_LIST = [500, 1000, 2000, 4000] AREA_EXTENTS_BY_RESOLUTION = {'FY4A': { - 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), - 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), - 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), - 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) + 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), + 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }, 'FY4B': { - 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), - 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), - 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), - 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) + 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), + 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), + 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }} From 192dac3d0cfd39b83585a5e000fa47871c2e8a5a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 19 Jul 2023 20:51:44 +0800 Subject: [PATCH 0352/1416] Update test_agri_l1.py --- satpy/tests/reader_tests/test_agri_l1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 44de75a7eb..12e30f2aac 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -43,7 +43,7 @@ 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), - 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) + 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }, 'FY4B': { 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), From 17148bbffcfff9f837fabb9c8d5a15b5a4619f84 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 12:16:16 -0500 Subject: [PATCH 0353/1416] Force libnetcdf to a non-hanging build (for now) --- continuous_integration/environment.yaml | 1 + satpy/tests/reader_tests/test_viirs_edr.py | 39 +++++++++++----------- 2 files changed, 21 insertions(+), 19 deletions(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 48976401a2..46096d7846 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -24,6 +24,7 @@ dependencies: - coverage - codecov - behave + - libnetcdf=4.9.2=nompi_h5902ca5_107 # [win] - netcdf4 - h5py - h5netcdf diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 8f984a323d..43647c4aab 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -22,6 +22,7 @@ from __future__ import annotations from datetime import datetime +from pathlib import Path from unittest import mock from unittest.mock import MagicMock @@ -40,16 +41,16 @@ END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) -# @pytest.fixture(scope="module") -# def surface_reflectance_file(tmp_path_factory) -> Path: -# """Generate fake surface reflectance EDR file.""" -# tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") -# fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" -# file_path = tmp_path / fn -# sr_vars = _create_surf_refl_variables() -# ds = _create_fake_dataset(sr_vars) -# ds.to_netcdf(file_path) -# return file_path +@pytest.fixture(scope="module") +def surface_reflectance_file(tmp_path_factory) -> Path: + """Generate fake surface reflectance EDR file.""" + tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") + fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" + file_path = tmp_path / fn + sr_vars = _create_surf_refl_variables() + ds = _create_fake_dataset(sr_vars) + ds.to_netcdf(file_path) + return file_path def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: @@ -92,15 +93,15 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - # def test_get_dataset_surf_refl(self, surface_reflectance_file): - # """Test retrieval of datasets.""" - # from satpy import Scene - # scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) - # assert scn.start_time == START_TIME - # assert scn.end_time == END_TIME - # scn.load(["surf_refl_I01", "surf_refl_M01"]) - # _check_surf_refl_data_arr(scn["surf_refl_I01"]) - # _check_surf_refl_data_arr(scn["surf_refl_M01"]) + def test_get_dataset_surf_refl(self, surface_reflectance_file): + """Test retrieval of datasets.""" + from satpy import Scene + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + assert scn.start_time == START_TIME + assert scn.end_time == END_TIME + scn.load(["surf_refl_I01", "surf_refl_M01"]) + _check_surf_refl_data_arr(scn["surf_refl_I01"]) + _check_surf_refl_data_arr(scn["surf_refl_M01"]) @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): From c9377f768d20f4372050e1d2498c275044c6d801 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 12:38:39 -0500 Subject: [PATCH 0354/1416] Fix yaml selector for libnetcdf build on windows --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 46096d7846..0cf682e1bb 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -24,7 +24,7 @@ dependencies: - coverage - codecov - behave - - libnetcdf=4.9.2=nompi_h5902ca5_107 # [win] + - libnetcdf=4.9.2=nompi_h5902ca5_107 # [win] - netcdf4 - h5py - h5netcdf From 69645a392f47c6bb1ff97c0a4287d121d58955bf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 12:58:16 -0500 Subject: [PATCH 0355/1416] Try environment hack one more time --- .github/workflows/ci.yaml | 3 +++ continuous_integration/environment.yaml | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index faa0aea2cc..1c18e4a5cc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -82,6 +82,9 @@ jobs: - name: Update environment run: mamba env update -n test-environment -f continuous_integration/environment.yaml if: steps.cache.outputs.cache-hit != 'true' + - name: Update environment - libnetcdf + run: mamba install -y -n test-environment libnetcdf=4.9.2=nompi_h5902ca5_107 + if: runner.os == 'Windows' - name: Install unstable dependencies if: matrix.experimental == true diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 0cf682e1bb..48976401a2 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -24,7 +24,6 @@ dependencies: - coverage - codecov - behave - - libnetcdf=4.9.2=nompi_h5902ca5_107 # [win] - netcdf4 - h5py - h5netcdf From fade8513d193168c61ff36aa2ccce634e20f2242 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 14:06:28 -0500 Subject: [PATCH 0356/1416] Switch VIIRS EDR to modern chunk sizing --- satpy/readers/viirs_edr.py | 14 ++++++++++---- satpy/tests/reader_tests/test_viirs_edr.py | 10 ++++++++-- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index a68961be97..f7817833fc 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -47,7 +47,7 @@ import xarray as xr from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_chunk_size_limit LOG = logging.getLogger(__name__) @@ -59,12 +59,18 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) - chunk_size = get_legacy_chunk_size() + chunk_size = get_chunk_size_limit() // 4 # 32-bit floats self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'Columns': chunk_size, - 'Rows': chunk_size}) + chunks={ + 'Columns': chunk_size, + 'Rows': chunk_size, + 'Along_Scan_375m': chunk_size, + 'Along_Track_375m': chunk_size, + 'Along_Scan_750m': chunk_size, + 'Along_Track_750m': chunk_size, + }) if 'columns' in self.nc.dims: self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) elif 'Along_Track_375m' in self.nc.dims: diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 43647c4aab..f049a5e288 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -26,6 +26,8 @@ from unittest import mock from unittest.mock import MagicMock +import dask +import dask.array as da import numpy as np import pytest import xarray as xr @@ -96,10 +98,11 @@ class TestVIIRSJRRReader: def test_get_dataset_surf_refl(self, surface_reflectance_file): """Test retrieval of datasets.""" from satpy import Scene - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + with dask.config.set({"array.chunk-size": "16B"}): + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME assert scn.end_time == END_TIME - scn.load(["surf_refl_I01", "surf_refl_M01"]) _check_surf_refl_data_arr(scn["surf_refl_I01"]) _check_surf_refl_data_arr(scn["surf_refl_M01"]) @@ -140,7 +143,10 @@ def test_get_platformname(self, mocked_dataset): def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, np.float32) + assert all(c == 4 for c in data_arr.chunks[0]) + assert all(c == 4 for c in data_arr.chunks[1]) assert data_arr.attrs["units"] == "1" exp_shape = (M_ROWS, M_COLS) if "M" in data_arr.attrs["name"] else (I_ROWS, I_COLS) assert data_arr.shape == exp_shape From af68e101d4701f5b2fbeab2d67787cc93d0de8bf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 14:20:42 -0500 Subject: [PATCH 0357/1416] Update VIIRS EDR chunking to be scan-based --- satpy/readers/viirs_edr.py | 16 +++++++++------- satpy/tests/reader_tests/test_viirs_edr.py | 13 ++++++++----- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index f7817833fc..8389ea8019 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -59,17 +59,19 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) - chunk_size = get_chunk_size_limit() // 4 # 32-bit floats + # use entire scans as chunks + row_chunks_m = max(get_chunk_size_limit() // 4 // 3200, 1) # 32-bit floats + row_chunks_i = row_chunks_m * 2 self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={ - 'Columns': chunk_size, - 'Rows': chunk_size, - 'Along_Scan_375m': chunk_size, - 'Along_Track_375m': chunk_size, - 'Along_Scan_750m': chunk_size, - 'Along_Track_750m': chunk_size, + 'Columns': -1, + 'Rows': row_chunks_i, + 'Along_Scan_375m': -1, + 'Along_Track_375m': row_chunks_i, + 'Along_Scan_750m': -1, + 'Along_Track_750m': row_chunks_m, }) if 'columns' in self.nc.dims: self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index f049a5e288..6ab31c6ff6 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -98,7 +98,8 @@ class TestVIIRSJRRReader: def test_get_dataset_surf_refl(self, surface_reflectance_file): """Test retrieval of datasets.""" from satpy import Scene - with dask.config.set({"array.chunk-size": "16B"}): + bytes_in_m_row = 4 * 3200 + with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME @@ -145,8 +146,10 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert isinstance(data_arr.attrs["area"], SwathDefinition) assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, np.float32) - assert all(c == 4 for c in data_arr.chunks[0]) - assert all(c == 4 for c in data_arr.chunks[1]) - assert data_arr.attrs["units"] == "1" - exp_shape = (M_ROWS, M_COLS) if "M" in data_arr.attrs["name"] else (I_ROWS, I_COLS) + is_m_band = "I" not in data_arr.attrs["name"] + exp_shape = (M_ROWS, M_COLS) if is_m_band else (I_ROWS, I_COLS) assert data_arr.shape == exp_shape + exp_row_chunks = 4 if is_m_band else 8 + assert all(c == exp_row_chunks for c in data_arr.chunks[0]) + assert data_arr.chunks[1] == (exp_shape[1],) + assert data_arr.attrs["units"] == "1" From ab555c2de2de52390ece06f78179a6aa251230d3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 15:00:05 -0500 Subject: [PATCH 0358/1416] Remove mocking of platform name VIIRS EDR test --- satpy/readers/viirs_edr.py | 1 + satpy/tests/reader_tests/test_viirs_edr.py | 31 +++++++++++++--------- 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 8389ea8019..d02d38f53e 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -94,6 +94,7 @@ def get_dataset(self, dataset_id, info): ds = self.nc[info['file_key']] if ds.attrs.get("units", None) == "unitless": ds.attrs["units"] = "1" + ds.attrs["platform_name"] = self.platform_name return ds diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 6ab31c6ff6..d1da58970d 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -21,6 +21,7 @@ """ from __future__ import annotations +import shutil from datetime import datetime from pathlib import Path from unittest import mock @@ -107,6 +108,23 @@ def test_get_dataset_surf_refl(self, surface_reflectance_file): _check_surf_refl_data_arr(scn["surf_refl_I01"]) _check_surf_refl_data_arr(scn["surf_refl_M01"]) + @pytest.mark.parametrize( + ("filename_platform", "exp_shortname"), + [ + ("npp", "Suomi-NPP"), + ("JPSS-1", "NOAA-20"), + ("J01", "NOAA-20") + ]) + def test_get_platformname(self, surface_reflectance_file, filename_platform, exp_shortname): + """Test finding start and end times of granules.""" + from satpy import Scene + new_name = str(surface_reflectance_file).replace("npp", filename_platform) + if new_name != str(surface_reflectance_file): + shutil.copy(surface_reflectance_file, new_name) + scn = Scene(reader="viirs_edr", filenames=[new_name]) + scn.load(["surf_refl_I01"]) + assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname + @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): """Test retrieval of datasets.""" @@ -127,19 +145,6 @@ def test_get_dataset(self, mocked_dataset): test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) mocked_dataset.assert_called() - @mock.patch('xarray.open_dataset') - def test_get_platformname(self, mocked_dataset): - """Test finding start and end times of granules.""" - tmp = MagicMock() - tmp.rename.return_value = tmp - xr.open_dataset.return_value = tmp - hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'npp'}, None) - assert hdl.platform_name == 'Suomi-NPP' - hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'JPSS-1'}, None) - assert hdl.platform_name == 'NOAA-20' - hdl = VIIRSJRRFileHandler('somedir/somefile.nc', {'platform_shortname': 'J01'}, None) - assert hdl.platform_name == 'NOAA-20' - def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.dims == ("y", "x") From f3ea32f393dfee7ad2302c9ff7a1d8e1362a18ff Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 20 Jul 2023 15:02:00 -0500 Subject: [PATCH 0359/1416] Remove all-in-one VIIRS EDR test It wasn't doing much and they'll be readded later as new files are supported --- satpy/tests/reader_tests/test_viirs_edr.py | 24 ---------------------- 1 file changed, 24 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index d1da58970d..a9a7d13a81 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -24,8 +24,6 @@ import shutil from datetime import datetime from pathlib import Path -from unittest import mock -from unittest.mock import MagicMock import dask import dask.array as da @@ -34,8 +32,6 @@ import xarray as xr from pyresample import SwathDefinition -from satpy.readers.viirs_edr import VIIRSJRRFileHandler - I_COLS = 64 # real-world 6400 I_ROWS = 32 # one scan M_COLS = 32 # real-world 3200 @@ -125,26 +121,6 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp scn.load(["surf_refl_I01"]) assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname - @mock.patch('xarray.open_dataset') - def test_get_dataset(self, mocked_dataset): - """Test retrieval of datasets.""" - filename_info = {'platform_shortname': 'npp'} - tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') - xr.open_dataset.return_value = tmp - test = VIIRSJRRFileHandler('somedir/somefile.nc', filename_info, None) - test.nc = {'Longitude': xr.Dataset(), - 'Latitude': xr.Dataset(), - 'smoke_concentration': xr.Dataset(), - 'fire_mask': xr.Dataset(), - } - test.get_dataset('longitude', {'file_key': 'Longitude'}) - test.get_dataset('latitude', {'file_key': 'Latitude'}) - test.get_dataset('smoke_concentration', {'file_key': 'smoke_concentration'}) - test.get_dataset('fire_mask', {'file_key': 'fire_mask'}) - with pytest.raises(KeyError): - test.get_dataset('erroneous dataset', {'file_key': 'erroneous dataset'}) - mocked_dataset.assert_called() - def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.dims == ("y", "x") From a34c9c8079a2406c3232532cd6ac24acfb1e68d5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jul 2023 08:58:42 -0500 Subject: [PATCH 0360/1416] Add NDVI/EVI optional datasets to viirs_edr reader --- satpy/etc/readers/viirs_edr.yaml | 15 ++++ satpy/readers/file_handlers.py | 4 +- satpy/readers/viirs_edr.py | 48 +++++++++-- satpy/tests/reader_tests/test_viirs_edr.py | 99 ++++++++++++++++++++++ 4 files changed, 159 insertions(+), 7 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index bf8a949de8..ebcea3cd3d 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -409,3 +409,18 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + # Swath-based vegetation indexes added to CSPP LEO output + NDVI: + name: NDVI + resolution: 375 + file_type: [jrr_surfref_product] + file_key: "NDVI" + coordinates: [longitude_375, latitude_375] + units: "1" + EVI: + name: EVI + resolution: 375 + file_type: [jrr_surfref_product] + file_key: "NDVI" + coordinates: [longitude_375, latitude_375] + units: "1" diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index cebab6e307..0c47553b0d 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -228,9 +228,9 @@ def available_datasets(self, configured_datasets=None): Args: configured_datasets (list): Series of (bool or None, dict) in the same way as is returned by this method (see below). The bool - is whether or not the dataset is available from at least one + is whether the dataset is available from at least one of the current file handlers. It can also be ``None`` if - no file handler knows before us knows how to handle it. + no file handler before us knows how to handle it. The dictionary is existing dataset metadata. The dictionaries are typically provided from a YAML configuration file and may be modified, updated, or used as a "template" for additional diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index d02d38f53e..3ddd685bab 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -91,12 +91,14 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, info): """Get the dataset.""" - ds = self.nc[info['file_key']] - if ds.attrs.get("units", None) == "unitless": - ds.attrs["units"] = "1" - ds.attrs["platform_name"] = self.platform_name + data_arr = self.nc[info['file_key']] + if data_arr.attrs.get("units", None) == "unitless": + data_arr.attrs["units"] = "1" + if isinstance(data_arr.attrs.get('flag_meanings'), str): + data_arr.attrs['flag_meanings'] = [flag.strip() for flag in data_arr.attrs['flag_meanings'].split(' ')] + data_arr.attrs["platform_name"] = self.platform_name - return ds + return data_arr @property def start_time(self): @@ -118,3 +120,39 @@ def platform_name(self): 'JPSS-2': 'NOAA-21', 'J02': 'NOAA-21'} return platform_dict[platform_path.upper()] + + def available_datasets(self, configured_datasets=None): + """Get information of available datasets in this file. + + Args: + configured_datasets (list): Series of (bool or None, dict) in the + same way as is returned by this method (see below). The bool + is whether the dataset is available from at least one + of the current file handlers. It can also be ``None`` if + no file handler before us knows how to handle it. + The dictionary is existing dataset metadata. The dictionaries + are typically provided from a YAML configuration file and may + be modified, updated, or used as a "template" for additional + available datasets. This argument could be the result of a + previous file handler's implementation of this method. + + Returns: + Iterator of (bool or None, dict) pairs where dict is the + dataset's metadata. If the dataset is available in the current + file type then the boolean value should be ``True``, ``False`` + if we **know** about the dataset but it is unavailable, or + ``None`` if this file object is not responsible for it. + + """ + for is_avail, ds_info in (configured_datasets or []): + if is_avail is not None: + # some other file handler said it has this dataset + # we don't know any more information than the previous + # file handler so let's yield early + yield is_avail, ds_info + continue + if self.file_type_matches(ds_info['file_type']) is None: + # this is not the file type for this dataset + yield None, ds_info + file_key = ds_info.get("file_key", ds_info["name"]) + yield file_key in self.nc, ds_info diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index a9a7d13a81..de95e946ac 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -31,6 +31,7 @@ import pytest import xarray as xr from pyresample import SwathDefinition +from pytest_lazyfixture import lazy_fixture I_COLS = 64 # real-world 6400 I_ROWS = 32 # one scan @@ -38,15 +39,52 @@ M_ROWS = 16 # one scan START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0) END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) +QF1_FLAG_MEANINGS = """ +\tBits are listed from the MSB (bit 7) to the LSB (bit 0): +\tBit Description +\t6-7 SUN GLINT; +\t 00 -- none +\t 01 -- geometry based +\t 10 -- wind speed based +\t 11 -- geometry & wind speed based +\t5 low sun mask; +\t 0 -- high +\t 1 -- low +\t4 day/night; +\t 0 -- day +\t 1 -- night +\t2-3 cloud detection & confidence; +\t 00 -- confident clear +\t 01 -- probably clear +\t 10 -- probably cloudy +\t 11 -- confident cloudy +\t0-1 cloud mask quality; +\t 00 -- poor +\t 01 -- low +\t 10 -- medium +\t 11 -- high +""" @pytest.fixture(scope="module") def surface_reflectance_file(tmp_path_factory) -> Path: """Generate fake surface reflectance EDR file.""" + return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=False) + + +@pytest.fixture(scope="module") +def surface_reflectance_with_veg_indices_file(tmp_path_factory) -> Path: + """Generate fake surface reflectance EDR file with vegetation indexes included.""" + return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=True) + + +def _create_surface_reflectance_file(tmp_path_factory, include_veg_indices: bool = False) -> Path: tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" file_path = tmp_path / fn sr_vars = _create_surf_refl_variables() + if include_veg_indices: + sr_vars.update(_create_veg_index_variables()) ds = _create_fake_dataset(sr_vars) ds.to_netcdf(file_path) return file_path @@ -89,6 +127,32 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: return data_arrs +def _create_veg_index_variables() -> dict[str, xr.DataArray]: + dim_y_750 = "Along_Track_750m" + dim_x_750 = "Along_Scan_750m" + m_dims = (dim_y_750, dim_x_750) + dim_y_375 = "Along_Track_375m" + dim_x_375 = "Along_Scan_375m" + i_dims = (dim_y_375, dim_x_375) + + i_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) + data_arrs = { + "NDVI": xr.DataArray(i_data, dims=i_dims, attrs={"units": "unitless"}), + "EVI": xr.DataArray(i_data, dims=i_dims, attrs={"units": "unitless"}), + } + data_arrs["NDVI"].encoding["dtype"] = np.float32 + data_arrs["EVI"].encoding["dtype"] = np.float32 + + # Quality Flags are from the Surface Reflectance data, but only used for VI products in the reader + qf_data = np.zeros((M_ROWS, M_COLS), dtype=np.uint8) + for qf_num in range(1, 8): + qf_name = f"QF{qf_num} Surface Reflectance" + data_arr = xr.DataArray(qf_data, dims=m_dims, attrs={"flag_meanings": QF1_FLAG_MEANINGS}) + data_arr.encoding["dtype"] = np.uint8 + data_arrs[qf_name] = data_arr + return data_arrs + + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" @@ -104,6 +168,34 @@ def test_get_dataset_surf_refl(self, surface_reflectance_file): _check_surf_refl_data_arr(scn["surf_refl_I01"]) _check_surf_refl_data_arr(scn["surf_refl_M01"]) + def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file): + """Test retrieval of vegetation indices from surface reflectance files.""" + from satpy import Scene + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) + scn.load(["NDVI", "EVI", "surf_refl_qf1"]) + _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) + # TODO: Check NDVI/EVI attributes/dims + # TODO: Check NDVI/EVI quality flag clearing + + @pytest.mark.parametrize( + ("data_file", "exp_available"), + [ + (lazy_fixture("surface_reflectance_file"), False), + (lazy_fixture("surface_reflectance_with_veg_indices_file"), True), + ] + ) + def test_availability_veg_idx(self, data_file, exp_available): + """Test that vegetation indexes aren't available when they aren't present.""" + from satpy import Scene + scn = Scene(reader="viirs_edr", filenames=[data_file]) + avail = scn.available_dataset_names() + if exp_available: + assert "NDVI" in avail + assert "EVI" in avail + else: + assert "NDVI" not in avail + assert "EVI" not in avail + @pytest.mark.parametrize( ("filename_platform", "exp_shortname"), [ @@ -134,3 +226,10 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) assert data_arr.attrs["units"] == "1" + + +def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: + assert data_arr.dims == ("y", "x") + assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert isinstance(data_arr.data, da.Array) + assert np.issubdtype(data_arr.data.dtype, np.uint8) From 976c1466b45a2c496dd71749c0cbcdb3d39eca40 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jul 2023 09:10:46 -0500 Subject: [PATCH 0361/1416] Cleanup VIIRS EDR testing --- satpy/readers/viirs_edr.py | 14 ++++++++---- satpy/tests/reader_tests/test_viirs_edr.py | 25 +++++++++++----------- 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 3ddd685bab..29a3d414d1 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -92,14 +92,20 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] - if data_arr.attrs.get("units", None) == "unitless": + units = data_arr.attrs.get("units", None) + if units is None or units == "unitless": data_arr.attrs["units"] = "1" - if isinstance(data_arr.attrs.get('flag_meanings'), str): - data_arr.attrs['flag_meanings'] = [flag.strip() for flag in data_arr.attrs['flag_meanings'].split(' ')] + self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name - return data_arr + @staticmethod + def _decode_flag_meanings(data_arr: xr.DataArray): + flag_meanings = data_arr.attrs.get("flag_meanings", None) + if isinstance(flag_meanings, str) and "\n" not in flag_meanings: + # only handle CF-standard flag meanings + data_arr.attrs['flag_meanings'] = [flag for flag in data_arr.attrs['flag_meanings'].split(' ')] + @property def start_time(self): """Get first date/time when observations were recorded.""" diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index de95e946ac..06bd49aba5 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -28,6 +28,7 @@ import dask import dask.array as da import numpy as np +import numpy.typing as npt import pytest import xarray as xr from pyresample import SwathDefinition @@ -171,10 +172,13 @@ def test_get_dataset_surf_refl(self, surface_reflectance_file): def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) - scn.load(["NDVI", "EVI", "surf_refl_qf1"]) + bytes_in_m_row = 4 * 3200 + with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) + scn.load(["NDVI", "EVI", "surf_refl_qf1"]) + _check_surf_refl_data_arr(scn["NDVI"]) + _check_surf_refl_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) - # TODO: Check NDVI/EVI attributes/dims # TODO: Check NDVI/EVI quality flag clearing @pytest.mark.parametrize( @@ -214,22 +218,19 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname -def _check_surf_refl_data_arr(data_arr: xr.DataArray) -> None: +def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert isinstance(data_arr.data, da.Array) - assert np.issubdtype(data_arr.data.dtype, np.float32) - is_m_band = "I" not in data_arr.attrs["name"] - exp_shape = (M_ROWS, M_COLS) if is_m_band else (I_ROWS, I_COLS) + assert np.issubdtype(data_arr.data.dtype, dtype) + is_mband_res = "I" not in data_arr.attrs["name"] # includes NDVI and EVI + exp_shape = (M_ROWS, M_COLS) if is_mband_res else (I_ROWS, I_COLS) assert data_arr.shape == exp_shape - exp_row_chunks = 4 if is_m_band else 8 + exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) assert data_arr.attrs["units"] == "1" def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: - assert data_arr.dims == ("y", "x") - assert isinstance(data_arr.attrs["area"], SwathDefinition) - assert isinstance(data_arr.data, da.Array) - assert np.issubdtype(data_arr.data.dtype, np.uint8) + _check_surf_refl_data_arr(data_arr, dtype=np.uint8) From 1db0e4855f2b82b2ea38456524fe346344f8f3f0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jul 2023 09:47:35 -0500 Subject: [PATCH 0362/1416] Fix sensor in VIIRS EDR --- satpy/readers/viirs_edr.py | 1 + satpy/tests/reader_tests/test_viirs_edr.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 29a3d414d1..2d26b4dd35 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -97,6 +97,7 @@ def get_dataset(self, dataset_id, info): data_arr.attrs["units"] = "1" self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name + data_arr.attrs["sensor"] = self.sensor_name return data_arr @staticmethod diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 06bd49aba5..be1cf8dc6a 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -229,7 +229,9 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) + assert data_arr.attrs["units"] == "1" + assert data_arr.attrs["sensor"] == "viirs" def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: From 7b183ca7147c970076a3fa7032002f878cbfdedf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 21 Jul 2023 16:04:24 -0500 Subject: [PATCH 0363/1416] Fix fill value handling and add valid_range YAML handling for VI products --- satpy/etc/readers/viirs_edr.yaml | 11 +++------- satpy/readers/viirs_edr.py | 15 +++++++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 25 ++++++++++++++-------- 3 files changed, 34 insertions(+), 17 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index ebcea3cd3d..23d07f4e07 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -360,7 +360,6 @@ datasets: file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf2: name: surf_refl_qf2 resolution: 750 @@ -368,7 +367,6 @@ datasets: file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf3: name: surf_refl_qf3 resolution: 750 @@ -376,7 +374,6 @@ datasets: file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf4: name: surf_refl_qf4 resolution: 750 @@ -384,7 +381,6 @@ datasets: file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf5: name: surf_refl_qf5 resolution: 750 @@ -392,7 +388,6 @@ datasets: file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf6: name: surf_refl_qf6 resolution: 750 @@ -400,7 +395,6 @@ datasets: file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 surf_refl_qf7: name: surf_refl_qf7 resolution: 750 @@ -408,7 +402,6 @@ datasets: file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' - _FillValue: -9999 # Swath-based vegetation indexes added to CSPP LEO output NDVI: name: NDVI @@ -417,10 +410,12 @@ datasets: file_key: "NDVI" coordinates: [longitude_375, latitude_375] units: "1" + valid_range: [-1.0, 1.0] EVI: name: EVI resolution: 375 file_type: [jrr_surfref_product] - file_key: "NDVI" + file_key: "EVI" coordinates: [longitude_375, latitude_375] units: "1" + valid_range: [-1.0, 1.0] diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 2d26b4dd35..dc58cd406e 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -44,6 +44,7 @@ import logging +import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler @@ -92,6 +93,7 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] + data_arr = self._mask_invalid(data_arr, info) units = data_arr.attrs.get("units", None) if units is None or units == "unitless": data_arr.attrs["units"] = "1" @@ -100,6 +102,19 @@ def get_dataset(self, dataset_id, info): data_arr.attrs["sensor"] = self.sensor_name return data_arr + def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: + fill_value = data_arr.encoding.get("_FillValue") + if fill_value is not None and not np.isnan(fill_value): + # xarray auto mask and scale handled this + return data_arr + yaml_fill = ds_info.get("_FillValue") + if yaml_fill is not None: + return data_arr.where(data_arr != yaml_fill) + valid_range = ds_info.get("valid_range", data_arr.attrs.get("valid_range")) + if valid_range is not None: + return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) + return data_arr + @staticmethod def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index be1cf8dc6a..69d03f6db9 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -136,10 +136,11 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: dim_x_375 = "Along_Scan_375m" i_dims = (dim_y_375, dim_x_375) - i_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) + vi_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) + vi_data[0, :7] = [-2.0, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5] data_arrs = { - "NDVI": xr.DataArray(i_data, dims=i_dims, attrs={"units": "unitless"}), - "EVI": xr.DataArray(i_data, dims=i_dims, attrs={"units": "unitless"}), + "NDVI": xr.DataArray(vi_data, dims=i_dims, attrs={"units": "unitless"}), + "EVI": xr.DataArray(vi_data, dims=i_dims, attrs={"units": "unitless"}), } data_arrs["NDVI"].encoding["dtype"] = np.float32 data_arrs["EVI"].encoding["dtype"] = np.float32 @@ -176,8 +177,8 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) - _check_surf_refl_data_arr(scn["NDVI"]) - _check_surf_refl_data_arr(scn["EVI"]) + _check_vi_data_arr(scn["NDVI"]) + _check_vi_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) # TODO: Check NDVI/EVI quality flag clearing @@ -218,6 +219,16 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname +def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: + _check_surf_refl_data_arr(data_arr, dtype=np.uint8) + + +def _check_vi_data_arr(data_arr: xr.DataArray) -> None: + _check_surf_refl_data_arr(data_arr) + data = data_arr.data.compute() + np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) + + def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) @@ -232,7 +243,3 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa assert data_arr.attrs["units"] == "1" assert data_arr.attrs["sensor"] == "viirs" - - -def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: - _check_surf_refl_data_arr(data_arr, dtype=np.uint8) From 2864f98e2c37f3acc868c09008ac43d5688eafb7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 22 Jul 2023 14:15:57 -0500 Subject: [PATCH 0364/1416] Add basic vegetation quality masking --- satpy/readers/viirs_edr.py | 35 ++++++++++++++++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 20 +++++++++++-- 2 files changed, 53 insertions(+), 2 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index dc58cd406e..df151a1fd9 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -94,6 +94,9 @@ def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] data_arr = self._mask_invalid(data_arr, info) + if info["file_key"] in ("NDVI", "EVI"): + good_mask = self._get_veg_index_good_mask() + data_arr = data_arr.where(good_mask) units = data_arr.attrs.get("units", None) if units is None or units == "unitless": data_arr.attrs["units"] = "1" @@ -115,6 +118,38 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) return data_arr + def _get_veg_index_good_mask(self) -> xr.DataArray: + # each mask array should be TRUE when pixels are UNACCEPTABLE + qf1 = self.nc['QF1 Surface Reflectance'] + has_sun_glint = (qf1 & 0b11000000) > 0 + is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" + cloud_quality = (qf1 & 0b00000011) < 0b10 + + qf2 = self.nc['QF2 Surface Reflectance'] + has_snow_or_ice = (qf2 & 0b00100000) > 0 + has_cloud_shadow = (qf2 & 0b00001000) > 0 + water_mask = (qf2 & 0b00000111) + has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic + + qf7 = self.nc['QF7 Surface Reflectance'] + has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity + adjacent_to_cloud = (qf7 & 0b00000010) > 0 + + bad_mask = ( + has_sun_glint | + is_cloudy | + cloud_quality | + has_snow_or_ice | + has_cloud_shadow | + has_water | + has_aerosols | + adjacent_to_cloud + ) + # upscale from M-band resolution to I-band resolution + bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) + good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) + return good_mask_iband + @staticmethod def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 69d03f6db9..6d380cd017 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -146,9 +146,24 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: data_arrs["EVI"].encoding["dtype"] = np.float32 # Quality Flags are from the Surface Reflectance data, but only used for VI products in the reader - qf_data = np.zeros((M_ROWS, M_COLS), dtype=np.uint8) for qf_num in range(1, 8): qf_name = f"QF{qf_num} Surface Reflectance" + qf_data = np.zeros((M_ROWS, M_COLS), dtype=np.uint8) + bad_qf_start = 4 # 0.5x the last test pixel set in "vi_data" above (I-band versus M-band index) + if qf_num == 1: + qf_data[:, :] |= 0b00000010 # medium cloud mask quality everywhere + qf_data[0, bad_qf_start] |= 0b11000000 # sun glint + qf_data[0, bad_qf_start + 1] |= 0b00001100 # cloudy + qf_data[0, bad_qf_start + 2] = 0b00000001 # low cloud mask quality + elif qf_num == 2: + qf_data[:, :] |= 0b00000011 # desert everywhere + qf_data[0, bad_qf_start + 3] |= 0b00100000 # snow or ice + qf_data[0, bad_qf_start + 4] |= 0b00001000 # cloud shadow + qf_data[0, bad_qf_start + 5] = 0b00000001 # deep ocean + elif qf_num == 7: + qf_data[0, bad_qf_start + 6] |= 0b00001100 # high aerosol + qf_data[0, bad_qf_start + 7] |= 0b00000010 # adjacent to cloud + data_arr = xr.DataArray(qf_data, dims=m_dims, attrs={"flag_meanings": QF1_FLAG_MEANINGS}) data_arr.encoding["dtype"] = np.uint8 data_arrs[qf_name] = data_arr @@ -180,7 +195,6 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i _check_vi_data_arr(scn["NDVI"]) _check_vi_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) - # TODO: Check NDVI/EVI quality flag clearing @pytest.mark.parametrize( ("data_file", "exp_available"), @@ -227,6 +241,8 @@ def _check_vi_data_arr(data_arr: xr.DataArray) -> None: _check_surf_refl_data_arr(data_arr) data = data_arr.data.compute() np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) + np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) + np.testing.assert_allclose(data[0, 8 + 16:], 0.0) def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: From b9a5f4c321a33db1ad06f3c261680e03ad1891e7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 22 Jul 2023 14:29:12 -0500 Subject: [PATCH 0365/1416] Refactor viirs edr reader to surface reflectance is separate --- satpy/etc/readers/viirs_edr.yaml | 2 +- satpy/readers/viirs_edr.py | 78 ++++++++++++++++++-------------- 2 files changed, 44 insertions(+), 36 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 23d07f4e07..9e362c18af 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -19,7 +19,7 @@ file_types: file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref_product: - file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSSurfaceReflectanceWithVIHandler variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index df151a1fd9..9ad841754d 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -94,9 +94,6 @@ def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] data_arr = self._mask_invalid(data_arr, info) - if info["file_key"] in ("NDVI", "EVI"): - good_mask = self._get_veg_index_good_mask() - data_arr = data_arr.where(good_mask) units = data_arr.attrs.get("units", None) if units is None or units == "unitless": data_arr.attrs["units"] = "1" @@ -118,38 +115,6 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) return data_arr - def _get_veg_index_good_mask(self) -> xr.DataArray: - # each mask array should be TRUE when pixels are UNACCEPTABLE - qf1 = self.nc['QF1 Surface Reflectance'] - has_sun_glint = (qf1 & 0b11000000) > 0 - is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" - cloud_quality = (qf1 & 0b00000011) < 0b10 - - qf2 = self.nc['QF2 Surface Reflectance'] - has_snow_or_ice = (qf2 & 0b00100000) > 0 - has_cloud_shadow = (qf2 & 0b00001000) > 0 - water_mask = (qf2 & 0b00000111) - has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic - - qf7 = self.nc['QF7 Surface Reflectance'] - has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity - adjacent_to_cloud = (qf7 & 0b00000010) > 0 - - bad_mask = ( - has_sun_glint | - is_cloudy | - cloud_quality | - has_snow_or_ice | - has_cloud_shadow | - has_water | - has_aerosols | - adjacent_to_cloud - ) - # upscale from M-band resolution to I-band resolution - bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) - good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) - return good_mask_iband - @staticmethod def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) @@ -213,3 +178,46 @@ def available_datasets(self, configured_datasets=None): yield None, ds_info file_key = ds_info.get("file_key", ds_info["name"]) yield file_key in self.nc, ds_info + + +class VIIRSSurfaceReflectanceWithVIHandler(VIIRSJRRFileHandler): + """File handler for surface reflectance files with optional vegetation indexes.""" + + def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: + new_data_arr = super()._mask_invalid(data_arr, ds_info) + if ds_info["file_key"] in ("NDVI", "EVI"): + good_mask = self._get_veg_index_good_mask() + new_data_arr = new_data_arr.where(good_mask) + return new_data_arr + + def _get_veg_index_good_mask(self) -> xr.DataArray: + # each mask array should be TRUE when pixels are UNACCEPTABLE + qf1 = self.nc['QF1 Surface Reflectance'] + has_sun_glint = (qf1 & 0b11000000) > 0 + is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" + cloud_quality = (qf1 & 0b00000011) < 0b10 + + qf2 = self.nc['QF2 Surface Reflectance'] + has_snow_or_ice = (qf2 & 0b00100000) > 0 + has_cloud_shadow = (qf2 & 0b00001000) > 0 + water_mask = (qf2 & 0b00000111) + has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic + + qf7 = self.nc['QF7 Surface Reflectance'] + has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity + adjacent_to_cloud = (qf7 & 0b00000010) > 0 + + bad_mask = ( + has_sun_glint | + is_cloudy | + cloud_quality | + has_snow_or_ice | + has_cloud_shadow | + has_water | + has_aerosols | + adjacent_to_cloud + ) + # upscale from M-band resolution to I-band resolution + bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) + good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) + return good_mask_iband From 5b1573fc6f7cf794fb45c388362e9b86ef59d6b8 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 22 Jul 2023 15:26:22 -0500 Subject: [PATCH 0366/1416] Add rows_per_scan to viirs_edr metadata --- satpy/readers/viirs_edr.py | 5 +++++ satpy/tests/reader_tests/test_viirs_edr.py | 5 +++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 9ad841754d..472e4c29ea 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -90,6 +90,10 @@ def __init__(self, filename, filename_info, filetype_info): self.algorithm_version = filename_info['platform_shortname'] self.sensor_name = 'viirs' + def rows_per_scans(self, data_arr: xr.DataArray) -> int: + """Get number of array rows per instrument scan based on data resolution.""" + return 32 if data_arr.shape[1] == 6400 else 16 + def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] @@ -100,6 +104,7 @@ def get_dataset(self, dataset_id, info): self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name data_arr.attrs["sensor"] = self.sensor_name + data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr) return data_arr def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 6d380cd017..7f2659ec61 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -34,9 +34,9 @@ from pyresample import SwathDefinition from pytest_lazyfixture import lazy_fixture -I_COLS = 64 # real-world 6400 +I_COLS = 6400 I_ROWS = 32 # one scan -M_COLS = 32 # real-world 3200 +M_COLS = 3200 M_ROWS = 16 # one scan START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0) END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) @@ -259,3 +259,4 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa assert data_arr.attrs["units"] == "1" assert data_arr.attrs["sensor"] == "viirs" + assert data_arr.attrs["rows_per_scan"] == 16 if is_mband_res else 32 From d7121bb1315360ac44c384435784785d32cb5530 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 06:43:34 -0500 Subject: [PATCH 0367/1416] Add standard_names from YAML --- satpy/etc/readers/viirs_edr.yaml | 22 ++++++++++++++++++++ satpy/readers/viirs_edr.py | 5 ++++- satpy/tests/reader_tests/test_viirs_edr.py | 24 +++++++++++++++++++--- 3 files changed, 47 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 9e362c18af..b93c2fdd3d 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -254,6 +254,7 @@ datasets: coordinates: [longitude_375, latitude_375] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_I02: name: surf_refl_I02 resolution: 375 @@ -263,6 +264,7 @@ datasets: coordinates: [longitude_375, latitude_375] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_I03: name: surf_refl_I03 resolution: 375 @@ -272,6 +274,7 @@ datasets: coordinates: [longitude_375, latitude_375] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M01: name: surf_refl_M01 resolution: 750 @@ -281,6 +284,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M02: name: surf_refl_M02 resolution: 750 @@ -290,6 +294,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M03: name: surf_refl_M03 resolution: 750 @@ -299,6 +304,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M04: name: surf_refl_M04 resolution: 750 @@ -308,6 +314,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M05: name: surf_refl_M05 resolution: 750 @@ -317,6 +324,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M06: name: surf_refl_M06 resolution: 750 @@ -326,6 +334,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M07: name: surf_refl_M07 resolution: 750 @@ -335,6 +344,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M08: name: surf_refl_M08 resolution: 750 @@ -344,6 +354,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_M10: name: surf_refl_M10 resolution: 750 @@ -353,6 +364,7 @@ datasets: coordinates: [longitude_750, latitude_750] units: '1' _FillValue: -9999 + standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: name: surf_refl_qf1 resolution: 750 @@ -360,6 +372,7 @@ datasets: file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf2: name: surf_refl_qf2 resolution: 750 @@ -367,6 +380,7 @@ datasets: file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf3: name: surf_refl_qf3 resolution: 750 @@ -374,6 +388,7 @@ datasets: file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf4: name: surf_refl_qf4 resolution: 750 @@ -381,6 +396,7 @@ datasets: file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf5: name: surf_refl_qf5 resolution: 750 @@ -388,6 +404,7 @@ datasets: file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf6: name: surf_refl_qf6 resolution: 750 @@ -395,6 +412,7 @@ datasets: file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" surf_refl_qf7: name: surf_refl_qf7 resolution: 750 @@ -402,6 +420,8 @@ datasets: file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' + standard_name: "quality_flag" + # Swath-based vegetation indexes added to CSPP LEO output NDVI: name: NDVI @@ -411,6 +431,7 @@ datasets: coordinates: [longitude_375, latitude_375] units: "1" valid_range: [-1.0, 1.0] + standard_name: "normalized_difference_vegetation_index" EVI: name: EVI resolution: 375 @@ -419,3 +440,4 @@ datasets: coordinates: [longitude_375, latitude_375] units: "1" valid_range: [-1.0, 1.0] + standard_name: "normalized_difference_vegetation_index" diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 472e4c29ea..ee9ba66953 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -98,9 +98,12 @@ def get_dataset(self, dataset_id, info): """Get the dataset.""" data_arr = self.nc[info['file_key']] data_arr = self._mask_invalid(data_arr, info) - units = data_arr.attrs.get("units", None) + units = info.get("units", data_arr.attrs.get("units", None)) if units is None or units == "unitless": data_arr.attrs["units"] = "1" + data_arr.attrs["units"] = units + if "standard_name" in info: + data_arr.attrs["standard_name"] = info["standard_name"] self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name data_arr.attrs["sensor"] = self.sensor_name diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 7f2659ec61..ff1b646737 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -234,11 +234,16 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: - _check_surf_refl_data_arr(data_arr, dtype=np.uint8) + _array_checks(data_arr, dtype=np.uint8) + _shared_metadata_checks(data_arr) + assert data_arr.attrs["standard_name"] == "quality_flag" def _check_vi_data_arr(data_arr: xr.DataArray) -> None: - _check_surf_refl_data_arr(data_arr) + _array_checks(data_arr) + _shared_metadata_checks(data_arr) + assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" + data = data_arr.data.compute() np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) @@ -246,17 +251,30 @@ def _check_vi_data_arr(data_arr: xr.DataArray) -> None: def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: + _array_checks(data_arr, dtype) + _shared_metadata_checks(data_arr) + assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" + + +def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, dtype) - is_mband_res = "I" not in data_arr.attrs["name"] # includes NDVI and EVI + is_mband_res = _is_mband_res(data_arr) exp_shape = (M_ROWS, M_COLS) if is_mband_res else (I_ROWS, I_COLS) assert data_arr.shape == exp_shape exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) + +def _shared_metadata_checks(data_arr: xr.DataArray) -> None: + is_mband_res = _is_mband_res(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["sensor"] == "viirs" assert data_arr.attrs["rows_per_scan"] == 16 if is_mband_res else 32 + + +def _is_mband_res(data_arr: xr.DataArray) -> bool: + return "I" not in data_arr.attrs["name"] # includes NDVI and EVI From 891c4c70ae5f0f4267c7e3132835478a427d902f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 07:06:57 -0500 Subject: [PATCH 0368/1416] Change surface reflectances to percentage for consistency --- satpy/etc/enhancements/generic.yaml | 9 ++++++++ satpy/etc/readers/viirs_edr.yaml | 24 +++++++++++----------- satpy/readers/viirs_edr.py | 9 +++++--- satpy/tests/reader_tests/test_viirs_edr.py | 13 +++++++++--- 4 files changed, 37 insertions(+), 18 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 37b375f36c..967f47e2f1 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -15,6 +15,15 @@ enhancements: - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} + surface_reflectance_default: + standard_name: surface_bidirectional_reflectance + operations: + - name: linear_stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.5} true_color_default: standard_name: true_color operations: diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index b93c2fdd3d..fc6d49d2cf 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -252,7 +252,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_I02: @@ -262,7 +262,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_I03: @@ -272,7 +272,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M01: @@ -282,7 +282,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M02: @@ -292,7 +292,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M03: @@ -302,7 +302,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M04: @@ -312,7 +312,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M05: @@ -322,7 +322,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M06: @@ -332,7 +332,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M6" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M07: @@ -342,7 +342,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M08: @@ -352,7 +352,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M10: @@ -362,7 +362,7 @@ datasets: file_type: [jrr_surfref_product] file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] - units: '1' + units: '%' _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index ee9ba66953..64edbd48c1 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -47,6 +47,7 @@ import numpy as np import xarray as xr +from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_chunk_size_limit @@ -94,13 +95,15 @@ def rows_per_scans(self, data_arr: xr.DataArray) -> int: """Get number of array rows per instrument scan based on data resolution.""" return 32 if data_arr.shape[1] == 6400 else 16 - def get_dataset(self, dataset_id, info): + def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: """Get the dataset.""" data_arr = self.nc[info['file_key']] data_arr = self._mask_invalid(data_arr, info) - units = info.get("units", data_arr.attrs.get("units", None)) + units = info.get("units", data_arr.attrs.get("units")) if units is None or units == "unitless": - data_arr.attrs["units"] = "1" + units = "1" + if units == "%" and data_arr.attrs.get("units") in ("1", "unitless"): + data_arr *= 100.0 # turn into percentages data_arr.attrs["units"] = units if "standard_name" in info: data_arr.attrs["standard_name"] = info["standard_name"] diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index ff1b646737..6a40778505 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -111,8 +111,8 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} - i_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) - m_data = np.zeros((M_ROWS, M_COLS), dtype=np.float32) + i_data = np.random.random_sample((I_ROWS, I_COLS)).astype(np.float32) + m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) data_arrs = { "Longitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), "Latitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), @@ -125,6 +125,8 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: if "scale_factor" not in data_arr.attrs: continue data_arr.encoding["dtype"] = np.int16 + data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor") + data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset") return data_arrs @@ -236,12 +238,14 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr, dtype=np.uint8) _shared_metadata_checks(data_arr) + assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "quality_flag" def _check_vi_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr) _shared_metadata_checks(data_arr) + assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" data = data_arr.data.compute() @@ -252,7 +256,11 @@ def _check_vi_data_arr(data_arr: xr.DataArray) -> None: def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: _array_checks(data_arr, dtype) + data = data_arr.data.compute() + assert data.max() > 1.0 # random 0-1 test data multiplied by 100 + _shared_metadata_checks(data_arr) + assert data_arr.attrs["units"] == "%" assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" @@ -271,7 +279,6 @@ def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None def _shared_metadata_checks(data_arr: xr.DataArray) -> None: is_mband_res = _is_mband_res(data_arr) - assert data_arr.attrs["units"] == "1" assert data_arr.attrs["sensor"] == "viirs" assert data_arr.attrs["rows_per_scan"] == 16 if is_mband_res else 32 From e4ee636ece198e976fc7cff34d49f8ab4be3b6a5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 20:29:23 -0500 Subject: [PATCH 0369/1416] Fix true color surface name Addresses reviewer comment --- satpy/etc/composites/viirs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 994a09c960..9c7269862b 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -322,7 +322,7 @@ composites: - name: surf_refl_M05 standard_name: natural_color - true_color_mband_nocorr: + true_color_mband_surf_nocorr: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_M05 From fee489f5b58c85a44dc4851ab498eb94f0908932 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 20:31:09 -0500 Subject: [PATCH 0370/1416] Remove redundant night_overview for VIIRS Closes #1964 --- satpy/etc/composites/viirs.yaml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 9c7269862b..541c4dff10 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -352,14 +352,6 @@ composites: modifiers: [sunz_corrected] standard_name: true_color - night_overview: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - DNB - - DNB - - M15 - standard_name: night_overview - overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: From ca587f9e77ed209ed29e81cc4a534be34c7ecbe4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 23 Jul 2023 21:10:24 -0500 Subject: [PATCH 0371/1416] Rename surface reflectance based composites and add sharpened true color --- satpy/etc/composites/viirs.yaml | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 541c4dff10..6fcdad4e1e 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -306,7 +306,7 @@ composites: modifiers: [sunz_corrected_iband] standard_name: natural_color - natural_color_iband_surf_nocorr: + natural_color_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_I03 @@ -314,7 +314,7 @@ composites: - name: surf_refl_I01 standard_name: natural_color - natural_color_mband_surf_nocorr: + natural_color_lowres_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_M10 @@ -322,7 +322,7 @@ composites: - name: surf_refl_M05 standard_name: natural_color - true_color_mband_surf_nocorr: + true_color_lowres_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_M05 @@ -330,6 +330,17 @@ composites: - name: surf_refl_M03 standard_name: true_color + true_color_surf: + compositor: !!python/name:satpy.composites.RatioSharpenedRGB + prerequisites: + - name: surf_refl_M05 + - name: surf_refl_M04 + - name: surf_refl_M03 + optional_prerequisites: + - name: surf_refl_I01 + standard_name: true_color + high_resolution_band: red + natural_color_sun_lowres: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: From a913e73cd161ff819a4856e167630cfb55989a6d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 10:38:52 -0500 Subject: [PATCH 0372/1416] Remove inaccurate VIIRS EDR mask information --- satpy/etc/readers/viirs_edr.yaml | 60 ++------------------------------ 1 file changed, 2 insertions(+), 58 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index fc6d49d2cf..3ef014a88e 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -77,172 +77,116 @@ datasets: file_type: jrr_cloudmask file_key: "CloudMask" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] - flag_values: [0, 1, 2, 3] - _FillValue: -128 cloud_mask_binary: name: cloud_mask_binary resolution: 750 file_type: [jrr_cloudmask] file_key: "CloudMaskBinary" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Cloudy'] - flag_values: [0, 1] - _FillValue: -128 cloud_probability: name: cloud_probability resolution: 750 file_type: [jrr_cloudmask] file_key: "CloudProbability" coordinates: [longitude, latitude] - units: '1' - _FillValue: -999. dust_mask: name: dust_mask resolution: 750 file_type: [jrr_cloudmask] file_key: "Dust_Mask" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Dusty'] - flag_values: [0, 1] - _FillValue: -128 fire_mask: name: fire_mask resolution: 750 file_type: [jrr_cloudmask] file_key: "Fire_Mask" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['No fire', 'Fire'] - flag_values: [0, 1] - _FillValue: -128 smoke_mask: name: smoke_mask resolution: 750 file_type: [jrr_cloudmask] file_key: "Smoke_Mask" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Smoky'] - flag_values: [0, 1] - _FillValue: -128 - # Aerosol optical depth product datasets + # Aerosol detection product datasets ash_mask: name: ash_mask resolution: 750 file_type: [jrr_aerosol_product] file_key: "Ash" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Ash'] - flag_values: [0, 1] - _FillValue: -128 cloud_mask_adp: name: cloud_mask_adp resolution: 750 file_type: [jrr_aerosol_product] file_key: "Cloud" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['Clear', 'Probably Clear', 'Probably Cloudy', 'Cloudy'] - flag_values: [0, 1, 2, 3] - _FillValue: -128 dust_smoke_discrimination_index: name: dust_smoke_discrimination_index resolution: 750 file_type: [jrr_aerosol_product] file_key: "DSDI" coordinates: [longitude, latitude] - units: '1' - _FillValue: -999 nuc: name: nuc resolution: 750 file_type: [jrr_aerosol_product] file_key: "NUC" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['No', 'Yes'] - flag_values: [0, 1] - _FillValue: -128 pqi1: name: pqi1 resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI1" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 pqi2: name: pqi2 resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI2" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 pqi3: name: pqi3 resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI3" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 pqi4: name: pqi4 resolution: 750 file_type: [jrr_aerosol_product] file_key: "PQI4" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 qcflag: name: qcflag resolution: 750 file_type: [jrr_aerosol_product] file_key: "QC_Flag" coordinates: [longitude, latitude] - units: '1' - _FillValue: -128 saai: name: saai resolution: 750 file_type: [jrr_aerosol_product] file_key: "SAAI" coordinates: [longitude, latitude] - units: '1' - _FillValue: -999 smoke: name: smoke resolution: 750 file_type: [jrr_aerosol_product] file_key: "Smoke" coordinates: [longitude, latitude] - units: '1' - _FillValue: -999 smoke_concentration: name: smoke_concentration resolution: 750 file_type: [jrr_aerosol_product] file_key: "SmokeCon" coordinates: [longitude, latitude] - units: 'ug/m^3' - _FillValue: -999 snow_ice: name: snow_ice resolution: 750 file_type: [jrr_aerosol_product] file_key: "SnowIce" coordinates: [longitude, latitude] - units: '1' - flag_meanings: ['No', 'Yes'] - flag_values: [0, 1] - _FillValue: -128 # Surface reflectance products surf_refl_I01: @@ -422,7 +366,7 @@ datasets: units: '1' standard_name: "quality_flag" - # Swath-based vegetation indexes added to CSPP LEO output + # Swath-based vegetation indexes added to CSPP LEO surface reflectance files NDVI: name: NDVI resolution: 375 From a3f1f4e152dd712f69a192435d901d234341c59d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 12:24:23 -0500 Subject: [PATCH 0373/1416] Removal of unnecessary fill values in YAML --- satpy/etc/readers/viirs_edr.yaml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 3ef014a88e..645c22a898 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -197,7 +197,6 @@ datasets: file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_I02: name: surf_refl_I02 @@ -207,7 +206,6 @@ datasets: file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_I03: name: surf_refl_I03 @@ -217,7 +215,6 @@ datasets: file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M01: name: surf_refl_M01 @@ -227,7 +224,6 @@ datasets: file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M02: name: surf_refl_M02 @@ -237,7 +233,6 @@ datasets: file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M03: name: surf_refl_M03 @@ -247,7 +242,6 @@ datasets: file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M04: name: surf_refl_M04 @@ -257,7 +251,6 @@ datasets: file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M05: name: surf_refl_M05 @@ -267,7 +260,6 @@ datasets: file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M06: name: surf_refl_M06 @@ -277,7 +269,6 @@ datasets: file_key: "750m Surface Reflectance Band M6" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M07: name: surf_refl_M07 @@ -287,7 +278,6 @@ datasets: file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M08: name: surf_refl_M08 @@ -297,7 +287,6 @@ datasets: file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_M10: name: surf_refl_M10 @@ -307,7 +296,6 @@ datasets: file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] units: '%' - _FillValue: -9999 standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: name: surf_refl_qf1 From c6f6cc616c1b3d74d4dcc6619d822677703eb752 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 13:12:52 -0500 Subject: [PATCH 0374/1416] Add missing M11 surface reflectance product and Polar2Grid false_color_surf product --- satpy/etc/composites/viirs.yaml | 16 ++++++++++++++-- satpy/etc/readers/viirs_edr.yaml | 9 +++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 6fcdad4e1e..bebf6c5833 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -231,10 +231,10 @@ composites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - - name: I01 + - name: I02 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: false_color - high_resolution_band: blue + high_resolution_band: green fire_temperature: # CIRA: Original VIIRS @@ -322,6 +322,18 @@ composites: - name: surf_refl_M05 standard_name: natural_color + false_color_surf: + compositor: !!python/name:satpy.composites.RatioSharpenedRGB + prerequisites: + - name: surf_refl_M11 + - name: surf_refl_M07 + - name: surf_refl_M05 + optional_prerequisites: + - name: surf_refl_I02 + standard_name: false_color + high_resolution_band: green + + true_color_lowres_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 645c22a898..953c508660 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -297,6 +297,15 @@ datasets: coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" + surf_refl_M11: + name: surf_refl_M11 + resolution: 750 + wavelength: [2.225, 2.250, 2.275] + file_type: [jrr_surfref_product] + file_key: "750m Surface Reflectance Band M11" + coordinates: [longitude_750, latitude_750] + units: '%' + standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: name: surf_refl_qf1 resolution: 750 From 25b4f51207fb055f7d37ac63be7adbecb7a0e753 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 14:03:48 -0500 Subject: [PATCH 0375/1416] Add a few cloud height VIIRS EDR products --- satpy/etc/readers/viirs_edr.yaml | 29 ++++++++- satpy/readers/viirs_edr.py | 4 +- satpy/tests/reader_tests/test_viirs_edr.py | 69 +++++++++++++++++++--- 3 files changed, 90 insertions(+), 12 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 953c508660..8bcc094b3b 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -23,6 +23,11 @@ file_types: variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudheight_product: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: @@ -30,14 +35,14 @@ datasets: longitude: name: longitude standard_name: longitude - file_type: [jrr_cloudmask, jrr_aerosol_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product] file_key: "Longitude" units: 'degrees_east' resolution: 750 latitude: name: latitude standard_name: latitude - file_type: [jrr_cloudmask, jrr_aerosol_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product] file_key: "Latitude" units: 'degrees_north' resolution: 750 @@ -382,3 +387,23 @@ datasets: units: "1" valid_range: [-1.0, 1.0] standard_name: "normalized_difference_vegetation_index" + + # Cloud Height products + cloud_top_temp: + name: CldTopTemp + file_key: "CldTopTemp" + file_type: [jrr_cloudheight_product] + resolution: 750 + coordinates: [longitude, latitude] + cloud_top_height: + name: CldTopHght + file_key: "CldTopHght" + file_type: [jrr_cloudheight_product] + resolution: 750 + coordinates: [longitude, latitude] + cloud_top_pressure: + name: CldTopPres + file_key: "CldTopPres" + file_type: [jrr_cloudheight_product] + resolution: 750 + coordinates: [longitude, latitude] diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 64edbd48c1..6dececa9c1 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -69,13 +69,13 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=True, chunks={ 'Columns': -1, - 'Rows': row_chunks_i, + 'Rows': row_chunks_m, 'Along_Scan_375m': -1, 'Along_Track_375m': row_chunks_i, 'Along_Scan_750m': -1, 'Along_Track_750m': row_chunks_m, }) - if 'columns' in self.nc.dims: + if 'Columns' in self.nc.dims: self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) elif 'Along_Track_375m' in self.nc.dims: self.nc = self.nc.rename({'Along_Scan_375m': 'x', 'Along_Track_375m': 'y'}) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 6a40778505..459f058495 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -91,14 +91,6 @@ def _create_surface_reflectance_file(tmp_path_factory, include_veg_indices: bool return file_path -def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: - ds = xr.Dataset( - vars_dict, - attrs={} - ) - return ds - - def _create_surf_refl_variables() -> dict[str, xr.DataArray]: dim_y_750 = "Along_Track_750m" dim_x_750 = "Along_Scan_750m" @@ -172,6 +164,51 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: return data_arrs +@pytest.fixture(scope="module") +def cloud_height_file(tmp_path_factory) -> Path: + """Generate fake CloudHeight VIIRS EDR file.""" + tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") + fn = f"JRR-CloudHeight_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" + file_path = tmp_path / fn + ch_vars = _create_cloud_height_variables() + ds = _create_fake_dataset(ch_vars) + ds.to_netcdf(file_path) + return file_path + + +def _create_cloud_height_variables() -> dict[str, xr.DataArray]: + dims = ("Rows", "Columns") + + lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} + lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} + cont_attrs = {"units": "Kelvin", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} + + m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) + data_arrs = { + "Longitude": xr.DataArray(m_data, dims=dims, attrs=lon_attrs), + "Latitude": xr.DataArray(m_data, dims=dims, attrs=lat_attrs), + } + for var_name in ("CldTopTemp", "CldTopHght", "CldTopPres"): + data_arrs[var_name] = xr.DataArray(m_data, dims=dims, attrs=cont_attrs) + for data_arr in data_arrs.values(): + if "_FillValue" in data_arr.attrs: + data_arr.encoding["_FillValue"] = data_arr.attrs.pop("_FillValue") + if "scale_factor" not in data_arr.attrs: + continue + data_arr.encoding["dtype"] = np.int16 + data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor") + data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset") + return data_arrs + + +def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: + ds = xr.Dataset( + vars_dict, + attrs={} + ) + return ds + + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" @@ -198,6 +235,17 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i _check_vi_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) + def test_get_dataset_cloud_height(self, cloud_height_file): + """Test datasets from cloud height files.""" + from satpy import Scene + bytes_in_m_row = 4 * 3200 + with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): + scn = Scene(reader="viirs_edr", filenames=[cloud_height_file]) + scn.load(["CldTopTemp", "CldTopHght", "CldTopPres"]) + _check_cloud_height_data_arr(scn["CldTopTemp"]) + _check_cloud_height_data_arr(scn["CldTopHght"]) + _check_cloud_height_data_arr(scn["CldTopPres"]) + @pytest.mark.parametrize( ("data_file", "exp_available"), [ @@ -264,6 +312,11 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" +def _check_cloud_height_data_arr(data_arr: xr.DataArray) -> None: + _array_checks(data_arr) + _shared_metadata_checks(data_arr) + + def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) From c0b6e83c7316b1d9e78d96d874012148c8f667cf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 24 Jul 2023 15:08:18 -0500 Subject: [PATCH 0376/1416] Add VIIRs EDR AOD product --- satpy/etc/readers/viirs_edr.yaml | 17 ++++++- satpy/tests/reader_tests/test_viirs_edr.py | 59 ++++++++++++++-------- 2 files changed, 54 insertions(+), 22 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 8bcc094b3b..0004751f5d 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -28,6 +28,11 @@ file_types: variable_prefix: "" file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_aod_product: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + variable_prefix: "" + file_patterns: + - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: @@ -35,14 +40,14 @@ datasets: longitude: name: longitude standard_name: longitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product] file_key: "Longitude" units: 'degrees_east' resolution: 750 latitude: name: latitude standard_name: latitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product] file_key: "Latitude" units: 'degrees_north' resolution: 750 @@ -407,3 +412,11 @@ datasets: file_type: [jrr_cloudheight_product] resolution: 750 coordinates: [longitude, latitude] + + # Aerosol Optical Depth products + aod550: + name: AOD550 + file_key: AOD550 + file_type: [jrr_aod_product] + resolution: 750 + coordinates: [longitude, latitude] diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 459f058495..53d24e7d11 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -24,6 +24,7 @@ import shutil from datetime import datetime from pathlib import Path +from typing import Iterable import dask import dask.array as da @@ -80,15 +81,11 @@ def surface_reflectance_with_veg_indices_file(tmp_path_factory) -> Path: def _create_surface_reflectance_file(tmp_path_factory, include_veg_indices: bool = False) -> Path: - tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" - file_path = tmp_path / fn sr_vars = _create_surf_refl_variables() if include_veg_indices: sr_vars.update(_create_veg_index_variables()) - ds = _create_fake_dataset(sr_vars) - ds.to_netcdf(file_path) - return file_path + return _create_fake_file(tmp_path_factory, fn, sr_vars) def _create_surf_refl_variables() -> dict[str, xr.DataArray]: @@ -167,16 +164,24 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: @pytest.fixture(scope="module") def cloud_height_file(tmp_path_factory) -> Path: """Generate fake CloudHeight VIIRS EDR file.""" - tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") fn = f"JRR-CloudHeight_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" - file_path = tmp_path / fn - ch_vars = _create_cloud_height_variables() - ds = _create_fake_dataset(ch_vars) - ds.to_netcdf(file_path) - return file_path + data_vars = _create_continuous_variables( + ("CldTopTemp", "CldTopHght", "CldTopPres") + ) + return _create_fake_file(tmp_path_factory, fn, data_vars) + + +@pytest.fixture(scope="module") +def aod_file(tmp_path_factory) -> Path: + """Generate fake AOD VIIRs EDR file.""" + fn = f"JRR-AOD_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" + data_vars = _create_continuous_variables( + ("AOD550",) + ) + return _create_fake_file(tmp_path_factory, fn, data_vars) -def _create_cloud_height_variables() -> dict[str, xr.DataArray]: +def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataArray]: dims = ("Rows", "Columns") lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} @@ -188,7 +193,7 @@ def _create_cloud_height_variables() -> dict[str, xr.DataArray]: "Longitude": xr.DataArray(m_data, dims=dims, attrs=lon_attrs), "Latitude": xr.DataArray(m_data, dims=dims, attrs=lat_attrs), } - for var_name in ("CldTopTemp", "CldTopHght", "CldTopPres"): + for var_name in var_names: data_arrs[var_name] = xr.DataArray(m_data, dims=dims, attrs=cont_attrs) for data_arr in data_arrs.values(): if "_FillValue" in data_arr.attrs: @@ -201,6 +206,14 @@ def _create_cloud_height_variables() -> dict[str, xr.DataArray]: return data_arrs +def _create_fake_file(tmp_path_factory, filename: str, data_arrs: dict[str, xr.DataArray]) -> Path: + tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") + file_path = tmp_path / filename + ds = _create_fake_dataset(data_arrs) + ds.to_netcdf(file_path) + return file_path + + def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: ds = xr.Dataset( vars_dict, @@ -235,16 +248,22 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i _check_vi_data_arr(scn["EVI"]) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) - def test_get_dataset_cloud_height(self, cloud_height_file): + @pytest.mark.parametrize( + ("var_names", "data_file"), + [ + (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")), + (("AOD550",), lazy_fixture("aod_file")), + ] + ) + def test_get_dataset_generic(self, var_names, data_file): """Test datasets from cloud height files.""" from satpy import Scene bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=[cloud_height_file]) - scn.load(["CldTopTemp", "CldTopHght", "CldTopPres"]) - _check_cloud_height_data_arr(scn["CldTopTemp"]) - _check_cloud_height_data_arr(scn["CldTopHght"]) - _check_cloud_height_data_arr(scn["CldTopPres"]) + scn = Scene(reader="viirs_edr", filenames=[data_file]) + scn.load(var_names) + for var_name in var_names: + _check_continuous_data_arr(scn[var_name]) @pytest.mark.parametrize( ("data_file", "exp_available"), @@ -312,7 +331,7 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" -def _check_cloud_height_data_arr(data_arr: xr.DataArray) -> None: +def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr) _shared_metadata_checks(data_arr) From 31988b63ae75427fed7b3e96e05c33dcf10a276b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 26 Jul 2023 06:55:37 -0500 Subject: [PATCH 0377/1416] Update viirs_edr module docstring --- satpy/readers/viirs_edr.py | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 6dececa9c1..58458b925c 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2022 Satpy developers +# Copyright (c) 2022-2023 Satpy developers # # This file is part of satpy. # @@ -15,30 +15,35 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""VIIRS NOAA enterprise L2 product reader. +"""VIIRS NOAA enterprise EDR product reader. This module defines the :class:`VIIRSJRRFileHandler` file handler, to -be used for reading VIIRS Level 2 products generated by the NOAA enterprise -suite, which are downloadable via NOAA CLASS. -A wide variety of such products exist and, at present, only three are -supported here, showing example filenames: +be used for reading VIIRS EDR products generated by the NOAA enterprise +suite, which are downloadable via NOAA CLASS or on NOAA's AWS buckets. + +A wide variety of such products exist and, at present, only a subset are supported. + - Cloud mask: JRR-CloudMask_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc - - Aerosol properties: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc + - Cloud products: JRR-CloudHeight_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc + - Aerosol detection: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc + - Aerosol optical depth: JRR-AOD_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc -All products use the same base reader `viirs_l2_jrr` and can be read through satpy with:: + +All products use the same base reader ``viirs_edr`` and can be read through satpy with:: import satpy import glob filenames = glob.glob('JRR-ADP*.nc') - scene = satpy.Scene(filenames, - reader='viirs_l2_jrr') + scene = satpy.Scene(filenames, reader='viirs_edr') scene.load(['smoke_concentration']) -NOTE: -Multiple products contain datasets with the same name! For example, both the cloud mask -and aerosol files contain a cloud mask, but these are not identical. -For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. +.. note:: + + Multiple products contain datasets with the same name! For example, both the cloud mask + and aerosol detection files contain a cloud mask, but these are not identical. + For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. + """ From 8957d02c18065dd6bfaefe79e32f0c790a93d484 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 27 Jul 2023 09:22:00 -0500 Subject: [PATCH 0378/1416] Add LST support --- satpy/etc/readers/viirs_edr.yaml | 21 +++++++---- satpy/readers/viirs_edr.py | 16 +++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 41 ++++++++++++++++++---- 3 files changed, 65 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 0004751f5d..0aa39b69db 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -10,29 +10,28 @@ reader: file_types: jrr_cloudmask: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - variable_prefix: "" file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aerosol_product: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - variable_prefix: "" file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref_product: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSSurfaceReflectanceWithVIHandler - variable_prefix: "" file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_cloudheight_product: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - variable_prefix: "" file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aod_product: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - variable_prefix: "" file_patterns: - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_lst_product: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSLSTHandler + file_patterns: + - 'LST_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: @@ -40,14 +39,14 @@ datasets: longitude: name: longitude standard_name: longitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product, jrr_lst_product] file_key: "Longitude" units: 'degrees_east' resolution: 750 latitude: name: latitude standard_name: latitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product] + file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product, jrr_lst_product] file_key: "Latitude" units: 'degrees_north' resolution: 750 @@ -420,3 +419,11 @@ datasets: file_type: [jrr_aod_product] resolution: 750 coordinates: [longitude, latitude] + + # Land Surface Temperature + vlst: + name: VLST + file_key: VLST + file_type: [jrr_lst_product] + resolution: 750 + coordinates: [longitude, latitude] diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 58458b925c..d90b0359f6 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -28,6 +28,7 @@ - Aerosol detection: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Aerosol optical depth: JRR-AOD_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc + - Land Surface Temperature: LST_v2r0_npp_s202307241724558_e202307241726200_c202307241854058.nc All products use the same base reader ``viirs_edr`` and can be read through satpy with:: @@ -237,3 +238,18 @@ def _get_veg_index_good_mask(self) -> xr.DataArray: bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) return good_mask_iband + + +class VIIRSLSTHandler(VIIRSJRRFileHandler): + """File handler to handle LST file scale factor and offset weirdness.""" + + def __init__(self, *args, **kwargs): + """Initialize the file handler and unscale necessary variables.""" + super().__init__(*args, **kwargs) + + # Update variables with external scale factor and offset + lst_data_arr = self.nc["VLST"] + scale_factor = self.nc["LST_ScaleFact"] + add_offset = self.nc["LST_Offset"] + lst_data_arr.data = lst_data_arr.data * scale_factor.data + add_offset.data + self.nc["VLST"] = lst_data_arr diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 53d24e7d11..cc06e16647 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -33,6 +33,7 @@ import pytest import xarray as xr from pyresample import SwathDefinition +from pytest import TempPathFactory from pytest_lazyfixture import lazy_fixture I_COLS = 6400 @@ -69,18 +70,18 @@ @pytest.fixture(scope="module") -def surface_reflectance_file(tmp_path_factory) -> Path: +def surface_reflectance_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file.""" return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=False) @pytest.fixture(scope="module") -def surface_reflectance_with_veg_indices_file(tmp_path_factory) -> Path: +def surface_reflectance_with_veg_indices_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file with vegetation indexes included.""" return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=True) -def _create_surface_reflectance_file(tmp_path_factory, include_veg_indices: bool = False) -> Path: +def _create_surface_reflectance_file(tmp_path_factory: TempPathFactory, include_veg_indices: bool = False) -> Path: fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" sr_vars = _create_surf_refl_variables() if include_veg_indices: @@ -162,7 +163,7 @@ def _create_veg_index_variables() -> dict[str, xr.DataArray]: @pytest.fixture(scope="module") -def cloud_height_file(tmp_path_factory) -> Path: +def cloud_height_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake CloudHeight VIIRS EDR file.""" fn = f"JRR-CloudHeight_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" data_vars = _create_continuous_variables( @@ -172,7 +173,7 @@ def cloud_height_file(tmp_path_factory) -> Path: @pytest.fixture(scope="module") -def aod_file(tmp_path_factory) -> Path: +def aod_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake AOD VIIRs EDR file.""" fn = f"JRR-AOD_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" data_vars = _create_continuous_variables( @@ -181,6 +182,27 @@ def aod_file(tmp_path_factory) -> Path: return _create_fake_file(tmp_path_factory, fn, data_vars) +@pytest.fixture(scope="module") +def lst_file(tmp_path_factory: TempPathFactory) -> Path: + """Generate fake VLST EDR file.""" + fn = f"LST_v2r0_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307241854058.nc" + data_vars = _create_lst_variables() + return _create_fake_file(tmp_path_factory, fn, data_vars) + + +def _create_lst_variables() -> dict[str, xr.DataArray]: + data_vars = _create_continuous_variables(("VLST",)) + + # VLST scale factors + data_vars["VLST"].data = (data_vars["VLST"].data / 0.0001).astype(np.int16) + data_vars["VLST"].encoding.pop("scale_factor") + data_vars["VLST"].encoding.pop("add_offset") + data_vars["LST_ScaleFact"] = xr.DataArray(np.float32(0.0001)) + data_vars["LST_Offset"] = xr.DataArray(np.float32(0.0)) + + return data_vars + + def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataArray]: dims = ("Rows", "Columns") @@ -206,7 +228,7 @@ def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataA return data_arrs -def _create_fake_file(tmp_path_factory, filename: str, data_arrs: dict[str, xr.DataArray]) -> Path: +def _create_fake_file(tmp_path_factory: TempPathFactory, filename: str, data_arrs: dict[str, xr.DataArray]) -> Path: tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") file_path = tmp_path / filename ds = _create_fake_dataset(data_arrs) @@ -253,6 +275,7 @@ def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_i [ (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")), (("AOD550",), lazy_fixture("aod_file")), + (("VLST",), lazy_fixture("lst_file")), ] ) def test_get_dataset_generic(self, var_names, data_file): @@ -333,6 +356,12 @@ def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.floa def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr) + + # random sample should be between 0 and 1 only if factor/offset applied + data = data_arr.data.compute() + assert not (data < 0).any() + assert not (data > 1).any() + _shared_metadata_checks(data_arr) From 06cee8d8b77ae0135212540af5c6fdc21cf85716 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Fri, 28 Jul 2023 15:22:26 +0000 Subject: [PATCH 0379/1416] Fix mismath in shape for lat/lon when reading amv bufr --- satpy/etc/readers/seviri_l2_bufr.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/seviri_l2_bufr.yaml b/satpy/etc/readers/seviri_l2_bufr.yaml index 1c11927707..e0c51c4370 100644 --- a/satpy/etc/readers/seviri_l2_bufr.yaml +++ b/satpy/etc/readers/seviri_l2_bufr.yaml @@ -62,7 +62,7 @@ datasets: latitude: name: latitude - key: 'latitude' + key: '#1#latitude' long_name: Latitude standard_name: latitude resolution: [48006.450653072,9001.209497451,72009.675979608] @@ -72,7 +72,7 @@ datasets: longitude: name: longitude - key: 'longitude' + key: '#1#longitude' resolution: [48006.450653072,9001.209497451,72009.675979608] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz,seviri_l2_bufr_amv] long_name: Longitude From e8f4629905639202bfdc4b815316236aa6dcbe11 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 06:42:31 +0000 Subject: [PATCH 0380/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/flake8: 6.0.0 → 6.1.0](https://github.com/PyCQA/flake8/compare/6.0.0...6.1.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 995f3035c4..d100d73ca7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ exclude: '^$' fail_fast: false repos: - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: - id: flake8 additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] From 5ddc035b2f610e16b84156f95aa8e926a9dfb094 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 1 Aug 2023 06:54:36 -0500 Subject: [PATCH 0381/1416] Fix flake8 whitespace issues --- satpy/tests/scene_tests/test_load.py | 2 +- satpy/tests/test_writers.py | 4 ++-- satpy/tests/writer_tests/test_awips_tiled.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/scene_tests/test_load.py b/satpy/tests/scene_tests/test_load.py index 2c44995076..6eefbc0080 100644 --- a/satpy/tests/scene_tests/test_load.py +++ b/satpy/tests/scene_tests/test_load.py @@ -501,7 +501,7 @@ def test_load_dataset_after_composite(self): from satpy.tests.utils import FakeCompositor load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) - with mock.patch.object(FileYAMLReader, 'load', load_mock),\ + with mock.patch.object(FileYAMLReader, 'load', load_mock), \ mock.patch.object(FakeCompositor, '__call__', comp_mock): lmock = load_mock.mock scene = Scene(filenames=['fake1_1.txt'], reader='fake1') diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index 032481e830..986687b0d6 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -779,8 +779,8 @@ def setUp(self): 'extend': False, 'width': 1670, 'height': 110, 'tick_marks': 5, 'minor_tick_marks': 1, - 'cursor': [0, 0], 'bg':'white', - 'title':'TEST TITLE OF SCALE', + 'cursor': [0, 0], 'bg': 'white', + 'title': 'TEST TITLE OF SCALE', 'fontsize': 110, 'align': 'cc' }} ] diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index ac75ed4069..a47552a708 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -236,7 +236,7 @@ def test_basic_numbered_tiles(self, tile_count, tile_size, tmp_path): ) should_error = tile_count is None and tile_size is None if should_error: - with dask.config.set(scheduler=CustomScheduler(0)),\ + with dask.config.set(scheduler=CustomScheduler(0)), \ pytest.raises(ValueError, match=r'Either.*tile_count.*'): w.save_datasets([input_data_arr], **save_kwargs) else: From 15f5e86c31862919630b1b5ac43b1fa8262e3f1c Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 2 Aug 2023 08:22:15 +0000 Subject: [PATCH 0382/1416] Update test with fix for the key for the mockup bufr for lat/lon --- satpy/tests/reader_tests/test_seviri_l2_bufr.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index 89f973fd45..3578645e5b 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -51,13 +51,13 @@ DATASET_INFO_LAT = { 'name': 'latitude', - 'key': 'latitude', + 'key': '#1#latitude', 'fill_value': -1.e+100 } DATASET_INFO_LON = { 'name': 'longitude', - 'key': 'longitude', + 'key': '#1#longitude', 'fill_value': -1.e+100 } @@ -128,10 +128,10 @@ def __init__(self, filename, with_adef=False, rect_lon='default'): # write the bufr test data twice as we want to read in and then concatenate the data in the reader # 55 id corresponds to METEOSAT 8` ec.codes_set(self.buf1, 'satelliteIdentifier', 56) - ec.codes_set_array(self.buf1, 'latitude', LAT) - ec.codes_set_array(self.buf1, 'latitude', LAT) - ec.codes_set_array(self.buf1, 'longitude', LON) - ec.codes_set_array(self.buf1, 'longitude', LON) + ec.codes_set_array(self.buf1, '#1#latitude', LAT) + ec.codes_set_array(self.buf1, '#1#latitude', LAT) + ec.codes_set_array(self.buf1, '#1#longitude', LON) + ec.codes_set_array(self.buf1, '#1#longitude', LON) ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) From db09792900673e45c5b5fa9f11cef12f8a757a90 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 9 Aug 2023 10:17:33 -0500 Subject: [PATCH 0383/1416] Restructure LST scaling to be more flexible --- satpy/readers/viirs_edr.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index d90b0359f6..1c7ba034ef 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -243,13 +243,27 @@ def _get_veg_index_good_mask(self) -> xr.DataArray: class VIIRSLSTHandler(VIIRSJRRFileHandler): """File handler to handle LST file scale factor and offset weirdness.""" + _manual_scalings = { + "VLST": ("LST_ScaleFact", "LST_Offset"), + "emis_m15": ("LSE_ScaleFact", "LSE_Offset"), + "emis_m16": ("LSE_ScaleFact", "LSE_Offset"), + "emis_bbe": ("LSE_ScaleFact", "LSE_Offset"), + "Satellite_Azimuth_Angle": ("AZI_ScaleFact", "AZI_Offset"), + } + def __init__(self, *args, **kwargs): """Initialize the file handler and unscale necessary variables.""" super().__init__(*args, **kwargs) # Update variables with external scale factor and offset - lst_data_arr = self.nc["VLST"] - scale_factor = self.nc["LST_ScaleFact"] - add_offset = self.nc["LST_Offset"] - lst_data_arr.data = lst_data_arr.data * scale_factor.data + add_offset.data - self.nc["VLST"] = lst_data_arr + self._scale_data() + + def _scale_data(self): + for var_name in list(self.nc.variables.keys()): + if var_name not in self._manual_scalings: + continue + data_arr = self.nc[var_name] + scale_factor = self.nc[self._manual_scalings[var_name][0]] + add_offset = self.nc[self._manual_scalings[var_name][1]] + data_arr.data = data_arr.data * scale_factor.data + add_offset.data + self.nc[var_name] = data_arr From 4e56372d4605ec486b240adc19b54dd2349e22fe Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 9 Aug 2023 13:03:37 -0500 Subject: [PATCH 0384/1416] Switch "viirs_edr" reader to dynamic variable loading --- satpy/etc/readers/viirs_edr.yaml | 240 ++++----------------- satpy/readers/viirs_edr.py | 44 +++- satpy/tests/reader_tests/test_viirs_edr.py | 5 +- 3 files changed, 84 insertions(+), 205 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 0aa39b69db..4f33bcc184 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -12,197 +12,71 @@ file_types: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_aerosol_product: + jrr_aerosol: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_surfref_product: + jrr_surfref: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSSurfaceReflectanceWithVIHandler file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_cloudheight_product: + jrr_cloudheight: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_aod_product: + jrr_aod: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_lst_product: + jrr_lst: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSLSTHandler file_patterns: - 'LST_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: - # Geolocation datasets - longitude: - name: longitude - standard_name: longitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product, jrr_lst_product] - file_key: "Longitude" - units: 'degrees_east' - resolution: 750 - latitude: - name: latitude - standard_name: latitude - file_type: [jrr_cloudmask, jrr_aerosol_product, jrr_cloudheight_product, jrr_aod_product, jrr_lst_product] - file_key: "Latitude" - units: 'degrees_north' - resolution: 750 + # NOTE: All non-surface reflectance file variables are dynamically loaded + # from the variable names inside the file. All 2D variables are + # supported and use the exact name of the variable in the NetCDF file. + # Files mentioned above in "file_types" are supported. + # To see a full list of loadable variables, create a Scene object with + # data files and run ``scn.available_dataset_names()``. + + # Surface reflectance products longitude_375: name: longitude_375 standard_name: longitude - file_type: jrr_surfref_product + file_type: jrr_surfref file_key: "Longitude_at_375m_resolution" units: 'degrees_east' resolution: 375 latitude_375: name: latitude_375 standard_name: latitude - file_type: jrr_surfref_product + file_type: jrr_surfref file_key: "Latitude_at_375m_resolution" units: 'degrees_north' resolution: 375 longitude_750: name: longitude_750 standard_name: longitude - file_type: jrr_surfref_product + file_type: jrr_surfref file_key: "Longitude_at_750m_resolution" units: 'degrees_east' resolution: 750 latitude_750: name: latitude_750 standard_name: latitude - file_type: jrr_surfref_product + file_type: jrr_surfref file_key: "Latitude_at_750m_resolution" units: 'degrees_north' resolution: 750 - # Cloudmask product datasets - cloud_mask: - name: cloud_mask - resolution: 750 - file_type: jrr_cloudmask - file_key: "CloudMask" - coordinates: [longitude, latitude] - cloud_mask_binary: - name: cloud_mask_binary - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "CloudMaskBinary" - coordinates: [longitude, latitude] - cloud_probability: - name: cloud_probability - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "CloudProbability" - coordinates: [longitude, latitude] - dust_mask: - name: dust_mask - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "Dust_Mask" - coordinates: [longitude, latitude] - fire_mask: - name: fire_mask - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "Fire_Mask" - coordinates: [longitude, latitude] - smoke_mask: - name: smoke_mask - resolution: 750 - file_type: [jrr_cloudmask] - file_key: "Smoke_Mask" - coordinates: [longitude, latitude] - - # Aerosol detection product datasets - ash_mask: - name: ash_mask - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "Ash" - coordinates: [longitude, latitude] - cloud_mask_adp: - name: cloud_mask_adp - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "Cloud" - coordinates: [longitude, latitude] - dust_smoke_discrimination_index: - name: dust_smoke_discrimination_index - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "DSDI" - coordinates: [longitude, latitude] - nuc: - name: nuc - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "NUC" - coordinates: [longitude, latitude] - pqi1: - name: pqi1 - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "PQI1" - coordinates: [longitude, latitude] - pqi2: - name: pqi2 - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "PQI2" - coordinates: [longitude, latitude] - pqi3: - name: pqi3 - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "PQI3" - coordinates: [longitude, latitude] - pqi4: - name: pqi4 - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "PQI4" - coordinates: [longitude, latitude] - qcflag: - name: qcflag - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "QC_Flag" - coordinates: [longitude, latitude] - saai: - name: saai - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "SAAI" - coordinates: [longitude, latitude] - smoke: - name: smoke - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "Smoke" - coordinates: [longitude, latitude] - smoke_concentration: - name: smoke_concentration - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "SmokeCon" - coordinates: [longitude, latitude] - snow_ice: - name: snow_ice - resolution: 750 - file_type: [jrr_aerosol_product] - file_key: "SnowIce" - coordinates: [longitude, latitude] - - # Surface reflectance products surf_refl_I01: name: surf_refl_I01 resolution: 375 wavelength: [0.600, 0.640, 0.680] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] units: '%' @@ -211,7 +85,7 @@ datasets: name: surf_refl_I02 resolution: 375 wavelength: [0.845, 0.865, 0.884] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] units: '%' @@ -220,7 +94,7 @@ datasets: name: surf_refl_I03 resolution: 375 wavelength: [1.580, 1.610, 1.640] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] units: '%' @@ -229,7 +103,7 @@ datasets: name: surf_refl_M01 resolution: 750 wavelength: [0.402, 0.412, 0.422] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] units: '%' @@ -238,7 +112,7 @@ datasets: name: surf_refl_M02 resolution: 750 wavelength: [0.436, 0.445, 0.454] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] units: '%' @@ -247,7 +121,7 @@ datasets: name: surf_refl_M03 resolution: 750 wavelength: [0.478, 0.488, 0.498] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] units: '%' @@ -256,7 +130,7 @@ datasets: name: surf_refl_M04 resolution: 750 wavelength: [0.545, 0.555, 0.565] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] units: '%' @@ -265,7 +139,7 @@ datasets: name: surf_refl_M05 resolution: 750 wavelength: [0.662, 0.672, 0.682] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] units: '%' @@ -274,7 +148,7 @@ datasets: name: surf_refl_M06 resolution: 750 wavelength: [0.739, 0.746, 0.754] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M6" coordinates: [longitude_750, latitude_750] units: '%' @@ -283,7 +157,7 @@ datasets: name: surf_refl_M07 resolution: 750 wavelength: [0.846, 0.865, 0.885] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] units: '%' @@ -292,7 +166,7 @@ datasets: name: surf_refl_M08 resolution: 750 wavelength: [1.230, 1.240, 1.250] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] units: '%' @@ -301,7 +175,7 @@ datasets: name: surf_refl_M10 resolution: 750 wavelength: [1.580, 1.610, 1.640] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] units: '%' @@ -310,7 +184,7 @@ datasets: name: surf_refl_M11 resolution: 750 wavelength: [2.225, 2.250, 2.275] - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M11" coordinates: [longitude_750, latitude_750] units: '%' @@ -318,7 +192,7 @@ datasets: surf_refl_qf1: name: surf_refl_qf1 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -326,7 +200,7 @@ datasets: surf_refl_qf2: name: surf_refl_qf2 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -334,7 +208,7 @@ datasets: surf_refl_qf3: name: surf_refl_qf3 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -342,7 +216,7 @@ datasets: surf_refl_qf4: name: surf_refl_qf4 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -350,7 +224,7 @@ datasets: surf_refl_qf5: name: surf_refl_qf5 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -358,7 +232,7 @@ datasets: surf_refl_qf6: name: surf_refl_qf6 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -366,7 +240,7 @@ datasets: surf_refl_qf7: name: surf_refl_qf7 resolution: 750 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' @@ -376,7 +250,7 @@ datasets: NDVI: name: NDVI resolution: 375 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "NDVI" coordinates: [longitude_375, latitude_375] units: "1" @@ -385,45 +259,9 @@ datasets: EVI: name: EVI resolution: 375 - file_type: [jrr_surfref_product] + file_type: [jrr_surfref] file_key: "EVI" coordinates: [longitude_375, latitude_375] units: "1" valid_range: [-1.0, 1.0] standard_name: "normalized_difference_vegetation_index" - - # Cloud Height products - cloud_top_temp: - name: CldTopTemp - file_key: "CldTopTemp" - file_type: [jrr_cloudheight_product] - resolution: 750 - coordinates: [longitude, latitude] - cloud_top_height: - name: CldTopHght - file_key: "CldTopHght" - file_type: [jrr_cloudheight_product] - resolution: 750 - coordinates: [longitude, latitude] - cloud_top_pressure: - name: CldTopPres - file_key: "CldTopPres" - file_type: [jrr_cloudheight_product] - resolution: 750 - coordinates: [longitude, latitude] - - # Aerosol Optical Depth products - aod550: - name: AOD550 - file_key: AOD550 - file_type: [jrr_aod_product] - resolution: 750 - coordinates: [longitude, latitude] - - # Land Surface Temperature - vlst: - name: VLST - file_key: VLST - file_type: [jrr_lst_product] - resolution: 750 - coordinates: [longitude, latitude] diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 1c7ba034ef..d1ef69db96 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -58,6 +58,7 @@ from satpy.utils import get_chunk_size_limit LOG = logging.getLogger(__name__) +M_COLS = 3200 class VIIRSJRRFileHandler(BaseFileHandler): @@ -68,7 +69,7 @@ def __init__(self, filename, filename_info, filetype_info): super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) # use entire scans as chunks - row_chunks_m = max(get_chunk_size_limit() // 4 // 3200, 1) # 32-bit floats + row_chunks_m = max(get_chunk_size_limit() // 4 // M_COLS, 1) # 32-bit floats row_chunks_i = row_chunks_m * 2 self.nc = xr.open_dataset(self.filename, decode_cf=True, @@ -99,7 +100,7 @@ def __init__(self, filename, filename_info, filetype_info): def rows_per_scans(self, data_arr: xr.DataArray) -> int: """Get number of array rows per instrument scan based on data resolution.""" - return 32 if data_arr.shape[1] == 6400 else 16 + return 16 if data_arr.shape[1] == M_COLS else 32 def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: """Get the dataset.""" @@ -183,6 +184,9 @@ def available_datasets(self, configured_datasets=None): ``None`` if this file object is not responsible for it. """ + # keep track of what variables the YAML has configured, so we don't + # duplicate entries for them in the dynamic portion + handled_var_names = set() for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset @@ -194,8 +198,44 @@ def available_datasets(self, configured_datasets=None): # this is not the file type for this dataset yield None, ds_info file_key = ds_info.get("file_key", ds_info["name"]) + handled_var_names.add(file_key) yield file_key in self.nc, ds_info + ftype = self.filetype_info["file_type"] + m_lon_name = f"longitude_{ftype}" + m_lat_name = f"latitude_{ftype}" + m_coords = (m_lon_name, m_lat_name) + i_lon_name = f"longitude_i_{ftype}" + i_lat_name = f"latitude_i_{ftype}" + i_coords = (i_lon_name, i_lat_name) + for var_name, data_arr in self.nc.items(): + is_lon = "longitude" in var_name.lower() + is_lat = "latitude" in var_name.lower() + if var_name in handled_var_names and not (is_lon or is_lat): + # skip variables that YAML had configured, but allow lon/lats + # to be reprocessed due to our dynamic coordinate naming + continue + if data_arr.ndim != 2: + # only 2D arrays supported at this time + continue + res = 750 if data_arr.shape[1] == M_COLS else 375 + ds_info = { + "file_key": var_name, + "file_type": ftype, + "name": var_name, + "resolution": res, + "coordinates": m_coords if res == 750 else i_coords, + } + if is_lon: + ds_info["standard_name"] = "longitude" + ds_info["units"] = "degrees_east" + ds_info["name"] = m_lon_name if res == 750 else i_lon_name + elif is_lat: + ds_info["standard_name"] = "latitude" + ds_info["units"] = "degrees_north" + ds_info["name"] = m_lat_name if res == 750 else i_lat_name + yield True, ds_info + class VIIRSSurfaceReflectanceWithVIHandler(VIIRSJRRFileHandler): """File handler for surface reflectance files with optional vegetation indexes.""" diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index cc06e16647..09cc2769b6 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -106,8 +106,8 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: data_arrs = { "Longitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), "Latitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), - "Longitude_at_750m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), - "Latitude_at_750m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), + "Longitude_at_750m_resolution": xr.DataArray(m_data, dims=m_dims, attrs=lon_attrs), + "Latitude_at_750m_resolution": xr.DataArray(m_data, dims=m_dims, attrs=lat_attrs), "375m Surface Reflectance Band I1": xr.DataArray(i_data, dims=i_dims, attrs=sr_attrs), "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), } @@ -368,6 +368,7 @@ def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert data_arr.attrs["area"].shape == data_arr.shape assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, dtype) is_mband_res = _is_mband_res(data_arr) From 65511ecf98a89346ab161c749e135ddebfcd38fe Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 11 Aug 2023 14:02:29 -0500 Subject: [PATCH 0385/1416] Deprecate "viirs_l2_cloud_mask_nc" reader --- satpy/readers/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 7efab8d904..2b1bbc37ba 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -37,7 +37,7 @@ # Old Name -> New Name -PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc'} +PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc', 'viirs_l2_cloud_mask_nc': 'viirs_edr'} OLD_READER_NAMES: dict[str, str] = {} From fbd2802c0747adde8a7582894d3c18f495bfc7ce Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 11 Aug 2023 14:15:55 -0500 Subject: [PATCH 0386/1416] Add flag to control QF filtering of vegetation indexes --- satpy/readers/viirs_edr.py | 18 +++++++++++++++++- satpy/tests/reader_tests/test_viirs_edr.py | 22 ++++++++++++++-------- 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index d1ef69db96..9b73d272ce 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -45,6 +45,17 @@ and aerosol detection files contain a cloud mask, but these are not identical. For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. +Vegetation Indexes +^^^^^^^^^^^^^^^^^^ + +The NDVI and EVI products can be loaded from CSPP-produced Surface Reflectance +files. By default, these products are filtered based on the Surface Reflectance +Quality Flags. This is used to remove/mask pixels in certain cloud or water +regions. This behavior can be disabled by providing the reader keyword argument +``filter_veg`` and setting it to ``False``. For example:: + + scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"filter_veg": False}) + """ @@ -240,9 +251,14 @@ def available_datasets(self, configured_datasets=None): class VIIRSSurfaceReflectanceWithVIHandler(VIIRSJRRFileHandler): """File handler for surface reflectance files with optional vegetation indexes.""" + def __init__(self, *args, filter_veg: bool = True, **kwargs) -> None: + """Initialize file handler and keep track of vegetation index filtering.""" + super().__init__(*args, **kwargs) + self._filter_veg = filter_veg + def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: new_data_arr = super()._mask_invalid(data_arr, ds_info) - if ds_info["file_key"] in ("NDVI", "EVI"): + if ds_info["file_key"] in ("NDVI", "EVI") and self._filter_veg: good_mask = self._get_veg_index_good_mask() new_data_arr = new_data_arr.where(good_mask) return new_data_arr diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 09cc2769b6..c9caf3ab85 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -259,15 +259,17 @@ def test_get_dataset_surf_refl(self, surface_reflectance_file): _check_surf_refl_data_arr(scn["surf_refl_I01"]) _check_surf_refl_data_arr(scn["surf_refl_M01"]) - def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file): + @pytest.mark.parametrize("filter_veg", [False, True]) + def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file, filter_veg): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file]) + scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file], + reader_kwargs={"filter_veg": filter_veg}) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) - _check_vi_data_arr(scn["NDVI"]) - _check_vi_data_arr(scn["EVI"]) + _check_vi_data_arr(scn["NDVI"], filter_veg) + _check_vi_data_arr(scn["EVI"], filter_veg) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) @pytest.mark.parametrize( @@ -332,16 +334,20 @@ def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: assert data_arr.attrs["standard_name"] == "quality_flag" -def _check_vi_data_arr(data_arr: xr.DataArray) -> None: +def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool) -> None: _array_checks(data_arr) _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" data = data_arr.data.compute() - np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) - np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) - np.testing.assert_allclose(data[0, 8 + 16:], 0.0) + if is_filtered: + np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) + np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) + np.testing.assert_allclose(data[0, 8 + 16:], 0.0) + else: + np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) + np.testing.assert_allclose(data[0, 8:], 0.0) def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: From e01b8a597d5b28569c969cb35ae0a151720c8c97 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Aug 2023 10:32:41 +0000 Subject: [PATCH 0387/1416] Bump pypa/gh-action-pypi-publish from 1.8.8 to 1.8.10 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.8 to 1.8.10. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.8...v1.8.10) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 68463b2b03..73db646c62 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.8 + uses: pypa/gh-action-pypi-publish@v1.8.10 with: user: __token__ password: ${{ secrets.pypi_password }} From d6ec23e9a3f9b3ccbeb85e40beffcc96073cd9e4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 14 Aug 2023 11:17:03 -0500 Subject: [PATCH 0388/1416] Remove unstable numpy in CI to test hanging --- .github/workflows/ci.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index faa0aea2cc..078d1f9df0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -94,7 +94,6 @@ jobs: --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ - numpy \ pandas \ scipy; \ python -m pip install \ From b7efb9dda139c385de7f493ff8f10514ae35afbc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 14 Aug 2023 11:44:17 -0500 Subject: [PATCH 0389/1416] Limit unstable environment to numpy <2 --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 078d1f9df0..b4d33026db 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -94,6 +94,7 @@ jobs: --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ + "numpy<2" \ pandas \ scipy; \ python -m pip install \ From 036fa9fd005202f0d9a2ede365d358fd70d5ec16 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 14 Aug 2023 13:56:11 -0500 Subject: [PATCH 0390/1416] Install numpy 1.26b1 from PyPI in unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b4d33026db..0ebbe68f0c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -89,12 +89,12 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | + python -m pip install --pre --upgrade --no-deps numpy; \ python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ - "numpy<2" \ pandas \ scipy; \ python -m pip install \ From 984c092501e4130921c48aa9ca62f682461330f1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 15 Aug 2023 05:40:10 +0000 Subject: [PATCH 0391/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.4.1 → v1.5.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.4.1...v1.5.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d100d73ca7..1b7889838a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.4.1' # Use the sha / tag you want to point at + rev: 'v1.5.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From f4b90e9da64be3713b3d9abfaa3e57b97be6eb6c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 09:22:00 -0500 Subject: [PATCH 0392/1416] Slim down CI for easier debugging --- .github/workflows/ci.yaml | 66 +++------------------------------------ 1 file changed, 4 insertions(+), 62 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0ebbe68f0c..477930cba6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,36 +11,15 @@ env: CACHE_NUMBER: 1 jobs: - lint: - name: lint and style checks - runs-on: ubuntu-latest - steps: - - name: Checkout source - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 flake8-docstrings flake8-debugger flake8-bugbear pytest - - name: Install Satpy - run: | - pip install -e . - - name: Run linting - run: | - flake8 satpy/ test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} - needs: [lint] strategy: fail-fast: true matrix: - os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11"] + os: ["ubuntu-latest"] + python-version: ["3.11"] experimental: [false] include: - python-version: "3.11" @@ -89,12 +68,12 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | - python -m pip install --pre --upgrade --no-deps numpy; \ python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ + numpy \ pandas \ scipy; \ python -m pip install \ @@ -119,41 +98,4 @@ jobs: shell: bash -l {0} run: | export LD_PRELOAD=${{ env.LD_PRELOAD }}; - pytest --cov=satpy satpy/tests --cov-report=xml --cov-report= - - - name: Upload unittest coverage to Codecov - uses: codecov/codecov-action@v3 - with: - flags: unittests - file: ./coverage.xml - env_vars: OS,PYTHON_VERSION,UNSTABLE - - - name: Coveralls Parallel - uses: AndreMiras/coveralls-python-action@develop - with: - flag-name: run-${{ matrix.test_number }} - parallel: true - if: runner.os == 'Linux' - - - name: Run behaviour tests - shell: bash -l {0} - run: | - export LD_PRELOAD=${{ env.LD_PRELOAD }}; - coverage run --source=satpy -m behave satpy/tests/features --tags=-download - coverage xml - - - name: Upload behaviour test coverage to Codecov - uses: codecov/codecov-action@v3 - with: - flags: behaviourtests - file: ./coverage.xml - env_vars: OS,PYTHON_VERSION,UNSTABLE - - coveralls: - needs: [test] - runs-on: ubuntu-latest - steps: - - name: Coveralls Finished - uses: AndreMiras/coveralls-python-action@develop - with: - parallel-finished: true + pytest satpy/tests From 5705f4db4e1e67eb8a37028442da63a7e7641d9f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 10:10:16 -0500 Subject: [PATCH 0393/1416] Try just viirs compact tests --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 477930cba6..03f0a88016 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -98,4 +98,4 @@ jobs: shell: bash -l {0} run: | export LD_PRELOAD=${{ env.LD_PRELOAD }}; - pytest satpy/tests + pytest satpy/tests/reader_tests/test_viirs_compact.py From 326b4df7e440cc5188832db9a29de47bc72a5264 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 10:18:58 -0500 Subject: [PATCH 0394/1416] Remove bokeh upper limit from CI environment --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 48976401a2..e1b52b384e 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -55,7 +55,7 @@ dependencies: - xarray-datatree - pint-xarray - ephem - - bokeh<3 + - bokeh - pip: - trollsift - trollimage>=1.20 From 91310cc8e57f106d8b92cefb119b615891c0561c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 10:46:28 -0500 Subject: [PATCH 0395/1416] Revert changes to ci.yaml --- .github/workflows/ci.yaml | 64 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 61 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 03f0a88016..faa0aea2cc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,15 +11,36 @@ env: CACHE_NUMBER: 1 jobs: + lint: + name: lint and style checks + runs-on: ubuntu-latest + steps: + - name: Checkout source + uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 flake8-docstrings flake8-debugger flake8-bugbear pytest + - name: Install Satpy + run: | + pip install -e . + - name: Run linting + run: | + flake8 satpy/ test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} + needs: [lint] strategy: fail-fast: true matrix: - os: ["ubuntu-latest"] - python-version: ["3.11"] + os: ["windows-latest", "ubuntu-latest", "macos-latest"] + python-version: ["3.9", "3.10", "3.11"] experimental: [false] include: - python-version: "3.11" @@ -98,4 +119,41 @@ jobs: shell: bash -l {0} run: | export LD_PRELOAD=${{ env.LD_PRELOAD }}; - pytest satpy/tests/reader_tests/test_viirs_compact.py + pytest --cov=satpy satpy/tests --cov-report=xml --cov-report= + + - name: Upload unittest coverage to Codecov + uses: codecov/codecov-action@v3 + with: + flags: unittests + file: ./coverage.xml + env_vars: OS,PYTHON_VERSION,UNSTABLE + + - name: Coveralls Parallel + uses: AndreMiras/coveralls-python-action@develop + with: + flag-name: run-${{ matrix.test_number }} + parallel: true + if: runner.os == 'Linux' + + - name: Run behaviour tests + shell: bash -l {0} + run: | + export LD_PRELOAD=${{ env.LD_PRELOAD }}; + coverage run --source=satpy -m behave satpy/tests/features --tags=-download + coverage xml + + - name: Upload behaviour test coverage to Codecov + uses: codecov/codecov-action@v3 + with: + flags: behaviourtests + file: ./coverage.xml + env_vars: OS,PYTHON_VERSION,UNSTABLE + + coveralls: + needs: [test] + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: AndreMiras/coveralls-python-action@develop + with: + parallel-finished: true From 4d54512565ebd3ffaf9f4c5e35252295ac29ac42 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 10:56:21 -0500 Subject: [PATCH 0396/1416] Reset CI cache number to force updating packages --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index faa0aea2cc..b2b53c691b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -8,7 +8,7 @@ concurrency: on: [push, pull_request] env: - CACHE_NUMBER: 1 + CACHE_NUMBER: 0 jobs: lint: From d5b1caca16a9442f6ddb3b40bd1f83041530f086 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 15 Aug 2023 20:35:25 -0500 Subject: [PATCH 0397/1416] Refactor viirs_edr available_datasets --- satpy/readers/viirs_edr.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 9b73d272ce..f95f6a901c 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -57,9 +57,10 @@ scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"filter_veg": False}) """ - +from __future__ import annotations import logging +from typing import Iterable import numpy as np import xarray as xr @@ -212,6 +213,9 @@ def available_datasets(self, configured_datasets=None): handled_var_names.add(file_key) yield file_key in self.nc, ds_info + yield from self._dynamic_variables_from_file(handled_var_names) + + def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: ftype = self.filetype_info["file_type"] m_lon_name = f"longitude_{ftype}" m_lat_name = f"latitude_{ftype}" From aa3f904cc0795eb4a1af2da3d1794e7bca5037cd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 10:57:00 -0500 Subject: [PATCH 0398/1416] Fix coordinate variables not being dynamically included --- satpy/readers/viirs_edr.py | 3 ++- satpy/tests/reader_tests/test_viirs_edr.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index f95f6a901c..8b3cf99d27 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -223,7 +223,8 @@ def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple i_lon_name = f"longitude_i_{ftype}" i_lat_name = f"latitude_i_{ftype}" i_coords = (i_lon_name, i_lat_name) - for var_name, data_arr in self.nc.items(): + for var_name in self.nc.variables.keys(): + data_arr = self.nc[var_name] is_lon = "longitude" in var_name.lower() is_lat = "latitude" in var_name.lower() if var_name in handled_var_names and not (is_lon or is_lat): diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index c9caf3ab85..f4f5799444 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -225,6 +225,7 @@ def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataA data_arr.encoding["dtype"] = np.int16 data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor") data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset") + data_arr.encoding["coordinates"] = "Longitude Latitude" return data_arrs From 4fbeeaa72cfbd1704ac422c662c07631d7defa1e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 11:17:41 -0500 Subject: [PATCH 0399/1416] Fix YAML definitions being ignored when multiple files are provided --- satpy/readers/viirs_edr.py | 4 +- satpy/tests/reader_tests/test_viirs_edr.py | 63 ++++++++++++++++------ 2 files changed, 48 insertions(+), 19 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 8b3cf99d27..b007e710b3 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -200,6 +200,8 @@ def available_datasets(self, configured_datasets=None): # duplicate entries for them in the dynamic portion handled_var_names = set() for is_avail, ds_info in (configured_datasets or []): + file_key = ds_info.get("file_key", ds_info["name"]) + handled_var_names.add(file_key) if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous @@ -209,8 +211,6 @@ def available_datasets(self, configured_datasets=None): if self.file_type_matches(ds_info['file_type']) is None: # this is not the file type for this dataset yield None, ds_info - file_key = ds_info.get("file_key", ds_info["name"]) - handled_var_names.add(file_key) yield file_key in self.nc, ds_info yield from self._dynamic_variables_from_file(handled_var_names) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index f4f5799444..5286ed3461 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -245,33 +245,57 @@ def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: return ds +def _copy_to_second_granule(first_granule_path: Path) -> Path: + # hack to make multiple time steps + second_fn = Path(str(first_granule_path).replace("0.nc", "1.nc")) + shutil.copy(first_granule_path, second_fn) + return second_fn + + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - def test_get_dataset_surf_refl(self, surface_reflectance_file): + @pytest.mark.parametrize("multiple_files", [False, True]) + def test_get_dataset_surf_refl(self, surface_reflectance_file, multiple_files): """Test retrieval of datasets.""" from satpy import Scene + + files = [surface_reflectance_file] + if multiple_files: + files.append(_copy_to_second_granule(surface_reflectance_file)) + bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_file]) + scn = Scene(reader="viirs_edr", filenames=files) scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME assert scn.end_time == END_TIME - _check_surf_refl_data_arr(scn["surf_refl_I01"]) - _check_surf_refl_data_arr(scn["surf_refl_M01"]) + _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=multiple_files) + _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=multiple_files) @pytest.mark.parametrize("filter_veg", [False, True]) - def test_get_dataset_surf_refl_with_veg_idx(self, surface_reflectance_with_veg_indices_file, filter_veg): + @pytest.mark.parametrize("multiple_files", [False, True]) + def test_get_dataset_surf_refl_with_veg_idx( + self, + surface_reflectance_with_veg_indices_file, + filter_veg, + multiple_files + ): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene + + files = [surface_reflectance_with_veg_indices_file] + if multiple_files: + files.append(_copy_to_second_granule(surface_reflectance_with_veg_indices_file)) + bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=[surface_reflectance_with_veg_indices_file], + scn = Scene(reader="viirs_edr", filenames=files, reader_kwargs={"filter_veg": filter_veg}) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) - _check_vi_data_arr(scn["NDVI"], filter_veg) - _check_vi_data_arr(scn["EVI"], filter_veg) - _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"]) + _check_vi_data_arr(scn["NDVI"], filter_veg, multiple_files) + _check_vi_data_arr(scn["EVI"], filter_veg, multiple_files) + _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], multiple_files) @pytest.mark.parametrize( ("var_names", "data_file"), @@ -328,15 +352,15 @@ def test_get_platformname(self, surface_reflectance_file, filename_platform, exp assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname -def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray) -> None: - _array_checks(data_arr, dtype=np.uint8) +def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray, multiple_files: bool) -> None: + _array_checks(data_arr, dtype=np.uint8, multiple_files=multiple_files) _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "quality_flag" -def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool) -> None: - _array_checks(data_arr) +def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool, multiple_files: bool) -> None: + _array_checks(data_arr, multiple_files=multiple_files) _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" @@ -351,8 +375,12 @@ def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool) -> None: np.testing.assert_allclose(data[0, 8:], 0.0) -def _check_surf_refl_data_arr(data_arr: xr.DataArray, dtype: npt.DType = np.float32) -> None: - _array_checks(data_arr, dtype) +def _check_surf_refl_data_arr( + data_arr: xr.DataArray, + dtype: npt.DType = np.float32, + multiple_files: bool = False +) -> None: + _array_checks(data_arr, dtype, multiple_files=multiple_files) data = data_arr.data.compute() assert data.max() > 1.0 # random 0-1 test data multiplied by 100 @@ -372,14 +400,15 @@ def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: _shared_metadata_checks(data_arr) -def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32) -> None: +def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32, multiple_files: bool = False) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["area"].shape == data_arr.shape assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, dtype) is_mband_res = _is_mband_res(data_arr) - exp_shape = (M_ROWS, M_COLS) if is_mband_res else (I_ROWS, I_COLS) + shape_multiplier = 1 + int(multiple_files) + exp_shape = (M_ROWS * shape_multiplier, M_COLS) if is_mband_res else (I_ROWS * shape_multiplier, I_COLS) assert data_arr.shape == exp_shape exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) From 8d664a654bb219dd8a85ff5bc60b50f37c16dceb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 11:18:38 -0500 Subject: [PATCH 0400/1416] Remove old VIIRS L2 Cloud Mask reader --- satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml | 50 -------- satpy/readers/viirs_l2.py | 120 ------------------ 2 files changed, 170 deletions(-) delete mode 100644 satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml delete mode 100644 satpy/readers/viirs_l2.py diff --git a/satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml b/satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml deleted file mode 100644 index 0f2650bdc1..0000000000 --- a/satpy/etc/readers/viirs_l2_cloud_mask_nc.yaml +++ /dev/null @@ -1,50 +0,0 @@ -reader: - name: viirs_l2_cloud_mask_nc - short_name: VIIRS CSPP Cloud Mask - long_name: VIIRS CSPP Cloud Mask data in NetCDF4 format - description: VIIRS CSPP Cloud Mask reader - status: beta - supports_fsspec: false - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - sensors: [viirs] - -file_types: - cspp_cloud_mask_file: - file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSCloudMaskFileHandler - file_patterns: ['JRR-CloudMask_{delivery_package:4s}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] - # Example filenames - # JRR-CloudMask_v3r0_npp_s202212070726217_e202212070727459_c202212071917430.nc - -datasets: - longitude: - name: longitude - resolution: 750 - file_type: cspp_cloud_mask_file - file_key: Longitude - file_units: "degrees_east" - standard_name: longitude - coordinates: [longitude, latitude] - latitude: - name: latitude - resolution: 750 - file_type: cspp_cloud_mask_file - file_key: Latitude - file_units: "degrees_north" - standard_name: latitude - coordinates: [longitude, latitude] - cloud_mask: - name: cloud_mask - resolution: 750 - file_type: cspp_cloud_mask_file - file_key: CloudMask - file_units: "1" - standard_name: cloud_mask - coordinates: [longitude, latitude] - cloud_mask_binary: - name: cloud_mask_binary - resolution: 750 - file_type: cspp_cloud_mask_file - file_key: CloudMaskBinary - file_units: "1" - standard_name: cloud_mask_binary - coordinates: [longitude, latitude] diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py deleted file mode 100644 index 90d272504a..0000000000 --- a/satpy/readers/viirs_l2.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (c) 2022-2023 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Interface to VIIRS L2 files.""" - -from datetime import datetime - -from satpy.readers.netcdf_utils import NetCDF4FileHandler - - -class VIIRSCloudMaskFileHandler(NetCDF4FileHandler): - """VIIRS L2 Cloud Mask reader.""" - - def __init__(self, filename, filename_info, filetype_info): - """Initialize the file handler.""" - super().__init__(filename, filename_info, filetype_info, cache_handle=True) - - def _parse_datetime(self, datestr): - """Parse datetime.""" - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") - - @property - def start_orbit_number(self): - """Get start orbit number.""" - return int(self['/attr/start_orbit_number']) - - @property - def end_orbit_number(self): - """Get end orbit number.""" - return int(self['/attr/end_orbit_number']) - - @property - def platform_name(self): - """Get platform name.""" - res = self.filename_info['platform_shortname'] - - return { - 'npp': 'Suomi-NPP', - 'j01': 'NOAA-20', - 'j02': 'NOAA-21', - }.get(res, res) - - @property - def sensor_name(self): - """Get sensor name.""" - return self['/attr/instrument_name'].lower() - - def get_shape(self, ds_id, ds_info): - """Get shape.""" - return self.get(ds_id['name'] + '/shape', 1) - - @property - def start_time(self): - """Get start time.""" - return self._parse_datetime(self['/attr/time_coverage_start']) - - @property - def end_time(self): - """Get end time.""" - return self._parse_datetime(self['/attr/time_coverage_end']) - - def get_metadata(self, dataset_id, ds_info): - """Get metadata.""" - var_path = ds_info['file_key'] - shape = self.get_shape(dataset_id, ds_info) - file_units = ds_info.get('file_units') - - attr = getattr(self[var_path], 'attrs', {}) - attr.update(ds_info) - attr.update(dataset_id.to_dict()) - attr.update({ - "shape": shape, - "units": ds_info.get("units", file_units), - "file_units": file_units, - "platform_name": self.platform_name, - "sensor": self.sensor_name, - "start_orbit": self.start_orbit_number, - "end_orbit": self.end_orbit_number, - }) - attr.update(dataset_id.to_dict()) - return attr - - def get_dataset(self, dataset_id, ds_info): - """Get dataset.""" - var_path = ds_info['file_key'] - metadata = self.get_metadata(dataset_id, ds_info) - - valid_min, valid_max = self._get_dataset_valid_range(var_path) - data = self[var_path] - data.attrs.update(metadata) - - if valid_min is not None and valid_max is not None: - data = data.where((data >= valid_min) & (data <= valid_max)) - - if isinstance(data.attrs.get('flag_meanings'), str): - data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') - - # rename dimensions to correspond to satpy's 'y' and 'x' standard - if 'Rows' in data.dims: - data = data.rename({'Rows': 'y', 'Columns': 'x'}) - return data - - def _get_dataset_valid_range(self, var_path): - valid_range = self.get(var_path + '/attr/valid_range') - valid_min = valid_range[0] - valid_max = valid_range[1] - - return valid_min, valid_max From f7b60b457fdd166796e1aec28451bc59589e95f4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 11:20:09 -0500 Subject: [PATCH 0401/1416] Remove old VIIRS L2 reader tests --- satpy/tests/reader_tests/test_viirs_l2.py | 142 ---------------------- 1 file changed, 142 deletions(-) delete mode 100644 satpy/tests/reader_tests/test_viirs_l2.py diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py deleted file mode 100644 index 5d6e8ffb4f..0000000000 --- a/satpy/tests/reader_tests/test_viirs_l2.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright (c) 2022 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Tests for the VIIRS CSPP L2 readers.""" - -import numpy as np -import pytest -import xarray as xr - -from satpy import Scene - -# NOTE: -# The following Pytest fixtures are not defined in this file, but are used and injected by Pytest: -# - tmp_path - -CLOUD_MASK_FILE = "JRR-CloudMask_v3r0_npp_s202212070905565_e202212070907207_c202212071932513.nc" -NUM_COLUMNS = 3200 -NUM_ROWS = 768 -DATASETS = ['Latitude', 'Longitude', 'CloudMask', 'CloudMaskBinary'] - - -@pytest.fixture -def cloud_mask_file(tmp_path): - """Create a temporary JRR CloudMask file as a fixture.""" - file_path = tmp_path / CLOUD_MASK_FILE - _write_cloud_mask_file(file_path) - yield file_path - - -def _write_cloud_mask_file(file_path): - dset = xr.Dataset() - dset.attrs = _get_global_attrs() - dset['Latitude'] = _get_lat_arr() - dset['Longitude'] = _get_lon_arr() - dset['CloudMask'] = _get_cloud_mask_arr() - dset['CloudMaskBinary'] = _get_cloud_mask_binary_arr() - dset.to_netcdf(file_path, 'w') - - -def _get_global_attrs(): - return { - 'time_coverage_start': '2022-12-07T09:05:56Z', - 'time_coverage_end': '2022-12-07T09:07:20Z', - 'start_orbit_number': np.array(57573), - 'end_orbit_number': np.array(57573), - 'instrument_name': 'VIIRS', - } - - -def _get_lat_arr(): - arr = np.zeros((NUM_ROWS, NUM_COLUMNS), dtype=np.float32) - attrs = { - 'long_name': 'Latitude', - 'units': 'degrees_north', - 'valid_range': np.array([-90, 90], dtype=np.float32), - '_FillValue': -999. - } - return xr.DataArray(arr, attrs=attrs, dims=('Rows', 'Columns')) - - -def _get_lon_arr(): - arr = np.zeros((NUM_ROWS, NUM_COLUMNS), dtype=np.float32) - attrs = { - 'long_name': 'Longitude', - 'units': 'degrees_east', - 'valid_range': np.array([-180, 180], dtype=np.float32), - '_FillValue': -999. - } - return xr.DataArray(arr, attrs=attrs, dims=('Rows', 'Columns')) - - -def _get_cloud_mask_arr(): - arr = np.random.randint(0, 4, (NUM_ROWS, NUM_COLUMNS), dtype=np.byte) - attrs = { - 'long_name': 'Cloud Mask', - '_FillValue': np.byte(-128), - 'valid_range': np.array([0, 3], dtype=np.byte), - 'units': '1', - 'flag_values': np.array([0, 1, 2, 3], dtype=np.byte), - 'flag_meanings': 'clear probably_clear probably_cloudy cloudy', - } - return xr.DataArray(arr, attrs=attrs, dims=('Rows', 'Columns')) - - -def _get_cloud_mask_binary_arr(): - arr = np.random.randint(0, 2, (NUM_ROWS, NUM_COLUMNS), dtype=np.byte) - attrs = { - 'long_name': 'Cloud Mask Binary', - '_FillValue': np.byte(-128), - 'valid_range': np.array([0, 1], dtype=np.byte), - 'units': '1', - } - return xr.DataArray(arr, attrs=attrs, dims=('Rows', 'Columns')) - - -def test_cloud_mask_read_latitude(cloud_mask_file): - """Test reading latitude dataset.""" - data = _read_viirs_l2_cloud_mask_nc_data(cloud_mask_file, 'latitude') - _assert_common(data) - - -def test_cloud_mask_read_longitude(cloud_mask_file): - """Test reading longitude dataset.""" - data = _read_viirs_l2_cloud_mask_nc_data(cloud_mask_file, 'longitude') - _assert_common(data) - - -def test_cloud_mask_read_cloud_mask(cloud_mask_file): - """Test reading cloud mask dataset.""" - data = _read_viirs_l2_cloud_mask_nc_data(cloud_mask_file, 'cloud_mask') - _assert_common(data) - np.testing.assert_equal(data.attrs['flag_values'], [0, 1, 2, 3]) - assert data.attrs['flag_meanings'] == ['clear', 'probably_clear', 'probably_cloudy', 'cloudy'] - - -def test_cloud_mas_read_binary_cloud_mask(cloud_mask_file): - """Test reading binary cloud mask dataset.""" - data = _read_viirs_l2_cloud_mask_nc_data(cloud_mask_file, 'cloud_mask_binary') - _assert_common(data) - - -def _read_viirs_l2_cloud_mask_nc_data(fname, dset_name): - scn = Scene(reader="viirs_l2_cloud_mask_nc", filenames=[fname]) - scn.load([dset_name]) - return scn[dset_name] - - -def _assert_common(data): - assert data.dims == ('y', 'x') - assert "units" in data.attrs From 9e813f62f3678ce59373778085ea545ca22304b3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 13:21:36 -0500 Subject: [PATCH 0402/1416] Change multiple files tests to use a pytest fixture --- satpy/tests/reader_tests/test_viirs_edr.py | 97 +++++++++++++++------- 1 file changed, 65 insertions(+), 32 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 5286ed3461..2acbd0d55a 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -22,7 +22,7 @@ from __future__ import annotations import shutil -from datetime import datetime +from datetime import datetime, timedelta from pathlib import Path from typing import Iterable @@ -72,17 +72,48 @@ @pytest.fixture(scope="module") def surface_reflectance_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file.""" - return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=False) + return _create_surface_reflectance_file(tmp_path_factory, START_TIME, include_veg_indices=False) + + +@pytest.fixture(scope="module") +def surface_reflectance_file2(tmp_path_factory: TempPathFactory) -> Path: + """Generate fake surface reflectance EDR file.""" + return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5), + include_veg_indices=False) + + +@pytest.fixture(scope="module") +def multiple_surface_reflectance_files(surface_reflectance_file, surface_reflectance_file2) -> list[Path]: + """Get two multiple surface reflectance files.""" + return [surface_reflectance_file, surface_reflectance_file2] @pytest.fixture(scope="module") def surface_reflectance_with_veg_indices_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file with vegetation indexes included.""" - return _create_surface_reflectance_file(tmp_path_factory, include_veg_indices=True) + return _create_surface_reflectance_file(tmp_path_factory, START_TIME, include_veg_indices=True) + + +@pytest.fixture(scope="module") +def surface_reflectance_with_veg_indices_file2(tmp_path_factory: TempPathFactory) -> Path: + """Generate fake surface reflectance EDR file with vegetation indexes included.""" + return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5), + include_veg_indices=True) -def _create_surface_reflectance_file(tmp_path_factory: TempPathFactory, include_veg_indices: bool = False) -> Path: - fn = f"SurfRefl_v1r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" +@pytest.fixture(scope="module") +def multiple_surface_reflectance_files_with_veg_indices(surface_reflectance_with_veg_indices_file, + surface_reflectance_with_veg_indices_file2) -> list[Path]: + """Get two multiple surface reflectance files with vegetation indexes included.""" + return [surface_reflectance_with_veg_indices_file, surface_reflectance_with_veg_indices_file2] + + +def _create_surface_reflectance_file( + tmp_path_factory: TempPathFactory, + start_time: datetime, + include_veg_indices: bool = False, +) -> Path: + fn = f"SurfRefl_v1r2_npp_s{start_time:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" sr_vars = _create_surf_refl_variables() if include_veg_indices: sr_vars.update(_create_veg_index_variables()) @@ -245,57 +276,59 @@ def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: return ds -def _copy_to_second_granule(first_granule_path: Path) -> Path: - # hack to make multiple time steps - second_fn = Path(str(first_granule_path).replace("0.nc", "1.nc")) - shutil.copy(first_granule_path, second_fn) - return second_fn - - class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" - @pytest.mark.parametrize("multiple_files", [False, True]) - def test_get_dataset_surf_refl(self, surface_reflectance_file, multiple_files): + @pytest.mark.parametrize( + "data_files", + [ + lazy_fixture("surface_reflectance_file"), + lazy_fixture("multiple_surface_reflectance_files"), + ], + ) + def test_get_dataset_surf_refl(self, data_files): """Test retrieval of datasets.""" from satpy import Scene - files = [surface_reflectance_file] - if multiple_files: - files.append(_copy_to_second_granule(surface_reflectance_file)) - + if not isinstance(data_files, list): + data_files = [data_files] + is_multiple = len(data_files) > 1 bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=files) + scn = Scene(reader="viirs_edr", filenames=data_files) scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME assert scn.end_time == END_TIME - _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=multiple_files) - _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=multiple_files) + _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=is_multiple) + _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=is_multiple) @pytest.mark.parametrize("filter_veg", [False, True]) - @pytest.mark.parametrize("multiple_files", [False, True]) + @pytest.mark.parametrize( + "data_files", + [ + lazy_fixture("surface_reflectance_with_veg_indices_file2"), + lazy_fixture("multiple_surface_reflectance_files_with_veg_indices"), + ], + ) def test_get_dataset_surf_refl_with_veg_idx( self, - surface_reflectance_with_veg_indices_file, + data_files, filter_veg, - multiple_files ): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene - files = [surface_reflectance_with_veg_indices_file] - if multiple_files: - files.append(_copy_to_second_granule(surface_reflectance_with_veg_indices_file)) - + if not isinstance(data_files, list): + data_files = [data_files] + is_multiple = len(data_files) > 1 bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): - scn = Scene(reader="viirs_edr", filenames=files, + scn = Scene(reader="viirs_edr", filenames=data_files, reader_kwargs={"filter_veg": filter_veg}) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) - _check_vi_data_arr(scn["NDVI"], filter_veg, multiple_files) - _check_vi_data_arr(scn["EVI"], filter_veg, multiple_files) - _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], multiple_files) + _check_vi_data_arr(scn["NDVI"], filter_veg, is_multiple) + _check_vi_data_arr(scn["EVI"], filter_veg, is_multiple) + _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], is_multiple) @pytest.mark.parametrize( ("var_names", "data_file"), From e5be4e3887f67c865a83590d04c5582a81bad270 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 15:15:41 -0500 Subject: [PATCH 0403/1416] Fix old colormap definition for VIIRS water detection --- satpy/etc/enhancements/viirs.yaml | 129 +++++++++++++++--------------- 1 file changed, 65 insertions(+), 64 deletions(-) diff --git a/satpy/etc/enhancements/viirs.yaml b/satpy/etc/enhancements/viirs.yaml index c740a2a6e6..8b3751167d 100644 --- a/satpy/etc/enhancements/viirs.yaml +++ b/satpy/etc/enhancements/viirs.yaml @@ -14,67 +14,68 @@ enhancements: - name: WaterDetection method: !!python/name:satpy.enhancements.viirs.water_detection kwargs: - palettes: {colors: - [[14, [0.0, 0.0, 0.0]], - [15, [0.0, 0.0, 0.39215686274509803]], - [16, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118]], - [17, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118]], - [18, [0.0, 0.0, 1.0]], - [20, [1.0, 1.0, 1.0]], - [27, [0.0, 1.0, 1.0]], - [30, [0.7843137254901961, 0.7843137254901961, 0.7843137254901961]], - [31, [0.39215686274509803, 0.39215686274509803, 0.39215686274509803]], - [88, [0.7058823529411765, 0.0, 0.9019607843137255]], - [100, [0.19607843137254902, 1.0, 0.39215686274509803]], - [120, [0.19607843137254902, 1.0, 0.39215686274509803]], - [121, [0.0, 1.0, 0.0]], - [130, [0.0, 1.0, 0.0]], - [131, [0.7843137254901961, 1.0, 0.0]], - [140, [0.7843137254901961, 1.0, 0.0]], - [141, [1.0, 1.0, 0.5882352941176471]], - [150, [1.0, 1.0, 0.5882352941176471]], - [151, [1.0, 1.0, 0.0]], - [160, [1.0, 1.0, 0.0]], - [161, [1.0, 0.7843137254901961, 0.0]], - [170, [1.0, 0.7843137254901961, 0.0]], - [171, [1.0, 0.5882352941176471, 0.19607843137254902]], - [180, [1.0, 0.5882352941176471, 0.19607843137254902]], - [181, [1.0, 0.39215686274509803, 0.0]], - [190, [1.0, 0.39215686274509803, 0.0]], - [191, [1.0, 0.0, 0.0]], - [200, [1.0, 0.0, 0.0]], - [201, [0.0, 0.0, 0.0]]], - min_value: 0, - max_value: 201} -# palettes: {colors: -# [[14, [0.0, 0.0, 0.0, 0.0]], -# [15, [0.0, 0.0, 0.39215686274509803, 1.0]], -# [16, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118, 1.0]], -# [17, [0.7686274509803922, 0.6352941176470588, 0.4470588235294118, 1.0]], -# [18, [0.0, 0.0, 1.0, 1.0]], -# [20, [1.0, 1.0, 1.0, 1.0]], -# [27, [0.0, 1.0, 1.0, 1.0]], -# [30, [0.7843137254901961, 0.7843137254901961, 0.7843137254901961, 1.0]], -# [31, [0.39215686274509803, 0.39215686274509803, 0.39215686274509803, 1.0]], -# [88, [0.7058823529411765, 0.0, 0.9019607843137255, 1.0]], -# [100, [0.19607843137254902, 1.0, 0.39215686274509803, 1.0]], -# [120, [0.19607843137254902, 1.0, 0.39215686274509803, 1.0]], -# [121, [0.0, 1.0, 0.0, 1.0]], -# [130, [0.0, 1.0, 0.0, 1.0]], -# [131, [0.7843137254901961, 1.0, 0.0, 1.0]], -# [140, [0.7843137254901961, 1.0, 0.0, 1.0]], -# [141, [1.0, 1.0, 0.5882352941176471, 1.0]], -# [150, [1.0, 1.0, 0.5882352941176471, 1.0]], -# [151, [1.0, 1.0, 0.0, 1.0]], -# [160, [1.0, 1.0, 0.0, 1.0]], -# [161, [1.0, 0.7843137254901961, 0.0, 1.0]], -# [170, [1.0, 0.7843137254901961, 0.0, 1.0]], -# [171, [1.0, 0.5882352941176471, 0.19607843137254902, 1.0]], -# [180, [1.0, 0.5882352941176471, 0.19607843137254902, 1.0]], -# [181, [1.0, 0.39215686274509803, 0.0, 1.0]], -# [190, [1.0, 0.39215686274509803, 0.0, 1.0]], -# [191, [1.0, 0.0, 0.0, 1.0]], -# [200, [1.0, 0.0, 0.0, 1.0]], -# [201, [0.0, 0.0, 0.0, 0.0]]], -# min_value: 0, -# max_value: 201} + palettes: { + values: [ + 14, + 15, + 16, + 17, + 18, + 20, + 27, + 30, + 31, + 88, + 100, + 120, + 121, + 130, + 131, + 140, + 141, + 150, + 151, + 160, + 161, + 170, + 171, + 180, + 181, + 190, + 191, + 200, + 201, + ], + colors: [ + [0.0, 0.0, 0.0], + [0.0, 0.0, 0.39215686274509803], + [0.7686274509803922, 0.6352941176470588, 0.4470588235294118], + [0.7686274509803922, 0.6352941176470588, 0.4470588235294118], + [0.0, 0.0, 1.0], + [1.0, 1.0, 1.0], + [0.0, 1.0, 1.0], + [0.7843137254901961, 0.7843137254901961, 0.7843137254901961], + [0.39215686274509803, 0.39215686274509803, 0.39215686274509803], + [0.7058823529411765, 0.0, 0.9019607843137255], + [0.19607843137254902, 1.0, 0.39215686274509803], + [0.19607843137254902, 1.0, 0.39215686274509803], + [0.0, 1.0, 0.0], + [0.0, 1.0, 0.0], + [0.7843137254901961, 1.0, 0.0], + [0.7843137254901961, 1.0, 0.0], + [1.0, 1.0, 0.5882352941176471], + [1.0, 1.0, 0.5882352941176471], + [1.0, 1.0, 0.0], + [1.0, 1.0, 0.0], + [1.0, 0.7843137254901961, 0.0], + [1.0, 0.7843137254901961, 0.0], + [1.0, 0.5882352941176471, 0.19607843137254902], + [1.0, 0.5882352941176471, 0.19607843137254902], + [1.0, 0.39215686274509803, 0.0], + [1.0, 0.39215686274509803, 0.0], + [1.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.0, 0.0, 0.0], + ], + min_value: 0, + max_value: 201} From 04fe3e05a025892c52b450f1a3139ba853c00b1e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 16 Aug 2023 15:16:24 -0500 Subject: [PATCH 0404/1416] Remove non-existent M06 surface reflectance product --- satpy/etc/readers/viirs_edr.yaml | 9 --------- 1 file changed, 9 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 4f33bcc184..c078e754aa 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -144,15 +144,6 @@ datasets: coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" - surf_refl_M06: - name: surf_refl_M06 - resolution: 750 - wavelength: [0.739, 0.746, 0.754] - file_type: [jrr_surfref] - file_key: "750m Surface Reflectance Band M6" - coordinates: [longitude_750, latitude_750] - units: '%' - standard_name: "surface_bidirectional_reflectance" surf_refl_M07: name: surf_refl_M07 resolution: 750 From 44c5b3970ea927e2a72f95096042a51ed5fe4e82 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Aug 2023 13:03:40 +0200 Subject: [PATCH 0405/1416] Use normalize_chunks for ahi hsd chunk sizes --- satpy/readers/ahi_hsd.py | 10 ++- satpy/tests/reader_tests/test_ahi_hsd.py | 85 ++++++++++++++++++++++++ 2 files changed, 92 insertions(+), 3 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 86e9aa9e0c..e06f7ebc50 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -78,9 +78,8 @@ np2str, unzip_file, ) -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_chunk_size_limit -CHUNK_SIZE = get_legacy_chunk_size() AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16") @@ -620,9 +619,14 @@ def _read_data(self, fp_, header): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) + chunks = da.core.normalize_chunks("auto", + shape=(nlines, ncols), + limit=get_chunk_size_limit(), + dtype='f8', + previous_chunks=(550, 550)) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Thu, 17 Aug 2023 09:24:57 -0500 Subject: [PATCH 0406/1416] Fix swath definitions not having all lon/lat metadata --- satpy/readers/viirs_edr.py | 13 +++++++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 8 +++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index b007e710b3..da61114fca 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -130,6 +130,11 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: data_arr.attrs["platform_name"] = self.platform_name data_arr.attrs["sensor"] = self.sensor_name data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr) + if data_arr.attrs.get("standard_name") in ("longitude", "latitude"): + # recursive swath definitions are a problem for the base reader right now + # delete the coordinates here so the base reader doesn't try to + # make a SwathDefinition + data_arr = data_arr.reset_coords(drop=True) return data_arr def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: @@ -201,6 +206,10 @@ def available_datasets(self, configured_datasets=None): handled_var_names = set() for is_avail, ds_info in (configured_datasets or []): file_key = ds_info.get("file_key", ds_info["name"]) + # we must add all variables here even if another file handler has + # claimed the variable. It could be another instance of this file + # type and we don't want to add that variable dynamically if the + # other file handler defined it by the YAML definition. handled_var_names.add(file_key) if is_avail is not None: # some other file handler said it has this dataset @@ -246,10 +255,14 @@ def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple ds_info["standard_name"] = "longitude" ds_info["units"] = "degrees_east" ds_info["name"] = m_lon_name if res == 750 else i_lon_name + # recursive coordinate/SwathDefinitions are not currently handled well in the base reader + del ds_info["coordinates"] elif is_lat: ds_info["standard_name"] = "latitude" ds_info["units"] = "degrees_north" ds_info["name"] = m_lat_name if res == 750 else i_lat_name + # recursive coordinate/SwathDefinitions are not currently handled well in the base reader + del ds_info["coordinates"] yield True, ds_info diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 2acbd0d55a..146a0cd2c4 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -450,8 +450,14 @@ def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32, multipl def _shared_metadata_checks(data_arr: xr.DataArray) -> None: is_mband_res = _is_mband_res(data_arr) + exp_rps = 16 if is_mband_res else 32 assert data_arr.attrs["sensor"] == "viirs" - assert data_arr.attrs["rows_per_scan"] == 16 if is_mband_res else 32 + assert data_arr.attrs["rows_per_scan"] == exp_rps + + lons = data_arr.attrs["area"].lons + lats = data_arr.attrs["area"].lats + assert lons.attrs["rows_per_scan"] == exp_rps + assert lats.attrs["rows_per_scan"] == exp_rps def _is_mband_res(data_arr: xr.DataArray) -> bool: From 3bb0920fe68b50274baa58051ddb67e4a2f09381 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 21 Aug 2023 09:46:47 +0200 Subject: [PATCH 0407/1416] Allow using s3 files for AMI reader --- satpy/readers/ami_l1b.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py index 4569b996fa..9adeaf76f1 100644 --- a/satpy/readers/ami_l1b.py +++ b/satpy/readers/ami_l1b.py @@ -26,6 +26,7 @@ import xarray as xr from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp +from satpy.readers import open_file_or_filename from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import apply_rad_correction, get_user_calibration_factors @@ -93,7 +94,8 @@ def __init__(self, filename, filename_info, filetype_info, user_calibration=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(AMIL1bNetCDF, self).__init__(filename, filename_info, filetype_info) - self.nc = xr.open_dataset(self.filename, + f_obj = open_file_or_filename(self.filename) + self.nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, chunks={'dim_image_x': CHUNK_SIZE, 'dim_image_y': CHUNK_SIZE}) From 54e5d19aaf09b574fc749f0bd7a6657817fe295b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 21 Aug 2023 09:54:59 +0200 Subject: [PATCH 0408/1416] Fix doc --- satpy/etc/readers/ami_l1b.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/ami_l1b.yaml b/satpy/etc/readers/ami_l1b.yaml index 8366f12117..e1990f9871 100644 --- a/satpy/etc/readers/ami_l1b.yaml +++ b/satpy/etc/readers/ami_l1b.yaml @@ -8,7 +8,7 @@ reader: `here `_. sensors: [ami] status: Beta - supports_fsspec: false + supports_fsspec: true default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' From aef188b283164f5fcf12fd94fb71702fed61c235 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 21 Aug 2023 08:20:01 -0500 Subject: [PATCH 0409/1416] Bump xarray expected version in test_cf.py --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index baeb45a4e4..54770b9176 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.8") + versions["xarray"] >= Version("2023.9") ) From bd1c7f3e9e56b389dfff5738e1f7961b233f1b71 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 22 Aug 2023 05:52:31 +0000 Subject: [PATCH 0410/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.5.0 → v1.5.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.5.0...v1.5.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1b7889838a..3999be8b04 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.5.0' # Use the sha / tag you want to point at + rev: 'v1.5.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From 6cd7f5ca4c98b9b925cbcb7a2709082164b5f55a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 22 Aug 2023 10:50:33 +0100 Subject: [PATCH 0411/1416] Add OLCI enhancement YAML to cope with the `mask` dataset. --- satpy/etc/enhancements/olci.yaml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 satpy/etc/enhancements/olci.yaml diff --git a/satpy/etc/enhancements/olci.yaml b/satpy/etc/enhancements/olci.yaml new file mode 100644 index 0000000000..9724b986aa --- /dev/null +++ b/satpy/etc/enhancements/olci.yaml @@ -0,0 +1,4 @@ +enhancements: + mask: + name: mask + operations: [] From 38bda954fc2a7b80bf394ea6bf71a4616bfb332f Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 14:28:32 +0200 Subject: [PATCH 0412/1416] Implement non-linearity term for NDVI-weighted hybrid-green correction when converting NDVI to blend fraction. --- satpy/composites/spectral.py | 27 ++++++++++++++++--- satpy/tests/compositor_tests/test_spectral.py | 20 ++++++++++---- 2 files changed, 38 insertions(+), 9 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index c0ccaff64f..5e6e03c148 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -114,8 +114,8 @@ class NDVIHybridGreen(SpectralBlender): This green band correction follows the same approach as the HybridGreen compositor, but with a dynamic blend factor `f` that depends on the pixel-level Normalized Differece Vegetation Index (NDVI). The higher the NDVI, the - smaller the contribution from the nir channel will be, following a liner relationship between the two ranges - `[ndvi_min, ndvi_max]` and `limits`. + smaller the contribution from the nir channel will be, following a liner (default) or non-linear relationship + between the two ranges `[ndvi_min, ndvi_max]` and `limits`. As an example, a new green channel using e.g. FCI data and the NDVIHybridGreen compositor can be defined like:: @@ -124,6 +124,7 @@ class NDVIHybridGreen(SpectralBlender): ndvi_min: 0.0 ndvi_max: 1.0 limits: [0.15, 0.05] + strength: 1.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected] @@ -138,17 +139,29 @@ class NDVIHybridGreen(SpectralBlender): pixels with NDVI=1.0 will be a weighted average with 5% contribution from the near-infrared vis_08 channel and the remaining 95% from the native green vis_05 channel. For other values of NDVI a linear interpolation between these values will be performed. + + A strength larger or smaller than 1.0 will introduce a non-linear relationship between the two ranges + `[ndvi_min, ndvi_max]` and `limits`. Hence, a higher strength (> 1.0) will result in a slower transition + to higher/lower fractions at the NDVI extremes. Similarly, a lower strength (< 1.0) will result in a + faster transition to higher/lower fractions at the NDVI extremes. """ - def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), **kwargs): - """Initialize class and set the NDVI limits and the corresponding blending fraction limits.""" + def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), strength=1.0, **kwargs): + """Initialize class and set the NDVI limits, blending fraction limits and strength.""" + if strength <= 0.0: + raise ValueError(f"Expected stength greater than 0.0, got {strength}.") + self.ndvi_min = ndvi_min self.ndvi_max = ndvi_max self.limits = limits + self.strength = strength super().__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Construct the hybrid green channel weighted by NDVI.""" + LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " + f"{self.limits[1]}] and strength {self.strength}.") + ndvi_input = self.match_data_arrays([projectables[1], projectables[2]]) ndvi = (ndvi_input[1] - ndvi_input[0]) / (ndvi_input[1] + ndvi_input[0]) @@ -156,6 +169,12 @@ def __call__(self, projectables, optional_datasets=None, **attrs): ndvi.data = da.where(ndvi > self.ndvi_min, ndvi, self.ndvi_min) ndvi.data = da.where(ndvi < self.ndvi_max, ndvi, self.ndvi_max) + # Apply non-linearity to the ndvi for a non-linear conversion from ndvi to fraction. This can be used for a + # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this + # operation has no effect on the ndvi. + ndvi = ndvi ** self.strength / (ndvi ** self.strength + (1 - ndvi) ** self.strength) + + # Compute blending fraction from ndvi fraction = (ndvi - self.ndvi_min) / (self.ndvi_max - self.ndvi_min) * (self.limits[1] - self.limits[0]) \ + self.limits[0] self.fractions = (1 - fraction, fraction) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 467adf119b..03e51a5043 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -14,7 +14,6 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for spectral correction compositors.""" -import warnings import dask.array as da import numpy as np @@ -78,6 +77,7 @@ def test_ndvi_hybrid_green(self): comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name='toa_bidirectional_reflectance') + # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) @@ -86,12 +86,22 @@ def test_ndvi_hybrid_green(self): data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) + # Test invalid strength + with pytest.raises(ValueError): + _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') + + # Test non-linear strength + comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') + + res = comp((self.c01, self.c02, self.c03)) + np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) + def test_green_corrector(self): """Test the deprecated class for green corrections.""" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=UserWarning, message=r'.*deprecated.*') - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name='toa_bidirectional_reflectance') res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) From 478a3b5ef6e81bbfb8489cca5df50ef78698670e Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 14:32:47 +0200 Subject: [PATCH 0413/1416] Modify default strength for FCI green band correction to be in line with EUMETSAT recipe used for first public images. --- satpy/etc/composites/fci.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 193415656b..30860fbb28 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -7,10 +7,11 @@ composites: The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that the signal comes from aerosols and ash rather than vegetation. An effect is that vegetation in a true colour RGB looks rather brown than green and barren rather red. Mixing in - some part of the NIR 0.8 channel reduced this effect. Note that the fractions + some part of the NIR 0.8 channel reduced this effect. Note that the fractions and non-linear strength currently implemented are experimental and may change in future versions of Satpy. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected] @@ -25,6 +26,7 @@ composites: Alternative to ndvi_hybrid_green, but without solar zenith or rayleigh correction. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: vis_05 - name: vis_06 From 2c0406010d49b69add2de73621c7b5c77adeb680 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 16:19:14 +0200 Subject: [PATCH 0414/1416] Specify name of GeoColor enhancements. Add enhancement for GeoColor day-night blend. --- satpy/etc/enhancements/generic.yaml | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 362625fa15..fd2c02fe48 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -838,8 +838,8 @@ enhancements: kwargs: weight: 1.0 - ir_high_cloud: - standard_name: ir_high_cloud + geo_color_high_clouds: + standard_name: geo_color_high_clouds operations: - name: inverse method: !!python/name:satpy.enhancements.invert @@ -849,11 +849,9 @@ enhancements: method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - - name: 3d - method: !!python/name:satpy.enhancements.three_d_effect - ir_low_cloud: - standard_name: ir_low_cloud + geo_color_low_clouds: + standard_name: geo_color_low_clouds operations: - name: inverse method: !!python/name:satpy.enhancements.invert @@ -869,6 +867,16 @@ enhancements: palettes: - {colors: [[140.25, 191.25, 249.9]]} + geo_color_day_night_blend: + standard_name: geo_color_day_night_blend + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [ 0,0,0 ] + max_stretch: [ 1,1,1 ] + colorized_ir_clouds: standard_name: colorized_ir_clouds operations: From 2c4f086e2a5d65ea4bf712bc745d930b611e5a7b Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 16:20:15 +0200 Subject: [PATCH 0415/1416] Add FCI GeoColor recipes. --- satpy/etc/composites/fci.yaml | 47 +++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 4e450a9779..4c091bd012 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -64,3 +64,50 @@ composites: - name: ndvi_hybrid_green_raw - name: vis_04 standard_name: true_color_raw + + # GeoColor + geo_color: + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 73 + lim_high: 82 + standard_name: geo_color_day_night_blend + prerequisites: + - true_color + - geo_color_night + + # GeoColor Night-time + geo_color_high_clouds: + standard_name: geo_color_high_clouds + compositor: !!python/name:satpy.composites.HighCloudCompositor + prerequisites: + - name: ir_105 + + geo_color_low_clouds: + standard_name: geo_color_low_clouds + compositor: !!python/name:satpy.composites.LowCloudCompositor + values_sea: 0 + values_land: 100 + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: ir_105 + - name: ir_38 + - name: ir_105 + - compositor: !!python/name:satpy.composites.StaticImageCompositor + standard_name: land_sea_mask + # TODO Change filename + filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + + geo_color_background_with_low_clouds: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - _night_background_hires + + geo_color_night: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_high_clouds + - geo_color_background_with_low_clouds From b0a6172c39a7517b44569d4b0d46c20fb028135c Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 16:29:41 +0200 Subject: [PATCH 0416/1416] Align AHI GeoColor recipes with FCI developments/recipes. --- satpy/etc/composites/ahi.yaml | 53 ++++++++--------------------------- 1 file changed, 11 insertions(+), 42 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 5c73eea7e9..c0008366cc 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -14,22 +14,6 @@ modifiers: - solar_azimuth_angle - solar_zenith_angle - geo_color_rayleigh_corrected: - modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance - atmosphere: us-standard - aerosol_type: rayleigh_only - reduce_lim_low: 70 - reduce_lim_high: 105 - reduce_strength: 1.5 - prerequisites: - - name: B03 - modifiers: [sunz_corrected] - optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle - composites: green: deprecation_warning: "'green' is a deprecated composite. Use the equivalent 'hybrid_green' instead." @@ -488,30 +472,22 @@ composites: geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 80 - lim_high: 88 - standard_name: true_color_with_night_ir + lim_low: 73 + lim_high: 82 + standard_name: geo_color_day_night_blend prerequisites: - geo_color_true_color - geo_color_night - geo_color_without_background: - compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 80 - lim_high: 88 - standard_name: true_color_with_night_ir - prerequisites: - - geo_color_true_color - - geo_color_night_without_background - # GeoColor Daytime geo_color_green: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [0.15, 0.05] prerequisites: - name: B02 - modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + modifiers: [ sunz_corrected, rayleigh_corrected ] - name: B03 - modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + modifiers: [ sunz_corrected, rayleigh_corrected ] - name: B04 modifiers: [ sunz_corrected ] standard_name: toa_bidirectional_reflectance @@ -520,22 +496,22 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 - modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + modifiers: [ sunz_corrected, rayleigh_corrected ] - name: geo_color_green - name: B01 - modifiers: [ sunz_corrected, geo_color_rayleigh_corrected ] + modifiers: [ sunz_corrected, rayleigh_corrected ] high_resolution_band: red standard_name: true_color # GeoColor Night-time geo_color_high_clouds: - standard_name: ir_high_cloud + standard_name: geo_color_high_clouds compositor: !!python/name:satpy.composites.HighCloudCompositor prerequisites: - name: B13 geo_color_low_clouds: - standard_name: ir_low_cloud + standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor values_sea: 0 values_land: 100 @@ -548,7 +524,7 @@ composites: - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask # TODO Change filename - filename: "/tcenas/scratch/strandgren/GeoColor/gshhs_land_sea_mask_3km_i.tif" + filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor @@ -563,10 +539,3 @@ composites: prerequisites: - geo_color_high_clouds - geo_color_background_with_low_clouds - - geo_color_night_without_background: - compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir - prerequisites: - - geo_color_low_clouds - - geo_color_high_clouds From c8aa8aeebce0e1fbba99d4d6117582d0bb0792de Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 23 Aug 2023 16:47:54 +0200 Subject: [PATCH 0417/1416] Modify thresholds of FCI GeoColor low cloud detection in order to reduce false alarms with early FCI data. --- satpy/etc/composites/fci.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 4c091bd012..caa8858734 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -87,6 +87,8 @@ composites: compositor: !!python/name:satpy.composites.LowCloudCompositor values_sea: 0 values_land: 100 + range_land: [4.35, 6.75] + range_sea: [1.35, 5.0] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: From 6e410f4aa8483b79cdd5900e5e64a93cbcff8882 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 08:33:12 +0200 Subject: [PATCH 0418/1416] Add non-linearity term to AHI NDVI green correction. --- satpy/etc/composites/ahi.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 9e36bf7d7f..9f771b712c 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -105,6 +105,8 @@ composites: ndvi_hybrid_green: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: B02 modifiers: [sunz_corrected, rayleigh_corrected] From 1d26f79489149b37e61be97d5adaf2209651d1e8 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 24 Aug 2023 11:45:48 +0100 Subject: [PATCH 0419/1416] Update AMI `true_color_reproduction` composites to use the hybrid NDVI green method rather than the deprecated green metho. --- satpy/etc/composites/ami.yaml | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml index 55466ea7e1..5a8608d795 100644 --- a/satpy/etc/composites/ami.yaml +++ b/satpy/etc/composites/ami.yaml @@ -60,6 +60,35 @@ composites: standard_name: toa_reflectance fraction: 0.15 + ndvi_hybrid_green: + description: > + The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that + the signal comes from aerosols and ash rather than vegetation. An effect + is that vegetation in a true colour RGB looks rather brown than green and barren rather red. Mixing in + some part of the NIR 0.8 channel reduced this effect. Note that the fractions + currently implemented are experimental and may change in future versions of Satpy. + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [0.15, 0.05] + prerequisites: + - name: VI005 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: VI006 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: VI008 + modifiers: [sunz_corrected ] + standard_name: toa_bidirectional_reflectance + + ndvi_hybrid_green_raw: + description: > + Alternative to ndvi_hybrid_green, but without solar zenith or rayleigh correction. + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [0.15, 0.05] + prerequisites: + - name: VI005 + - name: VI006 + - name: VI008 + standard_name: toa_bidirectional_reflectance + true_color_raw: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: @@ -278,7 +307,7 @@ composites: prerequisites: - name: VI006 modifiers: [sunz_corrected, rayleigh_corrected] - - name: green + - name: ndvi_hybrid_green - name: VI004 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color_reproduction_color_stretch @@ -288,6 +317,6 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 - - name: green_nocorr + - name: ndvi_hybrid_green_raw - name: VI004 standard_name: true_color_reproduction_color_stretch From 813845f863a0ddfd61b85e24978262f7a0183166 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 17:16:17 +0200 Subject: [PATCH 0420/1416] Restore NDVI true color recipe and use in GeoColor. --- satpy/etc/composites/ahi.yaml | 48 +++++++++++++++++------------------ 1 file changed, 23 insertions(+), 25 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index c0008366cc..a4e0994a9c 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -103,6 +103,17 @@ composites: - wavelength: 0.85 standard_name: toa_reflectance + ndvi_hybrid_green: + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + prerequisites: + - name: B02 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: B03 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: B04 + modifiers: [ sunz_corrected ] + standard_name: toa_bidirectional_reflectance + airmass: # PDF slides: https://www.eumetsat.int/website/home/News/ConferencesandEvents/DAT_2833302.html # Under session 2 by Akihiro Shimizu (JMA) @@ -260,6 +271,17 @@ composites: high_resolution_band: red standard_name: true_color + true_color_ndvi_green: + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: B03 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: ndvi_hybrid_green + - name: B01 + modifiers: [ sunz_corrected, rayleigh_corrected ] + high_resolution_band: red + standard_name: true_color + natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: @@ -476,33 +498,9 @@ composites: lim_high: 82 standard_name: geo_color_day_night_blend prerequisites: - - geo_color_true_color + - true_color_ndvi_green - geo_color_night - # GeoColor Daytime - geo_color_green: - compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen - limits: [0.15, 0.05] - prerequisites: - - name: B02 - modifiers: [ sunz_corrected, rayleigh_corrected ] - - name: B03 - modifiers: [ sunz_corrected, rayleigh_corrected ] - - name: B04 - modifiers: [ sunz_corrected ] - standard_name: toa_bidirectional_reflectance - - geo_color_true_color: - compositor: !!python/name:satpy.composites.SelfSharpenedRGB - prerequisites: - - name: B03 - modifiers: [ sunz_corrected, rayleigh_corrected ] - - name: geo_color_green - - name: B01 - modifiers: [ sunz_corrected, rayleigh_corrected ] - high_resolution_band: red - standard_name: true_color - # GeoColor Night-time geo_color_high_clouds: standard_name: geo_color_high_clouds From fcbae549dd85353d282004f956860978fa8c7d08 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 24 Aug 2023 21:30:56 +0100 Subject: [PATCH 0421/1416] Initial commit for Himawari L2 NOAA enterprise cloud products. --- satpy/etc/readers/ahi_l2_nc.yaml | 21 ++++++ satpy/readers/ahi_l2_nc.py | 112 +++++++++++++++++++++++++++++++ 2 files changed, 133 insertions(+) create mode 100644 satpy/etc/readers/ahi_l2_nc.yaml create mode 100644 satpy/readers/ahi_l2_nc.py diff --git a/satpy/etc/readers/ahi_l2_nc.yaml b/satpy/etc/readers/ahi_l2_nc.yaml new file mode 100644 index 0000000000..b1f0461838 --- /dev/null +++ b/satpy/etc/readers/ahi_l2_nc.yaml @@ -0,0 +1,21 @@ +reader: + name: ahi_l2_nc + short_name: AHI L2 NetCDF4 + long_name: Himawari-8/9 AHI Level 2 products in netCDF4 format from NOAA enterprise + status: Beta + supports_fsspec: true + sensors: ['ahi'] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + + +file_types: + ahi_l2_cloudmask: + file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler + file_patterns: + - '{sensor:3s}-{product:_4s}_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' + +datasets: + cloudmask: + name: cloudmask + file_key: CloudMask + file_type: [ ahi_l2_cloudmask ] \ No newline at end of file diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py new file mode 100644 index 0000000000..68c4f55a38 --- /dev/null +++ b/satpy/readers/ahi_l2_nc.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Reader for Himawari L2 cloud products from NOAA's big data programme.""" + +import logging +from datetime import datetime + +import xarray as xr + +from satpy._compat import cached_property +from satpy.readers._geos_area import get_area_definition, get_area_extent +from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import get_legacy_chunk_size + +logger = logging.getLogger(__name__) + +CHUNK_SIZE = get_legacy_chunk_size() + +EXPECTED_DATA_AREA = 'Full Disk' + + +class HIML2NCFileHandler(BaseFileHandler): + """File handler for Himawari L2 NOAA enterprise data in netCDF format.""" + + def __init__(self, filename, filename_info, filetype_info, geo_data=None): + """Initialize the reader.""" + super(HIML2NCFileHandler, self).__init__(filename, filename_info, + filetype_info) + self.nc = xr.open_dataset(self.filename, + decode_cf=True, + mask_and_scale=False, + chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) + + # Check that file is a full disk scene, we don't know the area for anything else + if self.nc.attrs['cdm_data_type'] != EXPECTED_DATA_AREA: + raise ValueError('File is not a full disk scene') + + self.sensor = self.nc.attrs['instrument_name'].lower() + self.nlines = self.nc.dims['Columns'] + self.ncols = self.nc.dims['Rows'] + self.platform_name = self.nc.attrs['satellite_name'] + self.platform_shortname = filename_info['platform'] + self._meta = None + + @property + def start_time(self): + """Start timestamp of the dataset.""" + dt = self.nc.attrs['time_coverage_start'] + return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + + @property + def end_time(self): + """End timestamp of the dataset.""" + dt = self.nc.attrs['time_coverage_end'] + return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + + def get_dataset(self, key, info): + """Load a dataset.""" + var = info['file_key'] + logger.debug('Reading in get_dataset %s.', var) + variable = self.nc[var] + variable.attrs.update(key.to_dict()) + return variable + + @cached_property + def area(self): + """Get AreaDefinition representing this file's data.""" + return self._get_area_def() + + def get_area_def(self, dsid): + """Get the area definition.""" + del dsid + return self.area + + def _get_area_def(self): + logger.warning('This product misses metadata required to produce an appropriate area definition.' + 'Assuming standard Himawari-8/9 full disk projection.') + pdict = {} + pdict['cfac'] = 20466275 + pdict['lfac'] = 20466275 + pdict['coff'] = 2750.5 + pdict['loff'] = 2750.5 + pdict['a'] = 6378137.0 + pdict['h'] = 35785863.0 + pdict['b'] = 6356752.3 + pdict['ssp_lon'] = 140.7 + pdict['nlines'] = self.nlines + pdict['ncols'] = self.ncols + pdict['scandir'] = 'N2S' + + aex = get_area_extent(pdict) + + pdict['a_name'] = 'Himawari_Area' + pdict['a_desc'] = "AHI Full Disk area" + pdict['p_id'] = f'geos{self.platform_shortname}' + + return get_area_definition(pdict, aex) \ No newline at end of file From fa491a6816cf218f0e65a7cb1dd3c1e47d46f6fa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 24 Aug 2023 21:01:44 +0000 Subject: [PATCH 0422/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/readers/ahi_l2_nc.yaml | 2 +- satpy/readers/ahi_l2_nc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/ahi_l2_nc.yaml b/satpy/etc/readers/ahi_l2_nc.yaml index b1f0461838..04d0571f79 100644 --- a/satpy/etc/readers/ahi_l2_nc.yaml +++ b/satpy/etc/readers/ahi_l2_nc.yaml @@ -18,4 +18,4 @@ datasets: cloudmask: name: cloudmask file_key: CloudMask - file_type: [ ahi_l2_cloudmask ] \ No newline at end of file + file_type: [ ahi_l2_cloudmask ] diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 68c4f55a38..76823017d3 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -109,4 +109,4 @@ def _get_area_def(self): pdict['a_desc'] = "AHI Full Disk area" pdict['p_id'] = f'geos{self.platform_shortname}' - return get_area_definition(pdict, aex) \ No newline at end of file + return get_area_definition(pdict, aex) From 865c7f634c05e1aa2dd3147da0228b766cc655b4 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 08:49:28 +0100 Subject: [PATCH 0423/1416] Correct typo in AHI L2 code. --- satpy/readers/ahi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 68c4f55a38..f795eec748 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -106,7 +106,7 @@ def _get_area_def(self): aex = get_area_extent(pdict) pdict['a_name'] = 'Himawari_Area' - pdict['a_desc'] = "AHI Full Disk area" + pdict['a_desc'] = "AHI Full Disk area" pdict['p_id'] = f'geos{self.platform_shortname}' return get_area_definition(pdict, aex) \ No newline at end of file From ad89b9a4ad9a514ab8f2cb2ad593a0c0858eac59 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 08:54:46 +0100 Subject: [PATCH 0424/1416] Add basic check to ensure we're working on a full disk scene. --- satpy/readers/ahi_l2_nc.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 3167896e66..3db9d1528c 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -90,18 +90,13 @@ def get_area_def(self, dsid): def _get_area_def(self): logger.warning('This product misses metadata required to produce an appropriate area definition.' 'Assuming standard Himawari-8/9 full disk projection.') - pdict = {} - pdict['cfac'] = 20466275 - pdict['lfac'] = 20466275 - pdict['coff'] = 2750.5 - pdict['loff'] = 2750.5 - pdict['a'] = 6378137.0 - pdict['h'] = 35785863.0 - pdict['b'] = 6356752.3 - pdict['ssp_lon'] = 140.7 - pdict['nlines'] = self.nlines - pdict['ncols'] = self.ncols - pdict['scandir'] = 'N2S' + + # Basic check to ensure we're processing a full disk (2km) scene. + if self.nlines != 5500 or self.ncols != 5500: + raise ValueError("Input L2 file is not a full disk Himawari scene. Only full disk data is supported.") + + pdict = {'cfac': 20466275, 'lfac': 20466275, 'coff': 2750.5, 'loff': 2750.5, 'a': 6378137.0, 'h': 35785863.0, + 'b': 6356752.3, 'ssp_lon': 140.7, 'nlines': self.nlines, 'ncols': self.ncols, 'scandir': 'N2S'} aex = get_area_extent(pdict) From 70f30e68bd539e113e2555a7c9bc0b99c2f2b69d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 12:18:40 +0200 Subject: [PATCH 0425/1416] Add Geocolor recipes for ABI. --- satpy/etc/composites/abi.yaml | 47 +++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index 489ef1f210..2f5d68e09e 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -705,3 +705,50 @@ composites: prerequisites: - name: C14 standard_name: highlighted_toa_brightness_temperature + + # GeoColor + geo_color: + compositor: !!python/name:satpy.composites.DayNightCompositor + lim_low: 73 + lim_high: 82 + standard_name: geo_color_day_night_blend + prerequisites: + - true_color + - geo_color_night + + # GeoColor Night-time + geo_color_high_clouds: + standard_name: geo_color_high_clouds + compositor: !!python/name:satpy.composites.HighCloudCompositor + prerequisites: + - name: C13 + + geo_color_low_clouds: + standard_name: geo_color_low_clouds + compositor: !!python/name:satpy.composites.LowCloudCompositor + values_sea: 0 + values_land: 100 + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: C13 + - name: C07 + - name: C13 + - compositor: !!python/name:satpy.composites.StaticImageCompositor + standard_name: land_sea_mask + # TODO Change filename + filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + + geo_color_background_with_low_clouds: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - _night_background_hires + + geo_color_night: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_high_clouds + - geo_color_background_with_low_clouds From 81bb4c8b2b45c36cc6b372504c6205754fb4bdce Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 11:47:52 +0100 Subject: [PATCH 0426/1416] Add tests for AHI L2 reader. --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 110 +++++++++++++++++++++ 1 file changed, 110 insertions(+) create mode 100644 satpy/tests/reader_tests/test_ahi_l2_nc.py diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py new file mode 100644 index 0000000000..68f8c3a420 --- /dev/null +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -0,0 +1,110 @@ +"""Tests for the Himawari L2 netCDF reader.""" + +from datetime import datetime + +import h5netcdf +import numpy as np +import pytest + +from satpy.readers.ahi_l2_nc import HIML2NCFileHandler +from satpy.tests.utils import make_dataid + +rng = np.random.default_rng() +clmk_data = rng.integers(0, 3, (5500, 5500), dtype=np.uint16) +cprob_data = rng.uniform(0, 1, (5500, 5500)) + +start_time = datetime(2023, 8, 24, 5, 40, 21) +end_time = datetime(2023, 8, 24, 5, 49, 40) + +dimensions = {'Columns': 5500, 'Rows': 5500} + +exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) + +global_attrs = {"time_coverage_start": start_time.strftime("%Y-%m-%dT%H:%M:%SZ"), + "time_coverage_end": end_time.strftime("%Y-%m-%dT%H:%M:%SZ"), + "instrument_name": "AHI", + "satellite_name": "Himawari-9", + "cdm_data_type": "Full Disk", + } + +badarea_attrs = global_attrs.copy() +badarea_attrs['cdm_data_type'] = 'bad_area' + + +def ahil2_filehandler(fname, platform='h09'): + """Instantiate a Filehandler.""" + fileinfo = {'platform': platform} + filetype = None + fh = HIML2NCFileHandler(fname, fileinfo, filetype) + return fh + + +@pytest.fixture(scope="session") +def himl2_filename(tmp_path_factory): + """Create a fake himawari l2 file.""" + fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' + with h5netcdf.File(fname, mode="w") as h5f: + h5f.dimensions = dimensions + h5f.attrs.update(global_attrs) + var = h5f.create_variable("CloudMask", ("Rows", "Columns"), np.uint16, chunks=(200, 200)) + var[:] = clmk_data + + return fname + + +@pytest.fixture(scope="session") +def himl2_filename_bad(tmp_path_factory): + """Create a fake himawari l2 file.""" + fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' + with h5netcdf.File(fname, mode="w") as h5f: + h5f.dimensions = dimensions + h5f.attrs.update(badarea_attrs) + var = h5f.create_variable("CloudMask", ("Rows", "Columns"), np.uint16, chunks=(200, 200)) + var[:] = clmk_data + + return fname + + +def test_startend(himl2_filename): + """Test start and end times are set correctly.""" + fh = ahil2_filehandler(himl2_filename) + assert fh.start_time == start_time + assert fh.end_time == end_time + + +def test_ahi_l2_area_def(himl2_filename, caplog): + """Test reader handles area definition correctly.""" + warntxt = "This product misses metadata" + ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' + + # Check case where input data is correct size. + fh = ahil2_filehandler(himl2_filename) + clmk_id = make_dataid(name="cloudmask") + area_def = fh.get_area_def(clmk_id) + assert area_def.width == dimensions['Columns'] + assert area_def.height == dimensions['Rows'] + assert np.allclose(area_def.area_extent, exp_ext) + assert area_def.proj4_string == ps + assert warntxt in caplog.text + + # Check case where input data is incorrect size. + with pytest.raises(ValueError): + fh = ahil2_filehandler(himl2_filename) + fh.nlines = 3000 + fh.get_area_def(clmk_id) + + +def test_bad_area_name(himl2_filename_bad): + """Check case where area name is not correct.""" + global_attrs['cdm_data_type'] = 'bad_area' + with pytest.raises(ValueError): + ahil2_filehandler(himl2_filename_bad) + global_attrs['cdm_data_type'] = 'Full Disk' + + +def test_load_data(himl2_filename): + """Test that data is loaded successfully.""" + fh = ahil2_filehandler(himl2_filename) + clmk_id = make_dataid(name="cloudmask") + clmk = fh.get_dataset(clmk_id, {'file_key': 'CloudMask'}) + assert np.allclose(clmk.data, clmk_data) From c7ce9735a6d48806f1d3de1cd19ce117776dea0d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 11:48:40 +0100 Subject: [PATCH 0427/1416] Remove unneeded variable from AHI L2 NC reader. --- satpy/readers/ahi_l2_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 3db9d1528c..3675f4f419 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -37,7 +37,7 @@ class HIML2NCFileHandler(BaseFileHandler): """File handler for Himawari L2 NOAA enterprise data in netCDF format.""" - def __init__(self, filename, filename_info, filetype_info, geo_data=None): + def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(HIML2NCFileHandler, self).__init__(filename, filename_info, filetype_info) @@ -91,7 +91,7 @@ def _get_area_def(self): logger.warning('This product misses metadata required to produce an appropriate area definition.' 'Assuming standard Himawari-8/9 full disk projection.') - # Basic check to ensure we're processing a full disk (2km) scene. + # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: raise ValueError("Input L2 file is not a full disk Himawari scene. Only full disk data is supported.") From 71476306cda0cd82345752ea4355c64445fd2bca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 12:30:35 +0100 Subject: [PATCH 0428/1416] Add additional AHI L2 netcdf datasets to the YAML file. --- satpy/etc/readers/ahi_l2_nc.yaml | 164 ++++++++++++++++++++++++++++++- 1 file changed, 159 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/ahi_l2_nc.yaml b/satpy/etc/readers/ahi_l2_nc.yaml index 04d0571f79..955d41bbcd 100644 --- a/satpy/etc/readers/ahi_l2_nc.yaml +++ b/satpy/etc/readers/ahi_l2_nc.yaml @@ -9,13 +9,167 @@ reader: file_types: - ahi_l2_cloudmask: + ahi_l2_mask: file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler file_patterns: - - '{sensor:3s}-{product:_4s}_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' + - '{sensor:3s}-CMSK_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' + + ahi_l2_type: + file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler + file_patterns: + - '{sensor:3s}-CPHS_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' + + ahi_l2_height: + file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler + file_patterns: + - '{sensor:3s}-CHGT_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' datasets: - cloudmask: - name: cloudmask + # Products from the cloud mask files + cloud_mask: + name: cloud_mask file_key: CloudMask - file_type: [ ahi_l2_cloudmask ] + file_type: [ ahi_l2_mask ] + + cloud_mask_binary: + name: cloud_mask_binary + file_key: CloudMaskBinary + file_type: [ ahi_l2_mask ] + + cloud_probability: + name: cloud_probability + file_key: CloudProbability + file_type: [ ahi_l2_mask ] + + ice_cloud_probability: + name: ice_cloud_probability + file_key: IceCloudProbability + file_type: [ ahi_l2_mask ] + + phase_uncertainty: + name: phase_uncertainty + file_key: PhaseUncertainty + file_type: [ ahi_l2_mask ] + + dust_mask: + name: dust_mask + file_key: Dust_Mask + file_type: [ ahi_l2_mask ] + + fire_mask: + name: fire_mask + file_key: Fire_Mask + file_type: [ ahi_l2_mask ] + + smoke_mask: + name: smoke_mask + file_key: Smoke_Mask + file_type: [ ahi_l2_mask ] + + # Products from the cloud phase / type files + cloud_phase: + name: cloud_phase + file_key: CloudPhase + file_type: [ ahi_l2_type ] + + cloud_phase_flag: + name: cloud_phase_flag + file_key: CloudPhaseFlag + file_type: [ ahi_l2_type ] + + cloud_type: + name: cloud_type + file_key: CloudType + file_type: [ ahi_l2_type ] + + # Products from the cloud height files + cloud_optical_depth: + name: cloud_optical_depth + file_key: CldOptDpth + file_type: [ ahi_l2_height ] + + cloud_top_emissivity: + name: cloud_top_emissivity + file_key: CldTopEmss + file_type: [ ahi_l2_height ] + + cloud_top_pressure: + name: cloud_top_pressure + file_key: CldTopPres + file_type: [ ahi_l2_height ] + + cloud_top_pressure_low: + name: cloud_top_pressure_low + file_key: CldTopPresLow + file_type: [ ahi_l2_height ] + + cloud_top_temperature: + name: cloud_top_temperature + file_key: CldTopTemp + file_type: [ ahi_l2_height ] + + cloud_top_temperature_low: + name: cloud_top_temperature_low + file_key: CldTopTempLow + file_type: [ ahi_l2_height ] + + cloud_height_quality: + name: cloud_height_quality + file_key: CloudHgtQF + file_type: [ ahi_l2_height ] + + retrieval_cost: + name: retrieval_cost + file_key: Cost + file_type: [ ahi_l2_height ] + + inversion_flag: + name: inversion_flag + file_key: InverFlag + file_type: [ ahi_l2_height ] + + latitude_parallax_corrected: + name: latitude_parallax_corrected + file_key: Latitude_Pc + file_type: [ ahi_l2_height ] + + longitude_parallax_corrected: + name: longitude_parallax_corrected + file_key: Longitude_Pc + file_type: [ ahi_l2_height ] + + cloud_top_pressure_error: + name: cloud_top_pressure_error + file_key: PcError + file_type: [ ahi_l2_height ] + + processing_order: + name: processing_order + file_key: ProcOrder + file_type: [ ahi_l2_height ] + + shadow_mask: + name: shadow_mask + file_key: Shadow_Mask + file_type: [ ahi_l2_height ] + + cloud_top_temperature_error: + name: cloud_top_temperature_error + file_key: TcError + file_type: [ ahi_l2_height ] + + cloud_top_height_error: + name: cloud_top_height_error + file_key: ZcError + file_type: [ ahi_l2_height ] + + # Datasets in all three file types + latitude: + name: latitude + file_key: Latitude + file_type: [ ahi_l2_height, ahi_l2_type, ahi_l2_mask ] + + longitude: + name: longitude + file_key: Longitude + file_type: [ ahi_l2_height, ahi_l2_type, ahi_l2_mask ] From 46a2f2919901a8432e60e8d914fcfcd2b07b3e7c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 12:35:57 +0100 Subject: [PATCH 0429/1416] Add some extra documentation to the AHI L2 nc reader. --- satpy/readers/ahi_l2_nc.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 3675f4f419..51d73af11f 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -15,7 +15,28 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Reader for Himawari L2 cloud products from NOAA's big data programme.""" +"""Reader for Himawari L2 cloud products from NOAA's big data programme. + +These products are generated by the NOAA enterprise cloud suite and have filenames like: +AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc + +The second letter grouping (CMSK above) indicates the product type: + CMSK - Cloud mask + CHGT - Cloud height + CPHS - Cloud type and phase +These products are generated from the AHI sensor on Himawari-8 and Himawari-9, and are +produced at the native instrument resolution for the IR channels (2km at nadir). + +NOTE: This reader is currently only compatible with full disk scenes. Unlike level 1 himawari +data, the netCDF files do not contain the required metadata to produce an appropriate area +definition for the data contents, and hence the area definition is hardcoded into the reader. + +A warning is displayed to the user highlighting this. The assumed area definition is a full +disk image at the nominal subsatellite longitude of 140.7 degrees East. + +All the simple data products are supported here, but multidimensional products are not yet +supported. These include the CldHgtFlag and the CloudMaskPacked variables. +""" import logging from datetime import datetime From 09051c8dca2344305e06290d576693c4d538a0b3 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 12:46:49 +0100 Subject: [PATCH 0430/1416] Update area definition warning message for AHI L2 NC reader. --- satpy/readers/ahi_l2_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 51d73af11f..a238f1bb73 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -109,8 +109,8 @@ def get_area_def(self, dsid): return self.area def _get_area_def(self): - logger.warning('This product misses metadata required to produce an appropriate area definition.' - 'Assuming standard Himawari-8/9 full disk projection.') + logger.warning('The AHI L2 cloud products do not have the metadata required to produce an area definition.' + ' Assuming standard Himawari-8/9 full disk projection.') # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: From 425256777dff3bd9deae07af916c4451c6d2fe23 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 25 Aug 2023 13:49:53 +0200 Subject: [PATCH 0431/1416] Remove previous chunks --- satpy/readers/ahi_hsd.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index e06f7ebc50..be0ef31ca0 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -622,8 +622,7 @@ def _read_data(self, fp_, header): chunks = da.core.normalize_chunks("auto", shape=(nlines, ncols), limit=get_chunk_size_limit(), - dtype='f8', - previous_chunks=(550, 550)) + dtype='f8') return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Fri, 25 Aug 2023 13:20:06 +0100 Subject: [PATCH 0432/1416] Fir AHI L2 NC tests for warning message. --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 68f8c3a420..84b3c667d7 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -74,7 +74,7 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - warntxt = "This product misses metadata" + warntxt = "The AHI L2 cloud products do not have the metadata" ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' # Check case where input data is correct size. From c447480cb0444e506c219b3d654f83768cbcb0a0 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 25 Aug 2023 16:35:16 +0100 Subject: [PATCH 0433/1416] Fix AHI L2 NC documentation. --- satpy/readers/ahi_l2_nc.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index a238f1bb73..e44c882898 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -22,8 +22,11 @@ The second letter grouping (CMSK above) indicates the product type: CMSK - Cloud mask + CHGT - Cloud height + CPHS - Cloud type and phase + These products are generated from the AHI sensor on Himawari-8 and Himawari-9, and are produced at the native instrument resolution for the IR channels (2km at nadir). From ed890dc2de9d156e286d988f207bacf5dbec21b6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 28 Aug 2023 19:45:52 -0500 Subject: [PATCH 0434/1416] Allow for more platform shortnames in ABI base reader --- satpy/readers/abi_base.py | 16 ++++++++-------- satpy/readers/ahi_hsd.py | 9 +++++---- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 5f4cf506fe..4a6bf069c1 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -34,13 +34,13 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { - 'G16': 'GOES-16', - 'G17': 'GOES-17', - 'G18': 'GOES-18', - 'G19': 'GOES-19', - 'GOES16': 'GOES-16', - 'GOES17': 'GOES-17', - 'GOES18': 'GOES-18', + 'g16': 'GOES-16', + 'g17': 'GOES-17', + 'g18': 'GOES-18', + 'g19': 'GOES-19', + 'goes16': 'GOES-16', + 'goes17': 'GOES-17', + 'goes18': 'GOES-18', } @@ -52,7 +52,7 @@ def __init__(self, filename, filename_info, filetype_info): super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) platform_shortname = filename_info['platform_shortname'] - self.platform_name = PLATFORM_NAMES.get(platform_shortname) + self.platform_name = PLATFORM_NAMES.get(platform_shortname.lower()) self.nlines = self.nc['y'].size self.ncols = self.nc['x'].size diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 86e9aa9e0c..8a2c82ac42 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -616,13 +616,14 @@ def _read_header(self, fp_): return header - def _read_data(self, fp_, header): + def _read_data(self, fp_, header, resolution): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) + chunk_size = CHUNK_SIZE * (500 / resolution) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Mon, 28 Aug 2023 19:50:34 -0500 Subject: [PATCH 0435/1416] Fix ABI GFLS test to be more accurate --- satpy/tests/reader_tests/test_abi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index b05ceb0f64..2f2131461e 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -125,7 +125,7 @@ def test_get_dataset(self): def test_get_dataset_gfls(self): """Test that Low Cloud and Fog filenames work.""" from satpy.tests.utils import make_dataid - filename_info = {'platform_shortname': 'GOES16', 'scene_abbr': 'FD'} + filename_info = {'platform_shortname': 'g16', 'scene_abbr': 'FD'} key = make_dataid(name='MVFR_Fog_Prob') with _create_reader_for_fake_data("GFLS", _create_cmip_dataset("MVFR_Fog_Prob"), filename_info) as reader: res = reader.get_dataset(key, {'file_key': 'MVFR_Fog_Prob'}) From 238f4796cb4e4703a5a37f5394451e6b9fb1b1e7 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 29 Aug 2023 15:17:09 +0100 Subject: [PATCH 0436/1416] Add Himawari full disk areas to the default areas YAML. --- satpy/etc/areas.yaml | 48 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 6abbc18c82..28129f72d9 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -268,6 +268,54 @@ msg_seviri_iodc_48km: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] +himawari_ahi_fes_500m: + description: + Himawari-8/9 full disk area definition at 500m resolution + projection: + proj: geos + lon_0: 140.7 + a: 6378137.0 + rf: 298.257024882273 + h: 35785863.0 + shape: + height: 22000 + width: 22000 + area_extent: + lower_left_xy: [-5499999.9684, -5499999.9684] + upper_right_xy: [5499999.9684, 5499999.9684] + +himawari_ahi_fes_1km: + description: + Himawari-8/9 full disk area definition at 1km resolution + projection: + proj: geos + lon_0: 140.7 + a: 6378137.0 + rf: 298.257024882273 + h: 35785863.0 + shape: + height: 11000 + width: 11000 + area_extent: + lower_left_xy: [-5500000.0355, -5500000.0355] + upper_right_xy: [5500000.0355, 5500000.0355] + + himawari_ahi_fes_2km: + description: + Himawari-8/9 full disk area definition at 2km resolution + projection: + proj: geos + lon_0: 140.7 + a: 6378137.0 + rf: 298.257024882273 + h: 35785863.0 + shape: + height: 5500 + width: 5500 + area_extent: + lower_left_xy: [ -5499999.9012, -5499999.9012 ] + upper_right_xy: [ 5499999.9012, 5499999.9012 ] + # Regional From 5fa71c40e5a99f65799c53f3ec3f942405ade416 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 29 Aug 2023 15:18:44 +0100 Subject: [PATCH 0437/1416] Fix Himawari area indentation. --- satpy/etc/areas.yaml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 28129f72d9..4f71368375 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -300,21 +300,21 @@ himawari_ahi_fes_1km: lower_left_xy: [-5500000.0355, -5500000.0355] upper_right_xy: [5500000.0355, 5500000.0355] - himawari_ahi_fes_2km: - description: - Himawari-8/9 full disk area definition at 2km resolution - projection: - proj: geos - lon_0: 140.7 - a: 6378137.0 - rf: 298.257024882273 - h: 35785863.0 - shape: - height: 5500 - width: 5500 - area_extent: - lower_left_xy: [ -5499999.9012, -5499999.9012 ] - upper_right_xy: [ 5499999.9012, 5499999.9012 ] +himawari_ahi_fes_2km: + description: + Himawari-8/9 full disk area definition at 2km resolution + projection: + proj: geos + lon_0: 140.7 + a: 6378137.0 + rf: 298.257024882273 + h: 35785863.0 + shape: + height: 5500 + width: 5500 + area_extent: + lower_left_xy: [ -5499999.9012, -5499999.9012 ] + upper_right_xy: [ 5499999.9012, 5499999.9012 ] # Regional From 4a54a55f0ea756df36a4ed21c5bbdc00a294e524 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 29 Aug 2023 18:37:56 -0500 Subject: [PATCH 0438/1416] Fix invalid AVHRR product names in AWIPS config --- satpy/etc/writers/awips_tiled.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/etc/writers/awips_tiled.yaml b/satpy/etc/writers/awips_tiled.yaml index 8700b63ad5..f0ee9c1d3b 100644 --- a/satpy/etc/writers/awips_tiled.yaml +++ b/satpy/etc/writers/awips_tiled.yaml @@ -233,42 +233,42 @@ templates: # AVHRR L1B products avhrr_band1_vis: - name: band1_vis + name: "1" var_name: data attributes: physical_element: raw_value: 0.63 um units: {} avhrr_band2_vis: - name: band2_vis + name: "2" var_name: data attributes: physical_element: raw_value: 0.86 um units: {} avhrr_band3a_vis: - name: band3a_vis + name: "3a" var_name: data attributes: physical_element: raw_value: 1.61 um units: {} avhrr_band3b_bt: - name: band3b_bt + name: "3b" var_name: data attributes: physical_element: raw_value: 3.74 um units: {} avhrr_band4_bt: - name: band4_bt + name: "4" var_name: data attributes: physical_element: raw_value: 10.8 um units: {} avhrr_band5_bt: - name: band5_bt + name: "5" var_name: data attributes: physical_element: From d75842dfb7088b389c16ed3848b223f871315b34 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 29 Aug 2023 18:43:54 -0500 Subject: [PATCH 0439/1416] Move MODIS test fixtures to MODIS-specific sub-directory Closes #2559 --- satpy/tests/reader_tests/conftest.py | 17 --------- .../reader_tests/modis_tests/__init__.py | 21 +++++++++++ .../{ => modis_tests}/_modis_fixtures.py | 0 .../reader_tests/modis_tests/conftest.py | 35 +++++++++++++++++++ .../{ => modis_tests}/test_modis_l1b.py | 2 +- .../{ => modis_tests}/test_modis_l2.py | 2 +- setup.cfg | 1 + 7 files changed, 59 insertions(+), 19 deletions(-) create mode 100644 satpy/tests/reader_tests/modis_tests/__init__.py rename satpy/tests/reader_tests/{ => modis_tests}/_modis_fixtures.py (100%) create mode 100644 satpy/tests/reader_tests/modis_tests/conftest.py rename satpy/tests/reader_tests/{ => modis_tests}/test_modis_l1b.py (99%) rename satpy/tests/reader_tests/{ => modis_tests}/test_modis_l2.py (99%) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index ca9e3fc66e..8f6f572494 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -16,20 +16,3 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup and configuration for all reader tests.""" - -from ._modis_fixtures import ( - modis_l1b_imapp_1000m_file, - modis_l1b_imapp_geo_file, - modis_l1b_nasa_1km_mod03_files, - modis_l1b_nasa_mod02hkm_file, - modis_l1b_nasa_mod02qkm_file, - modis_l1b_nasa_mod03_file, - modis_l1b_nasa_mod021km_file, - modis_l2_imapp_mask_byte1_file, - modis_l2_imapp_mask_byte1_geo_files, - modis_l2_imapp_snowmask_file, - modis_l2_imapp_snowmask_geo_files, - modis_l2_nasa_mod06_file, - modis_l2_nasa_mod35_file, - modis_l2_nasa_mod35_mod03_files, -) diff --git a/satpy/tests/reader_tests/modis_tests/__init__.py b/satpy/tests/reader_tests/modis_tests/__init__.py new file mode 100644 index 0000000000..45c8d67254 --- /dev/null +++ b/satpy/tests/reader_tests/modis_tests/__init__.py @@ -0,0 +1,21 @@ +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unit tests for MODIS readers. + +This subdirectory mostly exists to have MODIS-based pytest fixtures only loaded +for MODIS tests. + +""" diff --git a/satpy/tests/reader_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py similarity index 100% rename from satpy/tests/reader_tests/_modis_fixtures.py rename to satpy/tests/reader_tests/modis_tests/_modis_fixtures.py diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py new file mode 100644 index 0000000000..09f98049db --- /dev/null +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Setup and configuration for all reader tests.""" + +from ._modis_fixtures import ( + modis_l1b_imapp_1000m_file, + modis_l1b_imapp_geo_file, + modis_l1b_nasa_1km_mod03_files, + modis_l1b_nasa_mod02hkm_file, + modis_l1b_nasa_mod02qkm_file, + modis_l1b_nasa_mod03_file, + modis_l1b_nasa_mod021km_file, + modis_l2_imapp_mask_byte1_file, + modis_l2_imapp_mask_byte1_geo_files, + modis_l2_imapp_snowmask_file, + modis_l2_imapp_snowmask_geo_files, + modis_l2_nasa_mod06_file, + modis_l2_nasa_mod35_file, + modis_l2_nasa_mod35_mod03_files, +) diff --git a/satpy/tests/reader_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py similarity index 99% rename from satpy/tests/reader_tests/test_modis_l1b.py rename to satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 3d8d569ca8..56e8687844 100644 --- a/satpy/tests/reader_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -25,8 +25,8 @@ from pytest_lazyfixture import lazy_fixture from satpy import Scene, available_readers +from satpy.tests.utils import CustomScheduler, make_dataid -from ..utils import CustomScheduler, make_dataid from ._modis_fixtures import ( AVAILABLE_1KM_PRODUCT_NAMES, AVAILABLE_HKM_PRODUCT_NAMES, diff --git a/satpy/tests/reader_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py similarity index 99% rename from satpy/tests/reader_tests/test_modis_l2.py rename to satpy/tests/reader_tests/modis_tests/test_modis_l2.py index a1870fe390..222f365d87 100644 --- a/satpy/tests/reader_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -26,8 +26,8 @@ from pytest_lazyfixture import lazy_fixture from satpy import Scene, available_readers +from satpy.tests.utils import CustomScheduler, make_dataid -from ..utils import CustomScheduler, make_dataid from ._modis_fixtures import _shape_for_resolution # NOTE: diff --git a/setup.cfg b/setup.cfg index e2ef375dc0..594f9dc8cd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,6 +20,7 @@ exclude = satpy/tests/features per-file-ignores = satpy/tests/*/conftest.py:F401 + satpy/tests/*/*/conftest.py:F401 doc/source/doi_role.py:D103 satpy/tests/features/steps/*.py:F811 From 52de91b02a573e2fece5b8cec9ff4cf1c3b90d46 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 30 Aug 2023 09:32:02 +0200 Subject: [PATCH 0440/1416] Put back previous chunks --- satpy/readers/ahi_hsd.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index be0ef31ca0..e06f7ebc50 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -622,7 +622,8 @@ def _read_data(self, fp_, header): chunks = da.core.normalize_chunks("auto", shape=(nlines, ncols), limit=get_chunk_size_limit(), - dtype='f8') + dtype='f8', + previous_chunks=(550, 550)) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Thu, 31 Aug 2023 09:08:34 -0500 Subject: [PATCH 0441/1416] Fix VIIRS EDR handling of multiple fill values in geolocation --- satpy/readers/viirs_edr.py | 10 ++++----- satpy/tests/reader_tests/test_viirs_edr.py | 25 ++++++++++++++++------ 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index da61114fca..30aeaa6d52 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -62,7 +62,6 @@ import logging from typing import Iterable -import numpy as np import xarray as xr from satpy import DataID @@ -138,14 +137,13 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: return data_arr def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: - fill_value = data_arr.encoding.get("_FillValue") - if fill_value is not None and not np.isnan(fill_value): - # xarray auto mask and scale handled this - return data_arr yaml_fill = ds_info.get("_FillValue") + # xarray auto mask and scale handled any fills from the file if yaml_fill is not None: - return data_arr.where(data_arr != yaml_fill) + data_arr = data_arr.where(data_arr != yaml_fill) valid_range = ds_info.get("valid_range", data_arr.attrs.get("valid_range")) + if "valid_min" in data_arr.attrs and valid_range is None: + valid_range = (data_arr.attrs["valid_min"], data_arr.attrs["valid_max"]) if valid_range is not None: return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) return data_arr diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 146a0cd2c4..da6dc9a55b 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -128,17 +128,26 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: dim_x_375 = "Along_Scan_375m" i_dims = (dim_y_375, dim_x_375) - lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} - lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} + lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9, + "valid_min": -180.0, "valid_max": 180.0} + lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9, + "valid_min": -90.0, "valid_max": 90.0} sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} i_data = np.random.random_sample((I_ROWS, I_COLS)).astype(np.float32) m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) + lon_i_data = (i_data * 360) - 180.0 + lon_m_data = (m_data * 360) - 180.0 + lat_i_data = (i_data * 180) - 90.0 + lat_m_data = (m_data * 180) - 90.0 + for geo_var in (lon_i_data, lon_m_data, lat_i_data, lat_m_data): + geo_var[0, 0] = -999.9 + geo_var[0, 1] = -999.3 data_arrs = { - "Longitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lon_attrs), - "Latitude_at_375m_resolution": xr.DataArray(i_data, dims=i_dims, attrs=lat_attrs), - "Longitude_at_750m_resolution": xr.DataArray(m_data, dims=m_dims, attrs=lon_attrs), - "Latitude_at_750m_resolution": xr.DataArray(m_data, dims=m_dims, attrs=lat_attrs), + "Longitude_at_375m_resolution": xr.DataArray(lon_i_data, dims=i_dims, attrs=lon_attrs), + "Latitude_at_375m_resolution": xr.DataArray(lat_i_data, dims=i_dims, attrs=lat_attrs), + "Longitude_at_750m_resolution": xr.DataArray(lon_m_data, dims=m_dims, attrs=lon_attrs), + "Latitude_at_750m_resolution": xr.DataArray(lat_m_data, dims=m_dims, attrs=lat_attrs), "375m Surface Reflectance Band I1": xr.DataArray(i_data, dims=i_dims, attrs=sr_attrs), "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), } @@ -458,6 +467,10 @@ def _shared_metadata_checks(data_arr: xr.DataArray) -> None: lats = data_arr.attrs["area"].lats assert lons.attrs["rows_per_scan"] == exp_rps assert lats.attrs["rows_per_scan"] == exp_rps + assert lons.min() >= -180.0 + assert lons.max() <= 180.0 + assert lats.min() >= -90.0 + assert lats.max() <= 90.0 def _is_mband_res(data_arr: xr.DataArray) -> bool: From 99a63d5d3fac72e2f8637549e5e4b060d7c07a4f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 31 Aug 2023 09:46:39 -0500 Subject: [PATCH 0442/1416] Remove unnecessary _FillValue from YAML handling --- satpy/readers/viirs_edr.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 30aeaa6d52..a8c6c934b2 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -137,10 +137,7 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: return data_arr def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: - yaml_fill = ds_info.get("_FillValue") # xarray auto mask and scale handled any fills from the file - if yaml_fill is not None: - data_arr = data_arr.where(data_arr != yaml_fill) valid_range = ds_info.get("valid_range", data_arr.attrs.get("valid_range")) if "valid_min" in data_arr.attrs and valid_range is None: valid_range = (data_arr.attrs["valid_min"], data_arr.attrs["valid_max"]) From 1686cdd68fc7b26a9238874da52c9b512a3e3ac3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 31 Aug 2023 10:07:42 -0500 Subject: [PATCH 0443/1416] Revert accidental commit of AHI experimental resolution-based chunking --- satpy/readers/ahi_hsd.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 8a2c82ac42..86e9aa9e0c 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -616,14 +616,13 @@ def _read_header(self, fp_): return header - def _read_data(self, fp_, header, resolution): + def _read_data(self, fp_, header): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) - chunk_size = CHUNK_SIZE * (500 / resolution) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Thu, 31 Aug 2023 10:11:56 -0500 Subject: [PATCH 0444/1416] Remove unused low cloud fog file pattern CSPP Geo is no longer producing this filenaming scheme --- satpy/etc/readers/abi_l2_nc.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml index 7c0f1a75ac..3c3a94cc40 100644 --- a/satpy/etc/readers/abi_l2_nc.yaml +++ b/satpy/etc/readers/abi_l2_nc.yaml @@ -576,8 +576,6 @@ file_types: abi_l2_gfls: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - # AIT scheme: GOES16_ABI_2KM_MESO_2019147_1800_48_AVIATION_FOG_EN.nc - - '{platform_shortname:s}_{mission_id:3s}_2KM_{scene_abbr:s}_{start_time:%Y%j_%H%M}_{file_product:s}_{algorithm_type:2s}.nc' # NDE scheme: ABI-L2-GFLSC-M6_v3r1_g16_s202306071931181_e202306071933554_c202306071934440.nc - '{mission_id:3s}-L2-GFLS{scene_abbr:s}-{scan_mode:2s}_v{sw_version:d}r{sw_revision:d}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "GFLS" From b8bdcf05e9f9cb8b8dc857a8708db2bee7a31a45 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 1 Sep 2023 10:19:03 -0500 Subject: [PATCH 0445/1416] Fix floating point edge case in elevation indexing in CREFL --- satpy/modifiers/_crefl_utils.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py index ddde8c9765..c8d6920056 100644 --- a/satpy/modifiers/_crefl_utils.py +++ b/satpy/modifiers/_crefl_utils.py @@ -390,17 +390,21 @@ def _runner_class_for_sensor(sensor_name: str) -> Type[_CREFLRunner]: def _space_mask_height(lon, lat, avg_elevation): - lat[(lat <= -90) | (lat >= 90)] = np.nan - lon[(lon <= -180) | (lon >= 180)] = np.nan - row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0).astype(np.int32) - col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0).astype(np.int32) - space_mask = np.isnan(lon) | np.isnan(lat) - row[space_mask] = 0 - col[space_mask] = 0 + row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0) + col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0) + np.clip(row, 0, avg_elevation.shape[0] - 1, out=row) + np.clip(col, 0, avg_elevation.shape[1] - 1, out=col) + row = row.astype(np.int32) + col = col.astype(np.int32) + # conditions need to be this way to include NaNs + bad_mask = ~((lon >= -180) | (lon <= 180) | (lat >= -90) | (lat <= 90)) + # convert any NaNs to valid indexes + row[bad_mask] = 0 + col[bad_mask] = 0 height = avg_elevation[row, col] # negative heights aren't allowed, clip to 0 - height[(height < 0.0) | np.isnan(height) | space_mask] = 0.0 + height[(height < 0.0) | np.isnan(height) | bad_mask] = 0.0 return height From bbbd70e15273c696e4ea6a8187de78520e0cfaa0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 6 Sep 2023 10:29:11 -0500 Subject: [PATCH 0446/1416] Fix readthedocs creating a dirty git environment --- .readthedocs.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 7a15d5578b..065634995b 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -13,5 +13,8 @@ build: os: "ubuntu-20.04" tools: python: "mambaforge-4.10" + jobs: + pre_install: + - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py conda: environment: doc/rtd_environment.yml From 8ed4f4a135c92cdf18f29bf9abe0f4dbe3ba6793 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 6 Sep 2023 10:51:30 -0500 Subject: [PATCH 0447/1416] Debug RTD git state --- .readthedocs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 065634995b..59229176ec 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -15,6 +15,7 @@ build: python: "mambaforge-4.10" jobs: pre_install: + - git status - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py conda: environment: doc/rtd_environment.yml From 344beac625d2a9f7a54c2dd797f8c8a164682f5b Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 6 Sep 2023 17:07:10 +0100 Subject: [PATCH 0448/1416] Fix AHI L2 nc coordinate names --- satpy/readers/ahi_l2_nc.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index e44c882898..ef3b7611aa 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -98,6 +98,13 @@ def get_dataset(self, key, info): var = info['file_key'] logger.debug('Reading in get_dataset %s.', var) variable = self.nc[var] + + # Data has 'Latitude' and 'Longitude' coords, these must be replaced. + variable = variable.rename({'Rows': 'y', 'Columns': 'x'}) + + variable = variable.drop('Latitude') + variable = variable.drop('Longitude') + variable.attrs.update(key.to_dict()) return variable From 641a738c41e22b96973a30f6f485fbc0d1baa31f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 6 Sep 2023 11:08:13 -0500 Subject: [PATCH 0449/1416] Debug RTD git state --- .readthedocs.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 59229176ec..d16daabffc 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -17,5 +17,8 @@ build: pre_install: - git status - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py + - git status + post_install: + - python -m pip install --no-deps -e . conda: environment: doc/rtd_environment.yml From cce6349ae1bd0da06b8d58f8f0e6b496f7bcf503 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 6 Sep 2023 17:09:26 +0100 Subject: [PATCH 0450/1416] Update AHI tests for new coord names. --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 84b3c667d7..d2e9c24489 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -16,7 +16,7 @@ start_time = datetime(2023, 8, 24, 5, 40, 21) end_time = datetime(2023, 8, 24, 5, 49, 40) -dimensions = {'Columns': 5500, 'Rows': 5500} +dimensions = {'X': 5500, 'Y': 5500} exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) @@ -46,7 +46,7 @@ def himl2_filename(tmp_path_factory): with h5netcdf.File(fname, mode="w") as h5f: h5f.dimensions = dimensions h5f.attrs.update(global_attrs) - var = h5f.create_variable("CloudMask", ("Rows", "Columns"), np.uint16, chunks=(200, 200)) + var = h5f.create_variable("CloudMask", ("Y", "X"), np.uint16, chunks=(200, 200)) var[:] = clmk_data return fname @@ -59,7 +59,7 @@ def himl2_filename_bad(tmp_path_factory): with h5netcdf.File(fname, mode="w") as h5f: h5f.dimensions = dimensions h5f.attrs.update(badarea_attrs) - var = h5f.create_variable("CloudMask", ("Rows", "Columns"), np.uint16, chunks=(200, 200)) + var = h5f.create_variable("CloudMask", ("Y", "X"), np.uint16, chunks=(200, 200)) var[:] = clmk_data return fname From 1e208e97c6d4ba2fdbc4485b00d6478cdd39693e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 6 Sep 2023 13:34:40 -0500 Subject: [PATCH 0451/1416] Fix colormap-based tests with new version of trollimage --- .../enhancement_tests/test_enhancements.py | 20 +++++++------------ satpy/tests/test_composites.py | 14 ++++++------- 2 files changed, 14 insertions(+), 20 deletions(-) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 4420f4ea9b..e95c55a362 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -149,19 +149,13 @@ def test_colorize(self): from trollimage.colormap import brbg from satpy.enhancements import colorize - expected = np.array([[ - [np.nan, 3.29409498e-01, 3.29409498e-01, - 4.35952940e-06, 4.35952940e-06], - [4.35952940e-06, 4.35952940e-06, 4.35952940e-06, - 4.35952940e-06, 4.35952940e-06]], - [[np.nan, 1.88249866e-01, 1.88249866e-01, - 2.35302110e-01, 2.35302110e-01], - [2.35302110e-01, 2.35302110e-01, 2.35302110e-01, - 2.35302110e-01, 2.35302110e-01]], - [[np.nan, 1.96102817e-02, 1.96102817e-02, - 1.88238767e-01, 1.88238767e-01], - [1.88238767e-01, 1.88238767e-01, 1.88238767e-01, - 1.88238767e-01, 1.88238767e-01]]]) + expected = np.array([ + [[np.nan, 3.29411723e-01, 3.29411723e-01, 3.21825881e-08, 3.21825881e-08], + [3.21825881e-08, 3.21825881e-08, 3.21825881e-08, 3.21825881e-08, 3.21825881e-08]], + [[np.nan, 1.88235327e-01, 1.88235327e-01, 2.35294109e-01, 2.35294109e-01], + [2.35294109e-01, 2.35294109e-01, 2.35294109e-01, 2.35294109e-01, 2.35294109e-01]], + [[np.nan, 1.96078164e-02, 1.96078164e-02, 1.88235281e-01, 1.88235281e-01], + [1.88235281e-01, 1.88235281e-01, 1.88235281e-01, 1.88235281e-01, 1.88235281e-01]]]) run_and_check_enhancement(colorize, self.ch1, expected, palettes=brbg) def test_palettize(self): diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index f27c73d849..f056d2fa93 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -773,13 +773,13 @@ def test_colorize_with_interpolation(self): dims=['y', 'x'], attrs={'valid_range': np.array([2, 4])}) res = colormap_composite([data, palette]) - exp = np.array([[[1.0000149, 0.49804664, 0.24907766], - [0., 0.59844028, 1.0000149]], - [[1.00005405, 0.49806613, 0.24902255], - [0., 0.59846373, 1.00005405]], - [[1.00001585, 0.49804711, 0.24896771], - [0., 0.59844073, 1.00001585]]]) - self.assertTrue(np.allclose(res, exp, atol=1e-4)) + exp = np.array([[[1.0, 0.498039, 0.246575], + [0., 0.59309977, 1.0]], + [[1.0, 0.49803924, 0.24657543], + [0., 0.59309983, 1.0]], + [[1.0, 0.4980392, 0.24657541], + [0., 0.59309978, 1.0]]]) + np.testing.assert_allclose(res, exp, atol=1e-4) class TestCloudCompositorWithoutCloudfree: From c83a60aafd578518879b080102a205869186213f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Sep 2023 10:44:20 +0000 Subject: [PATCH 0452/1416] Bump actions/checkout from 3 to 4 Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 4 ++-- .github/workflows/deploy-sdist.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 178138fe3e..9897f8886a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: @@ -55,7 +55,7 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v2 diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 73db646c62..ba8653a9f2 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -11,7 +11,7 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Create sdist shell: bash -l {0} From 62e8ac3b21d9f9ab5cf2e29aefc35852a7a3e0e7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 13 Sep 2023 11:39:18 -0500 Subject: [PATCH 0453/1416] Rename "night_microphsyics_abi" composite to "night_microphysics" --- satpy/etc/composites/abi.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index d34e86313b..783bd5187a 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -537,7 +537,7 @@ composites: - name: C13 standard_name: snow - night_microphysics_abi: + night_microphysics: description: > Nighttime Microphysics RGB, for GOESR: NASA, NOAA references: From 9ad3995849bc0688b973895faf20f1c8674f8000 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 14 Sep 2023 10:42:06 -0500 Subject: [PATCH 0454/1416] Fetch all tags in RTD for versioning --- .readthedocs.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index d16daabffc..a04fbdda78 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -14,11 +14,11 @@ build: tools: python: "mambaforge-4.10" jobs: + post_checkout: + - git fetch --tags pre_install: - - git status - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py - - git status - post_install: - - python -m pip install --no-deps -e . + # post_install: + # - python -m pip install --no-deps -e . conda: environment: doc/rtd_environment.yml From 6d4fcc2898237dc4e07f24aaa188c4b2649a25c1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 14 Sep 2023 10:50:34 -0500 Subject: [PATCH 0455/1416] Remove commented out pip install in RTD config --- .readthedocs.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index a04fbdda78..9f3d7bd1b5 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -18,7 +18,5 @@ build: - git fetch --tags pre_install: - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py - # post_install: - # - python -m pip install --no-deps -e . conda: environment: doc/rtd_environment.yml From ed1c4a171e703c10dc8d9fa1e4e9260a2bde3abd Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 18 Sep 2023 16:03:28 +0200 Subject: [PATCH 0456/1416] Add non-linear strength for AMI ndvi_hybrid_green band. --- satpy/etc/composites/ami.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml index 5a8608d795..b0d943c5ca 100644 --- a/satpy/etc/composites/ami.yaml +++ b/satpy/etc/composites/ami.yaml @@ -69,6 +69,7 @@ composites: currently implemented are experimental and may change in future versions of Satpy. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: VI005 modifiers: [sunz_corrected, rayleigh_corrected] @@ -83,6 +84,7 @@ composites: Alternative to ndvi_hybrid_green, but without solar zenith or rayleigh correction. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] + strength: 3.0 prerequisites: - name: VI005 - name: VI006 From 049a2a3690e07166aa301d68e9d7e9ee569e0d2d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 18 Sep 2023 16:49:49 +0200 Subject: [PATCH 0457/1416] Refactor code for applying non-linearity and computing blend fractions. --- satpy/composites/spectral.py | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 5e6e03c148..7d05a000d6 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -169,17 +169,39 @@ def __call__(self, projectables, optional_datasets=None, **attrs): ndvi.data = da.where(ndvi > self.ndvi_min, ndvi, self.ndvi_min) ndvi.data = da.where(ndvi < self.ndvi_max, ndvi, self.ndvi_max) - # Apply non-linearity to the ndvi for a non-linear conversion from ndvi to fraction. This can be used for a - # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this - # operation has no effect on the ndvi. + # Introduce non-linearity to ndvi for non-linear scaling to NIR blend fraction + if self.strength != 1.0: # self._apply_strength() has no effect if strength = 1.0 -> no non-linear behaviour + ndvi = self._apply_strength(ndvi) + + # Compute pixel-level NIR blend fractions from ndvi + fraction = self._compute_blend_fraction(ndvi) + + # Prepare input as required by parent class (SpectralBlender) + self.fractions = (1 - fraction, fraction) + + return super().__call__([projectables[0], projectables[2]], **attrs) + + def _apply_strength(self, ndvi): + """Introduce non-linearity by applying strength factor. + + The method introduces non-linearity to the ndvi for a non-linear scaling from ndvi to blend fraction in + `_compute_blend_fraction`. This can be used for a slower or faster transision to higher/lower fractions + at the ndvi extremes. If strength equals 1.0, this operation has no effect on the ndvi. + """ ndvi = ndvi ** self.strength / (ndvi ** self.strength + (1 - ndvi) ** self.strength) - # Compute blending fraction from ndvi + return ndvi + + def _compute_blend_fraction(self, ndvi): + """Compute pixel-level fraction of NIR signal to blend with native green signal. + + This method linearly scales the input ndvi values to pixel-level blend fractions within the range + `[limits[0], limits[1]]` following this implementation . + """ fraction = (ndvi - self.ndvi_min) / (self.ndvi_max - self.ndvi_min) * (self.limits[1] - self.limits[0]) \ + self.limits[0] - self.fractions = (1 - fraction, fraction) - return super().__call__([projectables[0], projectables[2]], **attrs) + return fraction class GreenCorrector(SpectralBlender): From 3767f61a4b2545843232a97c2b7c02dd04448c33 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 18 Sep 2023 17:03:03 +0200 Subject: [PATCH 0458/1416] Refactor unit tests for NDVI-weighted hybrid green correction. --- satpy/tests/compositor_tests/test_spectral.py | 46 +++++++++++-------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 03e51a5043..b6460c911b 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -65,8 +65,24 @@ def test_hybrid_green(self): data = res.compute() np.testing.assert_allclose(data, 0.23) - def test_ndvi_hybrid_green(self): - """Test NDVI-scaled hybrid green correction of 'green' band.""" + def test_green_corrector(self): + """Test the deprecated class for green corrections.""" + comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name='toa_bidirectional_reflectance') + res = comp((self.c01, self.c03)) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs['name'] == 'blended_channel' + assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + data = res.compute() + np.testing.assert_allclose(data, 0.23) + + +class TestNdviHybridGreenCompositor: + """Test NDVI-weighted hybrid green correction of green band.""" + + def setup_method(self): + """Initialize channels.""" self.c01 = xr.DataArray(da.from_array([[0.25, 0.30], [0.20, 0.30]], chunks=25), dims=('y', 'x'), attrs={'name': 'C02'}) self.c02 = xr.DataArray(da.from_array([[0.25, 0.30], [0.25, 0.35]], chunks=25), @@ -74,6 +90,8 @@ def test_ndvi_hybrid_green(self): self.c03 = xr.DataArray(da.from_array([[0.35, 0.35], [0.28, 0.65]], chunks=25), dims=('y', 'x'), attrs={'name': 'C04'}) + def test_ndvi_hybrid_green(self): + """Test General functionality with linear scaling from ndvi to blend fraction.""" comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name='toa_bidirectional_reflectance') @@ -86,26 +104,16 @@ def test_ndvi_hybrid_green(self): data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) - # Test invalid strength - with pytest.raises(ValueError): - _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') - - # Test non-linear strength + def test_nonliniear_scaling(self): + """Test non-linear scaling using `strength` term.""" comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), standard_name='toa_bidirectional_reflectance') res = comp((self.c01, self.c02, self.c03)) np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) - def test_green_corrector(self): - """Test the deprecated class for green corrections.""" - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') - res = comp((self.c01, self.c03)) - assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' - data = res.compute() - np.testing.assert_allclose(data, 0.23) + def test_invalid_strength(self): + """Test using invalid `strength` term for non-linear scaling.""" + with pytest.raises(ValueError): + _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), + standard_name='toa_bidirectional_reflectance') From 08e833eafbcb0d44a1f436a2eb7fbac12c7739f0 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 14:59:41 +0200 Subject: [PATCH 0459/1416] Add modifier for reducing measurement signal within a defined sunz interval. --- satpy/modifiers/__init__.py | 1 + satpy/modifiers/angles.py | 46 +++++++++++++++++++++++++++++++++++++ satpy/modifiers/geometry.py | 41 ++++++++++++++++++++++++++++++++- 3 files changed, 87 insertions(+), 1 deletion(-) diff --git a/satpy/modifiers/__init__.py b/satpy/modifiers/__init__.py index a0888167b3..c8d32f246e 100644 --- a/satpy/modifiers/__init__.py +++ b/satpy/modifiers/__init__.py @@ -25,5 +25,6 @@ from .atmosphere import PSPRayleighReflectance # noqa: F401 from .geometry import EffectiveSolarPathLengthCorrector # noqa: F401 from .geometry import SunZenithCorrector # noqa: F401 +from .geometry import SunZenithReducer # noqa: F401 from .spectral import NIREmissivePartFromReflectance # noqa: F401 from .spectral import NIRReflectance # noqa: F401 diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 28adb60028..a914863b04 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -235,6 +235,7 @@ def cache_to_zarr_if( out old entries. It is up to the user to manage the size of the cache. """ + def _decorator(func: Callable) -> Callable: zarr_cacher = ZarrCacheHelper(func, cache_config_key, @@ -242,6 +243,7 @@ def _decorator(func: Callable) -> Callable: sanitize_args_func) wrapper = update_wrapper(zarr_cacher, func) return wrapper + return _decorator @@ -542,3 +544,47 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, # Force "night" pixels to 0 (where SZA is invalid) corr[np.isnan(cos_zen)] = 0 return data * corr + + +def sunzen_reduction(data: da.Array, + sunz: da.Array, + limit: float = 65., + max_sza: float = 95., + strength: float = 2.) -> da.Array: + """Reduced strength of signal at high sun zenith angles.""" + return da.map_blocks(_sunzen_reduction_ndarray, data, sunz, limit, max_sza, strength, + meta=np.array((), dtype=data.dtype), chunks=data.chunks) + + +def _sunzen_reduction_ndarray(data: np.ndarray, + sunz: np.ndarray, + limit: float, + max_sza: float, + strength: float) -> np.ndarray: + if max_sza is None: + raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") + + # compute reduction factor (0.0 - 1.0) between limit and maz_sza + reduction_factor = (sunz - limit) / (max_sza - limit) + reduction_factor = reduction_factor.clip(0., 1.) + + # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza + with np.errstate(invalid='ignore'): # we expect space pixels to be invalid + reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2) + + # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a + # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this + # operation has no effect on the reduction_factor. + reduction_factor = reduction_factor ** strength / ( + reduction_factor ** strength + (1 - reduction_factor) ** strength) + + # compute final correction term, with no reduction for angles < limit + corr = np.where(sunz < limit, 1.0, reduction_factor) + + # force "night" pixels to 0 (where SZA is invalid) + corr[np.isnan(sunz)] = 0 + + # reduce data signal with correction term + res = data * corr + + return res diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index ecd83f80e5..0022a487a4 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -24,7 +24,7 @@ import numpy as np from satpy.modifiers import ModifierBase -from satpy.modifiers.angles import sunzen_corr_cos +from satpy.modifiers.angles import sunzen_corr_cos, sunzen_reduction from satpy.utils import atmospheric_path_length_correction logger = logging.getLogger(__name__) @@ -159,3 +159,42 @@ def __init__(self, correction_limit=88., **kwargs): def _apply_correction(self, proj, coszen): logger.debug("Apply the effective solar atmospheric path length correction method by Li and Shibata") return atmospheric_path_length_correction(proj, coszen, limit=self.correction_limit, max_sza=self.max_sza) + + +class SunZenithReducer(SunZenithCorrectorBase): + """Reduce signal strength at large sun zenith angles. + + Within a given sunz interval [correction_limit, max_sza] the strength of the signal is reduced following the + formula: + + res = signal * reduction_factor + + where reduction_factor is a pixel-level value ranging from 0 to 1 within the sunz interval. + + The `strength` parameter can be used for a non-linear reduction within the sunz interval. A strength larger + than 1.0 will decelerate the signal reduction towards the sunz interval extremes, whereas a strength + smaller than 1.0 will accelerate the signal reduction towards the sunz interval extremes. + + """ + + def __init__(self, correction_limit=60., strength=2.0, **kwargs): + """Collect custom configuration values. + + Args: + correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Default 60. + strength (float): The strength of the non-linear signal reduction. Default 2.0 + + """ + self.correction_limit = correction_limit + self.strength = strength + super(SunZenithReducer, self).__init__(**kwargs) + + def _apply_correction(self, proj, coszen): + logger.debug("Apply sun-zenith signal reduction") + res = proj.copy() + sunz = np.rad2deg(np.arccos(coszen.data)) + res.data = sunzen_reduction(proj.data, sunz, + limit=self.correction_limit, + max_sza=self.max_sza, + strength=self.strength) + return res From f3970cd780f41457e7c6485ebc834703b700ed85 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 16:28:44 +0200 Subject: [PATCH 0460/1416] Modify default values and add default modifier recipe in visir.yaml. --- satpy/etc/composites/visir.yaml | 5 +++++ satpy/modifiers/geometry.py | 10 ++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index 0bb177e9ff..d9798057a2 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -16,6 +16,11 @@ modifiers: optional_prerequisites: - solar_zenith_angle + sunz_reduced: + modifier: !!python/name:satpy.modifiers.SunZenithReducer + optional_prerequisites: + - solar_zenith_angle + co2_corrected: modifier: !!python/name:satpy.modifiers.CO2Corrector prerequisites: diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 0022a487a4..e93b34e1a8 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -177,17 +177,19 @@ class SunZenithReducer(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=60., strength=2.0, **kwargs): + def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): """Collect custom configuration values. Args: - correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Default 60. - strength (float): The strength of the non-linear signal reduction. Default 2.0 + correction_limit (float): Solar zenith angle in degrees where to start the signal reduction. Default 60. + max_sza (float): Maximum solar zenith angle in degrees where to apply the signal reduction. Beyond + this solar zenith angle the signal will become zero. Default 90. + strength (float): The strength of the non-linear signal reduction. Default 1.5 """ self.correction_limit = correction_limit self.strength = strength - super(SunZenithReducer, self).__init__(**kwargs) + super(SunZenithReducer, self).__init__(max_sza=max_sza, **kwargs) def _apply_correction(self, proj, coszen): logger.debug("Apply sun-zenith signal reduction") From 2dff5696e2f71128790c919886173cd32923d422 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 16:30:18 +0200 Subject: [PATCH 0461/1416] Add satz reducer modified in FCI true color recipes. --- satpy/etc/composites/fci.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 193415656b..55876cb98d 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -13,11 +13,11 @@ composites: limits: [0.15, 0.05] prerequisites: - name: vis_05 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: vis_06 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: vis_08 - modifiers: [sunz_corrected ] + modifiers: [sunz_corrected, sunz_reduced ] standard_name: toa_bidirectional_reflectance ndvi_hybrid_green_raw: @@ -48,10 +48,10 @@ composites: of the ndvi_hybrid_green composites for details. prerequisites: - name: vis_06 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: ndvi_hybrid_green - name: vis_04 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: true_color true_color_raw_with_corrected_green: From 25a77c2dbe3ed841115839a3a2d2d969045ff945 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 24 Aug 2023 16:42:39 +0200 Subject: [PATCH 0462/1416] Add satz reducer modified in AHI NDVI based true color recipe. --- satpy/etc/composites/ahi.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 9e36bf7d7f..5b000689dc 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -107,11 +107,11 @@ composites: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen prerequisites: - name: B02 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: B04 - modifiers: [sunz_corrected] + modifiers: [sunz_corrected, sunz_reduced] standard_name: toa_bidirectional_reflectance airmass: @@ -275,10 +275,10 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: ndvi_hybrid_green - name: B01 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] high_resolution_band: red standard_name: true_color From d2d82cf2d4815c76a312ac1403d410f72a89390b Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 09:58:50 +0200 Subject: [PATCH 0463/1416] Align default values for sunz reduction. --- satpy/modifiers/angles.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index a914863b04..103ea7362b 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -548,9 +548,9 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, def sunzen_reduction(data: da.Array, sunz: da.Array, - limit: float = 65., - max_sza: float = 95., - strength: float = 2.) -> da.Array: + limit: float = 55., + max_sza: float = 90., + strength: float = 1.5) -> da.Array: """Reduced strength of signal at high sun zenith angles.""" return da.map_blocks(_sunzen_reduction_ndarray, data, sunz, limit, max_sza, strength, meta=np.array((), dtype=data.dtype), chunks=data.chunks) From 1a22f1c8d43d3840374c818d6763b31ef623f49d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 10:03:52 +0200 Subject: [PATCH 0464/1416] Move check for max_sza=None to class init method. --- satpy/modifiers/angles.py | 3 --- satpy/modifiers/geometry.py | 2 ++ 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 103ea7362b..391278bf8f 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -561,9 +561,6 @@ def _sunzen_reduction_ndarray(data: np.ndarray, limit: float, max_sza: float, strength: float) -> np.ndarray: - if max_sza is None: - raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") - # compute reduction factor (0.0 - 1.0) between limit and maz_sza reduction_factor = (sunz - limit) / (max_sza - limit) reduction_factor = reduction_factor.clip(0., 1.) diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index e93b34e1a8..61c406adb6 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -190,6 +190,8 @@ def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): self.correction_limit = correction_limit self.strength = strength super(SunZenithReducer, self).__init__(max_sza=max_sza, **kwargs) + if self.max_sza is None: + raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") def _apply_correction(self, proj, coszen): logger.debug("Apply sun-zenith signal reduction") From a4c9c18a3eb3b2a5e0127c5c8799ee1b4320ca92 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 10:06:38 +0200 Subject: [PATCH 0465/1416] Add unit tests for SunZenithReducer modifier. --- satpy/tests/test_modifiers.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index c21a514808..0ba788b4d1 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -156,6 +156,35 @@ def test_imcompatible_areas(self, sunz_ds2, sunz_sza): comp((sunz_ds2, sunz_sza), test_attr='test') +class TestSunZenithReducer: + """Test case for the sun zenith reducer.""" + + def test_basic_default_provided(self, sunz_ds1, sunz_sza): + """Test default settings with sza data available.""" + from satpy.modifiers.geometry import SunZenithReducer + comp = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) + res = comp((sunz_ds1, sunz_sza), test_attr='test') + np.testing.assert_allclose(res.values, + np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), + rtol=1e-5) + + def test_basic_lims_provided(self, sunz_ds1, sunz_sza): + """Test custom settings with sza data available.""" + from satpy.modifiers.geometry import SunZenithReducer + comp = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), + correction_limit=70, max_sza=95, strength=3.0) + res = comp((sunz_ds1, sunz_sza), test_attr='test') + np.testing.assert_allclose(res.values, + np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), + rtol=1e-5) + + def test_invalid_max_sza(self, sunz_ds1, sunz_sza): + """Test invalid max_sza with sza data available.""" + from satpy.modifiers.geometry import SunZenithReducer + with pytest.raises(ValueError): + SunZenithReducer(name='sza_reduction_test_invalid', modifiers=tuple(), max_sza=None) + + class TestNIRReflectance(unittest.TestCase): """Test NIR reflectance compositor.""" From ba655df5cd61265a16dfa4d45a9a70fc7ea89385 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 25 Aug 2023 10:45:35 +0200 Subject: [PATCH 0466/1416] Refactor test to reduce code duplication. --- satpy/tests/test_modifiers.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 0ba788b4d1..f7cc37391d 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -159,21 +159,24 @@ def test_imcompatible_areas(self, sunz_ds2, sunz_sza): class TestSunZenithReducer: """Test case for the sun zenith reducer.""" - def test_basic_default_provided(self, sunz_ds1, sunz_sza): - """Test default settings with sza data available.""" + @classmethod + def setup_class(cls): + """Initialze SunZenithReducer classes that shall be tested.""" from satpy.modifiers.geometry import SunZenithReducer - comp = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) - res = comp((sunz_ds1, sunz_sza), test_attr='test') + cls.default = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) + cls.custom = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), + correction_limit=70, max_sza=95, strength=3.0) + + def test_default_settings(self, sunz_ds1, sunz_sza): + """Test default settings with sza data available.""" + res = self.default((sunz_ds1, sunz_sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), rtol=1e-5) - def test_basic_lims_provided(self, sunz_ds1, sunz_sza): + def test_custom_settings(self, sunz_ds1, sunz_sza): """Test custom settings with sza data available.""" - from satpy.modifiers.geometry import SunZenithReducer - comp = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), - correction_limit=70, max_sza=95, strength=3.0) - res = comp((sunz_ds1, sunz_sza), test_attr='test') + res = self.custom((sunz_ds1, sunz_sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), rtol=1e-5) From 2fa3dc871608169d2ea7efb20415d623be309cd0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 10:07:03 -0500 Subject: [PATCH 0467/1416] Remove use of deprecated setuptools_scm_git_archive build package --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e27dcfd9e2..64c68d60eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2", 'setuptools_scm_git_archive'] +requires = ["setuptools>=60", "wheel", "setuptools_scm[toml]>=8.0"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] From 0fc4c911b77f60285e2a5d97c7e5a262a696cbf9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 10:46:13 -0500 Subject: [PATCH 0468/1416] Remove setuptools_scm hack in setup.py and exclude more files in sdist --- MANIFEST.in | 11 ++++++++--- setup.py | 8 -------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 7c8ea0e146..1b2aca456e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,11 @@ -include doc/Makefile -include doc/source/* -include doc/examples/*.py +prune * +exclude * +graft doc +recursive-exclude doc/build * +graft satpy include LICENSE.txt include README.rst +include AUTHORS.md +include CHANGELOG.md include satpy/version.py +global-exclude *.py[cod] diff --git a/setup.py b/setup.py index 2e6154ea92..b3eb4c5577 100644 --- a/setup.py +++ b/setup.py @@ -22,14 +22,6 @@ from setuptools import find_packages, setup -try: - # HACK: https://github.com/pypa/setuptools_scm/issues/190#issuecomment-351181286 - # Stop setuptools_scm from including all repository files - import setuptools_scm.integration - setuptools_scm.integration.find_files = lambda _: [] -except ImportError: - pass - requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.24.0', 'trollsift', 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', From a6d7b3a492e88266ab68000793c3f60bfe46f049 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 13:47:35 -0500 Subject: [PATCH 0469/1416] Run sdist CI on pushes and PRs --- .github/workflows/deploy-sdist.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index ba8653a9f2..53168e95a8 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -1,6 +1,8 @@ name: Deploy sdist on: + push: + pull_request: release: types: - published @@ -12,10 +14,14 @@ jobs: steps: - name: Checkout source uses: actions/checkout@v4 + with: + fetch-tags: true - name: Create sdist shell: bash -l {0} - run: python setup.py sdist + run: | + python -m pip install -q build + python -m build -s - name: Publish package to PyPI if: github.event.action == 'published' From a87020477c3eca459799561662be7f590a0d68e9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 14:26:53 -0500 Subject: [PATCH 0470/1416] Remove unnecessary fetch tags --- .github/workflows/deploy-sdist.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 53168e95a8..4ed63fefdd 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -14,8 +14,6 @@ jobs: steps: - name: Checkout source uses: actions/checkout@v4 - with: - fetch-tags: true - name: Create sdist shell: bash -l {0} From 72bdca2920ba367c95c3d197aa3fc6e5a4ee3047 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 20 Sep 2023 14:41:19 -0500 Subject: [PATCH 0471/1416] Add missing python package files in sdist --- MANIFEST.in | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/MANIFEST.in b/MANIFEST.in index 1b2aca456e..3a7cdb0b43 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,5 +7,10 @@ include LICENSE.txt include README.rst include AUTHORS.md include CHANGELOG.md +include SECURITY.md +include CITATION include satpy/version.py +include pyproject.toml +include setup.py +include setup.cfg global-exclude *.py[cod] From 1b154fbdd89667f26fdc48f35d104e7cee9c9c6c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 08:33:33 +0200 Subject: [PATCH 0472/1416] Remove unneeded performance tracker --- satpy/readers/seviri_l1b_hrit.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 1fedadb0e2..2b153edfcc 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -214,7 +214,7 @@ import copy import logging -from datetime import datetime, timedelta +from datetime import timedelta import dask.array as da import numpy as np @@ -721,7 +721,6 @@ def pad_hrv_data(self, res): def calibrate(self, data, calibration): """Calibrate the data.""" - tic = datetime.now() calib = SEVIRICalibrationHandler( platform_id=self.platform_id, channel_name=self.channel_name, @@ -730,7 +729,6 @@ def calibrate(self, data, calibration): scan_time=self.observation_start_time ) res = calib.calibrate(data, calibration) - logger.debug("Calibration time " + str(datetime.now() - tic)) return res def _mask_bad_quality(self, data): From c980ab2b2306b3a7e3490e7398c4bfab89a13369 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 08:47:52 +0200 Subject: [PATCH 0473/1416] Fix cf tests for numpy 2 --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 54770b9176..82919dc65c 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -537,7 +537,7 @@ def get_test_attrs(self): 'numpy_bool': True, 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), + 'numpy_string': np.str_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ["1", ["2", [3]]], 'bool': True, From 3bfcc345b61fa7c3c6c72acfa26e5367f34b341b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 09:08:34 +0200 Subject: [PATCH 0474/1416] Replace np.string_ with np.bytes_ for numpy 2 in cf writer --- satpy/writers/cf_writer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index b9a24b9292..ed149391bb 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -200,7 +200,7 @@ np.dtype('int32'), np.dtype('uint32'), np.dtype('int64'), np.dtype('uint64'), np.dtype('float32'), np.dtype('float64'), - np.string_] + np.bytes_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible @@ -209,7 +209,7 @@ np.dtype('int32'), np.dtype('float32'), np.dtype('float64'), - np.string_] + np.bytes_] CF_VERSION = 'CF-1.7' @@ -582,7 +582,7 @@ def _remove_satpy_attrs(new_data): def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + dataarray.attrs['prerequisites'] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] return dataarray From 88379e47ccd631a80654c4dbf60dc6fccdf7667a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 10:10:00 +0200 Subject: [PATCH 0475/1416] Remove other usages of np.string_ --- satpy/readers/hdf4_utils.py | 2 +- satpy/readers/utils.py | 2 +- satpy/tests/reader_tests/test_utils.py | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index acc86fd64d..f8bf4ade79 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -69,7 +69,7 @@ def __init__(self, filename, filename_info, filetype_info): def _collect_attrs(self, name, attrs): for key, value in attrs.items(): value = np.squeeze(value) - if issubclass(value.dtype.type, (np.string_, np.unicode_)) and not value.shape: + if issubclass(value.dtype.type, (np.bytes_, np.unicode_)) and not value.shape: value = value.item() # convert to scalar if not isinstance(value, str): # python 3 - was scalar numpy array of bytes diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 31f6dea6d9..e2035af479 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -54,7 +54,7 @@ def np2str(value): """ if hasattr(value, 'dtype') and \ - issubclass(value.dtype.type, (np.str_, np.string_, np.object_)) \ + issubclass(value.dtype.type, (np.str_, np.bytes_, np.object_)) \ and value.size == 1: value = value.item() if not isinstance(value, str): diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 54b156e4c5..fc38e36c88 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -202,20 +202,20 @@ def test_sub_area(self, adef): def test_np2str(self): """Test the np2str function.""" # byte object - npstring = np.string_('hej') - self.assertEqual(hf.np2str(npstring), 'hej') + npbytes = np.bytes_('hej') + self.assertEqual(hf.np2str(npbytes), 'hej') # single element numpy array - np_arr = np.array([npstring]) + np_arr = np.array([npbytes]) self.assertEqual(hf.np2str(np_arr), 'hej') # scalar numpy array - np_arr = np.array(npstring) + np_arr = np.array(npbytes) self.assertEqual(hf.np2str(np_arr), 'hej') # multi-element array - npstring = np.array([npstring, npstring]) - self.assertRaises(ValueError, hf.np2str, npstring) + npbytes = np.array([npbytes, npbytes]) + self.assertRaises(ValueError, hf.np2str, npbytes) # non-array self.assertRaises(ValueError, hf.np2str, 5) From 18da8fdd79b7f3b516ac3408981efb6ad0df9c35 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 10:58:52 +0200 Subject: [PATCH 0476/1416] Use np.float64 instead of np.float_ --- satpy/readers/aapp_mhs_amsub_l1c.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/aapp_mhs_amsub_l1c.py b/satpy/readers/aapp_mhs_amsub_l1c.py index 39216431f4..f5765545f3 100644 --- a/satpy/readers/aapp_mhs_amsub_l1c.py +++ b/satpy/readers/aapp_mhs_amsub_l1c.py @@ -152,7 +152,7 @@ def _calibrate(data, if calib_type == 'counts': return channel - channel = channel.astype(np.float_) + channel = channel.astype(np.float64) return da.where(mask, channel, np.nan) From 6f77d18191cd1a6816caf114a13ea33636a63202 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 21 Sep 2023 11:01:20 +0200 Subject: [PATCH 0477/1416] Use np.str_ instead of np.unicode_ --- satpy/readers/hdf4_utils.py | 2 +- satpy/writers/cf_writer.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index f8bf4ade79..fb20c0ce11 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -69,7 +69,7 @@ def __init__(self, filename, filename_info, filetype_info): def _collect_attrs(self, name, attrs): for key, value in attrs.items(): value = np.squeeze(value) - if issubclass(value.dtype.type, (np.bytes_, np.unicode_)) and not value.shape: + if issubclass(value.dtype.type, (np.bytes_, np.str_)) and not value.shape: value = value.item() # convert to scalar if not isinstance(value, str): # python 3 - was scalar numpy array of bytes diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index ed149391bb..702e25c2fa 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -192,7 +192,7 @@ if netCDF4 is None and h5netcdf is None: raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.') -# Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is +# Numpy datatypes compatible with all netCDF4 backends. ``np.str_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), From d1a433d6fc39dec0c28713cd1e96e6acd89a9a35 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 26 Sep 2023 10:44:17 -0500 Subject: [PATCH 0478/1416] Remove unused chunk size in modis readers --- satpy/readers/hdfeos_base.py | 2 -- satpy/readers/modis_l1b.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 172fd6d808..8ad2944dd6 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -33,10 +33,8 @@ from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() def interpolate(clons, clats, csatz, src_resolution, dst_resolution): diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index 4b11560936..1d0e209d57 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -78,10 +78,8 @@ from satpy.readers.hdf4_utils import from_sds from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() class HDFEOSBandReader(HDFEOSBaseFileReader): From d4ce372ba7a0048cf1dfd70911ca71e4096d6ed2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 26 Sep 2023 15:34:55 -0500 Subject: [PATCH 0479/1416] Fix chunk checks in MODIS L1b tests --- satpy/tests/reader_tests/modis_tests/test_modis_l1b.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 92d7962cef..53f0ca46ce 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -195,7 +195,8 @@ def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file) scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file, reader_kwargs={"mask_saturated": mask_saturated}) dataset_name = '2' - scene.load([dataset_name]) + with dask.config.set({'array.chunk-size': '1 MiB'}): + scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 From 0300434fbcf20a9cc8e1d0ef55e13090e24b3c09 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 27 Sep 2023 14:04:00 -0500 Subject: [PATCH 0480/1416] Remove compatibility with Python 3.8 and below --- .pre-commit-config.yaml | 2 +- satpy/_compat.py | 71 +---------------------------------------- satpy/_config.py | 13 +------- satpy/resample.py | 9 +----- 4 files changed, 4 insertions(+), 91 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3999be8b04..3ce81859ae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,7 +28,7 @@ repos: - types-pkg-resources - types-PyYAML - types-requests - args: ["--python-version", "3.8", "--ignore-missing-imports"] + args: ["--python-version", "3.9", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort rev: 5.12.0 hooks: diff --git a/satpy/_compat.py b/satpy/_compat.py index b49b5a961b..aad2009db3 100644 --- a/satpy/_compat.py +++ b/satpy/_compat.py @@ -17,70 +17,7 @@ # satpy. If not, see . """Backports and compatibility fixes for satpy.""" -from threading import RLock - -_NOT_FOUND = object() - - -class CachedPropertyBackport: - """Backport of cached_property from Python-3.8. - - Source: https://github.com/python/cpython/blob/v3.8.0/Lib/functools.py#L930 - """ - - def __init__(self, func): # noqa - self.func = func - self.attrname = None - self.__doc__ = func.__doc__ - self.lock = RLock() - - def __set_name__(self, owner, name): # noqa - if self.attrname is None: - self.attrname = name - elif name != self.attrname: - raise TypeError( - "Cannot assign the same cached_property to two different names " - f"({self.attrname!r} and {name!r})." - ) - - def __get__(self, instance, owner=None): # noqa - if instance is None: - return self - if self.attrname is None: - raise TypeError( - "Cannot use cached_property instance without calling __set_name__ on it.") - try: - cache = instance.__dict__ # noqa - except AttributeError: # not all objects have __dict__ (e.g. class defines slots) - msg = ( - f"No '__dict__' attribute on {type(instance).__name__!r} " - f"instance to cache {self.attrname!r} property." - ) - raise TypeError(msg) from None - val = cache.get(self.attrname, _NOT_FOUND) - if val is _NOT_FOUND: - with self.lock: - # check if another thread filled cache while we awaited lock - val = cache.get(self.attrname, _NOT_FOUND) - if val is _NOT_FOUND: - val = self.func(instance) - try: - cache[self.attrname] = val - except TypeError: - msg = ( - f"The '__dict__' attribute on {type(instance).__name__!r} instance " - f"does not support item assignment for caching {self.attrname!r} property." - ) - raise TypeError(msg) from None - return val - - -try: - from functools import cached_property # type: ignore -except ImportError: - # for python < 3.8 - cached_property = CachedPropertyBackport # type: ignore - +from functools import cache, cached_property # noqa try: from numpy.typing import ArrayLike, DTypeLike # noqa @@ -88,9 +25,3 @@ def __get__(self, instance, owner=None): # noqa # numpy <1.20 from numpy import dtype as DTypeLike # noqa from numpy import ndarray as ArrayLike # noqa - - -try: - from functools import cache # type: ignore -except ImportError: - from functools import lru_cache as cache # noqa diff --git a/satpy/_config.py b/satpy/_config.py index 4abc00aba2..7a0d7aaac3 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -26,20 +26,9 @@ import tempfile from collections import OrderedDict from importlib.metadata import EntryPoint, entry_points -from pathlib import Path +from importlib.resources import files as impr_files from typing import Iterable -try: - from importlib.resources import files as impr_files # type: ignore -except ImportError: - # Python 3.8 - def impr_files(module_name: str) -> Path: - """Get path to module as a backport for Python 3.8.""" - from importlib.resources import path as impr_path - - with impr_path(module_name, "__init__.py") as pkg_init_path: - return pkg_init_path.parent - import appdirs from donfig import Config diff --git a/satpy/resample.py b/satpy/resample.py index b124c84933..289371d8cb 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -143,6 +143,7 @@ import os import warnings from logging import getLogger +from math import lcm # type: ignore from weakref import WeakValueDictionary import dask @@ -157,14 +158,6 @@ from satpy.utils import PerformanceWarning, get_legacy_chunk_size -try: - from math import lcm # type: ignore -except ImportError: - def lcm(a, b): - """Get 'Least Common Multiple' with Python 3.8 compatibility.""" - from math import gcd - return abs(a * b) // gcd(a, b) - try: from pyresample.resampler import BaseResampler as PRBaseResampler except ImportError: From 482f3e27bacb2b77fa2100d6682e0eecf832fa0d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 27 Sep 2023 14:09:16 -0500 Subject: [PATCH 0481/1416] Add initial utility function for resolution-based chunking --- satpy/readers/hdfeos_base.py | 24 +++----- satpy/utils.py | 106 +++++++++++++++++++++++++++++++++++ 2 files changed, 114 insertions(+), 16 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 8ad2944dd6..751d286828 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -25,7 +25,6 @@ from contextlib import suppress from datetime import datetime -import dask.array.core import numpy as np import xarray as xr from pyhdf.error import HDF4Error @@ -33,6 +32,7 @@ from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import chunks_by_resolution logger = logging.getLogger(__name__) @@ -227,21 +227,13 @@ def _chunks_for_variable(self, hdf_dataset): scan_length_250m = 40 var_shape = hdf_dataset.info()[2] res_multiplier = self._get_res_multiplier(var_shape) - non_yx_chunks = tuple() - if len(var_shape) == 3: - # assume (band, y, x) - non_yx_chunks = ((1,) * var_shape[0],) - var_shape = var_shape[-2:] - elif len(var_shape) != 2: - # don't guess - return dask.array.core.normalize_chunks("auto", shape=var_shape, dtype=np.float32) - shape_for_250m = tuple(dim_size * res_multiplier for dim_size in var_shape) - chunks_for_250m = dask.array.core.normalize_chunks(("auto", -1), shape=shape_for_250m, dtype=np.float32) - row_chunks_for_250m = chunks_for_250m[0][0] - scanbased_row_chunks_for_250m = np.round(row_chunks_for_250m / scan_length_250m) * scan_length_250m - var_row_chunks = scanbased_row_chunks_for_250m / res_multiplier - var_row_chunks = max(var_row_chunks, scan_length_250m / res_multiplier) # avoid getting 0 chunk size - return non_yx_chunks + (var_row_chunks, -1) + return chunks_by_resolution( + var_shape, + np.float32, + scan_length_250m, + res_multiplier, + whole_scan_width=True + ) @staticmethod def _get_res_multiplier(var_shape): diff --git a/satpy/utils.py b/satpy/utils.py index a9785a544a..20bd604104 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -35,6 +35,8 @@ import yaml from yaml import BaseLoader, UnsafeLoader +from satpy._compat import DTypeLike + _is_logging_on = False TRACE_LEVEL = 5 @@ -631,6 +633,110 @@ def _get_pytroll_chunk_size(): return None +def chunks_by_resolution( + input_shape: tuple[int, ...], + input_dtype: DTypeLike, + num_high_res_elements: int, + low_res_multiplier: int, + whole_scan_width: bool = False, +) -> tuple[int, ...]: + """Compute dask chunk sizes based on data resolution. + + First, chunks are computed for the highest resolution version of the data. + This is done by multiplying the input array shape by the + ``low_res_multiplier`` and then using Dask's utility functions and + configuration to produce a chunk size to fit into a specific number of + bytes. See :ref:`dask:automatic-chunking` for more information. + Next, the same multiplier is used to reduce the high resolution chunk sizes + to the lower resolution of the input data. The end result of reading + multiple resolutions of data is that each dask chunk covers the same + geographic region. This also means replicating or aggregating one + resolution and then combining arrays should not require any rechunking. + + .. note:: + + Only 2 or 3-dimensional shapes are supported. In the case of 3D arrays + the first dimension is assumed to be "bands" and is given a chunk + size of 1. For shapes with other numbers of dimensions, the chunk size + for the entire array is determined by dask's "auto" chunking and + resolution is ignored. + + Args: + input_shape: Shape of the array to compute dask chunk size for. + input_dtype: Dtype for the final unscaled array. This is usually + 32-bit float (``np.float32``) or 64-bit float (``np.float64``) + for non-category data. If this doesn't represent the final data + type of the data then the final size of chunks in memory will not + match the user's request via dask's ``array.chunk-size`` + configuration. + num_high_res_elements: Smallest number of high (fine) resolution + elements that make up a single "unit" or chunk of data. This could + be a multiple or factor of the scan size for some instruments and/or + could be based on the on-disk chunk size. This value ensures that + chunks are aligned to the underlying data structure for best + performance. + low_res_multiplier: Number of high (fine) resolution pixels that fit + in a single low (coarse) resolution pixel. + whole_scan_width: To create the entire width (x dimension) of the + array as a single chunk. This is useful in cases when future + operations will operate on entire instrument scans of data at + a time. For example, polar-orbiter scan geolocation being + interpolated from low resolution to high resolution. + + Returns: + A tuple where each element is the chunk size for that axis/dimension. + + """ + if len(input_shape) not in (2, 3): + # we're not sure about this shape so don't guess + return dask.array.core.normalize_chunks("auto", shape=input_shape, dtype=input_dtype) + + pre_non_yx_chunks, yx_shape, post_non_yx_chunks = _split_non_yx_chunks(input_shape) + high_res_shape = tuple(dim_size * low_res_multiplier for dim_size in yx_shape) + col_chunks = -1 if whole_scan_width else "auto" + chunks_for_high_res = dask.array.core.normalize_chunks( + ("auto", col_chunks), + shape=high_res_shape, + dtype=input_dtype + ) + var_row_chunks = _low_res_chunks_from_high_res( + chunks_for_high_res[0][0], + num_high_res_elements, + low_res_multiplier + ) + var_col_chunks = -1 + if not whole_scan_width: + var_col_chunks = _low_res_chunks_from_high_res( + chunks_for_high_res[1][0], + num_high_res_elements, + low_res_multiplier + ) + return pre_non_yx_chunks + (var_row_chunks, var_col_chunks) + post_non_yx_chunks + + +def _split_non_yx_chunks( + input_shape: tuple[int, ...], +) -> tuple[tuple[int, ...] | tuple[()], tuple[int, int], tuple[int, ...] | tuple[()]]: + pre_non_yx_chunks: tuple[int, ...] = tuple() + post_non_yx_chunks: tuple[int, ...] = tuple() + yx_shape = (input_shape[-2], input_shape[-1]) + if len(input_shape) == 3: + # assume (band, y, x) + pre_non_yx_chunks = (1,) + return pre_non_yx_chunks, yx_shape, post_non_yx_chunks + + +def _low_res_chunks_from_high_res( + chunk_size_for_high_res: int, + num_high_res_elements: int, + low_res_multiplier: int +) -> int: + aligned_chunk_size = np.round(chunk_size_for_high_res / num_high_res_elements) * num_high_res_elements + low_res_chunk_size = aligned_chunk_size / low_res_multiplier + # avoid getting 0 chunk size + return max(low_res_chunk_size, num_high_res_elements / low_res_multiplier) + + def convert_remote_files_to_fsspec(filenames, storage_options=None): """Check filenames for transfer protocols, convert to FSFile objects if possible.""" if storage_options is None: From eba9c8221d29ffd7090941f459a52bd3c85c52aa Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 28 Sep 2023 11:18:51 +0200 Subject: [PATCH 0482/1416] Add mastodon link --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index b3eb4c5577..612db4fa05 100644 --- a/setup.py +++ b/setup.py @@ -142,6 +142,7 @@ def _config_data_files(base_dirs, extensions=(".cfg", )): "Slack": "https://pytroll.slack.com/", "Twitter": "https://twitter.com/hashtag/satpy?src=hashtag_click", "Release Notes": "https://github.com/pytroll/satpy/blob/main/CHANGELOG.md", + "Mastodon": "https://fosstodon.org/tags/satpy", }, packages=find_packages(), # Always use forward '/', even on Windows From 2125eb26706c6a1d6a5193dcb3e6496e90471449 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 28 Sep 2023 14:30:25 +0200 Subject: [PATCH 0483/1416] Fix cf tests for new xarray release --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 54770b9176..a325cb9cc8 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.9") + versions["xarray"] >= Version("2023.10") ) From dcb87ffce0363221eff189cce575b908aed9f69d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 28 Sep 2023 08:40:58 -0500 Subject: [PATCH 0484/1416] Remove tests for removed compatibility code --- satpy/tests/test_compat.py | 48 -------------------------------------- 1 file changed, 48 deletions(-) delete mode 100644 satpy/tests/test_compat.py diff --git a/satpy/tests/test_compat.py b/satpy/tests/test_compat.py deleted file mode 100644 index f084f88e53..0000000000 --- a/satpy/tests/test_compat.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2022 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Test backports and compatibility fixes.""" - -import gc - -from satpy._compat import CachedPropertyBackport - - -class ClassWithCachedProperty: # noqa - def __init__(self, x): # noqa - self.x = x - - @CachedPropertyBackport - def property(self): # noqa - return 2 * self.x - - -def test_cached_property_backport(): - """Test cached property backport.""" - c = ClassWithCachedProperty(1) - assert c.property == 2 - - -def test_cached_property_backport_releases_memory(): - """Test that cached property backport releases memory.""" - c1 = ClassWithCachedProperty(2) - del c1 - instances = [ - obj for obj in gc.get_objects() - if isinstance(obj, ClassWithCachedProperty) - ] - assert len(instances) == 0 From e0a8ea4bff608ee316c61f7de58161d20e5e2921 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 28 Sep 2023 13:48:18 -0500 Subject: [PATCH 0485/1416] Convert some utility tests to pytest --- satpy/tests/test_utils.py | 255 +++++++++++++------------------------- 1 file changed, 87 insertions(+), 168 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 56dbe25324..5babc29804 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -21,6 +21,7 @@ import typing import unittest import warnings +from math import sqrt from unittest import mock import dask.array as da @@ -44,182 +45,100 @@ # - caplog -class TestUtils(unittest.TestCase): - """Testing utils.""" +class TestGeoUtils: + """Testing geo-related utility functions.""" - def test_lonlat2xyz(self): - """Test the lonlat2xyz function.""" - x__, y__, z__ = lonlat2xyz(0, 0) - self.assertAlmostEqual(x__, 1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = lonlat2xyz(90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = lonlat2xyz(0, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = lonlat2xyz(180, 0) - self.assertAlmostEqual(x__, -1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = lonlat2xyz(-90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = lonlat2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, -1) - - x__, y__, z__ = lonlat2xyz(0, 45) - self.assertAlmostEqual(x__, np.sqrt(2) / 2) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, np.sqrt(2) / 2) - - x__, y__, z__ = lonlat2xyz(0, 60) - self.assertAlmostEqual(x__, np.sqrt(1) / 2) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, np.sqrt(3) / 2) - - def test_angle2xyz(self): + @pytest.mark.parametrize( + ("lonlat", "xyz"), + [ + ((0, 0), (1, 0, 0)), + ((90, 0), (0, 1, 0)), + ((0, 90), (0, 0, 1)), + ((180, 0), (-1, 0, 0)), + ((-90, 0), (0, -1, 0)), + ((0, -90), (0, 0, -1)), + ((0, 45), (sqrt(2) / 2, 0, sqrt(2) / 2)), + ((0, 60), (sqrt(1) / 2, 0, sqrt(3) / 2)), + ], + ) + def test_lonlat2xyz(self, lonlat, xyz): """Test the lonlat2xyz function.""" - x__, y__, z__ = angle2xyz(0, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = angle2xyz(90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = angle2xyz(0, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(180, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = angle2xyz(-90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) - - x__, y__, z__ = angle2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(90, 90) - self.assertAlmostEqual(x__, 1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(-90, 90) - self.assertAlmostEqual(x__, -1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(180, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) - - x__, y__, z__ = angle2xyz(0, 45) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, np.sqrt(2) / 2) - self.assertAlmostEqual(z__, np.sqrt(2) / 2) - - x__, y__, z__ = angle2xyz(0, 60) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, np.sqrt(3) / 2) - self.assertAlmostEqual(z__, np.sqrt(1) / 2) - - def test_xyz2lonlat(self): - """Test xyz2lonlat.""" - lon, lat = xyz2lonlat(1, 0, 0) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 0) - - lon, lat = xyz2lonlat(0, 1, 0) - self.assertAlmostEqual(lon, 90) - self.assertAlmostEqual(lat, 0) - - lon, lat = xyz2lonlat(0, 0, 1, asin=True) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 90) + x__, y__, z__ = lonlat2xyz(*lonlat) + assert x__ == pytest.approx(xyz[0]) + assert y__ == pytest.approx(xyz[1]) + assert z__ == pytest.approx(xyz[2]) - lon, lat = xyz2lonlat(0, 0, 1) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 90) + @pytest.mark.parametrize( + ("azizen", "xyz"), + [ + ((0, 0), (0, 0, 1)), + ((90, 0), (0, 0, 1)), + ((0, 90), (0, 1, 0)), + ((180, 0), (0, 0, 1)), + ((-90, 0), (0, 0, 1)), + ((0, -90), (0, -1, 0)), + ((90, 90), (1, 0, 0)), + ((-90, 90), (-1, 0, 0)), + ((180, 90), (0, -1, 0)), + ((0, -90), (0, -1, 0)), + ((0, 45), (0, sqrt(2) / 2, sqrt(2) / 2)), + ((0, 60), (0, sqrt(3) / 2, sqrt(1) / 2)), + ], + ) + def test_angle2xyz(self, azizen, xyz): + """Test the angle2xyz function.""" + x__, y__, z__ = angle2xyz(*azizen) + assert x__ == pytest.approx(xyz[0]) + assert y__ == pytest.approx(xyz[1]) + assert z__ == pytest.approx(xyz[2]) - lon, lat = xyz2lonlat(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) - self.assertAlmostEqual(lon, 45) - self.assertAlmostEqual(lat, 0) + @pytest.mark.parametrize( + ("xyz", "asin", "lonlat"), + [ + ((1, 0, 0), False, (0, 0)), + ((0, 1, 0), False, (90, 0)), + ((0, 0, 1), True, (0, 90)), + ((0, 0, 1), False, (0, 90)), + ((sqrt(2) / 2, sqrt(2) / 2, 0), False, (45, 0)), + ], + ) + def test_xyz2lonlat(self, xyz, asin, lonlat): + """Test xyz2lonlat.""" + lon, lat = xyz2lonlat(*xyz, asin=asin) + assert lon == pytest.approx(lonlat[0]) + assert lat == pytest.approx(lonlat[1]) - def test_xyz2angle(self): + @pytest.mark.parametrize( + ("xyz", "acos", "azizen"), + [ + ((1, 0, 0), False, (90, 90)), + ((0, 1, 0), False, (0, 90)), + ((0, 0, 1), False, (0, 0)), + ((0, 0, 1), True, (0, 0)), + ((sqrt(2) / 2, sqrt(2) / 2, 0), False, (45, 90)), + ((-1, 0, 0), False, (-90, 90)), + ((0, -1, 0), False, (180, 90)), + ], + ) + def test_xyz2angle(self, xyz, acos, azizen): """Test xyz2angle.""" - azi, zen = xyz2angle(1, 0, 0) - self.assertAlmostEqual(azi, 90) - self.assertAlmostEqual(zen, 90) - - azi, zen = xyz2angle(0, 1, 0) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 90) - - azi, zen = xyz2angle(0, 0, 1) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 0) - - azi, zen = xyz2angle(0, 0, 1, acos=True) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 0) + azi, zen = xyz2angle(*xyz, acos=acos) + assert azi == pytest.approx(azi) + assert zen == pytest.approx(zen) - azi, zen = xyz2angle(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) - self.assertAlmostEqual(azi, 45) - self.assertAlmostEqual(zen, 90) - - azi, zen = xyz2angle(-1, 0, 0) - self.assertAlmostEqual(azi, -90) - self.assertAlmostEqual(zen, 90) - - azi, zen = xyz2angle(0, -1, 0) - self.assertAlmostEqual(azi, 180) - self.assertAlmostEqual(zen, 90) - - def test_proj_units_to_meters(self): + @pytest.mark.parametrize( + ("prj", "exp_prj"), + [ + ("+asd=123123123123", "+asd=123123123123"), + ("+a=6378.137", "+a=6378137.000"), + ("+a=6378.137 +units=km", "+a=6378137.000"), + ("+a=6378.137 +b=6378.137", "+a=6378137.000 +b=6378137.000"), + ("+a=6378.137 +b=6378.137 +h=35785.863", "+a=6378137.000 +b=6378137.000 +h=35785863.000"), + ], + ) + def test_proj_units_to_meters(self, prj, exp_prj): """Test proj units to meters conversion.""" - prj = '+asd=123123123123' - res = proj_units_to_meters(prj) - self.assertEqual(res, prj) - prj = '+a=6378.137' - res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000') - prj = '+a=6378.137 +units=km' - res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000') - prj = '+a=6378.137 +b=6378.137' - res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000 +b=6378137.000') - prj = '+a=6378.137 +b=6378.137 +h=35785.863' - res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000 +b=6378137.000 +h=35785863.000') + assert proj_units_to_meters(prj) == exp_prj class TestGetSatPos: From 37170e11f7f9b9ba1d2271c02c7b268aa2a6cc3a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 29 Sep 2023 13:23:33 -0500 Subject: [PATCH 0486/1416] Add tests for resolution-based chunking utilities --- satpy/tests/test_utils.py | 36 ++++++++++++++++++++++++++++++++++++ satpy/utils.py | 8 +++++--- 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 5babc29804..b45885f312 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -416,6 +416,42 @@ def test_get_legacy_chunk_size(): assert get_legacy_chunk_size() == 2048 +@pytest.mark.parametrize( + ("shape", "chunk_dtype", "num_hr", "lr_mult", "scan_width", "exp_result"), + [ + ((1000, 3200), np.float32, 40, 4, True, (160, -1)), # 1km swath + ((1000 // 5, 3200 // 5), np.float32, 40, 20, True, (160 // 5, -1)), # 5km swath + ((1000 * 4, 3200 * 4), np.float32, 40, 1, True, (160 * 4, -1)), # 250m swath + ((21696 // 2, 21696 // 2), np.float32, 226, 2, False, (1469, 1469)), # 1km area (ABI chunk 226) + ((21696 // 2, 21696 // 2), np.float64, 226, 2, False, (1017, 1017)), # 1km area (64-bit) + ((21696 // 3, 21696 // 3), np.float32, 226, 6, False, (1469 // 3, 1469 // 3)), # 3km area + ((21696, 21696), np.float32, 226, 1, False, (1469 * 2, 1469 * 2)), # 500m area + ((7, 1000 * 4, 3200 * 4), np.float32, 40, 1, True, (1, 160 * 4, -1)), # 250m swath with bands + ((1, 7, 1000, 3200), np.float32, 40, 1, True, ((1,), (7,), (1000,), (1198, 1198, 804))), # lots of dimensions + ], +) +def test_resolution_chunking(shape, chunk_dtype, num_hr, lr_mult, scan_width, exp_result): + """Test chunks_by_resolution helper function.""" + import dask.config + + from satpy.utils import chunks_by_resolution + + with dask.config.set({"array.chunk-size": "32MiB"}): + chunk_results = chunks_by_resolution( + shape, + chunk_dtype, + num_hr, + lr_mult, + whole_scan_width=scan_width, + ) + assert chunk_results == exp_result + for chunk_size in chunk_results: + assert isinstance(chunk_size[0], int) if isinstance(chunk_size, tuple) else isinstance(chunk_size, int) + + # make sure the chunks are understandable by dask + da.zeros(shape, dtype=chunk_dtype, chunks=chunk_results) + + def test_convert_remote_files_to_fsspec_local_files(): """Test convertion of remote files to fsspec objects. diff --git a/satpy/utils.py b/satpy/utils.py index 20bd604104..28212c6510 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -639,7 +639,7 @@ def chunks_by_resolution( num_high_res_elements: int, low_res_multiplier: int, whole_scan_width: bool = False, -) -> tuple[int, ...]: +) -> tuple[int | tuple[int, ...], ...]: """Compute dask chunk sizes based on data resolution. First, chunks are computed for the highest resolution version of the data. @@ -668,7 +668,9 @@ def chunks_by_resolution( for non-category data. If this doesn't represent the final data type of the data then the final size of chunks in memory will not match the user's request via dask's ``array.chunk-size`` - configuration. + configuration. Sometimes it is useful to keep this as a single + dtype for all reading functionality (ex. ``np.float32``) in order + to keep all read variable chunks the same size regardless of dtype. num_high_res_elements: Smallest number of high (fine) resolution elements that make up a single "unit" or chunk of data. This could be a multiple or factor of the scan size for some instruments and/or @@ -734,7 +736,7 @@ def _low_res_chunks_from_high_res( aligned_chunk_size = np.round(chunk_size_for_high_res / num_high_res_elements) * num_high_res_elements low_res_chunk_size = aligned_chunk_size / low_res_multiplier # avoid getting 0 chunk size - return max(low_res_chunk_size, num_high_res_elements / low_res_multiplier) + return int(max(low_res_chunk_size, num_high_res_elements / low_res_multiplier)) def convert_remote_files_to_fsspec(filenames, storage_options=None): From 4954d9607e85d345c0119936e6024e7b5490955a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 3 Oct 2023 15:51:00 -0500 Subject: [PATCH 0487/1416] Fix reference to dask documentation --- satpy/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/utils.py b/satpy/utils.py index 28212c6510..94f7fbf86e 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -646,7 +646,7 @@ def chunks_by_resolution( This is done by multiplying the input array shape by the ``low_res_multiplier`` and then using Dask's utility functions and configuration to produce a chunk size to fit into a specific number of - bytes. See :ref:`dask:automatic-chunking` for more information. + bytes. See :doc:`dask:array-chunks` for more information. Next, the same multiplier is used to reduce the high resolution chunk sizes to the lower resolution of the input data. The end result of reading multiple resolutions of data is that each dask chunk covers the same From 82c57c74dec625a339c633c785a32e57375f9757 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 3 Oct 2023 20:46:14 -0500 Subject: [PATCH 0488/1416] Add night_microphysics_eum alias for ABI composites --- satpy/etc/composites/abi.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index 783bd5187a..1437b91df4 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -555,6 +555,22 @@ composites: - name: C13 standard_name: night_microphysics + night_microphysics_eum: + description: > + Nighttime Microphysics RGB following the EUMETSAT recipe + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: C15 + - name: C14 + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: C14 + - name: C07 + - name: C14 + standard_name: night_microphysics + fire_temperature_awips: description: > Fire Temperature RGB, for GOESR: NASA, NOAA From 04cfbd521d905a3bfe72b9f51044654610a3b00b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 09:12:02 +0200 Subject: [PATCH 0489/1416] Make caching fail if one of the args is unhashable --- satpy/modifiers/angles.py | 2 +- satpy/tests/modifier_tests/test_angles.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 28adb60028..2f904a1a02 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -250,7 +250,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): hashable_args = [] for arg in args: if isinstance(arg, unhashable_types): - continue + raise TypeError(f"Unhashable type in function signature ({type(arg)}), cannot be cached.") if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, datetime): diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index cd5082a5b7..46a8a8443f 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -322,6 +322,18 @@ def _fake_func(shape, chunks): satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func((5, 5), ((5,), (5,))) + def test_caching_with_array_in_args_fails(self, tmp_path): + """Test that trying to cache with non-dask arrays fails.""" + from satpy.modifiers.angles import cache_to_zarr_if + + @cache_to_zarr_if("cache_lonlats") + def _fake_func(array): + return array + 1 + + with pytest.raises(TypeError), \ + satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): + _fake_func(da.zeros(100)) + def test_no_cache_dir_fails(self, tmp_path): """Test that 'cache_dir' not being set fails.""" from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, get_angles From 9ad6b161219f87cf94df251390a0dfa2aeab811b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 09:15:35 +0200 Subject: [PATCH 0490/1416] Remove redundant hashability check --- satpy/modifiers/angles.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 2f904a1a02..01e7c1ff2d 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -163,8 +163,6 @@ def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]: should_cache: bool = satpy.config.get(self._cache_config_key, False) - can_cache = not any(isinstance(arg, self._uncacheable_arg_types) for arg in args) - should_cache = should_cache and can_cache cache_dir = self._get_cache_dir_from_config(cache_dir) return should_cache, cache_dir From 6e87d199e202f14e3f6460f68f92da86489a2909 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 09:33:18 +0200 Subject: [PATCH 0491/1416] Replace flake8 with ruff in pre-commit --- .github/workflows/ci.yaml | 4 ++-- .pre-commit-config.yaml | 10 +++++----- pyproject.toml | 15 +++++++++++++++ 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9897f8886a..9635d9efb8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -24,13 +24,13 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 flake8-docstrings flake8-debugger flake8-bugbear pytest + pip install ruff pytest - name: Install Satpy run: | pip install -e . - name: Run linting run: | - flake8 satpy/ + ruff satpy/ test: runs-on: ${{ matrix.os }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3999be8b04..b5b21a52fa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,12 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + - repo: https://github.com/charliermarsh/ruff-pre-commit + # Ruff version. + rev: 'v0.0.247' hooks: - - id: flake8 - additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] - args: [--max-complexity, "10"] + - id: ruff + args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: diff --git a/pyproject.toml b/pyproject.toml index 64c68d60eb..7bed2a2fdd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,3 +12,18 @@ skip_gitignore = true default_section = "THIRDPARTY" known_first_party = "satpy" line_length = 120 + +[tool.ruff] +select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10"] +ignore = ["B905"] # only available from python 3.10 +line-length = 120 + +[tool.ruff.per-file-ignores] +"satpy/tests/*" = ["S101"] # assert allowed in tests + +[tool.ruff.pydocstyle] +convention = "google" + +[tool.ruff.mccabe] +# Unlike Flake8, default to a complexity level of 10. +max-complexity = 10 From 6a4ae2c2e9609182f67bf23b7c3c1a80cfebe8bf Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 10:10:49 +0200 Subject: [PATCH 0492/1416] Add pep8-naming checks --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7bed2a2fdd..d0eac01fe0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ known_first_party = "satpy" line_length = 120 [tool.ruff] -select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10"] +select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] ignore = ["B905"] # only available from python 3.10 line-length = 120 From c6454b5deaf7993cdc66b58ea3a86f242bf5a874 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 4 Oct 2023 09:27:20 -0500 Subject: [PATCH 0493/1416] Remove libnetcdf specific build from CI env --- .github/workflows/ci.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9897f8886a..0bd05f273d 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -82,9 +82,6 @@ jobs: - name: Update environment run: mamba env update -n test-environment -f continuous_integration/environment.yaml if: steps.cache.outputs.cache-hit != 'true' - - name: Update environment - libnetcdf - run: mamba install -y -n test-environment libnetcdf=4.9.2=nompi_h5902ca5_107 - if: runner.os == 'Windows' - name: Install unstable dependencies if: matrix.experimental == true From 86997cc5facdfe1afc2d61e8cdba7346357412b9 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Oct 2023 17:41:59 +0200 Subject: [PATCH 0494/1416] Remove linting from github actions Rely on pre-commit entirely now. --- .github/workflows/ci.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9635d9efb8..ad7a7f96bf 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -28,9 +28,6 @@ jobs: - name: Install Satpy run: | pip install -e . - - name: Run linting - run: | - ruff satpy/ test: runs-on: ${{ matrix.os }} From 014edb6a6e4a3bde4fa5389f7544e83e8391b834 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 12 Sep 2023 15:12:53 +0200 Subject: [PATCH 0495/1416] add reader for GERB high-resolution HDF5 files --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 31 ++++++++ satpy/readers/gerb_l2_hr_h5.py | 110 +++++++++++++++++++++++++++ 2 files changed, 141 insertions(+) create mode 100644 satpy/etc/readers/gerb_l2_hr_h5.yaml create mode 100644 satpy/readers/gerb_l2_hr_h5.py diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml new file mode 100644 index 0000000000..b05c662dc9 --- /dev/null +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -0,0 +1,31 @@ +reader: + name: gerb_l2_hr_h5 + short_name: GERB HR + long_name: Meteosat Second Generation Geostationary Earth Radiation Budget L2 High-Resolution + description: Reader for the HR product of the Geostationary Earth Radiation Budget instrument + status: Beta + supports_fsspec: false + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [gerb] + +file_types: + gerb_l2_hr_h5: + file_reader: !!python/name:satpy.readers.gerb_l2_hr_h5.GERB_HR_FileHandler + file_patterns: ['{sensor_name}_{seviri_name}_L20_HR_SOL_TH_{sensing_time:%Y%m%d_%H%M%S}_{gerb_version}.hdf'] + +datasets: + Solar_Flux: + name: Solar Flux + sensor: gerb + wavelength: [0.3, 3., 4.] + units: W m-2 + standard_name: toa_outgoing_shortwave_flux + file_type: gerb_l2_hr_h5 + + Thermal_Flux: + name: Thermal Flux + sensor: gerb + wavelength: [4., 22., 40.] + units: W m-2 + standard_name: toa_outgoing_longwave_flux + file_type: gerb_l2_hr_h5 diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py new file mode 100644 index 0000000000..b5e4d0d5e8 --- /dev/null +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + + +"""A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation +Budget instrument aboard the Meteosat Second Generation satellites.""" + + +import logging +from datetime import timedelta + +import dask.array as da +import h5py +import numpy as np +import xarray as xr + +from satpy.readers.file_handlers import BaseFileHandler +from satpy.resample import get_area_def + +LOG = logging.getLogger(__name__) + +def gerb_get_dataset(hfile, name): + """ + Load a GERB dataset in memory from a HDF5 file + + The routine takes into account the quantisation factor and fill values. + """ + ds = hfile[name] + if 'Quantisation Factor' in ds.attrs and 'Unit' in ds.attrs: + ds_real = ds[...]*ds.attrs['Quantisation Factor'] + else: + ds_real = ds[...]*1. + ds_min = ds[...].min() + if ds_min < 0: + mask = ds == ds_min + ds_real[mask] = np.nan + return ds_real + + +class GERB_HR_FileHandler(BaseFileHandler): + """File handler for GERB L2 High Resolution H5 files.""" + + def __init__(self, filename, filename_info, filetype_info): + """Init the file handler.""" + super(GERB_HR_FileHandler, self).__init__(filename, + filename_info, + filetype_info) + self._h5fh = h5py.File(self.filename, 'r') + self.ssp_lon = self._h5fh["Geolocation"].attrs["Nominal Satellite Longitude (degrees)"][()] + + @property + def end_time(self): + """Get end time.""" + return self.start_time + timedelta(minutes=14, seconds=59) + + + @property + def start_time(self): + """Get start time.""" + return self.filename_info['sensing_time'] + + + def _get_dataset(self, ds_name): + """Access the GERB dataset from the HDF5 file.""" + if ds_name in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: + return gerb_get_dataset(self._h5fh, f'Radiometry/{ds_name}') + else: + raise ValueError + + + def get_dataset(self, ds_id, ds_info): + """Read a HDF5 file into an xarray DataArray.""" + ds = self._get_dataset(ds_id['name']) + ds_info = {} + + ds_info['start_time'] = self.start_time + ds_info['data_time'] = self.start_time + ds_info['end_time'] = self.end_time + + data = da.from_array(ds) + return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + + + def get_area_def(self, dsid): + """Area definition for the GERB product""" + + if abs(self.ssp_lon) < 1e-6: + return get_area_def("msg_seviri_fes_9km") + elif abs(self.ssp_lon - 9.5) < 1e-6: + return get_area_def("msg_seviri_fes_9km") + elif abs(self.ssp_lon - 45.5) < 1e-6: + return get_area_def("msg_seviri_iodc_9km") + else: + raise ValueError + From 045c1ea1914d0cbded44f5764908fb75b487e066 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 13 Sep 2023 13:12:57 +0200 Subject: [PATCH 0496/1416] fix style --- satpy/readers/gerb_l2_hr_h5.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index b5e4d0d5e8..686b283907 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -17,8 +17,10 @@ # satpy. If not, see . -"""A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation -Budget instrument aboard the Meteosat Second Generation satellites.""" +""" +A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation +Budget instrument aboard the Meteosat Second Generation satellites. +""" import logging @@ -34,6 +36,7 @@ LOG = logging.getLogger(__name__) + def gerb_get_dataset(hfile, name): """ Load a GERB dataset in memory from a HDF5 file @@ -68,13 +71,11 @@ def end_time(self): """Get end time.""" return self.start_time + timedelta(minutes=14, seconds=59) - @property def start_time(self): """Get start time.""" return self.filename_info['sensing_time'] - def _get_dataset(self, ds_name): """Access the GERB dataset from the HDF5 file.""" if ds_name in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: @@ -82,7 +83,6 @@ def _get_dataset(self, ds_name): else: raise ValueError - def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" ds = self._get_dataset(ds_id['name']) @@ -95,7 +95,6 @@ def get_dataset(self, ds_id, ds_info): data = da.from_array(ds) return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) - def get_area_def(self, dsid): """Area definition for the GERB product""" @@ -107,4 +106,3 @@ def get_area_def(self, dsid): return get_area_def("msg_seviri_iodc_9km") else: raise ValueError - From 9a6ba0dde134f34ee6fef59bbe103a33ffe089aa Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 13 Sep 2023 13:19:08 +0200 Subject: [PATCH 0497/1416] flake8 --- satpy/readers/gerb_l2_hr_h5.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 686b283907..59db186a6f 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -17,7 +17,8 @@ # satpy. If not, see . -""" +"""GERB L2 HR HDF5 reader. + A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation Budget instrument aboard the Meteosat Second Generation satellites. """ @@ -39,7 +40,7 @@ def gerb_get_dataset(hfile, name): """ - Load a GERB dataset in memory from a HDF5 file + Load a GERB dataset in memory from a HDF5 file. The routine takes into account the quantisation factor and fill values. """ @@ -96,8 +97,7 @@ def get_dataset(self, ds_id, ds_info): return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) def get_area_def(self, dsid): - """Area definition for the GERB product""" - + """Area definition for the GERB product.""" if abs(self.ssp_lon) < 1e-6: return get_area_def("msg_seviri_fes_9km") elif abs(self.ssp_lon - 9.5) < 1e-6: From 05038af0f63c2e516dec5f1c826f2306090ff43f Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 11:15:37 +0200 Subject: [PATCH 0498/1416] remove wavelength entry for GERB reader --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml index b05c662dc9..a501fec32c 100644 --- a/satpy/etc/readers/gerb_l2_hr_h5.yaml +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -17,7 +17,6 @@ datasets: Solar_Flux: name: Solar Flux sensor: gerb - wavelength: [0.3, 3., 4.] units: W m-2 standard_name: toa_outgoing_shortwave_flux file_type: gerb_l2_hr_h5 @@ -25,7 +24,6 @@ datasets: Thermal_Flux: name: Thermal Flux sensor: gerb - wavelength: [4., 22., 40.] units: W m-2 standard_name: toa_outgoing_longwave_flux file_type: gerb_l2_hr_h5 From 078f541b26cdb974b7993b04638f278eb2c30476 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 11:48:27 +0200 Subject: [PATCH 0499/1416] use fill_value for GERB reader fluxes --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 2 ++ satpy/readers/gerb_l2_hr_h5.py | 28 ++++++++++++++-------------- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml index a501fec32c..2d47c19b8f 100644 --- a/satpy/etc/readers/gerb_l2_hr_h5.yaml +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -18,6 +18,7 @@ datasets: name: Solar Flux sensor: gerb units: W m-2 + fill_value: -32767 standard_name: toa_outgoing_shortwave_flux file_type: gerb_l2_hr_h5 @@ -25,5 +26,6 @@ datasets: name: Thermal Flux sensor: gerb units: W m-2 + fill_value: -32767 standard_name: toa_outgoing_longwave_flux file_type: gerb_l2_hr_h5 diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 59db186a6f..4673db4887 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -38,22 +38,22 @@ LOG = logging.getLogger(__name__) -def gerb_get_dataset(hfile, name): +def gerb_get_dataset(hfile, name, ds_info): """ Load a GERB dataset in memory from a HDF5 file. The routine takes into account the quantisation factor and fill values. """ - ds = hfile[name] - if 'Quantisation Factor' in ds.attrs and 'Unit' in ds.attrs: - ds_real = ds[...]*ds.attrs['Quantisation Factor'] + ds = hfile[name][...] + ds_attrs = hfile[name].attrs + ds_fill = ds_info['fill_value'] + fill_mask = ds == ds_fill + if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: + ds = ds*ds_attrs['Quantisation Factor'] else: - ds_real = ds[...]*1. - ds_min = ds[...].min() - if ds_min < 0: - mask = ds == ds_min - ds_real[mask] = np.nan - return ds_real + ds = ds*1. + ds[fill_mask] = np.nan + return ds class GERB_HR_FileHandler(BaseFileHandler): @@ -77,16 +77,16 @@ def start_time(self): """Get start time.""" return self.filename_info['sensing_time'] - def _get_dataset(self, ds_name): + def _get_dataset(self, ds_name, ds_info): """Access the GERB dataset from the HDF5 file.""" if ds_name in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: - return gerb_get_dataset(self._h5fh, f'Radiometry/{ds_name}') + return gerb_get_dataset(self._h5fh, f'Radiometry/{ds_name}', ds_info) else: - raise ValueError + raise KeyError(f"{ds_name} is an unknown dataset for this reader.") def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds = self._get_dataset(ds_id['name']) + ds = self._get_dataset(ds_id['name'], ds_info) ds_info = {} ds_info['start_time'] = self.start_time From 58eaddc78e872842b4ddfd8de95cd2ee0ae34029 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 12:07:48 +0200 Subject: [PATCH 0500/1416] Add message for ValueError in get_area_def for GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 4673db4887..470db89478 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -105,4 +105,4 @@ def get_area_def(self, dsid): elif abs(self.ssp_lon - 45.5) < 1e-6: return get_area_def("msg_seviri_iodc_9km") else: - raise ValueError + raise ValueError(f"There is no matching grid for SSP longitude {self.ssp_lon}") From 0ab1693154c155759caa005e6328a10528627b23 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 23:02:16 +0200 Subject: [PATCH 0501/1416] use 15 mins instead of 14:59 in GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 470db89478..c56dc0e9a5 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -70,7 +70,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def end_time(self): """Get end time.""" - return self.start_time + timedelta(minutes=14, seconds=59) + return self.start_time + timedelta(minutes=15) @property def start_time(self): From 3c2e5b0e30c9c8af307ee0acce43c5e69a2d9c93 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 14 Sep 2023 23:02:47 +0200 Subject: [PATCH 0502/1416] Use xarray "where" instead of NumPy masking --- satpy/readers/gerb_l2_hr_h5.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index c56dc0e9a5..c7ce8f9f37 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -44,15 +44,15 @@ def gerb_get_dataset(hfile, name, ds_info): The routine takes into account the quantisation factor and fill values. """ - ds = hfile[name][...] + ds = xr.DataArray(hfile[name][...]) ds_attrs = hfile[name].attrs ds_fill = ds_info['fill_value'] - fill_mask = ds == ds_fill + fill_mask = ds != ds_fill if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: ds = ds*ds_attrs['Quantisation Factor'] else: ds = ds*1. - ds[fill_mask] = np.nan + ds = ds.where(fill_mask) return ds From ffe5f7f17c0fa6d4ba1d3f0e7d011e941aa3f386 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 18 Sep 2023 09:29:08 +0200 Subject: [PATCH 0503/1416] use HDF5FileHandler for GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index c7ce8f9f37..245bab1405 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -32,7 +32,7 @@ import numpy as np import xarray as xr -from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def LOG = logging.getLogger(__name__) @@ -40,7 +40,7 @@ def gerb_get_dataset(hfile, name, ds_info): """ - Load a GERB dataset in memory from a HDF5 file. + Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. The routine takes into account the quantisation factor and fill values. """ @@ -56,7 +56,7 @@ def gerb_get_dataset(hfile, name, ds_info): return ds -class GERB_HR_FileHandler(BaseFileHandler): +class GERB_HR_FileHandler(HDF5FileHandler): """File handler for GERB L2 High Resolution H5 files.""" def __init__(self, filename, filename_info, filetype_info): @@ -65,7 +65,6 @@ def __init__(self, filename, filename_info, filetype_info): filename_info, filetype_info) self._h5fh = h5py.File(self.filename, 'r') - self.ssp_lon = self._h5fh["Geolocation"].attrs["Nominal Satellite Longitude (degrees)"][()] @property def end_time(self): @@ -77,16 +76,14 @@ def start_time(self): """Get start time.""" return self.filename_info['sensing_time'] - def _get_dataset(self, ds_name, ds_info): - """Access the GERB dataset from the HDF5 file.""" - if ds_name in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: - return gerb_get_dataset(self._h5fh, f'Radiometry/{ds_name}', ds_info) - else: - raise KeyError(f"{ds_name} is an unknown dataset for this reader.") - def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds = self._get_dataset(ds_id['name'], ds_info) + + ds_name = ds_id['name'] + if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: + raise KeyError(f"{ds_name} is an unknown dataset for this reader.") + + ds = gerb_get_dataset(self, f'Radiometry/{ds_name}', ds_info) ds_info = {} ds_info['start_time'] = self.start_time @@ -98,11 +95,13 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, dsid): """Area definition for the GERB product.""" - if abs(self.ssp_lon) < 1e-6: + ssp_lon = self.file_content["Geolocation/attr/Nominal Satellite Longitude (degrees)"] + + if abs(ssp_lon) < 1e-6: return get_area_def("msg_seviri_fes_9km") - elif abs(self.ssp_lon - 9.5) < 1e-6: + elif abs(ssp_lon - 9.5) < 1e-6: return get_area_def("msg_seviri_fes_9km") - elif abs(self.ssp_lon - 45.5) < 1e-6: + elif abs(ssp_lon - 45.5) < 1e-6: return get_area_def("msg_seviri_iodc_9km") else: raise ValueError(f"There is no matching grid for SSP longitude {self.ssp_lon}") From 929972aa3d8b7fd4c58df26efdf722d63b2c5702 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 18 Sep 2023 15:19:41 +0200 Subject: [PATCH 0504/1416] flake8 --- satpy/readers/gerb_l2_hr_h5.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 245bab1405..4dad36f0e8 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -29,7 +29,6 @@ import dask.array as da import h5py -import numpy as np import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler @@ -78,7 +77,6 @@ def start_time(self): def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds_name = ds_id['name'] if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: raise KeyError(f"{ds_name} is an unknown dataset for this reader.") From 2f8365491064d2bb19f5a222c07636fc5fb837ef Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 12:50:37 +0200 Subject: [PATCH 0505/1416] add test for GERB L2 HR HDF5 reader --- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 208 ++++++++++++++++++ 1 file changed, 208 insertions(+) create mode 100644 satpy/tests/reader_tests/test_gerb_l2_hr_h5.py diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py new file mode 100644 index 0000000000..102317937e --- /dev/null +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unit tests for GERB L2 HR HDF5 reader.""" + +import numpy as np +import pytest +import h5py +from satpy import Scene + +FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" + + +@pytest.fixture(scope="session") +def gerb_l2_hr_h5_dummy_file(tmp_path_factory): + """Create a dummy HDF5 file for the GERB L2 HR product.""" + filename = tmp_path_factory.mktemp("data") / FNAME + + with h5py.File(filename, 'w') as fid: + fid.create_group('/Angles') + fid['/Angles/Relative Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Angles/Relative Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Angles/Relative Azimuth'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Degree', dtype='|S7')) + fid['/Angles/Solar Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Angles/Solar Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Angles/Solar Zenith'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Degree', dtype='|S7')) + fid['/Angles/Viewing Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Angles/Viewing Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Angles/Viewing Azimuth'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Degree', dtype='|S7')) + fid['/Angles/Viewing Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Angles/Viewing Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Angles/Viewing Zenith'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Degree', dtype='|S7')) + fid.create_group('/GERB') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(3) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/GERB'].id, b'Instrument Identifier', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'G4', dtype='|S3')) + fid.create_group('/GGSPS') + fid['/GGSPS'].attrs['L1.5 NANRG Product Version'] = np.array(-1, dtype='int32') + fid.create_group('/Geolocation') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(44) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Geolocation'].id, b'Geolocation File Name', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf', dtype='|S44')) + fid['/Geolocation'].attrs['Line of Sight North-South Speed'] = np.array(0.0, dtype='float64') + fid['/Geolocation'].attrs['Nominal Satellite Longitude (degrees)'] = np.array(0.0, dtype='float64') + fid.create_group('/Geolocation/Rectified Grid') + fid['/Geolocation/Rectified Grid'].attrs['Grid Orientation'] = np.array(0.0, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Lap'] = np.array(0.0, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Lop'] = np.array(0.0, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Nr'] = np.array(6.610674630916804, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Nx'] = np.array(1237, dtype='int32') + fid['/Geolocation/Rectified Grid'].attrs['Ny'] = np.array(1237, dtype='int32') + fid['/Geolocation/Rectified Grid'].attrs['Xp'] = np.array(618.3333333333334, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['Yp'] = np.array(617.6666666666666, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['dx'] = np.array(1207.4379446281002, dtype='float64') + fid['/Geolocation/Rectified Grid'].attrs['dy'] = np.array(1203.3201568249945, dtype='float64') + fid.create_group('/Geolocation/Rectified Grid/Resolution Flags') + fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['East West'] = np.array(0.014411607, dtype='float64') + fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['North South'] = np.array(0.014411607, dtype='float64') + fid.create_group('/Imager') + fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Imager'].id, b'Type', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'SEVIRI', dtype='|S7')) + fid.create_group('/RMIB') + fid['/RMIB'].attrs['Product Version'] = np.array(10, dtype='int32') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(16) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/RMIB'].id, b'Software Identifier', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'20220812_151631', dtype='|S16')) + fid.create_group('/Radiometry') + fid['/Radiometry'].attrs['SEVIRI Radiance Definition Flag'] = np.array(2, dtype='int32') + fid['/Radiometry/A Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) + fid['/Radiometry/C Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) + fid['/Radiometry/Longwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Longwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') + fid['/Radiometry/Longwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') + fid['/Radiometry/Shortwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Shortwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') + fid['/Radiometry/Shortwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') + fid['/Radiometry/Solar Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Solar Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(22) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Radiometry/Solar Flux'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Watt per square meter', dtype='|S22')) + fid['/Radiometry/Solar Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Solar Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(36) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Radiometry/Solar Radiance'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Watt per square meter per steradian', dtype='|S36')) + fid['/Radiometry/Thermal Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Thermal Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(22) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Radiometry/Thermal Flux'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Watt per square meter', dtype='|S22')) + fid['/Radiometry/Thermal Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Radiometry/Thermal Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(36) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Radiometry/Thermal Radiance'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Watt per square meter per steradian', dtype='|S36')) + fid.create_group('/Scene Identification') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(13) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Scene Identification'].id, b'Solar Angular Dependency Models Set Version', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'CERES_TRMM.1', dtype='|S13')) + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Scene Identification'].id, b'Thermal Angular Dependency Models Set Version', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'RMIB.3', dtype='|S7')) + fid['/Scene Identification/Aerosol Optical Depth IR 1.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Aerosol Optical Depth IR 1.6'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(8) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Scene Identification/Cloud Cover'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Percent', dtype='|S8')) + fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = np.array(0.00025, dtype='float64') + fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(34) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + at = h5py.h5a.create(fid['/Scene Identification/Cloud Phase'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(b'Percent (Water=0%,Mixed,Ice=100%)', dtype='|S34')) + fid['/Scene Identification/Dust Detection'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Dust Detection'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') + fid['/Scene Identification/Solar Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Surface Type'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Thermal Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid.create_group('/Times') + fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) + + return filename + + +def test_gerb_solar_flux_dataset(gerb_l2_hr_h5_dummy_file): + """Test the GERB L2 HR HDF5 file. + + Load the solar flux component. + """ + scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) + scene.load(['Solar Flux']) + assert scene['Solar Flux'].shape == (1237, 1237) + assert np.nanmax((scene['Solar Flux'].to_numpy().flatten() - 0.25)) < 1e-6 + + +def test_gerb_thermal_flux_dataset(gerb_l2_hr_h5_dummy_file): + """Test the GERB L2 HR HDF5 file. + + Load the thermal flux component. + """ + scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) + scene.load(['Thermal Flux']) + assert scene['Thermal Flux'].shape == (1237, 1237) + assert np.nanmax((scene['Thermal Flux'].to_numpy().flatten() - 0.25)) < 1e-6 From 7de9d8024cdd8b33956c2f63b618699a8b11b751 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 14:56:34 +0200 Subject: [PATCH 0506/1416] flake8 --- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 27 ++++++++++++------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 102317937e..188c9f1141 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -89,7 +89,8 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): fid['/Geolocation/Rectified Grid'].attrs['dy'] = np.array(1203.3201568249945, dtype='float64') fid.create_group('/Geolocation/Rectified Grid/Resolution Flags') fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['East West'] = np.array(0.014411607, dtype='float64') - fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['North South'] = np.array(0.014411607, dtype='float64') + fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['North South'] = \ + np.array(0.014411607, dtype='float64') fid.create_group('/Imager') fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) @@ -146,19 +147,24 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(13) dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification'].id, b'Solar Angular Dependency Models Set Version', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at = h5py.h5a.create(fid['/Scene Identification'].id, b'Solar Angular Dependency Models Set Version', dt, + h5py.h5s.create(h5py.h5s.SCALAR)) at.write(np.array(b'CERES_TRMM.1', dtype='|S13')) dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(7) dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification'].id, b'Thermal Angular Dependency Models Set Version', dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at = h5py.h5a.create(fid['/Scene Identification'].id, b'Thermal Angular Dependency Models Set Version', dt, + h5py.h5s.create(h5py.h5s.SCALAR)) at.write(np.array(b'RMIB.3', dtype='|S7')) fid['/Scene Identification/Aerosol Optical Depth IR 1.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth IR 1.6'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth IR 1.6'].attrs['Quantisation Factor'] = \ + np.array(0.001, dtype='float64') fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'].attrs['Quantisation Factor'] = \ + np.array(0.001, dtype='float64') fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'].attrs['Quantisation Factor'] = np.array(0.001, dtype='float64') + fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'].attrs['Quantisation Factor'] = \ + np.array(0.001, dtype='float64') fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) @@ -166,8 +172,10 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): dt.set_strpad(h5py.h5t.STR_NULLTERM) at = h5py.h5a.create(fid['/Scene Identification/Cloud Cover'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) at.write(np.array(b'Percent', dtype='|S8')) - fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = np.array(0.00025, dtype='float64') + fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = \ + np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) + fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = \ + np.array(0.00025, dtype='float64') fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) @@ -179,7 +187,8 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): fid['/Scene Identification/Dust Detection'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') fid['/Scene Identification/Solar Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Scene Identification/Surface Type'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Thermal Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + fid['/Scene Identification/Thermal Angular Dependency Model'] = \ + np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid.create_group('/Times') fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) From 1b16ffe505e2bc980b3deb74cf7000511edfc8f3 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 19:56:48 +0200 Subject: [PATCH 0507/1416] use pytest.mark.parametrize for GERB reader Co-authored-by: David Hoese --- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 26 +++++-------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 188c9f1141..b6a5deda13 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -194,24 +194,10 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): return filename - -def test_gerb_solar_flux_dataset(gerb_l2_hr_h5_dummy_file): - """Test the GERB L2 HR HDF5 file. - - Load the solar flux component. - """ - scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) - scene.load(['Solar Flux']) - assert scene['Solar Flux'].shape == (1237, 1237) - assert np.nanmax((scene['Solar Flux'].to_numpy().flatten() - 0.25)) < 1e-6 - - -def test_gerb_thermal_flux_dataset(gerb_l2_hr_h5_dummy_file): - """Test the GERB L2 HR HDF5 file. - - Load the thermal flux component. - """ +@pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux"]) +def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): + """Test loading the solar flux component.""" scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) - scene.load(['Thermal Flux']) - assert scene['Thermal Flux'].shape == (1237, 1237) - assert np.nanmax((scene['Thermal Flux'].to_numpy().flatten() - 0.25)) < 1e-6 + scene.load([name]) + assert scene[name].shape == (1237, 1237) + assert np.nanmax((scene[name].to_numpy().flatten() - 0.25)) < 1e-6 From 1e858b185b088135c4d087e0abe33d85cdb5d410 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 20:23:21 +0200 Subject: [PATCH 0508/1416] flake8 --- satpy/tests/reader_tests/test_gerb_l2_hr_h5.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index b6a5deda13..3eb115759f 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -194,6 +194,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): return filename + @pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" From ee26d3765830d9257021da57ae461951b4eea6ba Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 20:54:22 +0200 Subject: [PATCH 0509/1416] Reduce code size 1. Use function for common HDF5 operations 2. Remove unused parts of the dummy file --- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 146 +++++------------- 1 file changed, 35 insertions(+), 111 deletions(-) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 3eb115759f..65c8583b19 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -25,6 +25,23 @@ FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" +def make_h5_null_string(length): + """Make a HDF5 type for a NULL terminated string of fixed length.""" + dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) + dt.set_size(7) + dt.set_strpad(h5py.h5t.STR_NULLTERM) + return dt + + +def write_h5_null_string_att(loc_id, name, s): + """Write a NULL terminated string attribute at loc_id.""" + dt = make_h5_null_string(length=7) + name = bytes(name.encode('ascii')) + s = bytes(s.encode('ascii')) + at = h5py.h5a.create(loc_id, name, dt, h5py.h5s.create(h5py.h5s.SCALAR)) + at.write(np.array(s, dtype=f'|S{len(s)+1}')) + + @pytest.fixture(scope="session") def gerb_l2_hr_h5_dummy_file(tmp_path_factory): """Create a dummy HDF5 file for the GERB L2 HR product.""" @@ -34,77 +51,30 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): fid.create_group('/Angles') fid['/Angles/Relative Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Angles/Relative Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Angles/Relative Azimuth'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Degree', dtype='|S7')) fid['/Angles/Solar Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Angles/Solar Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Angles/Solar Zenith'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Degree', dtype='|S7')) + write_h5_null_string_att(fid['/Angles/Relative Azimuth'].id, 'Unit', 'Degree') fid['/Angles/Viewing Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Angles/Viewing Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Angles/Viewing Azimuth'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Degree', dtype='|S7')) + write_h5_null_string_att(fid['/Angles/Viewing Azimuth'].id, 'Unit', 'Degree') fid['/Angles/Viewing Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Angles/Viewing Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Angles/Viewing Zenith'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Degree', dtype='|S7')) + write_h5_null_string_att(fid['/Angles/Viewing Zenith'].id, 'Unit', 'Degree') fid.create_group('/GERB') dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(3) dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/GERB'].id, b'Instrument Identifier', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'G4', dtype='|S3')) + write_h5_null_string_att(fid['/GERB'].id, 'Instrument Identifier', 'G4') fid.create_group('/GGSPS') fid['/GGSPS'].attrs['L1.5 NANRG Product Version'] = np.array(-1, dtype='int32') fid.create_group('/Geolocation') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(44) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Geolocation'].id, b'Geolocation File Name', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf', dtype='|S44')) - fid['/Geolocation'].attrs['Line of Sight North-South Speed'] = np.array(0.0, dtype='float64') + write_h5_null_string_att(fid['/Geolocation'].id, 'Geolocation File Name', + 'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf') fid['/Geolocation'].attrs['Nominal Satellite Longitude (degrees)'] = np.array(0.0, dtype='float64') - fid.create_group('/Geolocation/Rectified Grid') - fid['/Geolocation/Rectified Grid'].attrs['Grid Orientation'] = np.array(0.0, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Lap'] = np.array(0.0, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Lop'] = np.array(0.0, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Nr'] = np.array(6.610674630916804, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Nx'] = np.array(1237, dtype='int32') - fid['/Geolocation/Rectified Grid'].attrs['Ny'] = np.array(1237, dtype='int32') - fid['/Geolocation/Rectified Grid'].attrs['Xp'] = np.array(618.3333333333334, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['Yp'] = np.array(617.6666666666666, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['dx'] = np.array(1207.4379446281002, dtype='float64') - fid['/Geolocation/Rectified Grid'].attrs['dy'] = np.array(1203.3201568249945, dtype='float64') - fid.create_group('/Geolocation/Rectified Grid/Resolution Flags') - fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['East West'] = np.array(0.014411607, dtype='float64') - fid['/Geolocation/Rectified Grid/Resolution Flags'].attrs['North South'] = \ - np.array(0.014411607, dtype='float64') fid.create_group('/Imager') fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Imager'].id, b'Type', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'SEVIRI', dtype='|S7')) + write_h5_null_string_att(fid['/Imager'].id, 'Type', 'SEVIRI') fid.create_group('/RMIB') - fid['/RMIB'].attrs['Product Version'] = np.array(10, dtype='int32') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(16) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/RMIB'].id, b'Software Identifier', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'20220812_151631', dtype='|S16')) fid.create_group('/Radiometry') fid['/Radiometry'].attrs['SEVIRI Radiance Definition Flag'] = np.array(2, dtype='int32') fid['/Radiometry/A Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) @@ -117,78 +87,32 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): fid['/Radiometry/Shortwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') fid['/Radiometry/Solar Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Radiometry/Solar Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(22) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Radiometry/Solar Flux'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Watt per square meter', dtype='|S22')) + write_h5_null_string_att(fid['/Radiometry/Solar Flux'].id, 'Unit', 'Watt per square meter') fid['/Radiometry/Solar Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Radiometry/Solar Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(36) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Radiometry/Solar Radiance'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Watt per square meter per steradian', dtype='|S36')) + write_h5_null_string_att(fid['/Radiometry/Solar Radiance'].id, 'Unit', 'Watt per square meter per steradian') fid['/Radiometry/Thermal Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Radiometry/Thermal Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(22) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Radiometry/Thermal Flux'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Watt per square meter', dtype='|S22')) + write_h5_null_string_att(fid['/Radiometry/Thermal Flux'].id, 'Unit', 'Watt per square meter') fid['/Radiometry/Thermal Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Radiometry/Thermal Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(36) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Radiometry/Thermal Radiance'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Watt per square meter per steradian', dtype='|S36')) + write_h5_null_string_att(fid['/Radiometry/Thermal Radiance'].id, 'Unit', 'Watt per square meter per steradian') fid.create_group('/Scene Identification') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(13) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification'].id, b'Solar Angular Dependency Models Set Version', dt, - h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'CERES_TRMM.1', dtype='|S13')) - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(7) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification'].id, b'Thermal Angular Dependency Models Set Version', dt, - h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'RMIB.3', dtype='|S7')) - fid['/Scene Identification/Aerosol Optical Depth IR 1.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth IR 1.6'].attrs['Quantisation Factor'] = \ - np.array(0.001, dtype='float64') - fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth VIS 0.6'].attrs['Quantisation Factor'] = \ - np.array(0.001, dtype='float64') - fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Aerosol Optical Depth VIS 0.8'].attrs['Quantisation Factor'] = \ - np.array(0.001, dtype='float64') + write_h5_null_string_att(fid['/Scene Identification'].id, + 'Solar Angular Dependency Models Set Version', 'CERES_TRMM.1') + write_h5_null_string_att(fid['/Scene Identification'].id, + 'Thermal Angular Dependency Models Set Version', 'RMIB.3') fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(8) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification/Cloud Cover'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Percent', dtype='|S8')) + write_h5_null_string_att(fid['/Scene Identification/Cloud Cover'].id, 'Unit', 'Percent') fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = \ np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = \ np.array(0.00025, dtype='float64') fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) - dt.set_size(34) - dt.set_strpad(h5py.h5t.STR_NULLTERM) - at = h5py.h5a.create(fid['/Scene Identification/Cloud Phase'].id, b'Unit', dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(b'Percent (Water=0%,Mixed,Ice=100%)', dtype='|S34')) - fid['/Scene Identification/Dust Detection'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Dust Detection'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - fid['/Scene Identification/Solar Angular Dependency Model'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Surface Type'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Thermal Angular Dependency Model'] = \ - np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) + write_h5_null_string_att(fid['/Scene Identification/Cloud Phase'].id, 'Unit', + 'Percent (Water=0%,Mixed,Ice=100%)') fid.create_group('/Times') fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) From b29ce40d18a3a7c4a761fcade8c209977e8111ec Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 21:01:32 +0200 Subject: [PATCH 0510/1416] add radiance datasets to GERB reader --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 14 ++++++++++++++ satpy/tests/reader_tests/test_gerb_l2_hr_h5.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml index 2d47c19b8f..0f53a6c92c 100644 --- a/satpy/etc/readers/gerb_l2_hr_h5.yaml +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -29,3 +29,17 @@ datasets: fill_value: -32767 standard_name: toa_outgoing_longwave_flux file_type: gerb_l2_hr_h5 + + Solar_Radiance: + name: Solar Radiance + sensor: gerb + units: W m-2 + fill_value: -32767 + file_type: gerb_l2_hr_h5 + + Thermal_Radiance: + name: Thermal Radiance + sensor: gerb + units: W m-2 + fill_value: -32767 + file_type: gerb_l2_hr_h5 diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 65c8583b19..3dadccb11a 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -119,7 +119,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): return filename -@pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux"]) +@pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) From e094049cf4c57fb5d63d574aff8013af9f7b6410 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 21:04:34 +0200 Subject: [PATCH 0511/1416] add missing "sr-1" in units of radiance for GERB reader --- satpy/etc/readers/gerb_l2_hr_h5.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/gerb_l2_hr_h5.yaml b/satpy/etc/readers/gerb_l2_hr_h5.yaml index 0f53a6c92c..7e1ec13fe9 100644 --- a/satpy/etc/readers/gerb_l2_hr_h5.yaml +++ b/satpy/etc/readers/gerb_l2_hr_h5.yaml @@ -33,13 +33,13 @@ datasets: Solar_Radiance: name: Solar Radiance sensor: gerb - units: W m-2 + units: W m-2 sr-1 fill_value: -32767 file_type: gerb_l2_hr_h5 Thermal_Radiance: name: Thermal Radiance sensor: gerb - units: W m-2 + units: W m-2 sr-1 fill_value: -32767 file_type: gerb_l2_hr_h5 From 1123c4d18edb3e9d5926b31d186e867fcd966c22 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 4 Oct 2023 22:10:53 +0200 Subject: [PATCH 0512/1416] fix import order (isort) --- satpy/tests/reader_tests/test_gerb_l2_hr_h5.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 3dadccb11a..eb06362831 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -17,9 +17,10 @@ # satpy. If not, see . """Unit tests for GERB L2 HR HDF5 reader.""" +import h5py import numpy as np import pytest -import h5py + from satpy import Scene FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" From 9c3094087408d574246d44feedd185c832545917 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 09:37:58 +0200 Subject: [PATCH 0513/1416] use xarray dataset from HDF5FileHandler properly in GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 4dad36f0e8..c9a5986894 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -43,7 +43,7 @@ def gerb_get_dataset(hfile, name, ds_info): The routine takes into account the quantisation factor and fill values. """ - ds = xr.DataArray(hfile[name][...]) + ds = hfile[name] ds_attrs = hfile[name].attrs ds_fill = ds_info['fill_value'] fill_mask = ds != ds_fill @@ -82,14 +82,10 @@ def get_dataset(self, ds_id, ds_info): raise KeyError(f"{ds_name} is an unknown dataset for this reader.") ds = gerb_get_dataset(self, f'Radiometry/{ds_name}', ds_info) - ds_info = {} - ds_info['start_time'] = self.start_time - ds_info['data_time'] = self.start_time - ds_info['end_time'] = self.end_time + ds.attrs.update({'start_time': self.start_time, 'data_time': self.start_time, 'end_time': self.end_time}) - data = da.from_array(ds) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return ds def get_area_def(self, dsid): """Area definition for the GERB product.""" From 2c79989e64b21ed5cefb11741878430b224846e9 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 10:08:06 +0200 Subject: [PATCH 0514/1416] flake8 --- satpy/readers/gerb_l2_hr_h5.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index c9a5986894..2cd99d359d 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -27,9 +27,7 @@ import logging from datetime import timedelta -import dask.array as da import h5py -import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def From da74ce4d8d0159ca78dfed9ad243452b25243621 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 06:09:31 -0500 Subject: [PATCH 0515/1416] Refactor resolution-based chunking to be more flexible --- satpy/readers/hdfeos_base.py | 11 ++-- satpy/tests/test_utils.py | 98 +++++++++++++++++++++--------------- satpy/utils.py | 89 +++++++++++++++----------------- 3 files changed, 103 insertions(+), 95 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 751d286828..56b15b626d 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -32,7 +32,7 @@ from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import chunks_by_resolution +from satpy.utils import normalize_low_res_chunks logger = logging.getLogger(__name__) @@ -227,12 +227,13 @@ def _chunks_for_variable(self, hdf_dataset): scan_length_250m = 40 var_shape = hdf_dataset.info()[2] res_multiplier = self._get_res_multiplier(var_shape) - return chunks_by_resolution( + num_nonyx_dims = len(var_shape) - 2 + return normalize_low_res_chunks( + (1,) * num_nonyx_dims + ("auto", -1), var_shape, + (1,) * num_nonyx_dims + (scan_length_250m, -1), + (1,) * num_nonyx_dims + (res_multiplier, res_multiplier), np.float32, - scan_length_250m, - res_multiplier, - whole_scan_width=True ) @staticmethod diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index b45885f312..987091a16e 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -192,7 +192,7 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): "attrs", ( {}, - {'orbital_parameters': {'projection_longitude': 1}}, + {'orbital_parameters': {'projection_longitude': 1}}, {'satellite_altitude': 1} ) ) @@ -207,16 +207,17 @@ def test_get_satpos_from_satname(self, caplog): import pyorbital.tlefile data_arr = xr.DataArray( - (), - attrs={ - "platform_name": "Meteosat-42", - "sensor": "irives", - "start_time": datetime.datetime(2031, 11, 20, 19, 18, 17)}) + (), + attrs={ + "platform_name": "Meteosat-42", + "sensor": "irives", + "start_time": datetime.datetime(2031, 11, 20, 19, 18, 17) + }) with mock.patch("pyorbital.tlefile.read") as plr: plr.return_value = pyorbital.tlefile.Tle( - "Meteosat-42", - line1="1 40732U 15034A 22011.84285506 .00000004 00000+0 00000+0 0 9995", - line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") + "Meteosat-42", + line1="1 40732U 15034A 22011.84285506 .00000004 00000+0 00000+0 0 9995", + line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") with caplog.at_level(logging.WARNING): (lon, lat, alt) = get_satpos(data_arr, use_tle=True) assert "Orbital parameters missing from metadata" in caplog.text @@ -238,13 +239,15 @@ def test_make_fake_scene(): assert make_fake_scene({}).keys() == [] sc = make_fake_scene({ - "six": np.arange(25).reshape(5, 5)}) + "six": np.arange(25).reshape(5, 5) + }) assert len(sc.keys()) == 1 assert sc.keys().pop()['name'] == "six" assert sc["six"].attrs["area"].shape == (5, 5) sc = make_fake_scene({ - "seven": np.arange(3*7).reshape(3, 7), - "eight": np.arange(3*8).reshape(3, 8)}, + "seven": np.arange(3 * 7).reshape(3, 7), + "eight": np.arange(3 * 8).reshape(3, 8) + }, daskify=True, area=False, common_attrs={"repetency": "fourteen hundred per centimetre"}) @@ -254,9 +257,10 @@ def test_make_fake_scene(): assert isinstance(sc["seven"].data, da.Array) sc = make_fake_scene({ "nine": xr.DataArray( - np.arange(2*9).reshape(2, 9), + np.arange(2 * 9).reshape(2, 9), dims=("y", "x"), - attrs={"please": "preserve", "answer": 42})}, + attrs={"please": "preserve", "answer": 42}) + }, common_attrs={"bad words": "semprini bahnhof veerooster winterbanden"}) assert sc["nine"].attrs.keys() >= {"please", "answer", "bad words", "area"} @@ -295,6 +299,7 @@ def depwarn(): DeprecationWarning, stacklevel=2 ) + warnings.filterwarnings("ignore", category=DeprecationWarning) debug_on(False) filts_before = warnings.filters.copy() @@ -417,32 +422,41 @@ def test_get_legacy_chunk_size(): @pytest.mark.parametrize( - ("shape", "chunk_dtype", "num_hr", "lr_mult", "scan_width", "exp_result"), + ("chunks", "shape", "previous_chunks", "lr_mult", "chunk_dtype", "exp_result"), [ - ((1000, 3200), np.float32, 40, 4, True, (160, -1)), # 1km swath - ((1000 // 5, 3200 // 5), np.float32, 40, 20, True, (160 // 5, -1)), # 5km swath - ((1000 * 4, 3200 * 4), np.float32, 40, 1, True, (160 * 4, -1)), # 250m swath - ((21696 // 2, 21696 // 2), np.float32, 226, 2, False, (1469, 1469)), # 1km area (ABI chunk 226) - ((21696 // 2, 21696 // 2), np.float64, 226, 2, False, (1017, 1017)), # 1km area (64-bit) - ((21696 // 3, 21696 // 3), np.float32, 226, 6, False, (1469 // 3, 1469 // 3)), # 3km area - ((21696, 21696), np.float32, 226, 1, False, (1469 * 2, 1469 * 2)), # 500m area - ((7, 1000 * 4, 3200 * 4), np.float32, 40, 1, True, (1, 160 * 4, -1)), # 250m swath with bands - ((1, 7, 1000, 3200), np.float32, 40, 1, True, ((1,), (7,), (1000,), (1198, 1198, 804))), # lots of dimensions + # 1km swath + (("auto", -1), (1000, 3200), (40, 40), (4, 4), np.float32, (160, -1)), + # 5km swath + (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (160 // 5, -1)), + # 250m swath + (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), + # 1km area (ABI chunk 226): + (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float32, (1469, 1469)), + # 1km area (64-bit) + (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float64, (1017, 1017)), + # 3km area + (("auto", "auto"), (21696 // 3, 21696 // 3), (226, 226), (6, 6), np.float32, (1469 // 3, 1469 // 3)), + # 500m area + (("auto", "auto"), (21696, 21696), (226, 226), (1, 1), np.float32, (1469 * 2, 1469 * 2)), + # 250m swath with bands: + ((1, "auto", -1), (7, 1000 * 4, 3200 * 4), (1, 40, 40), (1, 1, 1), np.float32, (1, 160 * 4, -1)), + # lots of dimensions: + ((1, 1, "auto", -1), (1, 7, 1000, 3200), (1, 1, 40, 40), (1, 1, 1, 1), np.float32, (1, 1, 1000, -1)), ], ) -def test_resolution_chunking(shape, chunk_dtype, num_hr, lr_mult, scan_width, exp_result): - """Test chunks_by_resolution helper function.""" +def test_resolution_chunking(chunks, shape, previous_chunks, lr_mult, chunk_dtype, exp_result): + """Test normalize_low_res_chunks helper function.""" import dask.config - from satpy.utils import chunks_by_resolution + from satpy.utils import normalize_low_res_chunks with dask.config.set({"array.chunk-size": "32MiB"}): - chunk_results = chunks_by_resolution( + chunk_results = normalize_low_res_chunks( + chunks, shape, - chunk_dtype, - num_hr, + previous_chunks, lr_mult, - whole_scan_width=scan_width, + chunk_dtype, ) assert chunk_results == exp_result for chunk_size in chunk_results: @@ -570,19 +584,21 @@ def test_find_in_ancillary(): """Test finding a dataset in ancillary variables.""" from satpy.utils import find_in_ancillary index_finger = xr.DataArray( - data=np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"name": "index-finger"}) + data=np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={"name": "index-finger"}) ring_finger = xr.DataArray( - data=np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"name": "ring-finger"}) + data=np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={"name": "ring-finger"}) hand = xr.DataArray( - data=np.arange(25).reshape(5, 5), - dims=("y", "x"), - attrs={"name": "hand", - "ancillary_variables": [index_finger, index_finger, ring_finger]}) + data=np.arange(25).reshape(5, 5), + dims=("y", "x"), + attrs={ + "name": "hand", + "ancillary_variables": [index_finger, index_finger, ring_finger] + }) assert find_in_ancillary(hand, "ring-finger") is ring_finger with pytest.raises( diff --git a/satpy/utils.py b/satpy/utils.py index 94f7fbf86e..6b005026d2 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -26,7 +26,7 @@ import warnings from contextlib import contextmanager from copy import deepcopy -from typing import Mapping, Optional +from typing import Literal, Mapping, Optional from urllib.parse import urlparse import dask.utils @@ -633,13 +633,13 @@ def _get_pytroll_chunk_size(): return None -def chunks_by_resolution( +def normalize_low_res_chunks( + chunks: tuple[int | Literal["auto"], ...], input_shape: tuple[int, ...], + previous_chunks: tuple[int, ...], + low_res_multipliers: tuple[int, ...], input_dtype: DTypeLike, - num_high_res_elements: int, - low_res_multiplier: int, - whole_scan_width: bool = False, -) -> tuple[int | tuple[int, ...], ...]: +) -> tuple[int, ...]: """Compute dask chunk sizes based on data resolution. First, chunks are computed for the highest resolution version of the data. @@ -653,16 +653,22 @@ def chunks_by_resolution( geographic region. This also means replicating or aggregating one resolution and then combining arrays should not require any rechunking. - .. note:: - - Only 2 or 3-dimensional shapes are supported. In the case of 3D arrays - the first dimension is assumed to be "bands" and is given a chunk - size of 1. For shapes with other numbers of dimensions, the chunk size - for the entire array is determined by dask's "auto" chunking and - resolution is ignored. - Args: + chunks: Requested chunk size for each dimension. This is passed + directly to dask. Use ``"auto"`` for dimensions that should have + chunks determined for them, ``-1`` for dimensions that should be + whole (not chunked), and ``1`` or any other positive integer for + dimensions that have a known chunk size beforehand. input_shape: Shape of the array to compute dask chunk size for. + previous_chunks: Any previous chunking or structure of the data. This + can also be thought of as the smallest number of high (fine) resolution + elements that make up a single "unit" or chunk of data. This could + be a multiple or factor of the scan size for some instruments and/or + could be based on the on-disk chunk size. This value ensures that + chunks are aligned to the underlying data structure for best + performance. + low_res_multipliers: Number of high (fine) resolution pixels that fit + in a single low (coarse) resolution pixel. input_dtype: Dtype for the final unscaled array. This is usually 32-bit float (``np.float32``) or 64-bit float (``np.float64``) for non-category data. If this doesn't represent the final data @@ -671,49 +677,34 @@ def chunks_by_resolution( configuration. Sometimes it is useful to keep this as a single dtype for all reading functionality (ex. ``np.float32``) in order to keep all read variable chunks the same size regardless of dtype. - num_high_res_elements: Smallest number of high (fine) resolution - elements that make up a single "unit" or chunk of data. This could - be a multiple or factor of the scan size for some instruments and/or - could be based on the on-disk chunk size. This value ensures that - chunks are aligned to the underlying data structure for best - performance. - low_res_multiplier: Number of high (fine) resolution pixels that fit - in a single low (coarse) resolution pixel. - whole_scan_width: To create the entire width (x dimension) of the - array as a single chunk. This is useful in cases when future - operations will operate on entire instrument scans of data at - a time. For example, polar-orbiter scan geolocation being - interpolated from low resolution to high resolution. Returns: A tuple where each element is the chunk size for that axis/dimension. """ - if len(input_shape) not in (2, 3): - # we're not sure about this shape so don't guess - return dask.array.core.normalize_chunks("auto", shape=input_shape, dtype=input_dtype) - - pre_non_yx_chunks, yx_shape, post_non_yx_chunks = _split_non_yx_chunks(input_shape) - high_res_shape = tuple(dim_size * low_res_multiplier for dim_size in yx_shape) - col_chunks = -1 if whole_scan_width else "auto" + if any(len(input_shape) != len(param) for param in (low_res_multipliers, chunks, previous_chunks)): + raise ValueError("Input shape, low res multipliers, chunks, and previous chunks must all be the same size") + high_res_shape = tuple(dim_size * lr_mult for dim_size, lr_mult in zip(input_shape, low_res_multipliers)) chunks_for_high_res = dask.array.core.normalize_chunks( - ("auto", col_chunks), + chunks, shape=high_res_shape, - dtype=input_dtype - ) - var_row_chunks = _low_res_chunks_from_high_res( - chunks_for_high_res[0][0], - num_high_res_elements, - low_res_multiplier + dtype=input_dtype, ) - var_col_chunks = -1 - if not whole_scan_width: - var_col_chunks = _low_res_chunks_from_high_res( - chunks_for_high_res[1][0], - num_high_res_elements, - low_res_multiplier - ) - return pre_non_yx_chunks + (var_row_chunks, var_col_chunks) + post_non_yx_chunks + low_res_chunks: list[int] = [] + for req_chunks, hr_chunks, prev_chunks, lr_mult in zip( + chunks, + chunks_for_high_res, + previous_chunks, low_res_multipliers + ): + if req_chunks != "auto": + low_res_chunks.append(req_chunks) + continue + low_res_chunks.append(_low_res_chunks_from_high_res( + hr_chunks[0], + prev_chunks, + lr_mult, + )) + return tuple(low_res_chunks) def _split_non_yx_chunks( From 785f1c5b68c14b986c022d18fce41e00a8c3bdbe Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 3 Oct 2023 23:04:50 +0200 Subject: [PATCH 0516/1416] add meirink calibration method --- satpy/readers/seviri_base.py | 69 +++++++++++++++++++++++++++++++- satpy/readers/seviri_l1b_hrit.py | 1 + 2 files changed, 68 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 131fe39ad4..c1d288f4df 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -166,7 +166,7 @@ """ import warnings -from datetime import timedelta +from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -353,6 +353,62 @@ 'ALPHA': 0.9981, 'BETA': 0.5635}} +# Calibration coefficients from Meirink, J.F., R.A. Roebeling and P. Stammes, 2013: +# Inter-calibration of polar imager solar channels using SEVIRI, Atm. Meas. Tech., 6, +# 2495-2508, doi:10.5194/amt-6-2495-2013 + +# To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope + +# Epoch for the MEIRINK re-calibration +DATE_2000 = datetime(2000, 1, 1) + +MEIRINK_COEFS = {} + +# Meteosat-8 + +MEIRINK_COEFS[321] = {'VIS006': (24.346, 0.3739), + 'VIS008': (30.989, 0.3111), + 'IR_016': (22.869, 0.0065) + } + +# Meteosat-9 + +MEIRINK_COEFS[322] = {'VIS006': (21.026, 0.3739), + 'VIS008': (26.875, 0.3111), + 'IR_016': (21.394, 0.0065) + } + +# Meteosat-10 + +MEIRINK_COEFS[323] = {'VIS006': (19.829, 0.5856), + 'VIS008': (25.284, 0.6787), + 'IR_016': (23.066, -0.0286) + } + +# Meteosat-11 + +MEIRINK_COEFS[324] = {'VIS006': (20.515, 0.3600), + 'VIS008': (25.803, 0.4844), + 'IR_016': (22.354, -0.0187) + } + + +def get_meirink_slope(meirink_coefs, acquisition_time): + """Compute the slope for the visible channel calibration according to Meirink 2013. + + S = A + B * 1.e-3* Day + + S is here in µW m-2 sr-1 (cm-1)-1 + + EUMETSAT calibration is given in mW m-2 sr-1 (cm-1)-1, so an extra factor of 1/1000 must + be applied. + """ + A = meirink_coefs[0] + B = meirink_coefs[1] + delta_t = (acquisition_time - DATE_2000).total_seconds() + S = A + B * delta_t / (3600*24) / 1000. + return S/1000 + def get_cds_time(days, msecs): """Compute timestamp given the days since epoch and milliseconds of the day. @@ -559,6 +615,11 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): self._platform_id = platform_id self._channel_name = channel_name self._coefs = coefs + if channel_name in ['VIS006', 'VIS008', 'IR_016']: + self._coefs['coefs']['MEIRINK'] = MEIRINK_COEFS[platform_id][channel_name] + else: + self._coefs['coefs']['MEIRINK'] = None + self._calib_mode = calib_mode.upper() self._scan_time = scan_time self._algo = SEVIRICalibrationAlgorithm( @@ -566,7 +627,7 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS') + valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK') if self._calib_mode not in valid_modes: raise ValueError( 'Invalid calibration mode: {}. Choose one of {}'.format( @@ -622,6 +683,10 @@ def get_gain_offset(self): internal_gain = gsics_gain internal_offset = gsics_offset + if self._calib_mode == 'MEIRINK': + if coefs['MEIRINK'] is not None: + internal_gain = get_meirink_slope(coefs['MEIRINK'], self._scan_time) + # Override with external coefficients, if any. gain = coefs['EXTERNAL'].get('gain', internal_gain) offset = coefs['EXTERNAL'].get('offset', internal_offset) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 2b153edfcc..4480afdbfb 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -247,6 +247,7 @@ mask_bad_quality, pad_data_horizontally, round_nom_time, + MEIRINK_CALIB, ) from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration from satpy.utils import get_legacy_chunk_size From dfe1dc9d169d08aac16d63acffad1c4ce81c3acc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 06:51:37 -0500 Subject: [PATCH 0517/1416] Ensure resolution chunks are disk-chunk aligned --- satpy/tests/test_utils.py | 6 +++--- satpy/utils.py | 18 ++---------------- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 987091a16e..4b22d51f93 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -431,13 +431,13 @@ def test_get_legacy_chunk_size(): # 250m swath (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), # 1km area (ABI chunk 226): - (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float32, (1469, 1469)), + (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float32, (1356, 1356)), # 1km area (64-bit) (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float64, (1017, 1017)), # 3km area - (("auto", "auto"), (21696 // 3, 21696 // 3), (226, 226), (6, 6), np.float32, (1469 // 3, 1469 // 3)), + (("auto", "auto"), (21696 // 3, 21696 // 3), (226, 226), (6, 6), np.float32, (1356 // 3, 1356 // 3)), # 500m area - (("auto", "auto"), (21696, 21696), (226, 226), (1, 1), np.float32, (1469 * 2, 1469 * 2)), + (("auto", "auto"), (21696, 21696), (226, 226), (1, 1), np.float32, (1356 * 2, 1356 * 2)), # 250m swath with bands: ((1, "auto", -1), (7, 1000 * 4, 3200 * 4), (1, 40, 40), (1, 1, 1), np.float32, (1, 160 * 4, -1)), # lots of dimensions: diff --git a/satpy/utils.py b/satpy/utils.py index 6b005026d2..73257d0e4f 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -689,6 +689,7 @@ def normalize_low_res_chunks( chunks, shape=high_res_shape, dtype=input_dtype, + previous_chunks=previous_chunks, ) low_res_chunks: list[int] = [] for req_chunks, hr_chunks, prev_chunks, lr_mult in zip( @@ -699,11 +700,7 @@ def normalize_low_res_chunks( if req_chunks != "auto": low_res_chunks.append(req_chunks) continue - low_res_chunks.append(_low_res_chunks_from_high_res( - hr_chunks[0], - prev_chunks, - lr_mult, - )) + low_res_chunks.append(int(max(hr_chunks[0] / lr_mult, prev_chunks / lr_mult))) return tuple(low_res_chunks) @@ -719,17 +716,6 @@ def _split_non_yx_chunks( return pre_non_yx_chunks, yx_shape, post_non_yx_chunks -def _low_res_chunks_from_high_res( - chunk_size_for_high_res: int, - num_high_res_elements: int, - low_res_multiplier: int -) -> int: - aligned_chunk_size = np.round(chunk_size_for_high_res / num_high_res_elements) * num_high_res_elements - low_res_chunk_size = aligned_chunk_size / low_res_multiplier - # avoid getting 0 chunk size - return int(max(low_res_chunk_size, num_high_res_elements / low_res_multiplier)) - - def convert_remote_files_to_fsspec(filenames, storage_options=None): """Check filenames for transfer protocols, convert to FSFile objects if possible.""" if storage_options is None: From 04b0161f3239fcc608faa5a885307e17031db3ba Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 06:56:52 -0500 Subject: [PATCH 0518/1416] Add lower-limit of on-disk chunk size for resolution-based chunking --- satpy/tests/test_utils.py | 4 ++-- satpy/utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 4b22d51f93..d25b307675 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -426,8 +426,8 @@ def test_get_legacy_chunk_size(): [ # 1km swath (("auto", -1), (1000, 3200), (40, 40), (4, 4), np.float32, (160, -1)), - # 5km swath - (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (160 // 5, -1)), + # 5km swath - 160 / 5 == 32 which is smaller than our on-disk chunk size of 40 + (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (40, -1)), # 250m swath (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), # 1km area (ABI chunk 226): diff --git a/satpy/utils.py b/satpy/utils.py index 73257d0e4f..7587617165 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -700,7 +700,7 @@ def normalize_low_res_chunks( if req_chunks != "auto": low_res_chunks.append(req_chunks) continue - low_res_chunks.append(int(max(hr_chunks[0] / lr_mult, prev_chunks / lr_mult))) + low_res_chunks.append(int(max(hr_chunks[0] / lr_mult, prev_chunks))) return tuple(low_res_chunks) From 984a7331ecccabb5fd976d8f5899e7eb91ea5e7a Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 15:00:56 +0200 Subject: [PATCH 0519/1416] add test for the slope of the Meirink coefficients --- satpy/tests/reader_tests/test_seviri_base.py | 32 ++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 157ed88bbf..f7c8525ab7 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -37,6 +37,9 @@ pad_data_horizontally, pad_data_vertically, round_nom_time, + SEVIRICalibrationHandler, + MEIRINK_COEFS, + DATE_2000, ) from satpy.utils import get_legacy_chunk_size @@ -358,3 +361,32 @@ def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): finder = OrbitPolynomialFinder(orbit_polynomials) with pytest.raises(NoValidOrbitParams): finder.get_orbit_polynomial(time=time) + + +class TestMeirinkSlope: + """Unit tests for the slope of Meirink calibration.""" + + @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) + @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) + def test_get_meirink_slope_epoch(self, platform_id, channel_name): + """Test the value of the slope of the Meirink calibration on 2000-01-01.""" + coefs = {'coefs': {}} + coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} + coefs['coefs']['EXTERNAL'] = {} + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2000) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS[platform_id][channel_name][0]/1000. + + @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) + @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) + def test_get_meirink_slope_2020(self, platform_id, channel_name): + """Test the value of the slope of the Meirink calibration on 2020-01-01.""" + DATE_2020 = datetime(2020, 1, 1) + coefs = {'coefs': {}} + coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} + coefs['coefs']['EXTERNAL'] = {} + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2020) + A, B = MEIRINK_COEFS[platform_id][channel_name] + delta_t = (DATE_2020 - DATE_2000).total_seconds() + S = A + B * delta_t / (3600*24) / 1000. + S = S/1000 + assert calibration_handler.get_gain_offset()[0] == S From a9521ef3f9af36a7bd7aa8686443649c8f2e12a5 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 16:45:28 +0200 Subject: [PATCH 0520/1416] remove un-necessary import --- satpy/readers/seviri_l1b_hrit.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 4480afdbfb..2b153edfcc 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -247,7 +247,6 @@ mask_bad_quality, pad_data_horizontally, round_nom_time, - MEIRINK_CALIB, ) from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration from satpy.utils import get_legacy_chunk_size From 2036251d4b868bda8b51b13c8bf3a8a1e6e881dd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 13:03:17 -0500 Subject: [PATCH 0521/1416] Add info about handling on-disk chunking --- .../modis_tests/_modis_fixtures.py | 2 +- satpy/tests/test_utils.py | 14 +++++++----- satpy/utils.py | 22 +++++++------------ 3 files changed, 17 insertions(+), 21 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index dfc8f0aec6..49331f5421 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -62,7 +62,7 @@ def _shape_for_resolution(resolution: int) -> tuple[int, int]: return factor * shape_1km[0], factor * shape_1km[1] -def _generate_lonlat_data(resolution: int) -> np.ndarray: +def _generate_lonlat_data(resolution: int) -> tuple[np.ndarray, np.ndarray]: shape = _shape_for_resolution(resolution) lat = np.repeat(np.linspace(35., 45., shape[0])[:, None], shape[1], 1) lat *= np.linspace(0.9, 1.1, shape[1]) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index d25b307675..ef6a359cdd 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -426,18 +426,20 @@ def test_get_legacy_chunk_size(): [ # 1km swath (("auto", -1), (1000, 3200), (40, 40), (4, 4), np.float32, (160, -1)), - # 5km swath - 160 / 5 == 32 which is smaller than our on-disk chunk size of 40 - (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (40, -1)), + # 5km swath + (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (160 / 5, -1)), # 250m swath (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), # 1km area (ABI chunk 226): - (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float32, (1356, 1356)), + (("auto", "auto"), (21696 // 2, 21696 // 2), (226*4, 226*4), (2, 2), np.float32, (1356, 1356)), # 1km area (64-bit) - (("auto", "auto"), (21696 // 2, 21696 // 2), (226, 226), (2, 2), np.float64, (1017, 1017)), + (("auto", "auto"), (21696 // 2, 21696 // 2), (226*4, 226*4), (2, 2), np.float64, (904, 904)), # 3km area - (("auto", "auto"), (21696 // 3, 21696 // 3), (226, 226), (6, 6), np.float32, (1356 // 3, 1356 // 3)), + (("auto", "auto"), (21696 // 3, 21696 // 3), (226*4, 226*4), (6, 6), np.float32, (452, 452)), # 500m area - (("auto", "auto"), (21696, 21696), (226, 226), (1, 1), np.float32, (1356 * 2, 1356 * 2)), + (("auto", "auto"), (21696, 21696), (226*4, 226*4), (1, 1), np.float32, (1356 * 2, 1356 * 2)), + # 500m area (64-bit) + (("auto", "auto"), (21696, 21696), (226*4, 226*4), (1, 1), np.float64, (904 * 2, 904 * 2)), # 250m swath with bands: ((1, "auto", -1), (7, 1000 * 4, 3200 * 4), (1, 40, 40), (1, 1, 1), np.float32, (1, 160 * 4, -1)), # lots of dimensions: diff --git a/satpy/utils.py b/satpy/utils.py index 7587617165..67150fed9d 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -666,7 +666,13 @@ def normalize_low_res_chunks( be a multiple or factor of the scan size for some instruments and/or could be based on the on-disk chunk size. This value ensures that chunks are aligned to the underlying data structure for best - performance. + performance. On-disk chunk sizes should be multiplied by the + largest low resolution multiplier if it is the same between all + files (ex. 500m file has 226 chunk size, 1km file has 226 chunk + size, etc).. Otherwise, the resulting low resolution chunks may + not be aligned to the on-disk chunks. For example, if dask decides + on a chunk size of 226 * 3 for 500m data, that becomes 226 * 3 / 2 + for 1km data which is not aligned to the on-disk chunk size of 226. low_res_multipliers: Number of high (fine) resolution pixels that fit in a single low (coarse) resolution pixel. input_dtype: Dtype for the final unscaled array. This is usually @@ -700,22 +706,10 @@ def normalize_low_res_chunks( if req_chunks != "auto": low_res_chunks.append(req_chunks) continue - low_res_chunks.append(int(max(hr_chunks[0] / lr_mult, prev_chunks))) + low_res_chunks.append(round(max(hr_chunks[0] / lr_mult, prev_chunks / lr_mult))) return tuple(low_res_chunks) -def _split_non_yx_chunks( - input_shape: tuple[int, ...], -) -> tuple[tuple[int, ...] | tuple[()], tuple[int, int], tuple[int, ...] | tuple[()]]: - pre_non_yx_chunks: tuple[int, ...] = tuple() - post_non_yx_chunks: tuple[int, ...] = tuple() - yx_shape = (input_shape[-2], input_shape[-1]) - if len(input_shape) == 3: - # assume (band, y, x) - pre_non_yx_chunks = (1,) - return pre_non_yx_chunks, yx_shape, post_non_yx_chunks - - def convert_remote_files_to_fsspec(filenames, storage_options=None): """Check filenames for transfer protocols, convert to FSFile objects if possible.""" if storage_options is None: From f5ddc6e11af8924c7a859651dd5ec1243e05408d Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 23:41:17 +0200 Subject: [PATCH 0522/1416] simplify GERB reader --- satpy/readers/gerb_l2_hr_h5.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 2cd99d359d..64f3fb8d71 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -35,14 +35,13 @@ LOG = logging.getLogger(__name__) -def gerb_get_dataset(hfile, name, ds_info): +def gerb_get_dataset(ds, ds_info): """ Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. The routine takes into account the quantisation factor and fill values. """ - ds = hfile[name] - ds_attrs = hfile[name].attrs + ds_attrs = ds.attrs ds_fill = ds_info['fill_value'] fill_mask = ds != ds_fill if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: @@ -56,13 +55,6 @@ def gerb_get_dataset(hfile, name, ds_info): class GERB_HR_FileHandler(HDF5FileHandler): """File handler for GERB L2 High Resolution H5 files.""" - def __init__(self, filename, filename_info, filetype_info): - """Init the file handler.""" - super(GERB_HR_FileHandler, self).__init__(filename, - filename_info, - filetype_info) - self._h5fh = h5py.File(self.filename, 'r') - @property def end_time(self): """Get end time.""" @@ -79,7 +71,7 @@ def get_dataset(self, ds_id, ds_info): if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: raise KeyError(f"{ds_name} is an unknown dataset for this reader.") - ds = gerb_get_dataset(self, f'Radiometry/{ds_name}', ds_info) + ds = gerb_get_dataset(self[f'Radiometry/{ds_name}'], ds_info) ds.attrs.update({'start_time': self.start_time, 'data_time': self.start_time, 'end_time': self.end_time}) From 39c7d56df8d010ffa37895883bf8a8c351cd48df Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 5 Oct 2023 23:45:25 +0200 Subject: [PATCH 0523/1416] flake8 --- satpy/readers/gerb_l2_hr_h5.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 64f3fb8d71..f663b3040f 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -27,8 +27,6 @@ import logging from datetime import timedelta -import h5py - from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def From e6e5c6e5f0085c8c8216951b80a46a468b0f75a2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 2 Oct 2023 15:44:47 -0500 Subject: [PATCH 0524/1416] Fix sunz correction converting 32-bit floats to 64-bit floats Also fixes that input data types were inconsistent between dask arrays and computed numpy results. --- satpy/modifiers/angles.py | 9 +++++++-- satpy/tests/test_modifiers.py | 10 ++++++++-- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 28adb60028..47aedcea4d 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -399,6 +399,7 @@ def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks) def _get_valid_lonlats(area: PRGeometry, chunks: Union[int, str, tuple] = "auto") -> tuple[da.Array, da.Array]: with ignore_invalid_float_warnings(): + # NOTE: This defaults to 64-bit floats due to needed precision for X/Y coordinates lons, lats = area.get_lonlats(chunks=chunks) lons = da.where(lons >= 1e30, np.nan, lons) lats = da.where(lats >= 1e30, np.nan, lats) @@ -526,7 +527,7 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza # Cosine correction - corr = 1. / cos_zen + corr = (1. / cos_zen).astype(data.dtype, copy=False) if max_sza is not None: # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) @@ -538,7 +539,11 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, else: # Use constant value (the limit) for larger zenith angles grad_factor = 1. - corr = np.where(cos_zen > limit_cos, corr, grad_factor / limit_cos) + corr = np.where( + cos_zen > limit_cos, + corr, + (grad_factor / limit_cos).astype(data.dtype, copy=False) + ) # Force "night" pixels to 0 (where SZA is invalid) corr[np.isnan(cos_zen)] = 0 return data * corr diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index c21a514808..ec69a7e05e 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -110,9 +110,13 @@ def sunz_sza(): class TestSunZenithCorrector: """Test case for the zenith corrector.""" - def test_basic_default_not_provided(self, sunz_ds1): + @pytest.mark.parametrize("as_32bit", [False, True]) + def test_basic_default_not_provided(self, sunz_ds1, as_32bit): """Test default limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector + + if as_32bit: + sunz_ds1 = sunz_ds1.astype(np.float32) comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) res = comp((sunz_ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) @@ -120,7 +124,9 @@ def test_basic_default_not_provided(self, sunz_ds1): assert 'x' in res.coords ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) res = comp((ds1,), test_attr='test') - np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + res_np = res.compute() + np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + assert res.dtype == res_np.dtype assert 'y' not in res.coords assert 'x' not in res.coords From 72379235805731ded6b28665eab9d7c86e6c7de2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 2 Oct 2023 15:57:24 -0500 Subject: [PATCH 0525/1416] Update AHI HSD reader to have resolution-based chunking --- satpy/readers/ahi_hsd.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index e06f7ebc50..12032e8395 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -78,7 +78,7 @@ np2str, unzip_file, ) -from satpy.utils import get_chunk_size_limit +from satpy.utils import chunks_by_resolution AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", @@ -615,15 +615,16 @@ def _read_header(self, fp_): return header - def _read_data(self, fp_, header): + def _read_data(self, fp_, header, resolution): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) - chunks = da.core.normalize_chunks("auto", - shape=(nlines, ncols), - limit=get_chunk_size_limit(), - dtype='f8', - previous_chunks=(550, 550)) + chunks = chunks_by_resolution( + (nlines, ncols), + np.float32, + 550, + int(resolution / 500), + ) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Mon, 2 Oct 2023 20:02:59 -0500 Subject: [PATCH 0526/1416] Fix AHI tests now that resolution is properly used --- satpy/tests/reader_tests/test_ahi_hsd.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 9832c3eb29..82ef5b905b 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -293,11 +293,12 @@ def test_bad_calibration(self): def test_actual_satellite_position(self, round_actual_position, expected_result): """Test that rounding of the actual satellite position can be controlled.""" with _fake_hsd_handler(fh_kwargs={"round_actual_position": round_actual_position}) as fh: - ds_id = make_dataid(name="B01") + ds_id = make_dataid(name="B01", resolution=1000) ds_info = { "units": "%", "standard_name": "some_name", "wavelength": (0.1, 0.2, 0.3), + "resolution": 1000, } metadata = fh._get_metadata(ds_id, ds_info) orb_params = metadata["orbital_parameters"] @@ -365,10 +366,17 @@ def test_read_band_from_actual_file(self, hsd_file_jp01): filename_info = {"segment": 1, "total_segments": 1} filetype_info = {"file_type": "blahB01"} fh = AHIHSDFileHandler(hsd_file_jp01, filename_info, filetype_info) - key = {"name": "B01", "calibration": "counts"} + key = {"name": "B01", "calibration": "counts", "resolution": 1000} import dask with dask.config.set({"array.chunk-size": "16MiB"}): - data = fh.read_band(key, {"units": "%", "standard_name": "toa_bidirectional_reflectance", "wavelength": 2}) + data = fh.read_band( + key, + { + "units": "%", + "standard_name": "toa_bidirectional_reflectance", + "wavelength": 2, + "resolution": 1000, + }) assert data.chunks == ((1100,) * 10, (1100,) * 10) @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') From 9169abe37891bde09adca693cef2f21b7e9b2d77 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 2 Oct 2023 20:52:28 -0500 Subject: [PATCH 0527/1416] Add tests for dtype checks --- satpy/tests/reader_tests/test_ahi_hsd.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 82ef5b905b..1a0e44ff4e 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -378,6 +378,8 @@ def test_read_band_from_actual_file(self, hsd_file_jp01): "resolution": 1000, }) assert data.chunks == ((1100,) * 10, (1100,) * 10) + assert data.dtype == data.compute().dtype + assert data.dtype == np.float32 @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid') @@ -503,8 +505,8 @@ def setUp(self, *mocks): 'cali_offset_count2rad_conversion': [self.upd_cali[1]]}, } - self.counts = da.array(np.array([[0., 1000.], - [2000., 5000.]])) + self.counts = da.array(np.array([[0, 1000], + [2000, 5000]], dtype=np.uint16)) self.fh = fh def test_default_calibrate(self, *mocks): @@ -572,7 +574,10 @@ def test_user_calibration(self): self.fh.user_calibration = {'B13': {'slope': 0.95, 'offset': -0.1}} self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() + rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad_np = rad.compute() + assert rad.dtype == rad_np.dtype + assert rad.dtype == np.float32 rad_exp = np.array([[16.10526316, 12.21052632], [8.31578947, -3.36842105]]) self.assertTrue(np.allclose(rad, rad_exp)) @@ -582,7 +587,10 @@ def test_user_calibration(self): 'offset': 15.20}, 'type': 'DN'} self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() + rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad_np = rad.compute() + assert rad.dtype == rad_np.dtype + assert rad.dtype == np.float32 rad_exp = np.array([[15.2, 12.], [8.8, -0.8]]) self.assertTrue(np.allclose(rad, rad_exp)) From ec20ba9341439a72d77ba9edf3668095baad6755 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 5 Oct 2023 19:23:54 -0500 Subject: [PATCH 0528/1416] Update AHI tests to use new resolution-based chunking normalization --- satpy/readers/ahi_hsd.py | 10 ++++++---- satpy/tests/reader_tests/test_ahi_hsd.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 12032e8395..dffafaa97b 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -78,7 +78,7 @@ np2str, unzip_file, ) -from satpy.utils import chunks_by_resolution +from satpy.utils import normalize_low_res_chunks AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", @@ -619,11 +619,13 @@ def _read_data(self, fp_, header, resolution): """Read data block.""" nlines = int(header["block2"]['number_of_lines'][0]) ncols = int(header["block2"]['number_of_columns'][0]) - chunks = chunks_by_resolution( + chunks = normalize_low_res_chunks( + ("auto", "auto"), (nlines, ncols), + # 1100 minimum chunk size for 500m, 550 for 1km, 225 for 2km + (1100, 1100), + (int(resolution / 500), int(resolution / 500)), np.float32, - 550, - int(resolution / 500), ) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=' Date: Fri, 6 Oct 2023 11:18:56 +0200 Subject: [PATCH 0529/1416] fix MEIRINK calibration coefficients (typos from copying the coefficients over) --- satpy/readers/seviri_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index c1d288f4df..c54a7586dc 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -373,9 +373,9 @@ # Meteosat-9 -MEIRINK_COEFS[322] = {'VIS006': (21.026, 0.3739), - 'VIS008': (26.875, 0.3111), - 'IR_016': (21.394, 0.0065) +MEIRINK_COEFS[322] = {'VIS006': (21.026, 0.2556), + 'VIS008': (26.875, 0.1835), + 'IR_016': (21.394, 0.0498) } # Meteosat-10 From 0abbb9a10376271c228839f53e59d70037aec413 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 6 Oct 2023 14:04:45 +0200 Subject: [PATCH 0530/1416] used fixed values to test meirink calibration --- satpy/tests/reader_tests/test_seviri_base.py | 26 +++++++++++--------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index f7c8525ab7..b924929d84 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -376,17 +376,21 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2000) assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS[platform_id][channel_name][0]/1000. - @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) - @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) - def test_get_meirink_slope_2020(self, platform_id, channel_name): - """Test the value of the slope of the Meirink calibration on 2020-01-01.""" - DATE_2020 = datetime(2020, 1, 1) + @pytest.mark.parametrize('platform_id,time,expected', ( + (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), + (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), + (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), + (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), + (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), + (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), + (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), + (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), + )) + def test_get_meirink_slope_2020(self, platform_id, time, expected): + """Test the value of the slope of the Meirink calibration.""" coefs = {'coefs': {}} coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2020) - A, B = MEIRINK_COEFS[platform_id][channel_name] - delta_t = (DATE_2020 - DATE_2000).total_seconds() - S = A + B * delta_t / (3600*24) / 1000. - S = S/1000 - assert calibration_handler.get_gain_offset()[0] == S + for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', time) + assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 From 909fb05bac0235ef80b58b11646495c12f44540e Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 6 Oct 2023 14:24:41 +0200 Subject: [PATCH 0531/1416] isort --- satpy/tests/reader_tests/test_seviri_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index b924929d84..f5ba38cea5 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -26,9 +26,12 @@ import xarray as xr from satpy.readers.seviri_base import ( + DATE_2000, + MEIRINK_COEFS, NoValidOrbitParams, OrbitPolynomial, OrbitPolynomialFinder, + SEVIRICalibrationHandler, chebyshev, dec10216, get_cds_time, @@ -37,9 +40,6 @@ pad_data_horizontally, pad_data_vertically, round_nom_time, - SEVIRICalibrationHandler, - MEIRINK_COEFS, - DATE_2000, ) from satpy.utils import get_legacy_chunk_size From 5c2234223f9e9fb44739830ffb24ce1ac9cf520f Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 6 Oct 2023 15:22:46 +0200 Subject: [PATCH 0532/1416] fix "multiple spaces after ','" for codefactor --- satpy/tests/reader_tests/test_seviri_base.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index f5ba38cea5..252da43e75 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -377,14 +377,14 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS[platform_id][channel_name][0]/1000. @pytest.mark.parametrize('platform_id,time,expected', ( - (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), - (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), - (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), - (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), - (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), - (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), - (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), - (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), + (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), + (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), + (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), + (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), + (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), + (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), + (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), + (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), )) def test_get_meirink_slope_2020(self, platform_id, time, expected): """Test the value of the slope of the Meirink calibration.""" From 09efba8ef643ae2a0af9ba61778a9bf4c6912a5d Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 9 Oct 2023 14:18:58 +0200 Subject: [PATCH 0533/1416] add comment with the units of the Meirink coefficients --- satpy/readers/seviri_base.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index c54a7586dc..3f548aa8ea 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -356,7 +356,10 @@ # Calibration coefficients from Meirink, J.F., R.A. Roebeling and P. Stammes, 2013: # Inter-calibration of polar imager solar channels using SEVIRI, Atm. Meas. Tech., 6, # 2495-2508, doi:10.5194/amt-6-2495-2013 - +# +# The coefficients are stored in pairs of A, B (see function `get_meirink_slope`) where the +# units of A are µW m-2 sr-1 (cm-1)-1 and those of B are µW m-2 sr-1 (cm-1)-1 (86400 s)-1 +# # To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope # Epoch for the MEIRINK re-calibration From 3ad23ec0c3a6b84be337c873409520d261e45329 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Mon, 9 Oct 2023 15:07:56 +0200 Subject: [PATCH 0534/1416] Read also proj_time0 --- satpy/etc/readers/viirs_vgac_l1c_nc.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/etc/readers/viirs_vgac_l1c_nc.yaml b/satpy/etc/readers/viirs_vgac_l1c_nc.yaml index bf3c254f80..039f9629ce 100644 --- a/satpy/etc/readers/viirs_vgac_l1c_nc.yaml +++ b/satpy/etc/readers/viirs_vgac_l1c_nc.yaml @@ -252,3 +252,8 @@ datasets: resolution: 5000 file_type: vgac_nc nc_key: time + + proj_time0: + name: proj_time0 + file_type: vgac_nc + nc_key: proj_time0 \ No newline at end of file From 8193298786f788c81b3fddc8b161679fb982c4d1 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 9 Oct 2023 16:11:57 +0200 Subject: [PATCH 0535/1416] Use MeirinkCalibrationHandler to manage the coefficients Co-authored-by: Stephan Finkensieper --- satpy/readers/seviri_base.py | 61 +++++++++++++++++++++--------------- 1 file changed, 36 insertions(+), 25 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 3f548aa8ea..394540cb8c 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -366,34 +366,35 @@ DATE_2000 = datetime(2000, 1, 1) MEIRINK_COEFS = {} +MEIRINK_COEFS['2013'] = {} # Meteosat-8 -MEIRINK_COEFS[321] = {'VIS006': (24.346, 0.3739), - 'VIS008': (30.989, 0.3111), - 'IR_016': (22.869, 0.0065) - } +MEIRINK_COEFS['2013'][321] = {'VIS006': (24.346, 0.3739), + 'VIS008': (30.989, 0.3111), + 'IR_016': (22.869, 0.0065) + } # Meteosat-9 -MEIRINK_COEFS[322] = {'VIS006': (21.026, 0.2556), - 'VIS008': (26.875, 0.1835), - 'IR_016': (21.394, 0.0498) - } +MEIRINK_COEFS['2013'][322] = {'VIS006': (21.026, 0.2556), + 'VIS008': (26.875, 0.1835), + 'IR_016': (21.394, 0.0498) + } # Meteosat-10 -MEIRINK_COEFS[323] = {'VIS006': (19.829, 0.5856), - 'VIS008': (25.284, 0.6787), - 'IR_016': (23.066, -0.0286) - } +MEIRINK_COEFS['2013'][323] = {'VIS006': (19.829, 0.5856), + 'VIS008': (25.284, 0.6787), + 'IR_016': (23.066, -0.0286) + } # Meteosat-11 -MEIRINK_COEFS[324] = {'VIS006': (20.515, 0.3600), - 'VIS008': (25.803, 0.4844), - 'IR_016': (22.354, -0.0187) - } +MEIRINK_COEFS['2013'][324] = {'VIS006': (20.515, 0.3600), + 'VIS008': (25.803, 0.4844), + 'IR_016': (22.354, -0.0187) + } def get_meirink_slope(meirink_coefs, acquisition_time): @@ -413,6 +414,21 @@ def get_meirink_slope(meirink_coefs, acquisition_time): return S/1000 +class MeirinkCalibrationHandler: + """Re-calibration of the SEVIRI visible channels slop (see Meirink 2013).""" + + def __init__(self, coefs=MEIRINK_COEFS, calib_mode=None): + """Initialize the calibration handler.""" + if calib_mode is None: + raise ValueError("Missing calib_mode") + self.coefs = coefs[calib_mode.split('-')[1]] + + def get_slope(self, platform, channel, time): + """Return the slope using the provided calibration coefficients.""" + coefs = self.coefs[platform][channel] + return get_meirink_slope(coefs, time) + + def get_cds_time(days, msecs): """Compute timestamp given the days since epoch and milliseconds of the day. @@ -618,11 +634,6 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): self._platform_id = platform_id self._channel_name = channel_name self._coefs = coefs - if channel_name in ['VIS006', 'VIS008', 'IR_016']: - self._coefs['coefs']['MEIRINK'] = MEIRINK_COEFS[platform_id][channel_name] - else: - self._coefs['coefs']['MEIRINK'] = None - self._calib_mode = calib_mode.upper() self._scan_time = scan_time self._algo = SEVIRICalibrationAlgorithm( @@ -630,7 +641,7 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK') + valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK-2013') if self._calib_mode not in valid_modes: raise ValueError( 'Invalid calibration mode: {}. Choose one of {}'.format( @@ -686,9 +697,9 @@ def get_gain_offset(self): internal_gain = gsics_gain internal_offset = gsics_offset - if self._calib_mode == 'MEIRINK': - if coefs['MEIRINK'] is not None: - internal_gain = get_meirink_slope(coefs['MEIRINK'], self._scan_time) + if "MEIRINK" in self._calib_mode and self._channel_name in ['VIS006', 'VIS008', 'IR_016']: + meirink = MeirinkCalibrationHandler(calib_mode=self._calib_mode) + internal_gain = meirink.get_slope(self._platform_id, self._channel_name, self._scan_time) # Override with external coefficients, if any. gain = coefs['EXTERNAL'].get('gain', internal_gain) From 55cba4f24904bef98f56f21f17cf777568575be1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 9 Oct 2023 20:56:50 +0100 Subject: [PATCH 0536/1416] Update AHI L2 NOAA tests. --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 27 +++++++++++----------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index d2e9c24489..ff2b5a3d53 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -2,9 +2,9 @@ from datetime import datetime -import h5netcdf import numpy as np import pytest +import xarray as xr from satpy.readers.ahi_l2_nc import HIML2NCFileHandler from satpy.tests.utils import make_dataid @@ -12,11 +12,13 @@ rng = np.random.default_rng() clmk_data = rng.integers(0, 3, (5500, 5500), dtype=np.uint16) cprob_data = rng.uniform(0, 1, (5500, 5500)) +lat_data = rng.uniform(-90, 90, (5500, 5500)) +lon_data = rng.uniform(-180, 180, (5500, 5500)) start_time = datetime(2023, 8, 24, 5, 40, 21) end_time = datetime(2023, 8, 24, 5, 49, 40) -dimensions = {'X': 5500, 'Y': 5500} +dimensions = {'Columns': 5500, 'Rows': 5500} exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) @@ -43,12 +45,11 @@ def ahil2_filehandler(fname, platform='h09'): def himl2_filename(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - with h5netcdf.File(fname, mode="w") as h5f: - h5f.dimensions = dimensions - h5f.attrs.update(global_attrs) - var = h5f.create_variable("CloudMask", ("Y", "X"), np.uint16, chunks=(200, 200)) - var[:] = clmk_data - + ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, + coords={'Latitude': (['Rows', 'Columns'], lat_data), + 'Longitude': (['Rows', 'Columns'], lon_data)}, + attrs=global_attrs) + ds.to_netcdf(fname) return fname @@ -56,11 +57,11 @@ def himl2_filename(tmp_path_factory): def himl2_filename_bad(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - with h5netcdf.File(fname, mode="w") as h5f: - h5f.dimensions = dimensions - h5f.attrs.update(badarea_attrs) - var = h5f.create_variable("CloudMask", ("Y", "X"), np.uint16, chunks=(200, 200)) - var[:] = clmk_data + ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, + coords={'Latitude': (['Rows', 'Columns'], lat_data), + 'Longitude': (['Rows', 'Columns'], lon_data)}, + attrs=badarea_attrs) + ds.to_netcdf(fname) return fname From f2410b55ae1c48c733c24ece3e585d682fe5f797 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 20:27:20 +0000 Subject: [PATCH 0537/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3ce81859ae..cd26d096fe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,7 +8,7 @@ repos: additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] args: [--max-complexity, "10"] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer From ad1b690d01609b2b50692dda3cebca14de2adf12 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Mon, 9 Oct 2023 23:22:17 +0200 Subject: [PATCH 0538/1416] Move criterion for application of meirink calibration in method Also fix typo --- satpy/readers/seviri_base.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 394540cb8c..f41b3cec40 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -414,8 +414,14 @@ def get_meirink_slope(meirink_coefs, acquisition_time): return S/1000 +def should_apply_meirink(calib_mode, channel_name): + """Decide whether to use the Meirink calibration coefficients.""" + + return "MEIRINK" in calib_mode and channel_name in ['VIS006', 'VIS008', 'IR_016'] + + class MeirinkCalibrationHandler: - """Re-calibration of the SEVIRI visible channels slop (see Meirink 2013).""" + """Re-calibration of the SEVIRI visible channels slope (see Meirink 2013).""" def __init__(self, coefs=MEIRINK_COEFS, calib_mode=None): """Initialize the calibration handler.""" @@ -697,7 +703,7 @@ def get_gain_offset(self): internal_gain = gsics_gain internal_offset = gsics_offset - if "MEIRINK" in self._calib_mode and self._channel_name in ['VIS006', 'VIS008', 'IR_016']: + if should_apply_meirink(self._calib_mode, self._channel_name): meirink = MeirinkCalibrationHandler(calib_mode=self._calib_mode) internal_gain = meirink.get_slope(self._platform_id, self._channel_name, self._scan_time) From 65503a00ae7cfa7492f811fd6eac7a5de2012676 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 10 Oct 2023 10:12:56 +0200 Subject: [PATCH 0539/1416] flake8 --- satpy/readers/seviri_base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index f41b3cec40..bd4ce962ad 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -416,7 +416,6 @@ def get_meirink_slope(meirink_coefs, acquisition_time): def should_apply_meirink(calib_mode, channel_name): """Decide whether to use the Meirink calibration coefficients.""" - return "MEIRINK" in calib_mode and channel_name in ['VIS006', 'VIS008', 'IR_016'] From 0976ba3ed536292091a39b827926aafa99c3ca3f Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Tue, 10 Oct 2023 14:42:52 +0200 Subject: [PATCH 0540/1416] Rename time to scanline_timestamp and decode it --- satpy/etc/readers/viirs_vgac_l1c_nc.yaml | 4 ++-- satpy/readers/viirs_vgac_l1c_nc.py | 27 ++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/viirs_vgac_l1c_nc.yaml b/satpy/etc/readers/viirs_vgac_l1c_nc.yaml index 039f9629ce..875cdfa2c5 100644 --- a/satpy/etc/readers/viirs_vgac_l1c_nc.yaml +++ b/satpy/etc/readers/viirs_vgac_l1c_nc.yaml @@ -247,8 +247,8 @@ datasets: units: degrees_east nc_key: lon - time: - name: time + scanline_timestamps: + name: scanline_timestamps resolution: 5000 file_type: vgac_nc nc_key: time diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index e4a29c27f1..d85c44d04f 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -73,6 +73,32 @@ def set_time_attrs(self, data): self._end_time = data.attrs["end_time"] self._start_time = data.attrs["start_time"] + def dt64_to_datetime(self, dt64): + """Conversion of numpy.datetime64 to datetime objects.""" + # https://stackoverflow.com/questions/13703720/converting-between-datetime-timestamp-and-datetime64/46921593#46921593 + if type(dt64) == np.datetime64: + unix_epoch = np.datetime64(0, 's') + one_second = np.timedelta64(1, 's') + seconds_since_epoch = (dt64 - unix_epoch) / one_second + dt = datetime.utcfromtimestamp(seconds_since_epoch) + return dt + return dt64 + + def decode_time_variable(self, data, nc): + """Decode time variable.""" + if data.units == "hours since proj_time0": + reference_time = np.datetime64(datetime.strptime(nc['proj_time0'].attrs["units"], + 'days since %d/%m/%YT%H:%M:%S')) + delta_days = float(nc['proj_time0'].values) * np.timedelta64(1, 'D').astype('timedelta64[ms]') + delta_hours = data.values * np.timedelta64(1, 'h').astype('timedelta64[ms]') + time_data = xr.DataArray(reference_time + delta_days + delta_hours, + coords=data.coords, attrs={"long_name": "Scanline time"}) + self._start_time = self.dt64_to_datetime(time_data[0].values) + self._end_time = self.dt64_to_datetime(time_data[-1].values) + return time_data + else: + return data + def get_dataset(self, key, yaml_info): """Get dataset.""" logger.debug("Getting data for: %s", yaml_info['name']) @@ -82,6 +108,7 @@ def get_dataset(self, key, yaml_info): file_key = yaml_info.get('nc_key', name) data = nc[file_key] data = self.calibrate(data, yaml_info, file_key, nc) + data = self.decode_time_variable(data, nc) data.attrs.update(nc.attrs) # For now add global attributes to all datasets data.attrs.update(yaml_info) self.set_time_attrs(data) From 50361aca8fd9433771fedb5acb414c54d1f66cdb Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Tue, 10 Oct 2023 15:14:42 +0200 Subject: [PATCH 0541/1416] Fix tests --- satpy/readers/viirs_vgac_l1c_nc.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index d85c44d04f..445313a807 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -63,7 +63,7 @@ def convert_to_bt(self, data, data_lut, scale_factor): def fix_radiances_not_in_percent(self, data): """Scale radiances to percent. This was not done in first version of data.""" - return 100*data + return 100 * data def set_time_attrs(self, data): """Set time from attributes.""" @@ -86,7 +86,7 @@ def dt64_to_datetime(self, dt64): def decode_time_variable(self, data, nc): """Decode time variable.""" - if data.units == "hours since proj_time0": + if data.attrs["units"] == "hours since proj_time0": reference_time = np.datetime64(datetime.strptime(nc['proj_time0'].attrs["units"], 'days since %d/%m/%YT%H:%M:%S')) delta_days = float(nc['proj_time0'].values) * np.timedelta64(1, 'D').astype('timedelta64[ms]') @@ -108,7 +108,8 @@ def get_dataset(self, key, yaml_info): file_key = yaml_info.get('nc_key', name) data = nc[file_key] data = self.calibrate(data, yaml_info, file_key, nc) - data = self.decode_time_variable(data, nc) + if file_key == "time": + data = self.decode_time_variable(data, nc) data.attrs.update(nc.attrs) # For now add global attributes to all datasets data.attrs.update(yaml_info) self.set_time_attrs(data) From 6a59c85b7b2408f7350a2803ce2f19faed1f30b4 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 10 Oct 2023 15:22:36 +0200 Subject: [PATCH 0542/1416] Make caching using wrong types a warning instead of an error --- satpy/modifiers/angles.py | 3 ++- satpy/tests/modifier_tests/test_angles.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 01e7c1ff2d..02ffadfa87 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -248,7 +248,8 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): hashable_args = [] for arg in args: if isinstance(arg, unhashable_types): - raise TypeError(f"Unhashable type in function signature ({type(arg)}), cannot be cached.") + warnings.warn(f"Unhashable type in function signature ({type(arg)}), cannot be cached.", stacklevel=2) + continue if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, datetime): diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index 46a8a8443f..4c7e295e14 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -330,7 +330,7 @@ def test_caching_with_array_in_args_fails(self, tmp_path): def _fake_func(array): return array + 1 - with pytest.raises(TypeError), \ + with pytest.warns(UserWarning), \ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func(da.zeros(100)) From b729ca36fefa57a1699ddce97cfe5819cedc161e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 10 Oct 2023 15:24:19 +0200 Subject: [PATCH 0543/1416] Require numpy > 1.20 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2e6154ea92..f5c81ee34c 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ except ImportError: pass -requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.24.0', 'trollsift', +requires = ['numpy >1.20', 'pillow', 'pyresample >=1.24.0', 'trollsift', 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', 'packaging', 'pooch', 'pyorbital'] From dbefc0fbe67270ba04a42aae6f71426c95a010b6 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 10 Oct 2023 16:00:08 +0200 Subject: [PATCH 0544/1416] rename DATE_2000 to MEIRINK_EPOCH --- satpy/readers/seviri_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index bd4ce962ad..88994d3f18 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -363,7 +363,7 @@ # To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope # Epoch for the MEIRINK re-calibration -DATE_2000 = datetime(2000, 1, 1) +MEIRINK_EPOCH = datetime(2000, 1, 1) MEIRINK_COEFS = {} MEIRINK_COEFS['2013'] = {} @@ -409,7 +409,7 @@ def get_meirink_slope(meirink_coefs, acquisition_time): """ A = meirink_coefs[0] B = meirink_coefs[1] - delta_t = (acquisition_time - DATE_2000).total_seconds() + delta_t = (acquisition_time - MEIRINK_EPOCH).total_seconds() S = A + B * delta_t / (3600*24) / 1000. return S/1000 From 60bb30f76fb13fa95b957e6168a1ec1e1c938403 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 10 Oct 2023 16:01:26 +0200 Subject: [PATCH 0545/1416] remove coef argument in MeirinkCalibrationHandler --- satpy/readers/seviri_base.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 88994d3f18..6471c42639 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -422,11 +422,9 @@ def should_apply_meirink(calib_mode, channel_name): class MeirinkCalibrationHandler: """Re-calibration of the SEVIRI visible channels slope (see Meirink 2013).""" - def __init__(self, coefs=MEIRINK_COEFS, calib_mode=None): + def __init__(self, calib_mode): """Initialize the calibration handler.""" - if calib_mode is None: - raise ValueError("Missing calib_mode") - self.coefs = coefs[calib_mode.split('-')[1]] + self.coefs = MEIRINK_COEFS[calib_mode.split('-')[1]] def get_slope(self, platform, channel, time): """Return the slope using the provided calibration coefficients.""" From fe43fc8e1be7047aaebbe23e82a40cfbd771b5f1 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 16:43:20 +0200 Subject: [PATCH 0546/1416] Dummy changes --- satpy/tests/writer_tests/cf_tests/test_attrs.py | 2 +- satpy/tests/writer_tests/cf_tests/test_dataaarray.py | 2 +- satpy/writers/cf/attrs.py | 2 +- satpy/writers/cf/encoding.py | 8 ++++---- satpy/writers/cf_writer.py | 10 +++++----- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py index 87cdfd173d..5895f115c3 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -45,7 +45,7 @@ def get_test_attrs(self): 'numpy_bool': True, 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), + 'numpy_string': np.str_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ["1", ["2", [3]]], 'bool': True, diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py index 896de5c55b..a67cae9ca2 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -90,7 +90,7 @@ def get_test_attrs(self): 'numpy_bool': True, 'numpy_void': np.void(0), 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), + 'numpy_string': np.str_('test'), 'list': [1, 2, np.float64(3)], 'nested_list': ["1", ["2", [3]]], 'bool': True, diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py index 153d645594..fad60fe97e 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/writers/cf/attrs.py @@ -166,7 +166,7 @@ def _remove_satpy_attrs(new_data): def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + dataarray.attrs['prerequisites'] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] return dataarray diff --git a/satpy/writers/cf/encoding.py b/satpy/writers/cf/encoding.py index 55a48f70fd..4e244e82f9 100644 --- a/satpy/writers/cf/encoding.py +++ b/satpy/writers/cf/encoding.py @@ -98,16 +98,16 @@ def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): return encoding -def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): +def update_encoding(dataset, to_engine_kwargs, numeric_name_prefix='CHANNEL_'): """Update encoding. Preserve dask chunks, avoid fill values in coordinate variables and make sure that time & time bounds have the same units. """ - other_to_netcdf_kwargs = to_netcdf_kwargs.copy() - encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() + other_to_engine_kwargs = to_engine_kwargs.copy() + encoding = other_to_engine_kwargs.pop('encoding', {}).copy() encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) encoding = _set_default_chunks(encoding, dataset) encoding = _set_default_fill_value(encoding, dataset) encoding = _set_default_time_encoding(encoding, dataset) - return encoding, other_to_netcdf_kwargs + return encoding, other_to_engine_kwargs diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 85c6fe999b..30ca7e784e 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -186,7 +186,7 @@ CF_VERSION = 'CF-1.7' -# Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is +# Numpy datatypes compatible with all netCDF4 backends. ``np.str_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), @@ -194,7 +194,7 @@ np.dtype('int32'), np.dtype('uint32'), np.dtype('int64'), np.dtype('uint64'), np.dtype('float32'), np.dtype('float64'), - np.string_] + np.bytes_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible @@ -203,7 +203,7 @@ np.dtype('int32'), np.dtype('float32'), np.dtype('float64'), - np.string_] + np.bytes_] def _sanitize_writer_kwargs(writer_kwargs): @@ -314,7 +314,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, # - If single netCDF, it write directly for group_name, ds in grouped_datasets.items(): encoding, other_to_netcdf_kwargs = update_encoding(ds, - to_netcdf_kwargs=to_netcdf_kwargs, + to_engine_kwargs=to_netcdf_kwargs, numeric_name_prefix=numeric_name_prefix) res = ds.to_netcdf(filename, engine=engine, @@ -346,7 +346,7 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, """ from satpy.writers.cf.dataarray import make_cf_dataarray warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + 'Use satpy.writers.cf.dataarray.make_cf_dataarray instead.', DeprecationWarning, stacklevel=3) return make_cf_dataarray(dataarray=dataarray, epoch=epoch, From c37fcb763110e0c3fc4bbfc5bb72df249a4b6017 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 17:08:21 +0200 Subject: [PATCH 0547/1416] Set private functions --- satpy/readers/amsr2_l2_gaasp.py | 4 ++-- satpy/tests/writer_tests/cf_tests/test_area.py | 18 +++++++++--------- .../tests/writer_tests/cf_tests/test_attrs.py | 6 +++--- satpy/writers/cf/area.py | 10 +++++----- satpy/writers/cf/attrs.py | 16 ++++++++-------- satpy/writers/cf/dataarray.py | 4 ++-- satpy/writers/cf/datasets.py | 6 +++--- satpy/writers/cf/time.py | 2 +- 8 files changed, 33 insertions(+), 33 deletions(-) diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 5f91e2d965..4f045057b4 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -189,7 +189,7 @@ def _available_if_this_file_type(self, configured_datasets): continue yield self.file_type_matches(ds_info['file_type']), ds_info - def _add_lonlat_coords(self, data_arr, ds_info): + def __add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: @@ -209,7 +209,7 @@ def _get_ds_info_for_data_arr(self, var_name, data_arr): if x_dim_name in self.dim_resolutions: ds_info['resolution'] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: - self._add_lonlat_coords(data_arr, ds_info) + self.__add_lonlat_coords(data_arr, ds_info) return ds_info def _is_2d_yx_data_array(self, data_arr): diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py index 1dd82ddd9d..92088f6d68 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -356,9 +356,9 @@ def _gm_matches(gmapping, expected): assert new_ds.attrs['grid_mapping'] == 'geos' _gm_matches(grid_mapping, geos_expected) - def test_add_lonlat_coords(self): + def test__add_lonlat_coords(self): """Test the conversion from areas to lon/lat.""" - from satpy.writers.cf.area import add_lonlat_coords + from satpy.writers.cf.area import _add_lonlat_coords area = AreaDefinition( 'seviri', @@ -371,7 +371,7 @@ def test_add_lonlat_coords(self): lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) - res = add_lonlat_coords(dataarray) + res = _add_lonlat_coords(dataarray) # original should be unmodified assert 'longitude' not in dataarray.coords @@ -394,7 +394,7 @@ def test_add_lonlat_coords(self): lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), dims=('bands', 'y', 'x'), attrs={'area': area}) - res = add_lonlat_coords(dataarray) + res = _add_lonlat_coords(dataarray) # original should be unmodified assert 'longitude' not in dataarray.coords @@ -469,12 +469,12 @@ def datasets(self): datasets['lon'].attrs['name'] = 'lon' return datasets - def test_is_lon_or_lat_dataarray(self, datasets): - """Test the is_lon_or_lat_dataarray function.""" - from satpy.writers.cf.area import is_lon_or_lat_dataarray + def test__is_lon_or_lat_dataarray(self, datasets): + """Test the _is_lon_or_lat_dataarray function.""" + from satpy.writers.cf.area import _is_lon_or_lat_dataarray - assert is_lon_or_lat_dataarray(datasets['lat']) - assert not is_lon_or_lat_dataarray(datasets['var1']) + assert _is_lon_or_lat_dataarray(datasets['lat']) + assert not _is_lon_or_lat_dataarray(datasets['var1']) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py index 5895f115c3..a969765181 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -124,14 +124,14 @@ def assertDictWithArraysEqual(self, d1, d2): assert isinstance(val2, np.generic) assert val1.dtype == val2.dtype - def test_encode_attrs_nc(self): + def test__encode_attrs_nc(self): """Test attributes encoding.""" - from satpy.writers.cf.attrs import encode_attrs_nc + from satpy.writers.cf.attrs import _encode_attrs_nc attrs, expected, _ = self.get_test_attrs() # Test encoding - encoded = encode_attrs_nc(attrs) + encoded = _encode_attrs_nc(attrs) self.assertDictWithArraysEqual(expected, encoded) # Test decoding of json-encoded attributes diff --git a/satpy/writers/cf/area.py b/satpy/writers/cf/area.py index 68113c1ee2..5916083d62 100644 --- a/satpy/writers/cf/area.py +++ b/satpy/writers/cf/area.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) -def add_lonlat_coords(dataarray): +def _add_lonlat_coords(dataarray): """Add 'longitude' and 'latitude' coordinates to DataArray.""" dataarray = dataarray.copy() area = dataarray.attrs['area'] @@ -73,7 +73,7 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): - dataarray = add_lonlat_coords(dataarray) + dataarray = _add_lonlat_coords(dataarray) if isinstance(dataarray.attrs['area'], AreaDefinition): dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) @@ -81,7 +81,7 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): return res -def is_lon_or_lat_dataarray(dataarray): +def _is_lon_or_lat_dataarray(dataarray): """Check if the DataArray represents the latitude or longitude coordinate.""" if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: return True @@ -91,7 +91,7 @@ def is_lon_or_lat_dataarray(dataarray): def has_projection_coords(ds_collection): """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" for dataarray in ds_collection.values(): - if is_lon_or_lat_dataarray(dataarray): + if _is_lon_or_lat_dataarray(dataarray): return True return False @@ -124,7 +124,7 @@ def make_alt_coords_unique(datas, pretty=False): tokens = defaultdict(set) for dataset in datas.values(): for coord_name in dataset.coords: - if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: + if not _is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: tokens[coord_name].add(tokenize(dataset[coord_name].data)) coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) diff --git a/satpy/writers/cf/attrs.py b/satpy/writers/cf/attrs.py index fad60fe97e..d4a41f2bfc 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/writers/cf/attrs.py @@ -65,7 +65,7 @@ def _encode(self, obj): return str(obj) -def _encode_nc(obj): +def __encode_nc(obj): """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. Raises: @@ -90,7 +90,7 @@ def _encode_nc(obj): raise ValueError('Unable to encode') -def encode_nc(obj): +def _encode_nc(obj): """Encode the given object as a netcdf compatible datatype.""" try: return obj.to_cf() @@ -104,9 +104,9 @@ def _encode_python_objects(obj): If on failure, encode as a string. Plain lists are encoded recursively. """ if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): - return [encode_nc(item) for item in obj] + return [_encode_nc(item) for item in obj] try: - dump = _encode_nc(obj) + dump = __encode_nc(obj) except ValueError: try: # Decode byte-strings @@ -117,7 +117,7 @@ def _encode_python_objects(obj): return dump -def encode_attrs_nc(attrs): +def _encode_attrs_nc(attrs): """Encode dataset attributes in a netcdf compatible datatype. Args: @@ -130,7 +130,7 @@ def encode_attrs_nc(attrs): encoded_attrs = [] for key, val in sorted(attrs.items()): if val is not None: - encoded_attrs.append((key, encode_nc(val))) + encoded_attrs.append((key, _encode_nc(val))) return OrderedDict(encoded_attrs) @@ -193,7 +193,7 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) - dataarray.attrs = encode_attrs_nc(dataarray.attrs) + dataarray.attrs = _encode_attrs_nc(dataarray.attrs) return dataarray @@ -215,7 +215,7 @@ def preprocess_header_attrs(header_attrs, flatten_attrs=False): if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) - header_attrs = encode_attrs_nc(header_attrs) # OrderedDict + header_attrs = _encode_attrs_nc(header_attrs) # OrderedDict else: header_attrs = {} header_attrs = _add_history(header_attrs) diff --git a/satpy/writers/cf/dataarray.py b/satpy/writers/cf/dataarray.py index fd9b20be5e..a5322cfee4 100644 --- a/satpy/writers/cf/dataarray.py +++ b/satpy/writers/cf/dataarray.py @@ -21,7 +21,7 @@ from satpy.writers.cf.attrs import preprocess_datarray_attrs from satpy.writers.cf.coords_attrs import add_xy_coords_attrs -from satpy.writers.cf.time import EPOCH, _process_time_coord +from satpy.writers.cf.time import EPOCH, process_time_coord logger = logging.getLogger(__name__) @@ -96,5 +96,5 @@ def make_cf_dataarray(dataarray, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs) dataarray = add_xy_coords_attrs(dataarray) - dataarray = _process_time_coord(dataarray, epoch=epoch) + dataarray = process_time_coord(dataarray, epoch=epoch) return dataarray diff --git a/satpy/writers/cf/datasets.py b/satpy/writers/cf/datasets.py index 4baabbc894..c87e6673d4 100644 --- a/satpy/writers/cf/datasets.py +++ b/satpy/writers/cf/datasets.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) -def get_extra_ds(dataarray, keys=None): +def _get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" ds_collection = {} # Retrieve ancillary variable datarrays @@ -36,7 +36,7 @@ def get_extra_ds(dataarray, keys=None): ancillary_variable = ancillary_dataarray.name if keys and ancillary_variable not in keys: keys.append(ancillary_variable) - ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) + ds_collection.update(_get_extra_ds(ancillary_dataarray, keys=keys)) # Add input dataarray ds_collection[dataarray.attrs['name']] = dataarray return ds_collection @@ -111,7 +111,7 @@ def _collect_cf_dataset(list_dataarrays, # --> Since keys=None, it doesn't never retrieve ancillary variables !!! ds_collection = {} for dataarray in list_dataarrays: - ds_collection.update(get_extra_ds(dataarray)) + ds_collection.update(_get_extra_ds(dataarray)) # Check if one DataArray in the collection has 'longitude' or 'latitude' got_lonlats = has_projection_coords(ds_collection) diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py index 05b90c4641..4c5cbf5bc9 100644 --- a/satpy/writers/cf/time.py +++ b/satpy/writers/cf/time.py @@ -47,7 +47,7 @@ def add_time_bounds_dimension(ds, time="time"): return ds -def _process_time_coord(dataarray, epoch): +def process_time_coord(dataarray, epoch): """Process the 'time' coordinate, if existing. It expand the DataArray with a time dimension if does not yet exists. From 22a8d097b698dd87039a13d36a336ccb21fcf277 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 18:11:46 +0200 Subject: [PATCH 0548/1416] Reorg files --- satpy/_scene_converters.py | 4 +- satpy/scene.py | 2 +- satpy/tests/utils.py | 15 ++ .../tests/writer_tests/cf_tests/test_area.py | 32 ----- .../tests/writer_tests/cf_tests/test_attrs.py | 18 +-- .../{test_time_coords.py => test_coords.py} | 41 +++++- .../writer_tests/cf_tests/test_dataaarray.py | 22 +-- .../writer_tests/cf_tests/test_datasets.py | 27 ++-- .../writer_tests/cf_tests/test_encoding.py | 4 +- satpy/writers/cf/__init__.py | 2 + satpy/writers/cf/coords.py | 128 ++++++++++++++++++ satpy/writers/cf/coords_attrs.py | 46 ------- satpy/writers/cf/crs.py | 47 ------- satpy/writers/cf/dataarray.py | 4 +- satpy/writers/cf/datasets.py | 8 +- satpy/writers/cf/time.py | 69 ---------- satpy/writers/cf_writer.py | 2 +- 17 files changed, 219 insertions(+), 252 deletions(-) rename satpy/tests/writer_tests/cf_tests/{test_time_coords.py => test_coords.py} (56%) create mode 100644 satpy/writers/cf/coords.py delete mode 100644 satpy/writers/cf/coords_attrs.py delete mode 100644 satpy/writers/cf/crs.py delete mode 100644 satpy/writers/cf/time.py diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index ba4432a58f..c5c0b1c896 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -66,7 +66,7 @@ def to_xarray(scn, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH" + If None, the default reference time is retrieved using "from satpy.writers.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -90,8 +90,8 @@ def to_xarray(scn, A CF-compliant xr.Dataset """ + from satpy.writers.cf import EPOCH from satpy.writers.cf.datasets import collect_cf_datasets - from satpy.writers.cf.time import EPOCH if epoch is None: epoch = EPOCH diff --git a/satpy/scene.py b/satpy/scene.py index e3e71811e9..52580d14e8 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1128,7 +1128,7 @@ def to_xarray(self, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH" + If None, the default reference time is retrieved using "from satpy.writers.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index c87cd1055c..155916aca1 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -407,3 +407,18 @@ def assert_attrs_equal(attrs, attrs_exp, tolerance=0): ) except TypeError: assert attrs[key] == attrs_exp[key], err_msg + + +def assert_dict_array_equality(d1, d2): + """Check that dicts containing arrays are equal.""" + assert set(d1.keys()) == set(d2.keys()) + for key, val1 in d1.items(): + val2 = d2[key] + if isinstance(val1, np.ndarray): + np.testing.assert_array_equal(val1, val2) + assert val1.dtype == val2.dtype + else: + assert val1 == val2 + if isinstance(val1, (np.floating, np.integer, np.bool_)): + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/writer_tests/cf_tests/test_area.py index 92088f6d68..5b7dd86d38 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/writer_tests/cf_tests/test_area.py @@ -16,18 +16,12 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF Area.""" -import logging - import dask.array as da import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition -# NOTE: -# The following fixtures are not defined in this file, but are used and injected by Pytest: -# - caplog - class TestCFArea: """Test case for CF Area.""" @@ -406,32 +400,6 @@ def test__add_lonlat_coords(self): assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() - def test_is_projected(self, caplog): - """Tests for private _is_projected function.""" - from satpy.writers.cf.crs import _is_projected - - # test case with units but no area - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) - assert _is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x"), - coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), - "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) - assert not _is_projected(da) - - da = xr.DataArray( - np.arange(25).reshape(5, 5), - dims=("y", "x")) - with caplog.at_level(logging.WARNING): - assert _is_projected(da) - assert "Failed to tell if data are projected." in caplog.text - @pytest.fixture def datasets(self): """Create test dataset.""" diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/writer_tests/cf_tests/test_attrs.py index a969765181..6988e761ee 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/writer_tests/cf_tests/test_attrs.py @@ -109,30 +109,16 @@ def get_test_attrs(self): 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} return attrs, encoded, encoded_flat - def assertDictWithArraysEqual(self, d1, d2): - """Check that dicts containing arrays are equal.""" - # TODO: this is also used by test_da2cf - assert set(d1.keys()) == set(d2.keys()) - for key, val1 in d1.items(): - val2 = d2[key] - if isinstance(val1, np.ndarray): - np.testing.assert_array_equal(val1, val2) - assert val1.dtype == val2.dtype - else: - assert val1 == val2 - if isinstance(val1, (np.floating, np.integer, np.bool_)): - assert isinstance(val2, np.generic) - assert val1.dtype == val2.dtype - def test__encode_attrs_nc(self): """Test attributes encoding.""" + from satpy.tests.utils import assert_dict_array_equality from satpy.writers.cf.attrs import _encode_attrs_nc attrs, expected, _ = self.get_test_attrs() # Test encoding encoded = _encode_attrs_nc(attrs) - self.assertDictWithArraysEqual(expected, encoded) + assert_dict_array_equality(expected, encoded) # Test decoding of json-encoded attributes raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], diff --git a/satpy/tests/writer_tests/cf_tests/test_time_coords.py b/satpy/tests/writer_tests/cf_tests/test_coords.py similarity index 56% rename from satpy/tests/writer_tests/cf_tests/test_time_coords.py rename to satpy/tests/writer_tests/cf_tests/test_coords.py index ce7845dcca..9e9d8c4607 100644 --- a/satpy/tests/writer_tests/cf_tests/test_time_coords.py +++ b/satpy/tests/writer_tests/cf_tests/test_coords.py @@ -16,16 +16,22 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of time information (coordinates and dimensions).""" +import logging + import numpy as np import xarray as xr +# NOTE: +# The following fixtures are not defined in this file, but are used and injected by Pytest: +# - caplog + class TestCFtime: """Test cases for CF time dimension and coordinates.""" def test_add_time_bounds_dimension(self): """Test addition of CF-compliant time attributes.""" - from satpy.writers.cf.time import add_time_bounds_dimension + from satpy.writers.cf.coords import add_time_bounds_dimension test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', @@ -42,3 +48,36 @@ def test_add_time_bounds_dimension(self): assert "time_bnds" in list(ds.data_vars) assert "bounds" in ds["time"].attrs assert "standard_name" in ds["time"].attrs + + +class TestCFcoords: + """Test cases for CF spatial dimension and coordinates.""" + + def test_is_projected(self, caplog): + """Tests for private _is_projected function.""" + from satpy.writers.cf.coords import _is_projected + + # test case with units but no area + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) + assert _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x"), + coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), + "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) + assert not _is_projected(da) + + da = xr.DataArray( + np.arange(25).reshape(5, 5), + dims=("y", "x")) + with caplog.at_level(logging.WARNING): + assert _is_projected(da) + assert "Failed to tell if data are projected." in caplog.text + + # add_xy_coords_attrs + # process_time_coord diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py index a67cae9ca2..43b87cfc20 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/writer_tests/cf_tests/test_dataaarray.py @@ -154,23 +154,9 @@ def get_test_attrs(self): 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} return attrs, encoded, encoded_flat - def assertDictWithArraysEqual(self, d1, d2): - """Check that dicts containing arrays are equal.""" - # TODO: also used by cf/test_attrs.py - assert set(d1.keys()) == set(d2.keys()) - for key, val1 in d1.items(): - val2 = d2[key] - if isinstance(val1, np.ndarray): - np.testing.assert_array_equal(val1, val2) - assert val1.dtype == val2.dtype - else: - assert val1 == val2 - if isinstance(val1, (np.floating, np.integer, np.bool_)): - assert isinstance(val2, np.generic) - assert val1.dtype == val2.dtype - def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" + from satpy.tests.utils import assert_dict_array_equality from satpy.writers.cf.dataarray import make_cf_dataarray # Create set of test attributes @@ -200,12 +186,12 @@ def test_make_cf_dataarray(self): np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} - self.assertDictWithArraysEqual(res.attrs, attrs_expected) + assert_dict_array_equality(res.attrs, attrs_expected) # Test attribute kwargs res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=['int']) attrs_expected_flat.pop('int') - self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) + assert_dict_array_equality(res_flat.attrs, attrs_expected_flat) def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" @@ -214,3 +200,5 @@ def test_make_cf_dataarray_one_dimensional_array(self): arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) _ = make_cf_dataarray(arr) + + # _handle_dataarray_name diff --git a/satpy/tests/writer_tests/cf_tests/test_datasets.py b/satpy/tests/writer_tests/cf_tests/test_datasets.py index b094feecbc..d92099e869 100644 --- a/satpy/tests/writer_tests/cf_tests/test_datasets.py +++ b/satpy/tests/writer_tests/cf_tests/test_datasets.py @@ -15,7 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Tests CF-compliant DataArray creation.""" +"""Tests CF-compliant Dataset(s) creation.""" import datetime import numpy as np @@ -24,18 +24,10 @@ from pyresample import AreaDefinition, create_area_def -def test_empty_collect_cf_datasets(): - """Test that if no DataArrays, collect_cf_datasets raise error.""" - from satpy.writers.cf.datasets import collect_cf_datasets - - with pytest.raises(RuntimeError): - collect_cf_datasets(list_dataarrays=[]) - - -class TestCollectCfDatasets: +class TestCollectCfDataset: """Test case for collect_cf_dataset.""" - def test_collect_cf_dataarrays(self): + def test_collect_cf_dataset(self): """Test collecting CF datasets from a DataArray objects.""" from satpy.writers.cf.datasets import _collect_cf_dataset @@ -75,7 +67,7 @@ def test_collect_cf_dataarrays(self): assert 'grid_mapping' not in da_var2.attrs assert da_var2.attrs['long_name'] == 'variable 2' - def test_collect_cf_dataarrays_with_latitude_named_lat(self): + def test_collect_cf_dataset_with_latitude_named_lat(self): """Test collecting CF datasets with latitude named lat.""" from satpy.writers.cf.datasets import _collect_cf_dataset @@ -148,3 +140,14 @@ def test_geographic_area_coords_attrs(self): assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) + + +class TestCollectCfDatasets: + """Test case for collect_cf_datasets.""" + + def test_empty_collect_cf_datasets(self): + """Test that if no DataArrays, collect_cf_datasets raise error.""" + from satpy.writers.cf.datasets import collect_cf_datasets + + with pytest.raises(RuntimeError): + collect_cf_datasets(list_dataarrays=[]) diff --git a/satpy/tests/writer_tests/cf_tests/test_encoding.py b/satpy/tests/writer_tests/cf_tests/test_encoding.py index 66f7c72a48..125c7eec94 100644 --- a/satpy/tests/writer_tests/cf_tests/test_encoding.py +++ b/satpy/tests/writer_tests/cf_tests/test_encoding.py @@ -22,8 +22,8 @@ import xarray as xr -class TestUpdateDatasetEncodings: - """Test update of Dataset encodings.""" +class TestUpdateEncoding: + """Test update of dataset encodings.""" @pytest.fixture def fake_ds(self): diff --git a/satpy/writers/cf/__init__.py b/satpy/writers/cf/__init__.py index f597a9264c..c48acebcf9 100644 --- a/satpy/writers/cf/__init__.py +++ b/satpy/writers/cf/__init__.py @@ -1,3 +1,5 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """Code for generation of CF-compliant datasets.""" + +EPOCH = u"seconds since 1970-01-01 00:00:00" diff --git a/satpy/writers/cf/coords.py b/satpy/writers/cf/coords.py new file mode 100644 index 0000000000..dee28952b5 --- /dev/null +++ b/satpy/writers/cf/coords.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +"""Set CF-compliant spatial and temporal coordinates.""" + +import logging +from contextlib import suppress + +import numpy as np +import xarray as xr +from pyresample.geometry import AreaDefinition, SwathDefinition + +logger = logging.getLogger(__name__) + + +def add_xy_coords_attrs(dataarray): + """Add relevant attributes to x, y coordinates.""" + # If there are no coords, return dataarray + if not dataarray.coords.keys() & {"x", "y", "crs"}: + return dataarray + # If projected area + if _is_projected(dataarray): + dataarray = _add_xy_projected_coords_attrs(dataarray) + else: + dataarray = _add_xy_geographic_coords_attrs(dataarray) + if 'crs' in dataarray.coords: + dataarray = dataarray.drop_vars('crs') + return dataarray + + +def _is_projected(dataarray): + """Guess whether data are projected or not.""" + crs = _try_to_get_crs(dataarray) + if crs: + return crs.is_projected + units = _try_get_units_from_coords(dataarray) + if units: + if units.endswith("m"): + return True + if units.startswith("degrees"): + return False + logger.warning("Failed to tell if data are projected. Assuming yes.") + return True + + +def _try_to_get_crs(dataarray): + """Try to get a CRS from attributes.""" + if "area" in dataarray.attrs: + if isinstance(dataarray.attrs["area"], AreaDefinition): + return dataarray.attrs["area"].crs + if not isinstance(dataarray.attrs["area"], SwathDefinition): + logger.warning( + f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " + "Assuming projected CRS.") + if "crs" in dataarray.coords: + return dataarray.coords["crs"].item() + + +def _try_get_units_from_coords(dataarray): + """Try to retrieve coordinate x/y units.""" + for c in ["x", "y"]: + with suppress(KeyError): + # If the data has only 1 dimension, it has only one of x or y coords + if "units" in dataarray.coords[c].attrs: + return dataarray.coords[c].attrs["units"] + + +def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): + """Add relevant attributes to x, y coordinates of a projected CRS.""" + if x in dataarray.coords: + dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' + dataarray[x].attrs['units'] = 'm' + if y in dataarray.coords: + dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' + dataarray[y].attrs['units'] = 'm' + return dataarray + + +def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): + """Add relevant attributes to x, y coordinates of a geographic CRS.""" + if x in dataarray.coords: + dataarray[x].attrs['standard_name'] = 'longitude' + dataarray[x].attrs['units'] = 'degrees_east' + if y in dataarray.coords: + dataarray[y].attrs['standard_name'] = 'latitude' + dataarray[y].attrs['units'] = 'degrees_north' + return dataarray + + +def add_time_bounds_dimension(ds, time="time"): + """Add time bound dimension to xr.Dataset.""" + start_times = [] + end_times = [] + for _var_name, data_array in ds.items(): + start_times.append(data_array.attrs.get("start_time", None)) + end_times.append(data_array.attrs.get("end_time", None)) + + start_time = min(start_time for start_time in start_times + if start_time is not None) + end_time = min(end_time for end_time in end_times + if end_time is not None) + ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + np.datetime64(end_time)]], + dims=['time', 'bnds_1d']) + ds[time].attrs['bounds'] = "time_bnds" + ds[time].attrs['standard_name'] = "time" + return ds + + +def process_time_coord(dataarray, epoch): + """Process the 'time' coordinate, if existing. + + It expand the DataArray with a time dimension if does not yet exists. + + The function assumes + + - that x and y dimensions have at least shape > 1 + - the time coordinate has size 1 + + """ + if 'time' in dataarray.coords: + dataarray['time'].encoding['units'] = epoch + dataarray['time'].attrs['standard_name'] = 'time' + dataarray['time'].attrs.pop('bounds', None) + + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') + + return dataarray diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py deleted file mode 100644 index c7e559adc2..0000000000 --- a/satpy/writers/cf/coords_attrs.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -"""Set CF-compliant attributes to x and y spatial dimensions.""" - -import logging - -from satpy.writers.cf.crs import _is_projected - -logger = logging.getLogger(__name__) - - -def add_xy_coords_attrs(dataarray): - """Add relevant attributes to x, y coordinates.""" - # If there are no coords, return dataarray - if not dataarray.coords.keys() & {"x", "y", "crs"}: - return dataarray - # If projected area - if _is_projected(dataarray): - dataarray = _add_xy_projected_coords_attrs(dataarray) - else: - dataarray = _add_xy_geographic_coords_attrs(dataarray) - if 'crs' in dataarray.coords: - dataarray = dataarray.drop_vars('crs') - return dataarray - - -def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): - """Add relevant attributes to x, y coordinates of a projected CRS.""" - if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' - dataarray[x].attrs['units'] = 'm' - if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' - dataarray[y].attrs['units'] = 'm' - return dataarray - - -def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): - """Add relevant attributes to x, y coordinates of a geographic CRS.""" - if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'longitude' - dataarray[x].attrs['units'] = 'degrees_east' - if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'latitude' - dataarray[y].attrs['units'] = 'degrees_north' - return dataarray diff --git a/satpy/writers/cf/crs.py b/satpy/writers/cf/crs.py deleted file mode 100644 index e6952a484f..0000000000 --- a/satpy/writers/cf/crs.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -"""CRS utility.""" - -import logging -from contextlib import suppress - -from pyresample.geometry import AreaDefinition, SwathDefinition - -logger = logging.getLogger(__name__) - - -def _is_projected(dataarray): - """Guess whether data are projected or not.""" - crs = _try_to_get_crs(dataarray) - if crs: - return crs.is_projected - units = _try_get_units_from_coords(dataarray) - if units: - if units.endswith("m"): - return True - if units.startswith("degrees"): - return False - logger.warning("Failed to tell if data are projected. Assuming yes.") - return True - - -def _try_to_get_crs(dataarray): - """Try to get a CRS from attributes.""" - if "area" in dataarray.attrs: - if isinstance(dataarray.attrs["area"], AreaDefinition): - return dataarray.attrs["area"].crs - if not isinstance(dataarray.attrs["area"], SwathDefinition): - logger.warning( - f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " - "Assuming projected CRS.") - if "crs" in dataarray.coords: - return dataarray.coords["crs"].item() - - -def _try_get_units_from_coords(dataarray): - """Try to retrieve coordinate x/y units.""" - for c in ["x", "y"]: - with suppress(KeyError): - # If the data has only 1 dimension, it has only one of x or y coords - if "units" in dataarray.coords[c].attrs: - return dataarray.coords[c].attrs["units"] diff --git a/satpy/writers/cf/dataarray.py b/satpy/writers/cf/dataarray.py index a5322cfee4..df52406f96 100644 --- a/satpy/writers/cf/dataarray.py +++ b/satpy/writers/cf/dataarray.py @@ -19,9 +19,9 @@ import logging import warnings +from satpy.writers.cf import EPOCH from satpy.writers.cf.attrs import preprocess_datarray_attrs -from satpy.writers.cf.coords_attrs import add_xy_coords_attrs -from satpy.writers.cf.time import EPOCH, process_time_coord +from satpy.writers.cf.coords import add_xy_coords_attrs, process_time_coord logger = logging.getLogger(__name__) diff --git a/satpy/writers/cf/datasets.py b/satpy/writers/cf/datasets.py index c87e6673d4..0cdf2b8210 100644 --- a/satpy/writers/cf/datasets.py +++ b/satpy/writers/cf/datasets.py @@ -22,7 +22,7 @@ import xarray as xr -from satpy.writers.cf.time import EPOCH +from satpy.writers.cf import EPOCH from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) @@ -77,7 +77,7 @@ def _collect_cf_dataset(list_dataarrays, epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + If None, the default reference time is retrieved using `from satpy.writers.cf import EPOCH` flatten_attrs : bool, optional If True, flatten dict-type attributes. exclude_attrs : list, optional @@ -197,7 +197,7 @@ def collect_cf_datasets(list_dataarrays, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH` + If None, the default reference time is retrieved using `from satpy.writers.cf import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -228,7 +228,7 @@ def collect_cf_datasets(list_dataarrays, Global attributes to be attached to the xr.Dataset / netCDF4. """ from satpy.writers.cf.attrs import preprocess_header_attrs - from satpy.writers.cf.time import add_time_bounds_dimension + from satpy.writers.cf.coords import add_time_bounds_dimension if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " diff --git a/satpy/writers/cf/time.py b/satpy/writers/cf/time.py deleted file mode 100644 index 4c5cbf5bc9..0000000000 --- a/satpy/writers/cf/time.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2017-2023 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""CF processing of time dimension and coordinates.""" -import logging - -import numpy as np -import xarray as xr - -logger = logging.getLogger(__name__) - - -EPOCH = u"seconds since 1970-01-01 00:00:00" - - -def add_time_bounds_dimension(ds, time="time"): - """Add time bound dimension to xr.Dataset.""" - start_times = [] - end_times = [] - for _var_name, data_array in ds.items(): - start_times.append(data_array.attrs.get("start_time", None)) - end_times.append(data_array.attrs.get("end_time", None)) - - start_time = min(start_time for start_time in start_times - if start_time is not None) - end_time = min(end_time for end_time in end_times - if end_time is not None) - ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), - np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - ds[time].attrs['bounds'] = "time_bnds" - ds[time].attrs['standard_name'] = "time" - return ds - - -def process_time_coord(dataarray, epoch): - """Process the 'time' coordinate, if existing. - - It expand the DataArray with a time dimension if does not yet exists. - - The function assumes - - - that x and y dimensions have at least shape > 1 - - the time coordinate has size 1 - - """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) - - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') - - return dataarray diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 30ca7e784e..096293e2b7 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -163,7 +163,7 @@ from packaging.version import Version from satpy.writers import Writer -from satpy.writers.cf.time import EPOCH +from satpy.writers.cf import EPOCH logger = logging.getLogger(__name__) From e3df20e32171257eeb999bcf272de5d0ae08c7bf Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 18:27:48 +0200 Subject: [PATCH 0549/1416] Unest cf directories --- doc/source/writers.rst | 4 ++-- satpy/_scene_converters.py | 6 ++--- satpy/{writers => }/cf/__init__.py | 0 satpy/{writers => }/cf/area.py | 0 satpy/{writers => }/cf/attrs.py | 2 +- satpy/{writers => }/cf/coords.py | 0 satpy/{writers => }/cf/dataarray.py | 6 ++--- satpy/{writers => }/cf/datasets.py | 22 +++++++------------ satpy/{writers => }/cf/encoding.py | 0 satpy/etc/writers/cf.yaml | 2 +- satpy/scene.py | 2 +- .../{writer_tests => }/cf_tests/__init__.py | 0 .../{writer_tests => }/cf_tests/test_area.py | 16 +++++++------- .../{writer_tests => }/cf_tests/test_attrs.py | 2 +- .../cf_tests/test_coords.py | 4 ++-- .../cf_tests/test_dataaarray.py | 8 +++---- .../cf_tests/test_datasets.py | 8 +++---- .../cf_tests/test_encoding.py | 6 ++--- satpy/tests/writer_tests/test_cf.py | 8 +++---- satpy/writers/cf_writer.py | 14 ++++++------ 20 files changed, 52 insertions(+), 58 deletions(-) rename satpy/{writers => }/cf/__init__.py (100%) rename satpy/{writers => }/cf/area.py (100%) rename satpy/{writers => }/cf/attrs.py (99%) rename satpy/{writers => }/cf/coords.py (100%) rename satpy/{writers => }/cf/dataarray.py (95%) rename satpy/{writers => }/cf/datasets.py (95%) rename satpy/{writers => }/cf/encoding.py (100%) rename satpy/tests/{writer_tests => }/cf_tests/__init__.py (100%) rename satpy/tests/{writer_tests => }/cf_tests/test_area.py (97%) rename satpy/tests/{writer_tests => }/cf_tests/test_attrs.py (99%) rename satpy/tests/{writer_tests => }/cf_tests/test_coords.py (96%) rename satpy/tests/{writer_tests => }/cf_tests/test_dataaarray.py (97%) rename satpy/tests/{writer_tests => }/cf_tests/test_datasets.py (95%) rename satpy/tests/{writer_tests => }/cf_tests/test_encoding.py (96%) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index f453f4d5a5..e5c33ecba2 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -44,9 +44,9 @@ One common parameter across almost all Writers is ``filename`` and - Deprecated from NinJo 7 (use ninjogeotiff) - * - NetCDF (Standard CF) - - :class:`cf ` + - :class:`cf ` - Beta - - :mod:`Usage example ` + - :mod:`Usage example ` * - AWIPS II Tiled NetCDF4 - :class:`awips_tiled ` - Beta diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index c5c0b1c896..a620817236 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -66,7 +66,7 @@ def to_xarray(scn, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.writers.cf import EPOCH" + If None, the default reference time is retrieved using "from satpy.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -90,8 +90,8 @@ def to_xarray(scn, A CF-compliant xr.Dataset """ - from satpy.writers.cf import EPOCH - from satpy.writers.cf.datasets import collect_cf_datasets + from satpy.cf import EPOCH + from satpy.cf.datasets import collect_cf_datasets if epoch is None: epoch = EPOCH diff --git a/satpy/writers/cf/__init__.py b/satpy/cf/__init__.py similarity index 100% rename from satpy/writers/cf/__init__.py rename to satpy/cf/__init__.py diff --git a/satpy/writers/cf/area.py b/satpy/cf/area.py similarity index 100% rename from satpy/writers/cf/area.py rename to satpy/cf/area.py diff --git a/satpy/writers/cf/attrs.py b/satpy/cf/attrs.py similarity index 99% rename from satpy/writers/cf/attrs.py rename to satpy/cf/attrs.py index d4a41f2bfc..28cc41be98 100644 --- a/satpy/writers/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -71,7 +71,7 @@ def __encode_nc(obj): Raises: ValueError if no such datatype could be found """ - from satpy.writers.cf_writer import NC4_DTYPES + from satpy.cf_writer import NC4_DTYPES if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): return obj diff --git a/satpy/writers/cf/coords.py b/satpy/cf/coords.py similarity index 100% rename from satpy/writers/cf/coords.py rename to satpy/cf/coords.py diff --git a/satpy/writers/cf/dataarray.py b/satpy/cf/dataarray.py similarity index 95% rename from satpy/writers/cf/dataarray.py rename to satpy/cf/dataarray.py index df52406f96..661981681b 100644 --- a/satpy/writers/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -19,9 +19,9 @@ import logging import warnings -from satpy.writers.cf import EPOCH -from satpy.writers.cf.attrs import preprocess_datarray_attrs -from satpy.writers.cf.coords import add_xy_coords_attrs, process_time_coord +from satpy.cf import EPOCH +from satpy.cf.attrs import preprocess_datarray_attrs +from satpy.cf.coords import add_xy_coords_attrs, process_time_coord logger = logging.getLogger(__name__) diff --git a/satpy/writers/cf/datasets.py b/satpy/cf/datasets.py similarity index 95% rename from satpy/writers/cf/datasets.py rename to satpy/cf/datasets.py index 0cdf2b8210..bcfe622f18 100644 --- a/satpy/writers/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -22,8 +22,8 @@ import xarray as xr -from satpy.writers.cf import EPOCH -from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION +from satpy.cf import EPOCH +from satpy.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) @@ -77,7 +77,7 @@ def _collect_cf_dataset(list_dataarrays, epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.writers.cf import EPOCH` + If None, the default reference time is retrieved using `from satpy.cf import EPOCH` flatten_attrs : bool, optional If True, flatten dict-type attributes. exclude_attrs : list, optional @@ -98,14 +98,8 @@ def _collect_cf_dataset(list_dataarrays, ds : xr.Dataset A partially CF-compliant xr.Dataset """ - from satpy.writers.cf.area import ( - area2cf, - assert_xy_unique, - has_projection_coords, - link_coords, - make_alt_coords_unique, - ) - from satpy.writers.cf.dataarray import make_cf_dataarray + from satpy.cf.area import area2cf, assert_xy_unique, has_projection_coords, link_coords, make_alt_coords_unique + from satpy.cf.dataarray import make_cf_dataarray # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! @@ -197,7 +191,7 @@ def collect_cf_datasets(list_dataarrays, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.writers.cf import EPOCH` + If None, the default reference time is retrieved using `from satpy.cf import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -227,8 +221,8 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ - from satpy.writers.cf.attrs import preprocess_header_attrs - from satpy.writers.cf.coords import add_time_bounds_dimension + from satpy.cf.attrs import preprocess_header_attrs + from satpy.cf.coords import add_time_bounds_dimension if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " diff --git a/satpy/writers/cf/encoding.py b/satpy/cf/encoding.py similarity index 100% rename from satpy/writers/cf/encoding.py rename to satpy/cf/encoding.py diff --git a/satpy/etc/writers/cf.yaml b/satpy/etc/writers/cf.yaml index 5c4a875bec..b141a68c09 100644 --- a/satpy/etc/writers/cf.yaml +++ b/satpy/etc/writers/cf.yaml @@ -1,7 +1,7 @@ writer: name: cf description: Generic netCDF4/CF Writer - writer: !!python/name:satpy.writers.cf_writer.CFWriter + writer: !!python/name:satpy.cf_writer.CFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.nc' compress: DEFLATE zlevel: 6 diff --git a/satpy/scene.py b/satpy/scene.py index 52580d14e8..cebf57253b 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1128,7 +1128,7 @@ def to_xarray(self, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.writers.cf import EPOCH" + If None, the default reference time is retrieved using "from satpy.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/tests/writer_tests/cf_tests/__init__.py b/satpy/tests/cf_tests/__init__.py similarity index 100% rename from satpy/tests/writer_tests/cf_tests/__init__.py rename to satpy/tests/cf_tests/__init__.py diff --git a/satpy/tests/writer_tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py similarity index 97% rename from satpy/tests/writer_tests/cf_tests/test_area.py rename to satpy/tests/cf_tests/test_area.py index 5b7dd86d38..352bf35a2e 100644 --- a/satpy/tests/writer_tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -28,7 +28,7 @@ class TestCFArea: def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" - from satpy.writers.cf.area import assert_xy_unique + from satpy.cf.area import assert_xy_unique dummy = [[1, 2], [3, 4]] datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), @@ -42,7 +42,7 @@ def test_assert_xy_unique(self): def test_link_coords(self): """Check that coordinates link has been established correctly.""" - from satpy.writers.cf.area import link_coords + from satpy.cf.area import link_coords data = [[1, 2], [3, 4]] lon = np.zeros((2, 2)) @@ -77,7 +77,7 @@ def test_link_coords(self): def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" - from satpy.writers.cf.area import make_alt_coords_unique + from satpy.cf.area import make_alt_coords_unique data = [[1, 2], [3, 4]] y = [1, 2] @@ -122,7 +122,7 @@ def test_make_alt_coords_unique(self): def test_area2cf(self): """Test the conversion of an area to CF standards.""" - from satpy.writers.cf.area import area2cf + from satpy.cf.area import area2cf ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, attrs={'name': 'var1'}) @@ -168,7 +168,7 @@ def test_area2cf(self): def test__add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" - from satpy.writers.cf.area import _add_grid_mapping + from satpy.cf.area import _add_grid_mapping def _gm_matches(gmapping, expected): """Assert that all keys in ``expected`` match the values in ``gmapping``.""" @@ -352,7 +352,7 @@ def _gm_matches(gmapping, expected): def test__add_lonlat_coords(self): """Test the conversion from areas to lon/lat.""" - from satpy.writers.cf.area import _add_lonlat_coords + from satpy.cf.area import _add_lonlat_coords area = AreaDefinition( 'seviri', @@ -439,14 +439,14 @@ def datasets(self): def test__is_lon_or_lat_dataarray(self, datasets): """Test the _is_lon_or_lat_dataarray function.""" - from satpy.writers.cf.area import _is_lon_or_lat_dataarray + from satpy.cf.area import _is_lon_or_lat_dataarray assert _is_lon_or_lat_dataarray(datasets['lat']) assert not _is_lon_or_lat_dataarray(datasets['var1']) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" - from satpy.writers.cf.area import has_projection_coords + from satpy.cf.area import has_projection_coords assert has_projection_coords(datasets) datasets['lat'].attrs['standard_name'] = 'dummy' diff --git a/satpy/tests/writer_tests/cf_tests/test_attrs.py b/satpy/tests/cf_tests/test_attrs.py similarity index 99% rename from satpy/tests/writer_tests/cf_tests/test_attrs.py rename to satpy/tests/cf_tests/test_attrs.py index 6988e761ee..7eb86e172b 100644 --- a/satpy/tests/writer_tests/cf_tests/test_attrs.py +++ b/satpy/tests/cf_tests/test_attrs.py @@ -111,8 +111,8 @@ def get_test_attrs(self): def test__encode_attrs_nc(self): """Test attributes encoding.""" + from satpy.cf.attrs import _encode_attrs_nc from satpy.tests.utils import assert_dict_array_equality - from satpy.writers.cf.attrs import _encode_attrs_nc attrs, expected, _ = self.get_test_attrs() diff --git a/satpy/tests/writer_tests/cf_tests/test_coords.py b/satpy/tests/cf_tests/test_coords.py similarity index 96% rename from satpy/tests/writer_tests/cf_tests/test_coords.py rename to satpy/tests/cf_tests/test_coords.py index 9e9d8c4607..1361c0d5a4 100644 --- a/satpy/tests/writer_tests/cf_tests/test_coords.py +++ b/satpy/tests/cf_tests/test_coords.py @@ -31,7 +31,7 @@ class TestCFtime: def test_add_time_bounds_dimension(self): """Test addition of CF-compliant time attributes.""" - from satpy.writers.cf.coords import add_time_bounds_dimension + from satpy.cf.coords import add_time_bounds_dimension test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', @@ -55,7 +55,7 @@ class TestCFcoords: def test_is_projected(self, caplog): """Tests for private _is_projected function.""" - from satpy.writers.cf.coords import _is_projected + from satpy.cf.coords import _is_projected # test case with units but no area da = xr.DataArray( diff --git a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py b/satpy/tests/cf_tests/test_dataaarray.py similarity index 97% rename from satpy/tests/writer_tests/cf_tests/test_dataaarray.py rename to satpy/tests/cf_tests/test_dataaarray.py index 43b87cfc20..0fd3a9d41c 100644 --- a/satpy/tests/writer_tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/cf_tests/test_dataaarray.py @@ -29,7 +29,7 @@ def test_preprocess_dataarray_name(): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" from satpy import Scene - from satpy.writers.cf.dataarray import _preprocess_dataarray_name + from satpy.cf.dataarray import _preprocess_dataarray_name scn = Scene() scn['1'] = xr.DataArray([1, 2, 3]) @@ -53,8 +53,8 @@ def test_make_cf_dataarray_lonlat(): """Test correct CF encoding for area with lon/lat units.""" from pyresample import create_area_def + from satpy.cf.dataarray import make_cf_dataarray from satpy.resample import add_crs_xy_coords - from satpy.writers.cf.dataarray import make_cf_dataarray area = create_area_def("mavas", 4326, shape=(5, 5), center=(0, 0), resolution=(1, 1)) @@ -156,8 +156,8 @@ def get_test_attrs(self): def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" + from satpy.cf.dataarray import make_cf_dataarray from satpy.tests.utils import assert_dict_array_equality - from satpy.writers.cf.dataarray import make_cf_dataarray # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() @@ -195,7 +195,7 @@ def test_make_cf_dataarray(self): def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" - from satpy.writers.cf.dataarray import make_cf_dataarray + from satpy.cf.dataarray import make_cf_dataarray arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) diff --git a/satpy/tests/writer_tests/cf_tests/test_datasets.py b/satpy/tests/cf_tests/test_datasets.py similarity index 95% rename from satpy/tests/writer_tests/cf_tests/test_datasets.py rename to satpy/tests/cf_tests/test_datasets.py index d92099e869..dc30d1b9d1 100644 --- a/satpy/tests/writer_tests/cf_tests/test_datasets.py +++ b/satpy/tests/cf_tests/test_datasets.py @@ -29,7 +29,7 @@ class TestCollectCfDataset: def test_collect_cf_dataset(self): """Test collecting CF datasets from a DataArray objects.""" - from satpy.writers.cf.datasets import _collect_cf_dataset + from satpy.cf.datasets import _collect_cf_dataset geos = AreaDefinition( area_id='geos', @@ -69,7 +69,7 @@ def test_collect_cf_dataset(self): def test_collect_cf_dataset_with_latitude_named_lat(self): """Test collecting CF datasets with latitude named lat.""" - from satpy.writers.cf.datasets import _collect_cf_dataset + from satpy.cf.datasets import _collect_cf_dataset data = [[75, 2], [3, 4]] y = [1, 2] @@ -123,8 +123,8 @@ def test_collect_cf_dataset_with_latitude_named_lat(self): def test_geographic_area_coords_attrs(self): """Test correct storage for area with lon/lat units.""" + from satpy.cf.datasets import _collect_cf_dataset from satpy.tests.utils import make_fake_scene - from satpy.writers.cf.datasets import _collect_cf_dataset scn = make_fake_scene( {"ketolysis": np.arange(25).reshape(5, 5)}, @@ -147,7 +147,7 @@ class TestCollectCfDatasets: def test_empty_collect_cf_datasets(self): """Test that if no DataArrays, collect_cf_datasets raise error.""" - from satpy.writers.cf.datasets import collect_cf_datasets + from satpy.cf.datasets import collect_cf_datasets with pytest.raises(RuntimeError): collect_cf_datasets(list_dataarrays=[]) diff --git a/satpy/tests/writer_tests/cf_tests/test_encoding.py b/satpy/tests/cf_tests/test_encoding.py similarity index 96% rename from satpy/tests/writer_tests/cf_tests/test_encoding.py rename to satpy/tests/cf_tests/test_encoding.py index 125c7eec94..5d49ebc647 100644 --- a/satpy/tests/writer_tests/cf_tests/test_encoding.py +++ b/satpy/tests/cf_tests/test_encoding.py @@ -47,7 +47,7 @@ def fake_ds_digit(self): def test_dataset_name_digit(self, fake_ds_digit): """Test data with dataset name staring with a digit.""" - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.encoding import update_encoding # Dataset with name staring with digit ds_digit = fake_ds_digit @@ -66,7 +66,7 @@ def test_dataset_name_digit(self, fake_ds_digit): def test_without_time(self, fake_ds): """Test data with no time dimension.""" - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.encoding import update_encoding # Without time dimension ds = fake_ds.chunk(2) @@ -98,7 +98,7 @@ def test_without_time(self, fake_ds): def test_with_time(self, fake_ds): """Test data with a time dimension.""" - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.encoding import update_encoding # With time dimension ds = fake_ds.chunk(8).expand_dims({'time': [datetime.datetime(2009, 7, 1, 12, 15)]}) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index f50a8e99d3..ba00ae4545 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -28,8 +28,8 @@ from packaging.version import Version from satpy import Scene +from satpy.cf_writer import _get_backend_versions from satpy.tests.utils import make_dsq -from satpy.writers.cf_writer import _get_backend_versions try: from pyproj import CRS @@ -66,8 +66,8 @@ class TestCFWriter: def test_init(self): """Test initializing the CFWriter class.""" + from satpy.cf_writer import CFWriter from satpy.writers import configs_for_writer - from satpy.writers.cf_writer import CFWriter CFWriter(config_files=list(configs_for_writer('cf'))[0]) @@ -403,11 +403,11 @@ def test_load_module_with_old_pyproj(self): old_version = sys.modules['pyproj'].__version__ sys.modules['pyproj'].__version__ = "1.9.6" try: - importlib.reload(sys.modules['satpy.writers.cf_writer']) + importlib.reload(sys.modules['satpy.cf_writer']) finally: # Tear down sys.modules['pyproj'].__version__ = old_version - importlib.reload(sys.modules['satpy.writers.cf_writer']) + importlib.reload(sys.modules['satpy.cf_writer']) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 096293e2b7..4093b7877b 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -162,8 +162,8 @@ import xarray as xr from packaging.version import Version +from satpy.cf import EPOCH from satpy.writers import Writer -from satpy.writers.cf import EPOCH logger = logging.getLogger(__name__) @@ -270,8 +270,8 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ - from satpy.writers.cf.datasets import collect_cf_datasets - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.datasets import collect_cf_datasets + from satpy.cf.encoding import update_encoding logger.info('Saving datasets to NetCDF4/CF.') _check_backend_versions() @@ -344,9 +344,9 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ - from satpy.writers.cf.dataarray import make_cf_dataarray + from satpy.cf.dataarray import make_cf_dataarray warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf.dataarray.make_cf_dataarray instead.', + 'Use satpy.cf.dataarray.make_cf_dataarray instead.', DeprecationWarning, stacklevel=3) return make_cf_dataarray(dataarray=dataarray, epoch=epoch, @@ -358,10 +358,10 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" - from satpy.writers.cf.encoding import update_encoding + from satpy.cf.encoding import update_encoding warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf.encoding.update_encoding instead.', + 'Use satpy.cf.encoding.update_encoding instead.', DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) From bf337195456cab9c541d633bf0b35d6d866149d2 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 18:50:16 +0200 Subject: [PATCH 0550/1416] Fix imports error --- doc/source/writers.rst | 4 ++-- satpy/cf/attrs.py | 2 +- satpy/cf/datasets.py | 2 +- satpy/readers/amsr2_l2_gaasp.py | 4 ++-- satpy/tests/writer_tests/test_cf.py | 4 ++-- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index e5c33ecba2..f453f4d5a5 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -44,9 +44,9 @@ One common parameter across almost all Writers is ``filename`` and - Deprecated from NinJo 7 (use ninjogeotiff) - * - NetCDF (Standard CF) - - :class:`cf ` + - :class:`cf ` - Beta - - :mod:`Usage example ` + - :mod:`Usage example ` * - AWIPS II Tiled NetCDF4 - :class:`awips_tiled ` - Beta diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index 28cc41be98..d4a41f2bfc 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -71,7 +71,7 @@ def __encode_nc(obj): Raises: ValueError if no such datatype could be found """ - from satpy.cf_writer import NC4_DTYPES + from satpy.writers.cf_writer import NC4_DTYPES if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): return obj diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index bcfe622f18..c2799ab8d1 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -23,7 +23,7 @@ import xarray as xr from satpy.cf import EPOCH -from satpy.cf_writer import CF_DTYPES, CF_VERSION +from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 4f045057b4..5f91e2d965 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -189,7 +189,7 @@ def _available_if_this_file_type(self, configured_datasets): continue yield self.file_type_matches(ds_info['file_type']), ds_info - def __add_lonlat_coords(self, data_arr, ds_info): + def _add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: @@ -209,7 +209,7 @@ def _get_ds_info_for_data_arr(self, var_name, data_arr): if x_dim_name in self.dim_resolutions: ds_info['resolution'] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: - self.__add_lonlat_coords(data_arr, ds_info) + self._add_lonlat_coords(data_arr, ds_info) return ds_info def _is_2d_yx_data_array(self, data_arr): diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index ba00ae4545..31e59a2fb7 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -28,8 +28,8 @@ from packaging.version import Version from satpy import Scene -from satpy.cf_writer import _get_backend_versions from satpy.tests.utils import make_dsq +from satpy.writers.cf_writer import _get_backend_versions try: from pyproj import CRS @@ -66,8 +66,8 @@ class TestCFWriter: def test_init(self): """Test initializing the CFWriter class.""" - from satpy.cf_writer import CFWriter from satpy.writers import configs_for_writer + from satpy.writers.cf_writer import CFWriter CFWriter(config_files=list(configs_for_writer('cf'))[0]) From 0dae7746f5180d7f7ab9f960dd7150e63b710ef5 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 20:23:25 +0200 Subject: [PATCH 0551/1416] Rename functions and refactor --- CHANGELOG.md | 2 +- satpy/cf/area.py | 114 ---------------- satpy/cf/coords.py | 168 ++++++++++++++++++++---- satpy/cf/dataarray.py | 5 +- satpy/cf/datasets.py | 45 ++++--- satpy/etc/writers/cf.yaml | 2 +- satpy/tests/cf_tests/_test_data.py | 111 ++++++++++++++++ satpy/tests/cf_tests/test_area.py | 147 --------------------- satpy/tests/cf_tests/test_attrs.py | 90 +------------ satpy/tests/cf_tests/test_coords.py | 151 ++++++++++++++++++++- satpy/tests/cf_tests/test_dataaarray.py | 90 +------------ 11 files changed, 442 insertions(+), 483 deletions(-) create mode 100644 satpy/tests/cf_tests/_test_data.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 799ae0a867..12c8e50194 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1143,7 +1143,7 @@ In this release 6 issues were closed. * [PR 1606](https://github.com/pytroll/satpy/pull/1606) - Make FSFile hashable again ([1605](https://github.com/pytroll/satpy/issues/1605), [1604](https://github.com/pytroll/satpy/issues/1604)) * [PR 1603](https://github.com/pytroll/satpy/pull/1603) - Update slstr_l2.yaml * [PR 1600](https://github.com/pytroll/satpy/pull/1600) - When setting `upper_right_corner` make sure that all dataset coordinates are flipped -* [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of link_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) +* [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of add_coordinates_attrs_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) #### Features added diff --git a/satpy/cf/area.py b/satpy/cf/area.py index 5916083d62..2eec0efd4a 100644 --- a/satpy/cf/area.py +++ b/satpy/cf/area.py @@ -17,11 +17,8 @@ # satpy. If not, see . """CF processing of pyresample area information.""" import logging -import warnings -from collections import defaultdict import xarray as xr -from dask.base import tokenize from packaging.version import Version from pyresample.geometry import AreaDefinition, SwathDefinition @@ -79,114 +76,3 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): res.append(gmapping) res.append(dataarray) return res - - -def _is_lon_or_lat_dataarray(dataarray): - """Check if the DataArray represents the latitude or longitude coordinate.""" - if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: - return True - return False - - -def has_projection_coords(ds_collection): - """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" - for dataarray in ds_collection.values(): - if _is_lon_or_lat_dataarray(dataarray): - return True - return False - - -def make_alt_coords_unique(datas, pretty=False): - """Make non-dimensional coordinates unique among all datasets. - - Non-dimensional (or alternative) coordinates, such as scanline timestamps, - may occur in multiple datasets with the same name and dimension - but different values. - - In order to avoid conflicts, prepend the dataset name to the coordinate name. - If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, - its name will not be modified. - - Since all datasets must have the same projection coordinates, - this is not applied to latitude and longitude. - - Args: - datas (dict): - Dictionary of (dataset name, dataset) - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - - Returns: - Dictionary holding the updated datasets - - """ - # Determine which non-dimensional coordinates are unique - tokens = defaultdict(set) - for dataset in datas.values(): - for coord_name in dataset.coords: - if not _is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims: - tokens[coord_name].add(tokenize(dataset[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) - - # Prepend dataset name, if not unique or no pretty-format desired - new_datas = datas.copy() - for coord_name, unique in coords_unique.items(): - if not pretty or not unique: - if pretty: - warnings.warn( - 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), - stacklevel=2 - ) - for ds_name, dataset in datas.items(): - if coord_name in dataset.coords: - rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} - new_datas[ds_name] = new_datas[ds_name].rename(rename) - - return new_datas - - -def assert_xy_unique(datas): - """Check that all datasets share the same projection coordinates x/y.""" - unique_x = set() - unique_y = set() - for dataset in datas.values(): - if 'y' in dataset.dims: - token_y = tokenize(dataset['y'].data) - unique_y.add(token_y) - if 'x' in dataset.dims: - token_x = tokenize(dataset['x'].data) - unique_x.add(token_x) - if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' - 'Please group them by area or save them in separate files.') - - -def link_coords(datas): - """Link dataarrays and coordinates. - - If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example - `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to - `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set - automatically. - - """ - for da_name, data in datas.items(): - declared_coordinates = data.attrs.get('coordinates', []) - if isinstance(declared_coordinates, str): - declared_coordinates = declared_coordinates.split(' ') - for coord in declared_coordinates: - if coord not in data.coords: - try: - dimensions_not_in_data = list(set(datas[coord].dims) - set(data.dims)) - data[coord] = datas[coord].squeeze(dimensions_not_in_data, drop=True) - except KeyError: - warnings.warn( - 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), - stacklevel=2 - ) - continue - - # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - data.attrs.pop('coordinates', None) diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index dee28952b5..0c5acc7df4 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -3,10 +3,13 @@ """Set CF-compliant spatial and temporal coordinates.""" import logging +import warnings +from collections import defaultdict from contextlib import suppress import numpy as np import xarray as xr +from dask.base import tokenize from pyresample.geometry import AreaDefinition, SwathDefinition logger = logging.getLogger(__name__) @@ -86,6 +89,149 @@ def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): return dataarray +def set_cf_time_info(dataarray, epoch): + """Set CF time attributes and encoding. + + It expand the DataArray with a time dimension if does not yet exists. + + The function assumes + + - that x and y dimensions have at least shape > 1 + - the time coordinate has size 1 + + """ + dataarray['time'].encoding['units'] = epoch + dataarray['time'].attrs['standard_name'] = 'time' + dataarray['time'].attrs.pop('bounds', None) + + if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims('time') + + return dataarray + + +def _is_lon_or_lat_dataarray(dataarray): + """Check if the DataArray represents the latitude or longitude coordinate.""" + if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: + return True + return False + + +def has_projection_coords(dict_datarrays): + """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" + for dataarray in dict_datarrays.values(): + if _is_lon_or_lat_dataarray(dataarray): + return True + return False + + +def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): + """Make non-dimensional coordinates unique among all datasets. + + Non-dimensional coordinates, such as scanline timestamps, + may occur in multiple datasets with the same name and dimension + but different values. + + In order to avoid conflicts, prepend the dataset name to the coordinate name. + If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, + its name will not be modified. + + Since all datasets must have the same projection coordinates, + this is not applied to latitude and longitude. + + Args: + datas (dict): + Dictionary of (dataset name, dataset) + pretty (bool): + Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + + Returns: + Dictionary holding the updated datasets + + """ + # Determine which non-dimensional coordinates are unique + # - coords_unique has structure: {coord_name: True/False} + tokens = defaultdict(set) + for dataarray in dict_dataarrays.values(): + for coord_name in dataarray.coords: + if not _is_lon_or_lat_dataarray(dataarray[coord_name]) and coord_name not in dataarray.dims: + tokens[coord_name].add(tokenize(dataarray[coord_name].data)) + coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + + # Prepend dataset name, if not unique or no pretty-format desired + new_dict_dataarrays = dict_dataarrays.copy() + for coord_name, unique in coords_unique.items(): + if not pretty or not unique: + if pretty: + warnings.warn( + 'Cannot pretty-format "{}" coordinates because they are ' + 'not identical among the given datasets'.format(coord_name), + stacklevel=2 + ) + for name, dataarray in dict_dataarrays.items(): + if coord_name in dataarray.coords: + rename = {coord_name: '{}_{}'.format(name, coord_name)} + new_dict_dataarrays[name] = new_dict_dataarrays[name].rename(rename) + + return new_dict_dataarrays + + +def check_unique_projection_coords(dict_dataarrays): + """Check that all datasets share the same projection coordinates x/y.""" + unique_x = set() + unique_y = set() + for dataarray in dict_dataarrays.values(): + if 'y' in dataarray.dims: + token_y = tokenize(dataarray['y'].data) + unique_y.add(token_y) + if 'x' in dataarray.dims: + token_x = tokenize(dataarray['x'].data) + unique_x.add(token_x) + if len(unique_x) > 1 or len(unique_y) > 1: + raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' + 'Please group them by area or save them in separate files.') + + +def add_coordinates_attrs_coords(dict_dataarrays): + """Add to DataArrays the coordinates specified in the 'coordinates' attribute. + + It deal with the 'coordinates' attributes indicating lat/lon coords + The 'coordinates' attribute is dropped from each DataArray + + If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example + `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. + + In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved + and the `coordinates` attributes be set automatically. + """ + for da_name, dataarray in dict_dataarrays.items(): + declared_coordinates = _get_coordinates_list(dataarray) + for coord in declared_coordinates: + if coord not in dataarray.coords: + try: + dimensions_not_in_data = list(set(dict_dataarrays[coord].dims) - set(dataarray.dims)) + dataarray[coord] = dict_dataarrays[coord].squeeze(dimensions_not_in_data, drop=True) + except KeyError: + warnings.warn( + 'Coordinate "{}" referenced by dataarray {} does not ' + 'exist, dropping reference.'.format(coord, da_name), + stacklevel=2 + ) + continue + + # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() + dataarray.attrs.pop('coordinates', None) + return dict_dataarrays + + +def _get_coordinates_list(dataarray): + """Return a list with the coordinates names specified in the 'coordinates' attribute.""" + declared_coordinates = dataarray.attrs.get('coordinates', []) + if isinstance(declared_coordinates, str): + declared_coordinates = declared_coordinates.split(' ') + return declared_coordinates + + def add_time_bounds_dimension(ds, time="time"): """Add time bound dimension to xr.Dataset.""" start_times = [] @@ -104,25 +250,3 @@ def add_time_bounds_dimension(ds, time="time"): ds[time].attrs['bounds'] = "time_bnds" ds[time].attrs['standard_name'] = "time" return ds - - -def process_time_coord(dataarray, epoch): - """Process the 'time' coordinate, if existing. - - It expand the DataArray with a time dimension if does not yet exists. - - The function assumes - - - that x and y dimensions have at least shape > 1 - - the time coordinate has size 1 - - """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) - - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') - - return dataarray diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 661981681b..8a3c78a476 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -21,7 +21,7 @@ from satpy.cf import EPOCH from satpy.cf.attrs import preprocess_datarray_attrs -from satpy.cf.coords import add_xy_coords_attrs, process_time_coord +from satpy.cf.coords import add_xy_coords_attrs, set_cf_time_info logger = logging.getLogger(__name__) @@ -96,5 +96,6 @@ def make_cf_dataarray(dataarray, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs) dataarray = add_xy_coords_attrs(dataarray) - dataarray = process_time_coord(dataarray, epoch=epoch) + if 'time' in dataarray.coords: + dataarray = set_cf_time_info(dataarray, epoch=epoch) return dataarray diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index c2799ab8d1..3dcbb33985 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -30,16 +30,16 @@ def _get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" - ds_collection = {} + dict_datarrays = {} # Retrieve ancillary variable datarrays for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): ancillary_variable = ancillary_dataarray.name if keys and ancillary_variable not in keys: keys.append(ancillary_variable) - ds_collection.update(_get_extra_ds(ancillary_dataarray, keys=keys)) + dict_datarrays.update(_get_extra_ds(ancillary_dataarray, keys=keys)) # Add input dataarray - ds_collection[dataarray.attrs['name']] = dataarray - return ds_collection + dict_datarrays[dataarray.attrs['name']] = dataarray + return dict_datarrays def _get_groups(groups, list_datarrays): @@ -98,23 +98,29 @@ def _collect_cf_dataset(list_dataarrays, ds : xr.Dataset A partially CF-compliant xr.Dataset """ - from satpy.cf.area import area2cf, assert_xy_unique, has_projection_coords, link_coords, make_alt_coords_unique + from satpy.cf.area import area2cf + from satpy.cf.coords import ( + add_coordinates_attrs_coords, + check_unique_projection_coords, + ensure_unique_nondimensional_coords, + has_projection_coords, + ) from satpy.cf.dataarray import make_cf_dataarray # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! - ds_collection = {} + dict_dataarrays = {} for dataarray in list_dataarrays: - ds_collection.update(_get_extra_ds(dataarray)) + dict_dataarrays.update(_get_extra_ds(dataarray)) # Check if one DataArray in the collection has 'longitude' or 'latitude' - got_lonlats = has_projection_coords(ds_collection) + got_lonlats = has_projection_coords(dict_dataarrays) # Sort dictionary by keys name - ds_collection = dict(sorted(ds_collection.items())) + dict_dataarrays = dict(sorted(dict_dataarrays.items())) - dict_dataarrays = {} - for dataarray in ds_collection.values(): + dict_cf_dataarrays = {} + for dataarray in dict_dataarrays.values(): dataarray_type = dataarray.dtype if dataarray_type not in CF_DTYPES: warnings.warn( @@ -147,23 +153,24 @@ def _collect_cf_dataset(list_dataarrays, exclude_attrs=exclude_attrs, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) - dict_dataarrays[new_dataarray.name] = new_dataarray + dict_cf_dataarrays[new_dataarray.name] = new_dataarray - # Check all DataArray have same size - assert_xy_unique(dict_dataarrays) + # Check all DataArrays have same projection coordinates + check_unique_projection_coords(dict_cf_dataarrays) - # Deal with the 'coordinates' attributes indicating lat/lon coords - # NOTE: this currently is dropped by default !!! - link_coords(dict_dataarrays) + # Add to DataArrays the coordinates specified in the 'coordinates' attribute + # - Deal with the 'coordinates' attributes indicating lat/lon coords + # - The 'coordinates' attribute is dropped from each DataArray + dict_cf_dataarrays = add_coordinates_attrs_coords(dict_cf_dataarrays) # Ensure non-dimensional coordinates to be unique across DataArrays # --> If not unique, prepend the DataArray name to the coordinate # --> If unique, does not prepend the DataArray name only if pretty=True # --> 'longitude' and 'latitude' coordinates are not prepended - dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty) + dict_cf_dataarrays = ensure_unique_nondimensional_coords(dict_cf_dataarrays, pretty=pretty) # Create a xr.Dataset - ds = xr.Dataset(dict_dataarrays) + ds = xr.Dataset(dict_cf_dataarrays) return ds diff --git a/satpy/etc/writers/cf.yaml b/satpy/etc/writers/cf.yaml index b141a68c09..5c4a875bec 100644 --- a/satpy/etc/writers/cf.yaml +++ b/satpy/etc/writers/cf.yaml @@ -1,7 +1,7 @@ writer: name: cf description: Generic netCDF4/CF Writer - writer: !!python/name:satpy.cf_writer.CFWriter + writer: !!python/name:satpy.writers.cf_writer.CFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.nc' compress: DEFLATE zlevel: 6 diff --git a/satpy/tests/cf_tests/_test_data.py b/satpy/tests/cf_tests/_test_data.py new file mode 100644 index 0000000000..2ea13afd2e --- /dev/null +++ b/satpy/tests/cf_tests/_test_data.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Functions and fixture to test CF code.""" +import datetime +from collections import OrderedDict + +import numpy as np + + +def get_test_attrs(): + """Create some dataset attributes for testing purpose. + + Returns: + Attributes, encoded attributes, encoded and flattened attributes + + """ + attrs = { + 'name': 'IR_108', + 'start_time': datetime.datetime(2018, 1, 1, 0), + 'end_time': datetime.datetime(2018, 1, 1, 0, 15), + 'int': 1, + 'float': 1.0, + 'none': None, # should be dropped + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': True, + 'numpy_void': np.void(0), + 'numpy_bytes': np.bytes_('test'), + 'numpy_string': np.str_('test'), + 'list': [1, 2, np.float64(3)], + 'nested_list': ["1", ["2", [3]]], + 'bool': True, + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': np.array([True, False, True]), + 'array_2d': np.array([[1, 2], [3, 4]]), + 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + 'dict': {'a': 1, 'b': 2}, + 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, + 'raw_metadata': OrderedDict([ + ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), + ('flag', np.bool_(True)), + ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + ]) + } + encoded = { + 'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict': '{"a": 1, "b": 2}', + 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}' + } + encoded_flat = { + 'name': 'IR_108', + 'start_time': '2018-01-01 00:00:00', + 'end_time': '2018-01-01 00:15:00', + 'int': 1, + 'float': 1.0, + 'numpy_int': np.uint8(1), + 'numpy_float': np.float32(1), + 'numpy_bool': 'true', + 'numpy_void': '[]', + 'numpy_bytes': 'test', + 'numpy_string': 'test', + 'list': [1, 2, np.float64(3)], + 'nested_list': '["1", ["2", [3]]]', + 'bool': 'true', + 'array': np.array([1, 2, 3], dtype='uint8'), + 'array_bool': ['true', 'false', 'true'], + 'array_2d': '[[1, 2], [3, 4]]', + 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', + 'dict_a': 1, + 'dict_b': 2, + 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), + 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', + 'raw_metadata_flag': 'true', + 'raw_metadata_dict_a': 1, + 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8') + } + return attrs, encoded, encoded_flat diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index 352bf35a2e..e34118c7cb 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -18,7 +18,6 @@ """Tests for the CF Area.""" import dask.array as da import numpy as np -import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition @@ -26,100 +25,6 @@ class TestCFArea: """Test case for CF Area.""" - def test_assert_xy_unique(self): - """Test that the x and y coordinates are unique.""" - from satpy.cf.area import assert_xy_unique - - dummy = [[1, 2], [3, 4]] - datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} - assert_xy_unique(datas) - - datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) - with pytest.raises(ValueError): - assert_xy_unique(datas) - - def test_link_coords(self): - """Check that coordinates link has been established correctly.""" - from satpy.cf.area import link_coords - - data = [[1, 2], [3, 4]] - lon = np.zeros((2, 2)) - lon2 = np.zeros((1, 2, 2)) - lat = np.ones((2, 2)) - datasets = { - 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), - 'var2': xr.DataArray(data=data, dims=('y', 'x')), - 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), - 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), - 'lon': xr.DataArray(data=lon, dims=('y', 'x')), - 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), - 'lat': xr.DataArray(data=lat, dims=('y', 'x')) - } - - link_coords(datasets) - - # Check that link has been established correctly and 'coordinate' atrribute has been dropped - assert 'lon' in datasets['var1'].coords - assert 'lat' in datasets['var1'].coords - np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) - np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - assert 'coordinates' not in datasets['var1'].attrs - - # There should be no link if there was no 'coordinate' attribute - assert 'lon' not in datasets['var2'].coords - assert 'lat' not in datasets['var2'].coords - - # The non-existent dimension or coordinate should be dropped - assert 'time' not in datasets['var3'].coords - assert 'not_exist' not in datasets['var4'].coords - - def test_make_alt_coords_unique(self): - """Test that created coordinate variables are unique.""" - from satpy.cf.area import make_alt_coords_unique - - data = [[1, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - time1 = [1, 2] - time2 = [3, 4] - datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} - - # Test that dataset names are prepended to alternative coordinates - res = make_alt_coords_unique(datasets) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords - - # Make sure nothing else is modified - np.testing.assert_array_equal(res['var1']['x'], x) - np.testing.assert_array_equal(res['var1']['y'], y) - np.testing.assert_array_equal(res['var2']['x'], x) - np.testing.assert_array_equal(res['var2']['y'], y) - - # Coords not unique -> Dataset names must be prepended, even if pretty=True - with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): - res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords - - # Coords unique and pretty=True -> Don't modify coordinate names - datasets['var2']['acq_time'] = ('y', time1) - res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['acq_time'], time1) - np.testing.assert_array_equal(res['var2']['acq_time'], time1) - assert 'var1_acq_time' not in res['var1'].coords - assert 'var2_acq_time' not in res['var2'].coords - def test_area2cf(self): """Test the conversion of an area to CF standards.""" from satpy.cf.area import area2cf @@ -399,55 +304,3 @@ def test__add_lonlat_coords(self): np.testing.assert_array_equal(lon.data, lons_ref) assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() - - @pytest.fixture - def datasets(self): - """Create test dataset.""" - data = [[75, 2], [3, 4]] - y = [1, 2] - x = [1, 2] - geos = AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - datasets = { - 'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - datasets['lat'].attrs['standard_name'] = 'latitude' - datasets['var1'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['area'] = geos - datasets['var1'].attrs['area'] = geos - datasets['lat'].attrs['name'] = 'lat' - datasets['var1'].attrs['name'] = 'var1' - datasets['var2'].attrs['name'] = 'var2' - datasets['lon'].attrs['name'] = 'lon' - return datasets - - def test__is_lon_or_lat_dataarray(self, datasets): - """Test the _is_lon_or_lat_dataarray function.""" - from satpy.cf.area import _is_lon_or_lat_dataarray - - assert _is_lon_or_lat_dataarray(datasets['lat']) - assert not _is_lon_or_lat_dataarray(datasets['var1']) - - def test_has_projection_coords(self, datasets): - """Test the has_projection_coords function.""" - from satpy.cf.area import has_projection_coords - - assert has_projection_coords(datasets) - datasets['lat'].attrs['standard_name'] = 'dummy' - assert not has_projection_coords(datasets) diff --git a/satpy/tests/cf_tests/test_attrs.py b/satpy/tests/cf_tests/test_attrs.py index 7eb86e172b..787d1dc82d 100644 --- a/satpy/tests/cf_tests/test_attrs.py +++ b/satpy/tests/cf_tests/test_attrs.py @@ -16,105 +16,19 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for CF-compatible attributes encoding.""" -import datetime import json -from collections import OrderedDict - -import numpy as np class TestCFAttributeEncoding: """Test case for CF attribute encodings.""" - def get_test_attrs(self): - """Create some dataset attributes for testing purpose. - - Returns: - Attributes, encoded attributes, encoded and flattened attributes - - """ - # TODO: this is also used by test_da2cf - attrs = {'name': 'IR_108', - 'start_time': datetime.datetime(2018, 1, 1, 0), - 'end_time': datetime.datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.str_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) - ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' - '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} - return attrs, encoded, encoded_flat - def test__encode_attrs_nc(self): """Test attributes encoding.""" from satpy.cf.attrs import _encode_attrs_nc + from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality - attrs, expected, _ = self.get_test_attrs() + attrs, expected, _ = get_test_attrs() # Test encoding encoded = _encode_attrs_nc(attrs) diff --git a/satpy/tests/cf_tests/test_coords.py b/satpy/tests/cf_tests/test_coords.py index 1361c0d5a4..0d0b5436cd 100644 --- a/satpy/tests/cf_tests/test_coords.py +++ b/satpy/tests/cf_tests/test_coords.py @@ -19,7 +19,9 @@ import logging import numpy as np +import pytest import xarray as xr +from pyresample import AreaDefinition # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -49,10 +51,106 @@ def test_add_time_bounds_dimension(self): assert "bounds" in ds["time"].attrs assert "standard_name" in ds["time"].attrs + # set_cf_time_info + class TestCFcoords: """Test cases for CF spatial dimension and coordinates.""" + def test_check_unique_projection_coords(self): + """Test that the x and y coordinates are unique.""" + from satpy.cf.coords import check_unique_projection_coords + + dummy = [[1, 2], [3, 4]] + datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), + 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), + 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} + check_unique_projection_coords(datas) + + datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) + with pytest.raises(ValueError): + check_unique_projection_coords(datas) + + def test_add_coordinates_attrs_coords(self): + """Check that coordinates link has been established correctly.""" + from satpy.cf.coords import add_coordinates_attrs_coords + + data = [[1, 2], [3, 4]] + lon = np.zeros((2, 2)) + lon2 = np.zeros((1, 2, 2)) + lat = np.ones((2, 2)) + datasets = { + 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), + 'var2': xr.DataArray(data=data, dims=('y', 'x')), + 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), + 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), + 'lon': xr.DataArray(data=lon, dims=('y', 'x')), + 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), + 'lat': xr.DataArray(data=lat, dims=('y', 'x')) + } + + datasets = add_coordinates_attrs_coords(datasets) + + # Check that link has been established correctly and 'coordinate' atrribute has been dropped + assert 'lon' in datasets['var1'].coords + assert 'lat' in datasets['var1'].coords + np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) + np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) + assert 'coordinates' not in datasets['var1'].attrs + + # There should be no link if there was no 'coordinate' attribute + assert 'lon' not in datasets['var2'].coords + assert 'lat' not in datasets['var2'].coords + + # The non-existent dimension or coordinate should be dropped + assert 'time' not in datasets['var3'].coords + assert 'not_exist' not in datasets['var4'].coords + + def test_ensure_unique_nondimensional_coords(self): + """Test that created coordinate variables are unique.""" + from satpy.cf.coords import ensure_unique_nondimensional_coords + + data = [[1, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + time1 = [1, 2] + time2 = [3, 4] + datasets = {'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} + + # Test that dataset names are prepended to alternative coordinates + res = ensure_unique_nondimensional_coords(datasets) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords + + # Make sure nothing else is modified + np.testing.assert_array_equal(res['var1']['x'], x) + np.testing.assert_array_equal(res['var1']['y'], y) + np.testing.assert_array_equal(res['var2']['x'], x) + np.testing.assert_array_equal(res['var2']['y'], y) + + # Coords not unique -> Dataset names must be prepended, even if pretty=True + with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): + res = ensure_unique_nondimensional_coords(datasets, pretty=True) + np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) + np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) + assert 'acq_time' not in res['var1'].coords + assert 'acq_time' not in res['var2'].coords + + # Coords unique and pretty=True -> Don't modify coordinate names + datasets['var2']['acq_time'] = ('y', time1) + res = ensure_unique_nondimensional_coords(datasets, pretty=True) + np.testing.assert_array_equal(res['var1']['acq_time'], time1) + np.testing.assert_array_equal(res['var2']['acq_time'], time1) + assert 'var1_acq_time' not in res['var1'].coords + assert 'var2_acq_time' not in res['var2'].coords + def test_is_projected(self, caplog): """Tests for private _is_projected function.""" from satpy.cf.coords import _is_projected @@ -79,5 +177,56 @@ def test_is_projected(self, caplog): assert _is_projected(da) assert "Failed to tell if data are projected." in caplog.text + @pytest.fixture + def datasets(self): + """Create test dataset.""" + data = [[75, 2], [3, 4]] + y = [1, 2] + x = [1, 2] + geos = AreaDefinition( + area_id='geos', + description='geos', + proj_id='geos', + projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + width=2, height=2, + area_extent=[-1, -1, 1, 1]) + datasets = { + 'var1': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'var2': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lat': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x}), + 'lon': xr.DataArray(data=data, + dims=('y', 'x'), + coords={'y': y, 'x': x})} + datasets['lat'].attrs['standard_name'] = 'latitude' + datasets['var1'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['standard_name'] = 'dummy' + datasets['var2'].attrs['area'] = geos + datasets['var1'].attrs['area'] = geos + datasets['lat'].attrs['name'] = 'lat' + datasets['var1'].attrs['name'] = 'var1' + datasets['var2'].attrs['name'] = 'var2' + datasets['lon'].attrs['name'] = 'lon' + return datasets + + def test__is_lon_or_lat_dataarray(self, datasets): + """Test the _is_lon_or_lat_dataarray function.""" + from satpy.cf.coords import _is_lon_or_lat_dataarray + + assert _is_lon_or_lat_dataarray(datasets['lat']) + assert not _is_lon_or_lat_dataarray(datasets['var1']) + + def test_has_projection_coords(self, datasets): + """Test the has_projection_coords function.""" + from satpy.cf.coords import has_projection_coords + + assert has_projection_coords(datasets) + datasets['lat'].attrs['standard_name'] = 'dummy' + assert not has_projection_coords(datasets) + # add_xy_coords_attrs - # process_time_coord diff --git a/satpy/tests/cf_tests/test_dataaarray.py b/satpy/tests/cf_tests/test_dataaarray.py index 0fd3a9d41c..90fbae4558 100644 --- a/satpy/tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/cf_tests/test_dataaarray.py @@ -16,10 +16,6 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests CF-compliant DataArray creation.""" - -import datetime -from collections import OrderedDict - import numpy as np import xarray as xr @@ -71,96 +67,14 @@ def test_make_cf_dataarray_lonlat(): class TestCfDataArray: """Test creation of CF DataArray.""" - def get_test_attrs(self): - """Create some dataset attributes for testing purpose. - - Returns: - Attributes, encoded attributes, encoded and flattened attributes - - """ - # TODO: also used by cf/test_attrs.py - attrs = {'name': 'IR_108', - 'start_time': datetime.datetime(2018, 1, 1, 0), - 'end_time': datetime.datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.str_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) - ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' - '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} - return attrs, encoded, encoded_flat - def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" from satpy.cf.dataarray import make_cf_dataarray + from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality # Create set of test attributes - attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() + attrs, attrs_expected, attrs_expected_flat = get_test_attrs() attrs['area'] = 'some_area' attrs['prerequisites'] = [make_dsq(name='hej')] attrs['_satpy_id_name'] = 'myname' From 57afb82959ac68129e89ba4179880fecc9f1fa59 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 10 Oct 2023 20:45:32 +0200 Subject: [PATCH 0552/1416] Fix cf_writer module path --- satpy/tests/writer_tests/test_cf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 31e59a2fb7..f50a8e99d3 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -403,11 +403,11 @@ def test_load_module_with_old_pyproj(self): old_version = sys.modules['pyproj'].__version__ sys.modules['pyproj'].__version__ = "1.9.6" try: - importlib.reload(sys.modules['satpy.cf_writer']) + importlib.reload(sys.modules['satpy.writers.cf_writer']) finally: # Tear down sys.modules['pyproj'].__version__ = old_version - importlib.reload(sys.modules['satpy.cf_writer']) + importlib.reload(sys.modules['satpy.writers.cf_writer']) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" From 2474b9860ed44903887a9f26a2eb431f1102534d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 10 Oct 2023 20:00:43 +0100 Subject: [PATCH 0553/1416] Update doc/source/writers.rst Co-authored-by: Martin Raspaud --- doc/source/writers.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/writers.rst b/doc/source/writers.rst index 12ee786f56..e70f6ffb2b 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -160,7 +160,7 @@ to be added. Where `my_text` is the text you wish to add and `` is the location of the font file you wish to use, often in `/usr/share/fonts/` -This dictionary can then be passed to the `save_dataset` or `save_datasets` command. +This dictionary can then be passed to the :meth:`~satpy.scene.Scene.save_dataset` or :meth:`~satpy.scene.Scene.save_datasets` command. .. code-block:: python From 8cbe04a411ed3cffdf5aa5309623fefef486b860 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 10 Oct 2023 20:07:26 +0100 Subject: [PATCH 0554/1416] Minor updates to AHI L2 reader and the tests. --- satpy/readers/ahi_l2_nc.py | 16 +++++++--------- satpy/tests/reader_tests/test_ahi_l2_nc.py | 2 -- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index ef3b7611aa..5159931819 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2018 Satpy developers +# Copyright (c) 2023 Satpy developers # # This file is part of satpy. # @@ -17,6 +17,8 @@ # satpy. If not, see . """Reader for Himawari L2 cloud products from NOAA's big data programme. +For more information about the data, see: . + These products are generated by the NOAA enterprise cloud suite and have filenames like: AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc @@ -49,12 +51,9 @@ from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() - EXPECTED_DATA_AREA = 'Full Disk' @@ -63,12 +62,11 @@ class HIML2NCFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" - super(HIML2NCFileHandler, self).__init__(filename, filename_info, - filetype_info) + super().__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) + chunks={"xc": "auto", "yc": "auto"}) # Check that file is a full disk scene, we don't know the area for anything else if self.nc.attrs['cdm_data_type'] != EXPECTED_DATA_AREA: @@ -119,8 +117,8 @@ def get_area_def(self, dsid): return self.area def _get_area_def(self): - logger.warning('The AHI L2 cloud products do not have the metadata required to produce an area definition.' - ' Assuming standard Himawari-8/9 full disk projection.') + logger.info('The AHI L2 cloud products do not have the metadata required to produce an area definition.' + ' Assuming standard Himawari-8/9 full disk projection.') # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index ff2b5a3d53..39de4e1053 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -75,7 +75,6 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - warntxt = "The AHI L2 cloud products do not have the metadata" ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' # Check case where input data is correct size. @@ -86,7 +85,6 @@ def test_ahi_l2_area_def(himl2_filename, caplog): assert area_def.height == dimensions['Rows'] assert np.allclose(area_def.area_extent, exp_ext) assert area_def.proj4_string == ps - assert warntxt in caplog.text # Check case where input data is incorrect size. with pytest.raises(ValueError): From e0d7a2a9373143ce14509dcc98bd5ac89c4f8abb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 10 Oct 2023 20:14:08 +0100 Subject: [PATCH 0555/1416] Change names of `readers` and `writers` in the docs to `reading` and `writing`. --- doc/source/{readers.rst => reading.rst} | 10 +++++----- doc/source/{writers.rst => writing.rst} | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) rename doc/source/{readers.rst => reading.rst} (97%) rename doc/source/{writers.rst => writing.rst} (95%) diff --git a/doc/source/readers.rst b/doc/source/reading.rst similarity index 97% rename from doc/source/readers.rst rename to doc/source/reading.rst index fa7cfecea1..eefa4ccba1 100644 --- a/doc/source/readers.rst +++ b/doc/source/reading.rst @@ -1,5 +1,5 @@ ======= -Readers +Reading ======= .. todo:: @@ -7,10 +7,10 @@ Readers How to read cloud products from NWCSAF software. (separate document?) Satpy supports reading and loading data from many input file formats and -schemes. The :class:`~satpy.scene.Scene` object provides a simple interface -around all the complexity of these various formats through its ``load`` -method. The following sections describe the different way data can be loaded, -requested, or added to a Scene object. +schemes through the concept of *readers*. Each reader supports a specific type of input data. +The :class:`~satpy.scene.Scene` object provides a simple interface around all the complexity of +these various formats through its ``load``method. +The following sections describe the different way data can be loaded, requested, or added to a Scene object. Available Readers ================= diff --git a/doc/source/writers.rst b/doc/source/writing.rst similarity index 95% rename from doc/source/writers.rst rename to doc/source/writing.rst index 12ee786f56..19518b2755 100644 --- a/doc/source/writers.rst +++ b/doc/source/writing.rst @@ -1,10 +1,10 @@ ======= -Writers +Writing ======= -Satpy makes it possible to save datasets in multiple formats. For details -on additional arguments and features available for a specific Writer see -the table below. Most use cases will want to save datasets using the +Satpy makes it possible to save datasets in multiple formats, with *writers* designed to save in a given format. +For details on additional arguments and features available for a specific Writer see the table below. +Most use cases will want to save datasets using the :meth:`~satpy.scene.Scene.save_datasets` method:: >>> scn.save_datasets(writer='simple_image') From 700ac96958c381da0eb2ff4c6d4a0eaa5b494687 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 10 Oct 2023 22:00:13 +0200 Subject: [PATCH 0556/1416] Update setup.py Co-authored-by: David Hoese --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7b321ecbcd..555f299b19 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ from setuptools import find_packages, setup -requires = ['numpy >1.20', 'pillow', 'pyresample >=1.24.0', 'trollsift', +requires = ['numpy >=1.21', 'pillow', 'pyresample >=1.24.0', 'trollsift', 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', 'packaging', 'pooch', 'pyorbital'] From 69c5751dacd76501569ddef56cc37bf7c52faa33 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Tue, 10 Oct 2023 22:13:01 +0200 Subject: [PATCH 0557/1416] add documentation about Meirink calibration --- satpy/readers/seviri_base.py | 34 +++++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 6471c42639..73a1454cb8 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -61,14 +61,22 @@ reader_kwargs={'calib_mode': 'GSICS'}) scene.load(['VIS006', 'IR_108']) -Furthermore, it is possible to specify external calibration coefficients -for the conversion from counts to radiances. External coefficients take -precedence over internal coefficients, but you can also mix internal and -external coefficients: If external calibration coefficients are specified -for only a subset of channels, the remaining channels will be calibrated -using the chosen file-internal coefficients (nominal or GSICS). +In addition, two other calibration methods are available: + +1. It is possible to specify external calibration coefficients for the + conversion from counts to radiances. External coefficients take + precedence over internal coefficients and over the Meirink + coefficients, but you can also mix internal and external coefficients: + If external calibration coefficients are specified for only a subset + of channels, the remaining channels will be calibrated using the + chosen file-internal coefficients (nominal or GSICS). Calibration + coefficients must be specified in [mW m-2 sr-1 (cm-1)-1]. + +2. The calibration mode ``meirink-2013`` uses coefficients based on an + intercalibration with Aqua-MODIS for the visible channels, as found in + `Inter-calibration of polar imager solar channels using SEVIRI`_ + (2013) by J. F. Meirink, R. A. Roebeling, and P. Stammes. -Calibration coefficients must be specified in [mW m-2 sr-1 (cm-1)-1]. In the following example we use external calibration coefficients for the ``VIS006`` & ``IR_108`` channels, and nominal coefficients for the @@ -93,6 +101,15 @@ 'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) +In the next example we use the mode ``meirink-2013`` calibration +coefficients for all visible channels and nominal coefficients for the +rest:: + + scene = satpy.Scene(filenames, + reader='seviri_l1b_...', + reader_kwargs={'calib_mode': 'meirink-2013'}) + scene.load(['VIS006', 'VIS008', 'IR_016']) + Calibration to reflectance ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -163,6 +180,9 @@ .. _Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance: https://www-cdn.eumetsat.int/files/2020-04/pdf_ten_msg_seviri_rad_calib.pdf +.. _Inter-calibration of polar imager solar channels using SEVIRI: + http://dx.doi.org/10.5194/amt-6-2495-2013 + """ import warnings From 714130cba99751d3b604e9ff8252662b9334dddb Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 11 Oct 2023 09:59:30 +0200 Subject: [PATCH 0558/1416] Update test for changes in Meirink calibration --- satpy/tests/reader_tests/test_seviri_base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 252da43e75..3b357512e4 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -26,7 +26,7 @@ import xarray as xr from satpy.readers.seviri_base import ( - DATE_2000, + MEIRINK_EPOCH, MEIRINK_COEFS, NoValidOrbitParams, OrbitPolynomial, @@ -373,8 +373,8 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): coefs = {'coefs': {}} coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', DATE_2000) - assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS[platform_id][channel_name][0]/1000. + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2013', MEIRINK_EPOCH) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2013'][platform_id][channel_name][0]/1000. @pytest.mark.parametrize('platform_id,time,expected', ( (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), @@ -392,5 +392,5 @@ def test_get_meirink_slope_2020(self, platform_id, time, expected): coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK', time) + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2013', time) assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 From b2ef4b4dd549d47e35e73f44af4a986d79a20f84 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 11 Oct 2023 10:27:16 +0200 Subject: [PATCH 0559/1416] remove setuptools_scm_git_archive from requirement The package's functionality is now found in setuptools_scm (see https://github.com/pytroll/satpy/issues/2549 and https://github.com/Changaco/setuptools_scm_git_archive ). Minimal changes done: 1. Remove setuptools_scm_git_archive from setup.cfg and doc/rtd_environment.yml 2. Change the content of .git_archival.txt according to https://setuptools-scm.readthedocs.io/en/latest/usage/#builtin-mechanisms-for-obtaining-version-numbers fixes #2549 --- .git_archival.txt | 3 +++ doc/rtd_environment.yml | 1 - setup.cfg | 1 - 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.git_archival.txt b/.git_archival.txt index 95cb3eea4e..3994ec0a83 100644 --- a/.git_archival.txt +++ b/.git_archival.txt @@ -1 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true)$ ref-names: $Format:%D$ diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index ce147a1644..1e40cbb73a 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -25,7 +25,6 @@ dependencies: - rioxarray - setuptools - setuptools_scm - - setuptools_scm_git_archive - sphinx - sphinx_rtd_theme - sphinxcontrib-apidoc diff --git a/setup.cfg b/setup.cfg index 594f9dc8cd..3e09909a6a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,6 @@ [options] setup_requires = setuptools_scm - setuptools_scm_git_archive [bdist_rpm] requires=h5py pyresample python2-numexpr pyhdf xarray dask h5netcdf From 92910cec18d96416f43f2482ce69dc7d6da33da1 Mon Sep 17 00:00:00 2001 From: Nina Date: Wed, 11 Oct 2023 10:45:58 +0200 Subject: [PATCH 0560/1416] Update satpy/readers/viirs_vgac_l1c_nc.py Co-authored-by: Martin Raspaud --- satpy/readers/viirs_vgac_l1c_nc.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 445313a807..09c361f9a7 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -75,13 +75,8 @@ def set_time_attrs(self, data): def dt64_to_datetime(self, dt64): """Conversion of numpy.datetime64 to datetime objects.""" - # https://stackoverflow.com/questions/13703720/converting-between-datetime-timestamp-and-datetime64/46921593#46921593 - if type(dt64) == np.datetime64: - unix_epoch = np.datetime64(0, 's') - one_second = np.timedelta64(1, 's') - seconds_since_epoch = (dt64 - unix_epoch) / one_second - dt = datetime.utcfromtimestamp(seconds_since_epoch) - return dt + if isinstance(dt64, np.datetime64): + return dt64.astype(datetime) return dt64 def decode_time_variable(self, data, nc): From aaec89a6f3a3edbbf34eeb1488915d99347f07ce Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 11 Oct 2023 11:05:28 +0200 Subject: [PATCH 0561/1416] Issue warning for caching only when caching is requested --- satpy/modifiers/angles.py | 44 +++++++++++++---------- satpy/tests/modifier_tests/test_angles.py | 14 +++++++- 2 files changed, 39 insertions(+), 19 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 02ffadfa87..8a3fe4910a 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -138,27 +138,36 @@ def _zarr_pattern(self, arg_hash, cache_version: Union[None, int, str] = None) - def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: """Call the decorated function.""" - new_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args - arg_hash = _hash_args(*new_args, unhashable_types=self._uncacheable_arg_types) - should_cache, cache_dir = self._get_should_cache_and_cache_dir(new_args, cache_dir) + sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args + should_cache, cache_dir = self._get_should_cache_and_cache_dir(sanitized_args, cache_dir) + if should_cache: + try: + arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) + except TypeError as err: + warnings.warn("Cannot cache function because of unhashable argument: " + str(err), stacklevel=2) + should_cache = False + + if not should_cache: + return self._func(*args) + zarr_fn = self._zarr_pattern(arg_hash) zarr_format = os.path.join(cache_dir, zarr_fn) zarr_paths = glob(zarr_format.format("*")) - if not should_cache or not zarr_paths: - # use sanitized arguments if we are caching, otherwise use original arguments - args_to_use = new_args if should_cache else args + + if not zarr_paths: + # use sanitized arguments + args_to_use = sanitized_args res = self._func(*args_to_use) - if should_cache and not zarr_paths: - self._warn_if_irregular_input_chunks(args, args_to_use) - self._cache_results(res, zarr_format) + self._warn_if_irregular_input_chunks(args, args_to_use) + self._cache_results(res, zarr_format) + # if we did any caching, let's load from the zarr files - if should_cache: - # re-calculate the cached paths - zarr_paths = sorted(glob(zarr_format.format("*"))) - if not zarr_paths: - raise RuntimeError("Data was cached to disk but no files were found") - new_chunks = _get_output_chunks_from_func_arguments(args) - res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) + # re-calculate the cached paths + zarr_paths = sorted(glob(zarr_format.format("*"))) + if not zarr_paths: + raise RuntimeError("Data was cached to disk but no files were found") + new_chunks = _get_output_chunks_from_func_arguments(args) + res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) return res def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]: @@ -248,8 +257,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): hashable_args = [] for arg in args: if isinstance(arg, unhashable_types): - warnings.warn(f"Unhashable type in function signature ({type(arg)}), cannot be cached.", stacklevel=2) - continue + raise TypeError(f"Unhashable type ({type(arg)}).") if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, datetime): diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index 4c7e295e14..f51ab229c8 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -322,7 +322,7 @@ def _fake_func(shape, chunks): satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func((5, 5), ((5,), (5,))) - def test_caching_with_array_in_args_fails(self, tmp_path): + def test_caching_with_array_in_args_warns(self, tmp_path): """Test that trying to cache with non-dask arrays fails.""" from satpy.modifiers.angles import cache_to_zarr_if @@ -334,6 +334,18 @@ def _fake_func(array): satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func(da.zeros(100)) + def test_caching_with_array_in_args_does_not_warn_when_caching_is_not_enabled(self, tmp_path, recwarn): + """Test that trying to cache with non-dask arrays fails.""" + from satpy.modifiers.angles import cache_to_zarr_if + + @cache_to_zarr_if("cache_lonlats") + def _fake_func(array): + return array + 1 + + with satpy.config.set(cache_lonlats=False, cache_dir=str(tmp_path)): + _fake_func(da.zeros(100)) + assert len(recwarn) == 0 + def test_no_cache_dir_fails(self, tmp_path): """Test that 'cache_dir' not being set fails.""" from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, get_angles From 51a3331206c158cdf464eab219da121da34cd869 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 11 Oct 2023 11:56:39 +0200 Subject: [PATCH 0562/1416] isort --- satpy/tests/reader_tests/test_seviri_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 3b357512e4..910623bcd5 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -26,8 +26,8 @@ import xarray as xr from satpy.readers.seviri_base import ( - MEIRINK_EPOCH, MEIRINK_COEFS, + MEIRINK_EPOCH, NoValidOrbitParams, OrbitPolynomial, OrbitPolynomialFinder, From 3b465a0dcbac71f3e007a8e825e1b0c1aa5b6910 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 11 Oct 2023 14:06:12 +0200 Subject: [PATCH 0563/1416] Change version of Meirink coefficients to 2023. The coefficients on the webpage https://msgcpp.knmi.nl/solar-channel-calibration.html were updated in place. The current set of coefficients were obtained in 2023, the code now reflects this correctly. --- satpy/readers/seviri_base.py | 21 +++++++++++--------- satpy/tests/reader_tests/test_seviri_base.py | 6 +++--- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 73a1454cb8..83977bf40c 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -72,7 +72,7 @@ chosen file-internal coefficients (nominal or GSICS). Calibration coefficients must be specified in [mW m-2 sr-1 (cm-1)-1]. -2. The calibration mode ``meirink-2013`` uses coefficients based on an +2. The calibration mode ``meirink-2023`` uses coefficients based on an intercalibration with Aqua-MODIS for the visible channels, as found in `Inter-calibration of polar imager solar channels using SEVIRI`_ (2013) by J. F. Meirink, R. A. Roebeling, and P. Stammes. @@ -101,13 +101,13 @@ 'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) -In the next example we use the mode ``meirink-2013`` calibration +In the next example we use the mode ``meirink-2023`` calibration coefficients for all visible channels and nominal coefficients for the rest:: scene = satpy.Scene(filenames, reader='seviri_l1b_...', - reader_kwargs={'calib_mode': 'meirink-2013'}) + reader_kwargs={'calib_mode': 'meirink-2023'}) scene.load(['VIS006', 'VIS008', 'IR_016']) @@ -377,6 +377,9 @@ # Inter-calibration of polar imager solar channels using SEVIRI, Atm. Meas. Tech., 6, # 2495-2508, doi:10.5194/amt-6-2495-2013 # +# The coeffients in the 2023 entry have been obtained from the webpage +# https://msgcpp.knmi.nl/solar-channel-calibration.html on 2023-10-11. +# # The coefficients are stored in pairs of A, B (see function `get_meirink_slope`) where the # units of A are µW m-2 sr-1 (cm-1)-1 and those of B are µW m-2 sr-1 (cm-1)-1 (86400 s)-1 # @@ -386,32 +389,32 @@ MEIRINK_EPOCH = datetime(2000, 1, 1) MEIRINK_COEFS = {} -MEIRINK_COEFS['2013'] = {} +MEIRINK_COEFS['2023'] = {} # Meteosat-8 -MEIRINK_COEFS['2013'][321] = {'VIS006': (24.346, 0.3739), +MEIRINK_COEFS['2023'][321] = {'VIS006': (24.346, 0.3739), 'VIS008': (30.989, 0.3111), 'IR_016': (22.869, 0.0065) } # Meteosat-9 -MEIRINK_COEFS['2013'][322] = {'VIS006': (21.026, 0.2556), +MEIRINK_COEFS['2023'][322] = {'VIS006': (21.026, 0.2556), 'VIS008': (26.875, 0.1835), 'IR_016': (21.394, 0.0498) } # Meteosat-10 -MEIRINK_COEFS['2013'][323] = {'VIS006': (19.829, 0.5856), +MEIRINK_COEFS['2023'][323] = {'VIS006': (19.829, 0.5856), 'VIS008': (25.284, 0.6787), 'IR_016': (23.066, -0.0286) } # Meteosat-11 -MEIRINK_COEFS['2013'][324] = {'VIS006': (20.515, 0.3600), +MEIRINK_COEFS['2023'][324] = {'VIS006': (20.515, 0.3600), 'VIS008': (25.803, 0.4844), 'IR_016': (22.354, -0.0187) } @@ -664,7 +667,7 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK-2013') + valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK-2023') if self._calib_mode not in valid_modes: raise ValueError( 'Invalid calibration mode: {}. Choose one of {}'.format( diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 910623bcd5..32918ea45b 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -373,8 +373,8 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): coefs = {'coefs': {}} coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2013', MEIRINK_EPOCH) - assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2013'][platform_id][channel_name][0]/1000. + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', MEIRINK_EPOCH) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2023'][platform_id][channel_name][0]/1000. @pytest.mark.parametrize('platform_id,time,expected', ( (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), @@ -392,5 +392,5 @@ def test_get_meirink_slope_2020(self, platform_id, time, expected): coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} coefs['coefs']['EXTERNAL'] = {} for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2013', time) + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', time) assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 From 761d273385808a20696af3baaa95cd95c5d314d7 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 11 Oct 2023 14:39:17 +0200 Subject: [PATCH 0564/1416] Update pyproject.toml Co-authored-by: Gerrit Holl --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index d0eac01fe0..e3a3a2efbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ known_first_party = "satpy" line_length = 120 [tool.ruff] +# See https://docs.astral.sh/ruff/rules/ select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] ignore = ["B905"] # only available from python 3.10 line-length = 120 From 49a156804b12faa92fd266659acaad5aea948963 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 11 Oct 2023 09:18:01 -0500 Subject: [PATCH 0565/1416] Remove unused setup section in setup.cfg --- setup.cfg | 4 ---- 1 file changed, 4 deletions(-) diff --git a/setup.cfg b/setup.cfg index 3e09909a6a..205f924b33 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,3 @@ -[options] -setup_requires = - setuptools_scm - [bdist_rpm] requires=h5py pyresample python2-numexpr pyhdf xarray dask h5netcdf release=1 From 056a3a9da5c8de9d811655441c234f735482f1a8 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 11 Oct 2023 10:47:00 -0500 Subject: [PATCH 0566/1416] Fix type annotations in seviri_base.py --- satpy/readers/seviri_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 83977bf40c..5e6d69ea68 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -184,6 +184,7 @@ http://dx.doi.org/10.5194/amt-6-2495-2013 """ +from __future__ import annotations import warnings from datetime import datetime, timedelta @@ -388,7 +389,7 @@ # Epoch for the MEIRINK re-calibration MEIRINK_EPOCH = datetime(2000, 1, 1) -MEIRINK_COEFS = {} +MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {} MEIRINK_COEFS['2023'] = {} # Meteosat-8 From 1d63b698e0ce7126434a72b562f61d353a639032 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 11 Oct 2023 19:11:14 +0200 Subject: [PATCH 0567/1416] Improve comment --- satpy/modifiers/angles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 8a3fe4910a..a11a3d2cd7 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -161,7 +161,7 @@ def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: self._warn_if_irregular_input_chunks(args, args_to_use) self._cache_results(res, zarr_format) - # if we did any caching, let's load from the zarr files + # if we did any caching, let's load from the zarr files, so that future calls have the same name # re-calculate the cached paths zarr_paths = sorted(glob(zarr_format.format("*"))) if not zarr_paths: From e1ea7cedb6b945bfca31a8078fd0e55c74c3abc0 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 08:14:12 +0100 Subject: [PATCH 0568/1416] Update documentation index for renamed files and replace some single quotes with double quotes. --- doc/source/index.rst | 4 ++-- doc/source/writing.rst | 30 +++++++++++++++--------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/doc/source/index.rst b/doc/source/index.rst index 413f7864a1..052a7e2d03 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -63,12 +63,12 @@ Documentation data_download examples/index quickstart - readers + reading remote_reading composites resample enhancements - writers + writing multiscene dev_guide/index diff --git a/doc/source/writing.rst b/doc/source/writing.rst index 779a550761..426be0b458 100644 --- a/doc/source/writing.rst +++ b/doc/source/writing.rst @@ -7,20 +7,20 @@ For details on additional arguments and features available for a specific Writer Most use cases will want to save datasets using the :meth:`~satpy.scene.Scene.save_datasets` method:: - >>> scn.save_datasets(writer='simple_image') + >>> scn.save_datasets(writer="simple_image") The ``writer`` parameter defaults to using the ``geotiff`` writer. One common parameter across almost all Writers is ``filename`` and ``base_dir`` to help automate saving files with custom filenames:: >>> scn.save_datasets( - ... filename='{name}_{start_time:%Y%m%d_%H%M%S}.tif', - ... base_dir='/tmp/my_ouput_dir') + ... filename="{name}_{start_time:%Y%m%d_%H%M%S}.tif", + ... base_dir="/tmp/my_ouput_dir") .. versionchanged:: 0.10 The `file_pattern` keyword argument was renamed to `filename` to match - the `save_dataset` method's keyword argument. + the `save_dataset` method"s keyword argument. .. _writer_table: @@ -129,10 +129,10 @@ and save them all at once. >>> from satpy.writers import compute_writer_results >>> res1 = scn.save_datasets(filename="/tmp/{name}.png", - ... writer='simple_image', + ... writer="simple_image", ... compute=False) >>> res2 = scn.save_datasets(filename="/tmp/{name}.tif", - ... writer='geotiff', + ... writer="geotiff", ... compute=False) >>> results = [res1, res2] >>> compute_writer_results(results) @@ -147,14 +147,14 @@ to be added. .. code-block:: python - >>> decodict = {'decorate': [{'text': {'txt': f' {my_text}', - ... 'align': {'top_bottom': 'top', 'left_right': 'left'}, - ... 'font': , - ... 'font_size': 48, - ... 'line': 'white', - ... 'bg_opacity': 255, - ... 'bg': 'black', - ... 'height': 30, + >>> decodict = {"decorate": [{"text": {"txt": "my_text", + ... "align": {"top_bottom": "top", "left_right": "left"}, + ... "font": , + ... "font_size": 48, + ... "line": "white", + ... "bg_opacity": 255, + ... "bg": "black", + ... "height": 30, ... }}]} Where `my_text` is the text you wish to add and `` is the @@ -164,5 +164,5 @@ This dictionary can then be passed to the :meth:`~satpy.scene.Scene.save_dataset .. code-block:: python - >>> scene.save_dataset(my_dataset, writer='simple_image', fill_value=False, + >>> scene.save_dataset(my_dataset, writer="simple_image", fill_value=False, ... decorate=decodict) From c9c3459e1e1e1e272d889ae65ba7a6324d56fda6 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Thu, 12 Oct 2023 09:21:17 +0200 Subject: [PATCH 0569/1416] Split full_days and part_of_days raise exception when units are not of expected format Use um instead of ms for better accuracy --- satpy/readers/viirs_vgac_l1c_nc.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 09c361f9a7..5fc9fd0415 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -84,15 +84,17 @@ def decode_time_variable(self, data, nc): if data.attrs["units"] == "hours since proj_time0": reference_time = np.datetime64(datetime.strptime(nc['proj_time0'].attrs["units"], 'days since %d/%m/%YT%H:%M:%S')) - delta_days = float(nc['proj_time0'].values) * np.timedelta64(1, 'D').astype('timedelta64[ms]') - delta_hours = data.values * np.timedelta64(1, 'h').astype('timedelta64[ms]') - time_data = xr.DataArray(reference_time + delta_days + delta_hours, - coords=data.coords, attrs={"long_name": "Scanline time"}) + delta_part_of_day, delta_full_days = np.modf(nc['proj_time0'].values) + delta_full_days = np.timedelta64(int(delta_full_days), 'D') + delta_part_of_day = delta_part_of_day * np.timedelta64(1, 'D').astype('timedelta64[us]') + delta_hours = data.values * np.timedelta64(1, 'h').astype('timedelta64[us]') + time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, + coords=data.coords, attrs={'long_name': 'Scanline time'}) self._start_time = self.dt64_to_datetime(time_data[0].values) self._end_time = self.dt64_to_datetime(time_data[-1].values) return time_data else: - return data + raise AttributeError('Unit of time variable in VGAC nc file is not "hours since proj_time0"') def get_dataset(self, key, yaml_info): """Get dataset.""" From 4facdafe90250e46fd43f742a8c3efff16576521 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Thu, 12 Oct 2023 09:21:36 +0200 Subject: [PATCH 0570/1416] Test to read also scanline_timestamps --- .../reader_tests/test_viirs_vgac_l1c_nc.py | 28 ++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 2f926a0e47..d74e3725fb 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -42,6 +42,7 @@ def _nc_filename(tmp_path): nc.createDimension('npix', npix) nc.createDimension('nscn', nscn) nc.createDimension('n_lut', n_lut) + nc.createDimension('one', 1) nc.StartTime = "2023-03-28T09:08:07" nc.EndTime = "2023-03-28T10:11:12" for ind in range(1, 11, 1): @@ -60,6 +61,23 @@ def _nc_filename(tmp_path): setattr(tb_b, attr, attrs[attr]) tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=('n_lut')) tb_lut[:] = np.array(range(0, n_lut)) * 0.5 + setattr(tb_lut, 'units', "Kelvin") + reference_time = np.datetime64("2010-01-01T00:00:00") + start_time = np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123, "ms") + delta_days = start_time - reference_time + delta_full_days = delta_days.astype('timedelta64[D]') + hidden_reference_time = reference_time + delta_full_days + delta_part_of_days = start_time - hidden_reference_time + proj_time0 = nc.createVariable('proj_time0', np.float64, ("one",)) + proj_time0[:] = (delta_full_days.astype(int) + + 0.000001 * delta_part_of_days.astype('timedelta64[us]').astype(np.int64) / (60 * 60 * 24)) + proj_time0.units = 'days since 01/01/2010T00:00:00' + time_v = nc.createVariable('time', np.float64, ('nscn',)) + delta_h = np.datetime64(nc.EndTime) - start_time + delta_hours = 0.000001 * delta_h.astype('timedelta64[us]').astype(int) / (60 * 60) + time_v[:] = np.linspace(0, delta_hours, num=nscn) + time_v.units = 'hours since proj_time0' + return filename_str @@ -74,7 +92,15 @@ def test_read_vgac(self, _nc_filename): scn_ = Scene( reader='viirs_vgac_l1c_nc', filenames=[_nc_filename]) - scn_.load(["M05", "M15"]) + scn_.load(["M05", "M15", "scanline_timestamps"]) + print(scn_["scanline_timestamps"][-1]) + assert ((scn_["scanline_timestamps"][0] - + np.datetime64("2023-03-28T09:08:07") - np.timedelta64(123, "ms")) < np.timedelta64(5, "us")) + assert ((scn_["scanline_timestamps"][-1] - np.datetime64("2023-03-28T10:11:12")) < np.timedelta64(5, "us")) + assert ((np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123, "ms") - + scn_["scanline_timestamps"][0]) < np.timedelta64(5, "us")) + assert ((np.datetime64("2023-03-28T10:11:12") - scn_["scanline_timestamps"][-1]) < np.timedelta64(5, "us")) + assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) assert scn_.start_time == datetime.datetime(year=2023, month=3, day=28, From d130317b2cac98d7ba25c335646c856c1055862a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 08:50:06 +0100 Subject: [PATCH 0571/1416] Add some global EPSG 4326 gridded lat/lon areas. --- satpy/etc/areas.yaml | 67 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 4f71368375..68f771d1d9 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -2030,3 +2030,70 @@ msg_resample_area: area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] + +# Global lat / lon gridded areas +EPSG_4326_36000x18000: + description: Global equal latitude/longitude grid at 0.01 degree resolution + projection: + EPSG:4326 + shape: + height: 18000 + width: 36000 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_7200x3600: + description: Global equal latitude/longitude grid at 0.05 degree resolution + projection: + EPSG:4326 + shape: + height: 3600 + width: 7200 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_1440x720: + description: Global equal latitude/longitude grid at 0.25 degree resolution + projection: + EPSG:4326 + shape: + height: 720 + width: 1440 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_720x360: + description: Global equal latitude/longitude grid at 0.5 degree resolution + projection: + EPSG:4326 + shape: + height: 360 + width: 720 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_3600x1800: + description: Global equal latitude/longitude grid at 0.1 degree resolution + projection: + EPSG:4326 + shape: + height: 1800 + width: 3600 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + +EPSG_4326_360x180: + description: Global equal latitude/longitude grid at 1 degree resolution + projection: + EPSG:4326 + shape: + height: 180 + width: 360 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] \ No newline at end of file From d5ab7e47dc58953b4b554dab67f85a7aff9da640 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 12 Oct 2023 07:54:05 +0000 Subject: [PATCH 0572/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/areas.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index 68f771d1d9..ec5cc5d254 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -2096,4 +2096,4 @@ EPSG_4326_360x180: width: 360 area_extent: lower_left_xy: [-180.0, -90.0] - upper_right_xy: [180.0, 90.0] \ No newline at end of file + upper_right_xy: [180.0, 90.0] From 81af83893c483900c37898ef35a06241bdf88040 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 09:00:09 +0100 Subject: [PATCH 0573/1416] Update areas.yaml Re-order some of the area definitions. --- satpy/etc/areas.yaml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index ec5cc5d254..28805855d9 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -2054,6 +2054,17 @@ EPSG_4326_7200x3600: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] +EPSG_4326_3600x1800: + description: Global equal latitude/longitude grid at 0.1 degree resolution + projection: + EPSG:4326 + shape: + height: 1800 + width: 3600 + area_extent: + lower_left_xy: [-180.0, -90.0] + upper_right_xy: [180.0, 90.0] + EPSG_4326_1440x720: description: Global equal latitude/longitude grid at 0.25 degree resolution projection: @@ -2076,17 +2087,6 @@ EPSG_4326_720x360: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] -EPSG_4326_3600x1800: - description: Global equal latitude/longitude grid at 0.1 degree resolution - projection: - EPSG:4326 - shape: - height: 1800 - width: 3600 - area_extent: - lower_left_xy: [-180.0, -90.0] - upper_right_xy: [180.0, 90.0] - EPSG_4326_360x180: description: Global equal latitude/longitude grid at 1 degree resolution projection: From 06ad2e573285e535cca3731761af4358e7240bd9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 11:35:15 +0100 Subject: [PATCH 0574/1416] Fix error in reading.rst file. --- doc/source/reading.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/reading.rst b/doc/source/reading.rst index eefa4ccba1..8553b52e76 100644 --- a/doc/source/reading.rst +++ b/doc/source/reading.rst @@ -116,7 +116,7 @@ Starting with Satpy version 0.25.1 with supported readers it is possible to load data from remote file systems like ``s3fs`` or ``fsspec``. For example: -:: +.. code-block:: python >>> from satpy import Scene >>> from satpy.readers import FSFile From d012c2180d1b76d83dc2b7e96a131ff104e8957c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 12 Oct 2023 11:42:57 +0100 Subject: [PATCH 0575/1416] Rename 'readers' page in documentation within two satpy readers. --- satpy/readers/modis_l2.py | 2 +- satpy/readers/seviri_base.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 1a7fc3ae38..ac1522dfe9 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -28,7 +28,7 @@ - m[o/y]d35_l2: cloud_mask dataset - some datasets in m[o/y]d06 files -To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../readers`. +To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. Geolocation files diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 131fe39ad4..7870e591a0 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -122,7 +122,7 @@ * The ``orbital_parameters`` attribute provides the nominal and actual satellite position, as well as the projection centre. See the `Metadata` section in - the :doc:`../readers` chapter for more information. + the :doc:`../reading` chapter for more information. * The ``acq_time`` coordinate provides the mean acquisition time for each scanline. Use a ``MultiIndex`` to enable selection by acquisition time: From e96db8158e087d5c3fc6a987362714d506a1e3c6 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Thu, 12 Oct 2023 13:27:13 +0200 Subject: [PATCH 0576/1416] Try to make codebeat happy --- satpy/readers/viirs_vgac_l1c_nc.py | 35 +++++++++++-------- .../reader_tests/test_viirs_vgac_l1c_nc.py | 2 +- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 5fc9fd0415..e34d95fb92 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -79,20 +79,26 @@ def dt64_to_datetime(self, dt64): return dt64.astype(datetime) return dt64 - def decode_time_variable(self, data, nc): - """Decode time variable.""" + def extract_time_data(self, data, nc): + """Decode time data.""" + reference_time = np.datetime64(datetime.strptime(nc['proj_time0'].attrs["units"], + 'days since %d/%m/%YT%H:%M:%S')) + delta_part_of_day, delta_full_days = np.modf(nc['proj_time0'].values) + delta_full_days = np.timedelta64(int(delta_full_days), 'D') + delta_part_of_day = delta_part_of_day * np.timedelta64(1, 'D').astype('timedelta64[us]') + delta_hours = data.values * np.timedelta64(1, 'h').astype('timedelta64[us]') + time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, + coords=data.coords, attrs={'long_name': 'Scanline time'}) + self._start_time = self.dt64_to_datetime(time_data[0].values) + self._end_time = self.dt64_to_datetime(time_data[-1].values) + return time_data + + def decode_time_variable(self, data, file_key, nc): + """Decide if time data should be decoded.""" + if file_key != "time": + return data if data.attrs["units"] == "hours since proj_time0": - reference_time = np.datetime64(datetime.strptime(nc['proj_time0'].attrs["units"], - 'days since %d/%m/%YT%H:%M:%S')) - delta_part_of_day, delta_full_days = np.modf(nc['proj_time0'].values) - delta_full_days = np.timedelta64(int(delta_full_days), 'D') - delta_part_of_day = delta_part_of_day * np.timedelta64(1, 'D').astype('timedelta64[us]') - delta_hours = data.values * np.timedelta64(1, 'h').astype('timedelta64[us]') - time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, - coords=data.coords, attrs={'long_name': 'Scanline time'}) - self._start_time = self.dt64_to_datetime(time_data[0].values) - self._end_time = self.dt64_to_datetime(time_data[-1].values) - return time_data + return self.extract_time_data(data, nc) else: raise AttributeError('Unit of time variable in VGAC nc file is not "hours since proj_time0"') @@ -105,8 +111,7 @@ def get_dataset(self, key, yaml_info): file_key = yaml_info.get('nc_key', name) data = nc[file_key] data = self.calibrate(data, yaml_info, file_key, nc) - if file_key == "time": - data = self.decode_time_variable(data, nc) + data = self.decode_time_variable(data, file_key, nc) data.attrs.update(nc.attrs) # For now add global attributes to all datasets data.attrs.update(yaml_info) self.set_time_attrs(data) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index d74e3725fb..2159af3864 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -61,7 +61,7 @@ def _nc_filename(tmp_path): setattr(tb_b, attr, attrs[attr]) tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=('n_lut')) tb_lut[:] = np.array(range(0, n_lut)) * 0.5 - setattr(tb_lut, 'units', "Kelvin") + tb_lut.units = "Kelvin" reference_time = np.datetime64("2010-01-01T00:00:00") start_time = np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123, "ms") delta_days = start_time - reference_time From 16ab6869e551f84090a00de6b04056c87ced79e0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 12 Oct 2023 09:33:52 -0500 Subject: [PATCH 0577/1416] Fix typo in doc/source/reading.rst --- doc/source/reading.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/reading.rst b/doc/source/reading.rst index 8553b52e76..b7264eeb6e 100644 --- a/doc/source/reading.rst +++ b/doc/source/reading.rst @@ -9,7 +9,7 @@ Reading Satpy supports reading and loading data from many input file formats and schemes through the concept of *readers*. Each reader supports a specific type of input data. The :class:`~satpy.scene.Scene` object provides a simple interface around all the complexity of -these various formats through its ``load``method. +these various formats through its ``load`` method. The following sections describe the different way data can be loaded, requested, or added to a Scene object. Available Readers From 67d4b415a3b7490b381b49652903b448f6ab4737 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 16 Oct 2023 20:13:03 +0100 Subject: [PATCH 0578/1416] Change names of `readers` and `writers` in the docs to `reading` and `writing`. --- doc/source/overview.rst | 6 +++--- doc/source/quickstart.rst | 4 ++-- satpy/dataset/dataid.py | 1 + satpy/scene.py | 4 ++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/doc/source/overview.rst b/doc/source/overview.rst index 8282c40aa3..73923a63cd 100644 --- a/doc/source/overview.rst +++ b/doc/source/overview.rst @@ -127,7 +127,7 @@ Reading One of the biggest advantages of using Satpy is the large number of input file formats that it can read. It encapsulates this functionality into -individual :doc:`readers`. Satpy Readers handle all of the complexity of +individual :doc:`reading`. Satpy Readers handle all of the complexity of reading whatever format they represent. Meteorological Satellite file formats can be extremely complex and formats are rarely reused across satellites or instruments. No matter the format, Satpy's Reader interface is meant to @@ -174,7 +174,7 @@ should look. Satpy tries to hide the complexity of all the possible enhancement methods from the user and just provide the best looking image by default. Satpy still makes it possible to customize these procedures, but in most cases it shouldn't be necessary. See the documentation on -:doc:`writers` for more information on what's possible for output formats +:doc:`writing` for more information on what's possible for output formats and enhancing images. Writing @@ -187,4 +187,4 @@ users to save data in image formats like PNG or GeoTIFF as well as data file formats like NetCDF. Each format's complexity is hidden behind the interface of individual Writer objects and includes keyword arguments for accessing specific format features like compression and output data type. See the -:doc:`writers` documentation for the available writers and how to use them. +:doc:`writing` documentation for the available writers and how to use them. diff --git a/doc/source/quickstart.rst b/doc/source/quickstart.rst index 9f9885a750..83ca60770b 100644 --- a/doc/source/quickstart.rst +++ b/doc/source/quickstart.rst @@ -122,7 +122,7 @@ To access the loaded data use the wavelength or name: >>> print(global_scene[0.8]) For more information on loading datasets by resolution, calibration, or other -advanced loading methods see the :doc:`readers` documentation. +advanced loading methods see the :doc:`reading` documentation. Calculating measurement values and navigation coordinates @@ -255,7 +255,7 @@ Or to save an individual dataset: Datasets are automatically scaled or "enhanced" to be compatible with the output format and to provide the best looking image. For more information on saving datasets and customizing enhancements see the documentation on -:doc:`writers`. +:doc:`writing`. Slicing and subsetting scenes diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index ded6cec146..135b2af35b 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -253,6 +253,7 @@ def __hash__(self): 'reflectance', 'brightness_temperature', 'radiance', + 'radiance_wavenumber', 'counts' ], 'transitive': True, diff --git a/satpy/scene.py b/satpy/scene.py index d43c9d80d2..308f02163d 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1119,7 +1119,7 @@ def save_dataset(self, dataset_id, filename=None, writer=None, :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. - kwargs: Additional writer arguments. See :doc:`../writers` for more + kwargs: Additional writer arguments. See :doc:`../writing` for more information. Returns: @@ -1173,7 +1173,7 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. - kwargs: Additional writer arguments. See :doc:`../writers` for more + kwargs: Additional writer arguments. See :doc:`../writing` for more information. Returns: From 97de220c1014684824ae558a404cbbadc02a28a7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 19:58:13 +0000 Subject: [PATCH 0579/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.5.1 → v1.6.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.5.1...v1.6.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cd26d096fe..07c9509616 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.5.1' # Use the sha / tag you want to point at + rev: 'v1.6.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From 5038b50a52d15445eb62134edd85ff60d864bd8a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 17 Oct 2023 13:02:00 +0200 Subject: [PATCH 0580/1416] Refactor cache helper's call --- satpy/modifiers/angles.py | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index a11a3d2cd7..f4146b60d5 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -138,42 +138,44 @@ def _zarr_pattern(self, arg_hash, cache_version: Union[None, int, str] = None) - def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: """Call the decorated function.""" - sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args - should_cache, cache_dir = self._get_should_cache_and_cache_dir(sanitized_args, cache_dir) - if should_cache: - try: - arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) - except TypeError as err: - warnings.warn("Cannot cache function because of unhashable argument: " + str(err), stacklevel=2) - should_cache = False - + should_cache: bool = satpy.config.get(self._cache_config_key, False) if not should_cache: return self._func(*args) - zarr_fn = self._zarr_pattern(arg_hash) - zarr_format = os.path.join(cache_dir, zarr_fn) + try: + return self._cache_and_read(args, cache_dir) + except TypeError as err: + warnings.warn("Cannot cache function because of unhashable argument: " + str(err), stacklevel=2) + return self._func(*args) + + def _cache_and_read(self, args, cache_dir): + sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args + + zarr_format = self._get_zarr_format(sanitized_args, cache_dir) zarr_paths = glob(zarr_format.format("*")) if not zarr_paths: # use sanitized arguments - args_to_use = sanitized_args - res = self._func(*args_to_use) - self._warn_if_irregular_input_chunks(args, args_to_use) - self._cache_results(res, zarr_format) + self._warn_if_irregular_input_chunks(args, sanitized_args) + res_to_cache = self._func(*(sanitized_args)) + self._cache_results(res_to_cache, zarr_format) # if we did any caching, let's load from the zarr files, so that future calls have the same name # re-calculate the cached paths zarr_paths = sorted(glob(zarr_format.format("*"))) if not zarr_paths: raise RuntimeError("Data was cached to disk but no files were found") + new_chunks = _get_output_chunks_from_func_arguments(args) res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) return res - def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]: - should_cache: bool = satpy.config.get(self._cache_config_key, False) + def _get_zarr_format(self, sanitized_args, cache_dir): + arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) + zarr_filename = self._zarr_pattern(arg_hash) cache_dir = self._get_cache_dir_from_config(cache_dir) - return should_cache, cache_dir + zarr_format = os.path.join(cache_dir, zarr_filename) + return zarr_format @staticmethod def _get_cache_dir_from_config(cache_dir: Optional[str]) -> str: From 3078273c33bf3113dff9643c686da44ececa7b91 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 17 Oct 2023 13:05:29 +0200 Subject: [PATCH 0581/1416] Update changelog for v0.44.0 --- CHANGELOG.md | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 799ae0a867..4b9ab4e1b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,56 @@ +## Version 0.44.0 (2023/10/17) + +### Issues Closed + +* [Issue 2593](https://github.com/pytroll/satpy/issues/2593) - FY4A REGC data resampling return all nan +* [Issue 2591](https://github.com/pytroll/satpy/issues/2591) - Is there a corresponding reader for S3A_SL_2_WST? +* [Issue 2581](https://github.com/pytroll/satpy/issues/2581) - Can reader 'modis_l1b' correct MODIS Bow Tie Effect? +* [Issue 2580](https://github.com/pytroll/satpy/issues/2580) - Does python3.8 and below seem to fail to install via the command line "conda install -c conda-forge satpy"? +* [Issue 2571](https://github.com/pytroll/satpy/issues/2571) - Add Calibration by Meirink et al for SEVIRI ([PR 2589](https://github.com/pytroll/satpy/pull/2589) by [@pdebuyl](https://github.com/pdebuyl)) +* [Issue 2549](https://github.com/pytroll/satpy/issues/2549) - setuptools-scm-git-archive is obsolete -- use setuptools-scm >= 7 ([PR 2598](https://github.com/pytroll/satpy/pull/2598) by [@pdebuyl](https://github.com/pdebuyl)) +* [Issue 2266](https://github.com/pytroll/satpy/issues/2266) - AGRI data fails with `native` resampling` + +In this release 7 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2598](https://github.com/pytroll/satpy/pull/2598) - remove setuptools_scm_git_archive from requirement ([2549](https://github.com/pytroll/satpy/issues/2549), [2549](https://github.com/pytroll/satpy/issues/2549)) +* [PR 2579](https://github.com/pytroll/satpy/pull/2579) - Adapt satpy to numpy 2 +* [PR 2575](https://github.com/pytroll/satpy/pull/2575) - Remove use of deprecated setuptools_scm_git_archive build package + +#### Features added + +* [PR 2600](https://github.com/pytroll/satpy/pull/2600) - Add some global EPSG 4326 gridded lat/lon areas. +* [PR 2589](https://github.com/pytroll/satpy/pull/2589) - Add meirink calib ([2571](https://github.com/pytroll/satpy/issues/2571)) +* [PR 2584](https://github.com/pytroll/satpy/pull/2584) - Convert AHI HSD dask chunking to be based on band resolution +* [PR 2574](https://github.com/pytroll/satpy/pull/2574) - Rename ABI "night_microphysics_abi" composite to "night_microphysics" +* [PR 2572](https://github.com/pytroll/satpy/pull/2572) - Add reader for GERB high-resolution HDF5 files +* [PR 2558](https://github.com/pytroll/satpy/pull/2558) - New reader for Himawari L2 NOAA enterprise cloud products. +* [PR 2556](https://github.com/pytroll/satpy/pull/2556) - Implement modifier for reducing signal as a function of sunz angle +* [PR 2554](https://github.com/pytroll/satpy/pull/2554) - Implement non-linear scaling for NDVI hybrid green correction +* [PR 2488](https://github.com/pytroll/satpy/pull/2488) - Add a blend method to create temporal RGB from MultiScene +* [PR 2052](https://github.com/pytroll/satpy/pull/2052) - Add resolution dependent chunk sizing to 'modis_l1b' reader + +#### Documentation changes + +* [PR 2582](https://github.com/pytroll/satpy/pull/2582) - Add mastodon link +* [PR 2517](https://github.com/pytroll/satpy/pull/2517) - Add documentation on putting text onto images + +#### Backward incompatible changes + +* [PR 2574](https://github.com/pytroll/satpy/pull/2574) - Rename ABI "night_microphysics_abi" composite to "night_microphysics" + +#### Clean ups + +* [PR 2587](https://github.com/pytroll/satpy/pull/2587) - Remove libnetcdf specific build from CI env +* [PR 2578](https://github.com/pytroll/satpy/pull/2578) - Remove unneeded performance tracker in seviri reader +* [PR 2575](https://github.com/pytroll/satpy/pull/2575) - Remove use of deprecated setuptools_scm_git_archive build package + +In this release 19 pull requests were closed. + + ## Version 0.43.0 (2023/07/03) ### Issues Closed From e09e047cfeeacf3dceba57eb021a59ca9ffe6d4d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 20 Oct 2023 14:50:31 +0100 Subject: [PATCH 0582/1416] Change platform name for EPIC (DSCOVR) to upper case. --- satpy/readers/epic_l1b_h5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/epic_l1b_h5.py b/satpy/readers/epic_l1b_h5.py index 55c020ee21..d83bf6893d 100644 --- a/satpy/readers/epic_l1b_h5.py +++ b/satpy/readers/epic_l1b_h5.py @@ -69,7 +69,7 @@ def __init__(self, filename, filename_info, filetype_info): super(DscovrEpicL1BH5FileHandler, self).__init__(filename, filename_info, filetype_info) self.sensor = 'epic' - self.platform_name = 'dscovr' + self.platform_name = 'DSCOVR' @property def start_time(self): From 58ddfaa671b0d74c9b23bffc3146bbf1337e4e4e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 20 Oct 2023 10:11:06 -0500 Subject: [PATCH 0583/1416] Bump xarray expected versions in test_cf.py --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index a034038663..7fdcaeb553 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.10") + versions["xarray"] >= Version("2023.11") ) From 7495e06d49d26f5259b631437bcdc09067bc4de5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 22 Oct 2023 19:56:16 -0500 Subject: [PATCH 0584/1416] Fix ABI L2 reader to produce reflectances as percentages --- satpy/readers/abi_l2_nc.py | 6 ++ satpy/tests/reader_tests/test_abi_l2_nc.py | 69 +++++++++++++--------- 2 files changed, 48 insertions(+), 27 deletions(-) diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index a152790197..62aad5d08c 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -41,6 +41,12 @@ def get_dataset(self, key, info): variable.attrs.update(key.to_dict()) self._update_data_arr_with_filename_attrs(variable) self._remove_problem_attrs(variable) + + # convert to satpy standard units + if variable.attrs['units'] == '1' and key['calibration'] != 'counts': + variable *= 100.0 + variable.attrs['units'] = '%' + return variable def _update_data_arr_with_filename_attrs(self, variable): diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 2f2131461e..a0ec5b92e4 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -20,6 +20,7 @@ from unittest import mock import numpy as np +import pytest import xarray as xr @@ -87,7 +88,12 @@ def _assert_orbital_parameters(orb_params): def _create_mcmip_dataset(): - return _create_cmip_dataset("CMI_C14") + ds1 = _create_cmip_dataset("CMI_C01") + ds2 = _create_cmip_dataset("CMI_C14") + ds1["CMI_C01"].attrs["units"] = "1" + ds2["CMI_C14"].attrs["units"] = "K" + ds1["CMI_C14"] = ds2["CMI_C14"] + return ds1 class Test_NC_ABI_L2_get_dataset: @@ -135,46 +141,55 @@ def test_get_dataset_gfls(self): class TestMCMIPReading: """Test cases of the MCMIP file format.""" + @pytest.mark.parametrize( + ("product", "exp_metadata"), + [ + ("C14", {"calibration": "brightness_temperature", "wavelength": (10.8, 11.2, 11.6), "units": "K"}), + ("C01", {"calibration": "reflectance", "wavelength": (0.45, 0.47, 0.49), "units": "%"}), + ] + ) @mock.patch('satpy.readers.abi_base.xr') - def test_mcmip_get_dataset(self, xr_): + def test_mcmip_get_dataset(self, xr_, product, exp_metadata): """Test getting channel from MCMIP file.""" from datetime import datetime from pyresample.geometry import AreaDefinition from satpy import Scene - from satpy.dataset.dataid import WavelengthRange - xr_.open_dataset.return_value = _create_mcmip_dataset() + fake_ds = _create_mcmip_dataset() + xr_.open_dataset.return_value = fake_ds fn = "OR_ABI-L2-MCMIPF-M6_G16_s20192600241149_e20192600243534_c20192600245360.nc" scn = Scene(reader='abi_l2_nc', filenames=[fn]) - scn.load(['C14']) + scn.load([product]) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) + if "C01" in product: + exp_data *= 100 + + exp_attrs = { + 'instrument_ID': None, + 'modifiers': (), + 'name': product, + 'observation_type': 'MCMIP', + 'orbital_slot': None, + 'reader': 'abi_l2_nc', + 'platform_name': 'GOES-16', + 'platform_shortname': 'G16', + 'production_site': None, + 'scan_mode': 'M6', + 'scene_abbr': 'F', + 'scene_id': None, + 'sensor': 'abi', + 'timeline_ID': None, + 'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000), + 'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000), + 'ancillary_variables': [], + } + exp_attrs.update(exp_metadata) - exp_attrs = {'instrument_ID': None, - 'modifiers': (), - 'name': 'C14', - 'observation_type': 'MCMIP', - 'orbital_slot': None, - 'reader': 'abi_l2_nc', - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M6', - 'scene_abbr': 'F', - 'scene_id': None, - 'sensor': 'abi', - 'timeline_ID': None, - 'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000), - 'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000), - 'calibration': 'brightness_temperature', - 'ancillary_variables': [], - 'wavelength': WavelengthRange(10.8, 11.2, 11.6, unit='µm'), - 'units': 'm'} - - res = scn['C14'] + res = scn[product] np.testing.assert_allclose(res.data, exp_data, equal_nan=True) assert isinstance(res.attrs['area'], AreaDefinition) _compare_subdict(res.attrs, exp_attrs) From bd43ad6be06844eb44e114f8e78a94154a481bec Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 06:43:53 -0500 Subject: [PATCH 0585/1416] Fix ABI L2 to only convert reflectances to percentages --- satpy/readers/abi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index 62aad5d08c..d63ba354a6 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -43,7 +43,7 @@ def get_dataset(self, key, info): self._remove_problem_attrs(variable) # convert to satpy standard units - if variable.attrs['units'] == '1' and key['calibration'] != 'counts': + if variable.attrs['units'] == '1' and key['calibration'] == 'reflectance': variable *= 100.0 variable.attrs['units'] = '%' From dc3ee7d30151584caaae05e6fb21401702614cfd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 13:08:30 -0500 Subject: [PATCH 0586/1416] Remove legacy resampler code Satpy depends on a new enough version of pyresample that all of theses imports are guaranteed. --- satpy/resample.py | 340 ++-------------------------------------------- 1 file changed, 12 insertions(+), 328 deletions(-) diff --git a/satpy/resample.py b/satpy/resample.py index 289371d8cb..d6c90b5bcf 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -146,32 +146,19 @@ from math import lcm # type: ignore from weakref import WeakValueDictionary -import dask import dask.array as da import numpy as np import pyresample import xarray as xr import zarr from packaging import version -from pyresample.ewa import fornav, ll2cr +from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler from pyresample.geometry import SwathDefinition - -from satpy.utils import PerformanceWarning, get_legacy_chunk_size - -try: - from pyresample.resampler import BaseResampler as PRBaseResampler -except ImportError: - PRBaseResampler = None -try: - from pyresample.gradient import GradientSearchResampler -except ImportError: - GradientSearchResampler = None -try: - from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler -except ImportError: - DaskEWAResampler = LegacyDaskEWAResampler = None +from pyresample.gradient import GradientSearchResampler +from pyresample.resampler import BaseResampler as PRBaseResampler from satpy._config import config_search_paths, get_config_path +from satpy.utils import PerformanceWarning, get_legacy_chunk_size LOG = getLogger(__name__) @@ -355,100 +342,7 @@ def update_resampled_coords(old_data, new_data, new_area): return new_data -class BaseResampler(object): - """Base abstract resampler class.""" - - def __init__(self, source_geo_def, target_geo_def): - """Initialize resampler with geolocation information. - - Args: - source_geo_def (SwathDefinition, AreaDefinition): - Geolocation definition for the data to be resampled - target_geo_def (CoordinateDefinition, AreaDefinition): - Geolocation definition for the area to resample data to. - - """ - self.source_geo_def = source_geo_def - self.target_geo_def = target_geo_def - - def get_hash(self, source_geo_def=None, target_geo_def=None, **kwargs): - """Get hash for the current resample with the given *kwargs*.""" - if source_geo_def is None: - source_geo_def = self.source_geo_def - if target_geo_def is None: - target_geo_def = self.target_geo_def - the_hash = source_geo_def.update_hash() - target_geo_def.update_hash(the_hash) - hash_dict(kwargs, the_hash) - return the_hash.hexdigest() - - def precompute(self, **kwargs): - """Do the precomputation. - - This is an optional step if the subclass wants to implement more - complex features like caching or can share some calculations - between multiple datasets to be processed. - - """ - return None - - def compute(self, data, **kwargs): - """Do the actual resampling. - - This must be implemented by subclasses. - - """ - raise NotImplementedError - - def resample(self, data, cache_dir=None, mask_area=None, **kwargs): - """Resample `data` by calling `precompute` and `compute` methods. - - Only certain resampling classes may use `cache_dir` and the `mask` - provided when `mask_area` is True. The return value of calling the - `precompute` method is passed as the `cache_id` keyword argument - of the `compute` method, but may not be used directly for caching. It - is up to the individual resampler subclasses to determine how this - is used. - - Args: - data (xarray.DataArray): Data to be resampled - cache_dir (str): directory to cache precomputed results - (default False, optional) - mask_area (bool): Mask geolocation data where data values are - invalid. This should be used when data values - may affect what neighbors are considered valid. - - Returns (xarray.DataArray): Data resampled to the target area - - """ - # default is to mask areas for SwathDefinitions - if mask_area is None and isinstance( - self.source_geo_def, SwathDefinition): - mask_area = True - - if mask_area: - if isinstance(self.source_geo_def, SwathDefinition): - geo_dims = self.source_geo_def.lons.dims - else: - geo_dims = ('y', 'x') - flat_dims = [dim for dim in data.dims if dim not in geo_dims] - if np.issubdtype(data.dtype, np.integer): - kwargs['mask'] = data == data.attrs.get('_FillValue', np.iinfo(data.dtype.type).max) - else: - kwargs['mask'] = data.isnull() - kwargs['mask'] = kwargs['mask'].all(dim=flat_dims) - - cache_id = self.precompute(cache_dir=cache_dir, **kwargs) - return self.compute(data, cache_id=cache_id, **kwargs) - - def _create_cache_filename(self, cache_dir, prefix='', - fmt='.zarr', **kwargs): - """Create filename for the cached resampling parameters.""" - hash_str = self.get_hash(**kwargs) - return os.path.join(cache_dir, prefix + hash_str + fmt) - - -class KDTreeResampler(BaseResampler): +class KDTreeResampler(PRBaseResampler): """Resample using a KDTree-based nearest neighbor algorithm. This resampler implements on-disk caching when the `cache_dir` argument @@ -636,209 +530,7 @@ def compute(self, data, weight_funcs=None, fill_value=np.nan, return update_resampled_coords(data, res, self.target_geo_def) -class _LegacySatpyEWAResampler(BaseResampler): - """Resample using an elliptical weighted averaging algorithm. - - This algorithm does **not** use caching or any externally provided data - mask (unlike the 'nearest' resampler). - - This algorithm works under the assumption that the data is observed - one scan line at a time. However, good results can still be achieved - for non-scan based data provided `rows_per_scan` is set to the - number of rows in the entire swath or by setting it to `None`. - - Args: - rows_per_scan (int, None): - Number of data rows for every observed scanline. If None then the - entire swath is treated as one large scanline. - weight_count (int): - number of elements to create in the gaussian weight table. - Default is 10000. Must be at least 2 - weight_min (float): - the minimum value to store in the last position of the - weight table. Default is 0.01, which, with a - `weight_distance_max` of 1.0 produces a weight of 0.01 - at a grid cell distance of 1.0. Must be greater than 0. - weight_distance_max (float): - distance in grid cell units at which to - apply a weight of `weight_min`. Default is - 1.0. Must be greater than 0. - weight_delta_max (float): - maximum distance in grid cells in each grid - dimension over which to distribute a single swath cell. - Default is 10.0. - weight_sum_min (float): - minimum weight sum value. Cells whose weight sums - are less than `weight_sum_min` are set to the grid fill value. - Default is EPSILON. - maximum_weight_mode (bool): - If False (default), a weighted average of - all swath cells that map to a particular grid cell is used. - If True, the swath cell having the maximum weight of all - swath cells that map to a particular grid cell is used. This - option should be used for coded/category data, i.e. snow cover. - - """ - - def __init__(self, source_geo_def, target_geo_def): - """Init _LegacySatpyEWAResampler.""" - warnings.warn( - "A new version of pyresample is available. Please " - "upgrade to get access to a newer 'ewa' and " - "'ewa_legacy' resampler.", - stacklevel=2 - ) - super(_LegacySatpyEWAResampler, self).__init__(source_geo_def, target_geo_def) - self.cache = {} - - def resample(self, *args, **kwargs): - """Run precompute and compute methods. - - .. note:: - - This sets the default of 'mask_area' to False since it is - not needed in EWA resampling currently. - - """ - kwargs.setdefault('mask_area', False) - return super(_LegacySatpyEWAResampler, self).resample(*args, **kwargs) - - def _call_ll2cr(self, lons, lats, target_geo_def, swath_usage=0): - """Wrap ll2cr() for handling dask delayed calls better.""" - new_src = SwathDefinition(lons, lats) - - swath_points_in_grid, cols, rows = ll2cr(new_src, target_geo_def) - # FIXME: How do we check swath usage/coverage if we only do this - # per-block - # # Determine if enough of the input swath was used - # grid_name = getattr(self.target_geo_def, "name", "N/A") - # fraction_in = swath_points_in_grid / float(lons.size) - # swath_used = fraction_in > swath_usage - # if not swath_used: - # LOG.info("Data does not fit in grid %s because it only %f%% of " - # "the swath is used" % - # (grid_name, fraction_in * 100)) - # raise RuntimeError("Data does not fit in grid %s" % (grid_name,)) - # else: - # LOG.debug("Data fits in grid %s and uses %f%% of the swath", - # grid_name, fraction_in * 100) - - return np.stack([cols, rows], axis=0) - - def precompute(self, cache_dir=None, swath_usage=0, **kwargs): - """Generate row and column arrays and store it for later use.""" - if self.cache: - # this resampler should be used for one SwathDefinition - # no need to recompute ll2cr output again - return None - - if kwargs.get('mask') is not None: - LOG.warning("'mask' parameter has no affect during EWA " - "resampling") - - del kwargs - source_geo_def = self.source_geo_def - target_geo_def = self.target_geo_def - - if cache_dir: - LOG.warning("'cache_dir' is not used by EWA resampling") - - # Satpy/PyResample don't support dynamic grids out of the box yet - lons, lats = source_geo_def.get_lonlats() - if isinstance(lons, xr.DataArray): - # get dask arrays - lons = lons.data - lats = lats.data - # we are remapping to a static unchanging grid/area with all of - # its parameters specified - chunks = (2,) + lons.chunks - res = da.map_blocks(self._call_ll2cr, lons, lats, - target_geo_def, swath_usage, - dtype=lons.dtype, chunks=chunks, new_axis=[0]) - cols = res[0] - rows = res[1] - - # save the dask arrays in the class instance cache - # the on-disk cache will store the numpy arrays - self.cache = { - "rows": rows, - "cols": cols, - } - - return None - - def _call_fornav(self, cols, rows, target_geo_def, data, - grid_coverage=0, **kwargs): - """Wrap fornav() to run as a dask delayed.""" - num_valid_points, res = fornav(cols, rows, target_geo_def, - data, **kwargs) - - if isinstance(data, tuple): - # convert 'res' from tuple of arrays to one array - res = np.stack(res) - num_valid_points = sum(num_valid_points) - - grid_covered_ratio = num_valid_points / float(res.size) - grid_covered = grid_covered_ratio > grid_coverage - if not grid_covered: - msg = "EWA resampling only found %f%% of the grid covered " \ - "(need %f%%)" % (grid_covered_ratio * 100, - grid_coverage * 100) - raise RuntimeError(msg) - LOG.debug("EWA resampling found %f%% of the grid covered" % - (grid_covered_ratio * 100)) - - return res - - def compute(self, data, cache_id=None, fill_value=0, weight_count=10000, - weight_min=0.01, weight_distance_max=1.0, - weight_delta_max=1.0, weight_sum_min=-1.0, - maximum_weight_mode=False, grid_coverage=0, **kwargs): - """Resample the data according to the precomputed X/Y coordinates.""" - rows = self.cache["rows"] - cols = self.cache["cols"] - - # if the data is scan based then check its metadata or the passed - # kwargs otherwise assume the entire input swath is one large - # "scanline" - rows_per_scan = kwargs.get('rows_per_scan', - data.attrs.get("rows_per_scan", - data.shape[0])) - - if data.ndim == 3 and 'bands' in data.dims: - data_in = tuple(data.sel(bands=band).data - for band in data['bands']) - elif data.ndim == 2: - data_in = data.data - else: - raise ValueError("Unsupported data shape for EWA resampling.") - - res = dask.delayed(self._call_fornav)( - cols, rows, self.target_geo_def, data_in, - grid_coverage=grid_coverage, - rows_per_scan=rows_per_scan, weight_count=weight_count, - weight_min=weight_min, weight_distance_max=weight_distance_max, - weight_delta_max=weight_delta_max, weight_sum_min=weight_sum_min, - maximum_weight_mode=maximum_weight_mode) - if isinstance(data_in, tuple): - new_shape = (len(data_in),) + self.target_geo_def.shape - else: - new_shape = self.target_geo_def.shape - data_arr = da.from_delayed(res, new_shape, data.dtype) - # from delayed creates one large chunk, break it up a bit if we can - data_arr = data_arr.rechunk([CHUNK_SIZE] * data_arr.ndim) - if data.ndim == 3 and data.dims[0] == 'bands': - dims = ('bands', 'y', 'x') - elif data.ndim == 2: - dims = ('y', 'x') - else: - dims = data.dims - - res = xr.DataArray(data_arr, dims=dims, attrs=data.attrs.copy()) - return update_resampled_coords(data, res, self.target_geo_def) - - -class BilinearResampler(BaseResampler): +class BilinearResampler(PRBaseResampler): """Resample using bilinear interpolation. This resampler implements on-disk caching when the `cache_dir` argument @@ -978,7 +670,7 @@ def _repeat_by_factor(data, block_info=None): return out_data -class NativeResampler(BaseResampler): +class NativeResampler(PRBaseResampler): """Expand or reduce input datasets to be the same shape. If data is higher resolution (more pixels) than the destination area @@ -1139,7 +831,7 @@ def _get_arg_to_pass_for_skipna_handling(**kwargs): return kwargs -class BucketResamplerBase(BaseResampler): +class BucketResamplerBase(PRBaseResampler): """Base class for bucket resampling which implements averaging.""" def __init__(self, source_geo_def, target_geo_def): @@ -1353,17 +1045,9 @@ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): "bucket_sum": BucketSum, "bucket_count": BucketCount, "bucket_fraction": BucketFraction, + "ewa": DaskEWAResampler, + "ewa_legacy": LegacyDaskEWAResampler, } -if DaskEWAResampler is not None: - RESAMPLERS['ewa'] = DaskEWAResampler - RESAMPLERS['ewa_legacy'] = LegacyDaskEWAResampler -else: - RESAMPLERS['ewa'] = _LegacySatpyEWAResampler - - -# deepcode ignore PythonSameEvalBinaryExpressiontrue: PRBaseResampler is None only on import errors -if PRBaseResampler is None: - PRBaseResampler = BaseResampler # TODO: move this to pyresample @@ -1373,7 +1057,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ LOG.info("Using default KDTree resampler") resampler = 'kd_tree' - if isinstance(resampler, (BaseResampler, PRBaseResampler)): + if isinstance(resampler, PRBaseResampler): raise ValueError("Trying to create a resampler when one already " "exists.") if isinstance(resampler, str): @@ -1403,7 +1087,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ def resample(source_area, data, destination_area, resampler=None, **kwargs): """Do the resampling.""" - if not isinstance(resampler, (BaseResampler, PRBaseResampler)): + if not isinstance(resampler, PRBaseResampler): # we don't use the first argument (cache key) _, resampler_instance = prepare_resampler(source_area, destination_area, From 59c8b94a7495589911dba96649936afe322a07ac Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 15:05:42 -0500 Subject: [PATCH 0587/1416] Remove long deprecated numpy resampling cache helper --- satpy/resample.py | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/satpy/resample.py b/satpy/resample.py index d6c90b5bcf..d011b20aa2 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -436,41 +436,10 @@ def _apply_cached_index(self, val, idx_name, persist=False): setattr(self.resampler, idx_name, val) return val - def _check_numpy_cache(self, cache_dir, mask=None, - **kwargs): - """Check if there's Numpy cache file and convert it to zarr.""" - if cache_dir is None: - return - fname_np = self._create_cache_filename(cache_dir, - prefix='resample_lut-', - mask=mask, fmt='.npz', - **kwargs) - fname_zarr = self._create_cache_filename(cache_dir, prefix='nn_lut-', - mask=mask, fmt='.zarr', - **kwargs) - LOG.debug("Check if %s exists", fname_np) - if os.path.exists(fname_np) and not os.path.exists(fname_zarr): - import warnings - warnings.warn( - "Using Numpy files as resampling cache is deprecated.", - stacklevel=3 - ) - LOG.warning("Converting resampling LUT from .npz to .zarr") - zarr_out = xr.Dataset() - with np.load(fname_np, 'r') as fid: - for idx_name, coord in NN_COORDINATES.items(): - zarr_out[idx_name] = (coord, fid[idx_name]) - - # Write indices to Zarr file - zarr_out.to_zarr(fname_zarr) - LOG.debug("Resampling LUT saved to %s", fname_zarr) - def load_neighbour_info(self, cache_dir, mask=None, **kwargs): """Read index arrays from either the in-memory or disk cache.""" mask_name = getattr(mask, 'name', None) cached = {} - self._check_numpy_cache(cache_dir, mask=mask_name, **kwargs) - for idx_name in NN_COORDINATES: if mask_name in self._index_caches: cached[idx_name] = self._apply_cached_index( From f273695efc19ffeb07e5a24165912087171036df Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 23 Oct 2023 20:21:59 +0000 Subject: [PATCH 0588/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.6.0 → v1.6.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.6.0...v1.6.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 07c9509616..5bf64d25da 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.6.0' # Use the sha / tag you want to point at + rev: 'v1.6.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From b9189dd10ca16dfb4b2d26265607be2c4a7bb12d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 15:26:18 -0500 Subject: [PATCH 0589/1416] Remove tests for removed and deprecated functionality --- satpy/tests/test_resample.py | 40 +----------------------------------- 1 file changed, 1 insertion(+), 39 deletions(-) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index aa9063b95c..f96b98ecc0 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -137,13 +137,12 @@ def test_type_preserve(self): class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" - @mock.patch('satpy.resample.KDTreeResampler._check_numpy_cache') @mock.patch('satpy.resample.xr.Dataset') @mock.patch('satpy.resample.zarr.open') @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') @mock.patch('pyresample.kd_tree.XArrayResamplerNN') def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, - xr_dset, cnc): + xr_dset): """Test the kd resampler.""" from satpy.resample import KDTreeResampler data, source_area, swath_data, source_swath, target_area = get_test_data() @@ -157,7 +156,6 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, # swath definitions should not be cached self.assertFalse(len(mock_dset.to_zarr.mock_calls), 0) resampler.resampler.reset_mock() - cnc.assert_called_once() resampler = KDTreeResampler(source_area, target_area) resampler.precompute() @@ -216,42 +214,6 @@ def astype(self, dtype): resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) - @mock.patch('satpy.resample.np.load') - @mock.patch('satpy.resample.xr.Dataset') - def test_check_numpy_cache(self, xr_Dataset, np_load): - """Test that cache stored in .npz is converted to zarr.""" - from satpy.resample import KDTreeResampler - - data, source_area, swath_data, source_swath, target_area = get_test_data() - resampler = KDTreeResampler(source_area, target_area) - - zarr_out = mock.MagicMock() - xr_Dataset.return_value = zarr_out - - try: - the_dir = tempfile.mkdtemp() - kwargs = {} - np_path = resampler._create_cache_filename(the_dir, - prefix='resample_lut-', - fmt='.npz', - mask=None, - **kwargs) - zarr_path = resampler._create_cache_filename(the_dir, - prefix='nn_lut-', - fmt='.zarr', - mask=None, - **kwargs) - resampler._check_numpy_cache(the_dir) - np_load.assert_not_called() - zarr_out.to_zarr.assert_not_called() - with open(np_path, 'w') as fid: - fid.write("42") - resampler._check_numpy_cache(the_dir) - np_load.assert_called_once_with(np_path, 'r') - zarr_out.to_zarr.assert_called_once_with(zarr_path) - finally: - shutil.rmtree(the_dir) - @unittest.skipIf(LegacyDaskEWAResampler is not None, "Deprecated EWA resampler is now in pyresample. " From e6261e751cc642e4e4e334a1a1235e0668bd299a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 23 Oct 2023 15:50:17 -0500 Subject: [PATCH 0590/1416] Remove more tests for removed code --- satpy/tests/test_resample.py | 114 ----------------------------------- 1 file changed, 114 deletions(-) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index f96b98ecc0..ca9dd409cd 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -28,11 +28,6 @@ import xarray as xr from pyproj import CRS -try: - from pyresample.ewa import LegacyDaskEWAResampler -except ImportError: - LegacyDaskEWAResampler = None - from satpy.resample import NativeResampler @@ -215,115 +210,6 @@ def astype(self, dtype): resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) -@unittest.skipIf(LegacyDaskEWAResampler is not None, - "Deprecated EWA resampler is now in pyresample. " - "No need to test in Satpy.") -class TestEWAResampler(unittest.TestCase): - """Test EWA resampler class.""" - - @mock.patch('satpy.resample.fornav') - @mock.patch('satpy.resample.ll2cr') - @mock.patch('satpy.resample.SwathDefinition.get_lonlats') - def test_2d_ewa(self, get_lonlats, ll2cr, fornav): - """Test EWA with a 2D dataset.""" - import numpy as np - import xarray as xr - - from satpy.resample import resample_dataset - ll2cr.return_value = (100, - np.zeros((10, 10), dtype=np.float32), - np.zeros((10, 10), dtype=np.float32)) - fornav.return_value = (100 * 200, - np.zeros((200, 100), dtype=np.float32)) - _, _, swath_data, source_swath, target_area = get_test_data() - get_lonlats.return_value = (source_swath.lons, source_swath.lats) - swath_data.data = swath_data.data.astype(np.float32) - num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) - - new_data = resample_dataset(swath_data, target_area, resampler='ewa') - self.assertTupleEqual(new_data.shape, (200, 100)) - self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs['test'], 'test') - self.assertIs(new_data.attrs['area'], target_area) - # make sure we can actually compute everything - new_data.compute() - lonlat_calls = get_lonlats.call_count - ll2cr_calls = ll2cr.call_count - - # resample a different dataset and make sure cache is used - data = xr.DataArray( - swath_data.data, - dims=('y', 'x'), attrs={'area': source_swath, 'test': 'test2', - 'name': 'test2'}) - new_data = resample_dataset(data, target_area, resampler='ewa') - new_data.compute() - # ll2cr will be called once more because of the computation - self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) - # but we should already have taken the lonlats from the SwathDefinition - self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) - - @mock.patch('satpy.resample.fornav') - @mock.patch('satpy.resample.ll2cr') - @mock.patch('satpy.resample.SwathDefinition.get_lonlats') - def test_3d_ewa(self, get_lonlats, ll2cr, fornav): - """Test EWA with a 3D dataset.""" - import numpy as np - import xarray as xr - - from satpy.resample import resample_dataset - _, _, swath_data, source_swath, target_area = get_test_data( - input_shape=(3, 200, 100), input_dims=('bands', 'y', 'x')) - swath_data.data = swath_data.data.astype(np.float32) - ll2cr.return_value = (100, - np.zeros((10, 10), dtype=np.float32), - np.zeros((10, 10), dtype=np.float32)) - fornav.return_value = ([100 * 200] * 3, - [np.zeros((200, 100), dtype=np.float32)] * 3) - get_lonlats.return_value = (source_swath.lons, source_swath.lats) - num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) - - new_data = resample_dataset(swath_data, target_area, resampler='ewa') - self.assertTupleEqual(new_data.shape, (3, 200, 100)) - self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs['test'], 'test') - self.assertIs(new_data.attrs['area'], target_area) - # make sure we can actually compute everything - new_data.compute() - lonlat_calls = get_lonlats.call_count - ll2cr_calls = ll2cr.call_count - - # resample a different dataset and make sure cache is used - swath_data = xr.DataArray( - swath_data.data, - dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, - attrs={'area': source_swath, 'test': 'test'}) - new_data = resample_dataset(swath_data, target_area, resampler='ewa') - new_data.compute() - # ll2cr will be called once more because of the computation - self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) - # but we should already have taken the lonlats from the SwathDefinition - self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('bands', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - np.testing.assert_equal(new_data.coords['bands'].values, - ['R', 'G', 'B']) - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) - - class TestNativeResampler: """Tests for the 'native' resampling method.""" From 3fc285570ea5356fb773089482fc43c3aa5bd3be Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:40:57 +0200 Subject: [PATCH 0591/1416] Make bandit happy --- pyproject.toml | 3 +- satpy/demo/_google_cloud_platform.py | 6 ++-- satpy/demo/abi_l1b.py | 32 ++++++++++--------- satpy/demo/fci.py | 2 +- satpy/demo/utils.py | 2 +- satpy/readers/xmlformat.py | 8 ++--- utils/fetch_avhrr_calcoeffs.py | 48 ++++++++++++++-------------- 7 files changed, 51 insertions(+), 50 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e3a3a2efbe..d2059e68af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,8 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] +# select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] +select = [] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/demo/_google_cloud_platform.py b/satpy/demo/_google_cloud_platform.py index c1b7016388..970fd9cfa3 100644 --- a/satpy/demo/_google_cloud_platform.py +++ b/satpy/demo/_google_cloud_platform.py @@ -32,7 +32,7 @@ def is_google_cloud_instance(): """Check if we are on a GCP virtual machine.""" try: - return urlopen('http://metadata.google.internal').headers.get('Metadata-Flavor') == 'Google' + return urlopen("http://metadata.google.internal").headers.get("Metadata-Flavor") == "Google" # nosec except URLError: return False @@ -68,7 +68,7 @@ def get_bucket_files(glob_pattern, base_dir, force=False, pattern_slice=None): if isinstance(glob_pattern, str): glob_pattern = [glob_pattern] - fs = gcsfs.GCSFileSystem(token='anon') + fs = gcsfs.GCSFileSystem(token="anon") # nosec filenames = [] for gp in glob_pattern: # handle multiple glob patterns being treated as one pattern @@ -98,5 +98,5 @@ def _download_gcs_files(globbed_files, fs, base_dir, force): LOG.info("Found existing: {}".format(ondisk_pathname)) continue LOG.info("Downloading: {}".format(ondisk_pathname)) - fs.get('gs://' + fn, ondisk_pathname) + fs.get("gs://" + fn, ondisk_pathname) return filenames diff --git a/satpy/demo/abi_l1b.py b/satpy/demo/abi_l1b.py index e223238767..8583c2580b 100644 --- a/satpy/demo/abi_l1b.py +++ b/satpy/demo/abi_l1b.py @@ -36,19 +36,20 @@ def get_us_midlatitude_cyclone_abi(base_dir=None, method=None, force=False): Total size: ~110MB """ - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") if method is None: - method = 'gcsfs' - if method not in ['gcsfs']: + method = "gcsfs" + if method not in ["gcsfs"]: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) from ._google_cloud_platform import get_bucket_files - patterns = ['gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc'] - subdir = os.path.join(base_dir, 'abi_l1b', '20190314_us_midlatitude_cyclone') + patterns = ["gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc"] + subdir = os.path.join(base_dir, "abi_l1b", "20190314_us_midlatitude_cyclone") os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force) - assert len(filenames) == 16, "Not all files could be downloaded" + if len(filenames) != 16: + raise RuntimeError("Not all files could be downloaded") return filenames @@ -76,12 +77,12 @@ def get_hurricane_florence_abi(base_dir=None, method=None, force=False, Total size (240 frames, all channels): ~3.5GB """ - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") if channels is None: channels = range(1, 17) if method is None: - method = 'gcsfs' - if method not in ['gcsfs']: + method = "gcsfs" + if method not in ["gcsfs"]: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) if isinstance(num_frames, (int, float)): @@ -96,16 +97,17 @@ def get_hurricane_florence_abi(base_dir=None, method=None, force=False, # patterns += ['gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/1[3456]/' # '*C{:02d}*s20182541[3456]*.nc'.format(channel)] patterns += [( - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc'.format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc".format(channel), )] - subdir = os.path.join(base_dir, 'abi_l1b', '20180911_hurricane_florence_abi_l1b') + subdir = os.path.join(base_dir, "abi_l1b", "20180911_hurricane_florence_abi_l1b") os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force, pattern_slice=frame_slice) actual_slice = frame_slice.indices(240) # 240 max frames num_frames = int((actual_slice[1] - actual_slice[0]) / actual_slice[2]) - assert len(filenames) == len(channels) * num_frames, "Not all files could be downloaded" + if len(filenames) != len(channels) * num_frames: + raise RuntimeError("Not all files could be downloaded") return filenames diff --git a/satpy/demo/fci.py b/satpy/demo/fci.py index 7c4160b203..7a2abe5d20 100644 --- a/satpy/demo/fci.py +++ b/satpy/demo/fci.py @@ -53,5 +53,5 @@ def _unpack_tarfile_to(filename, subdir): """Unpack content of tarfile in filename to subdir.""" with tarfile.open(filename, mode="r:gz") as tf: contents = tf.getnames() - tf.extractall(path=subdir) + tf.extractall(path=subdir) # nosec return contents diff --git a/satpy/demo/utils.py b/satpy/demo/utils.py index 0fd1d1d1a7..63ccbd007f 100644 --- a/satpy/demo/utils.py +++ b/satpy/demo/utils.py @@ -22,7 +22,7 @@ def download_url(source, target): """Download a url in stream mode.""" - with requests.get(source, stream=True) as r: + with requests.get(source, stream=True, timeout=10) as r: r.raise_for_status() with open(target, "wb") as f: for chunk in r.iter_content(chunk_size=8192): diff --git a/satpy/readers/xmlformat.py b/satpy/readers/xmlformat.py index 0c46a3595e..969c30113a 100644 --- a/satpy/readers/xmlformat.py +++ b/satpy/readers/xmlformat.py @@ -19,9 +19,8 @@ from __future__ import annotations -from xml.etree.ElementTree import ElementTree - import numpy as np +from defusedxml.ElementTree import parse VARIABLES: dict[str, str] = {} @@ -141,8 +140,7 @@ def to_scales(val): def parse_format(xml_file): """Parse the xml file to create types, scaling factor types, and scales.""" - tree = ElementTree() - tree.parse(xml_file) + tree = parse(xml_file) for param in tree.find("parameters"): VARIABLES[param.get("name")] = param.get("value") @@ -204,5 +202,5 @@ def apply_scales(self, array): return _apply_scales(array, *self.translator[array.dtype]) -if __name__ == '__main__': +if __name__ == "__main__": pass diff --git a/utils/fetch_avhrr_calcoeffs.py b/utils/fetch_avhrr_calcoeffs.py index 7bc49ba8db..f73975df95 100644 --- a/utils/fetch_avhrr_calcoeffs.py +++ b/utils/fetch_avhrr_calcoeffs.py @@ -53,32 +53,32 @@ def get_page(url): """Retrieve the given page.""" - return urllib2.urlopen(url).read() + return urllib2.urlopen(url).read() # nosec def get_coeffs(page): """Parse coefficients from the page.""" coeffs = {} - coeffs['datetime'] = [] - coeffs['slope1'] = [] - coeffs['intercept1'] = [] - coeffs['slope2'] = [] - coeffs['intercept2'] = [] + coeffs["datetime"] = [] + coeffs["slope1"] = [] + coeffs["intercept1"] = [] + coeffs["slope2"] = [] + coeffs["intercept2"] = [] slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \ None, None, None, None date_idx = 0 - for row in page.lower().split('\n'): + for row in page.lower().split("\n"): row = row.split() if len(row) == 0: continue - if row[0] == 'update': + if row[0] == "update": # Get the column indices from the header line - slope1_idx = row.index('slope_lo') - intercept1_idx = row.index('int_lo') - slope2_idx = row.index('slope_hi') - intercept2_idx = row.index('int_hi') + slope1_idx = row.index("slope_lo") + intercept1_idx = row.index("int_lo") + slope2_idx = row.index("slope_hi") + intercept2_idx = row.index("int_hi") continue if slope1_idx is None: @@ -94,11 +94,11 @@ def get_coeffs(page): except ValueError: continue - coeffs['datetime'].append([dat.year, dat.month, dat.day]) - coeffs['slope1'].append(float(row[slope1_idx])) - coeffs['intercept1'].append(float(row[intercept1_idx])) - coeffs['slope2'].append(float(row[slope2_idx])) - coeffs['intercept2'].append(float(row[intercept2_idx])) + coeffs["datetime"].append([dat.year, dat.month, dat.day]) + coeffs["slope1"].append(float(row[slope1_idx])) + coeffs["intercept1"].append(float(row[intercept1_idx])) + coeffs["slope2"].append(float(row[slope2_idx])) + coeffs["intercept2"].append(float(row[intercept2_idx])) return coeffs @@ -119,19 +119,19 @@ def get_all_coeffs(): return coeffs -def save_coeffs(coeffs, out_dir=''): +def save_coeffs(coeffs, out_dir=""): """Save calibration coefficients to HDF5 files.""" for platform in coeffs.keys(): fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform) - fid = h5py.File(fname, 'w') + fid = h5py.File(fname, "w") for chan in coeffs[platform].keys(): fid.create_group(chan) - fid[chan]['datetime'] = coeffs[platform][chan]['datetime'] - fid[chan]['slope1'] = coeffs[platform][chan]['slope1'] - fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1'] - fid[chan]['slope2'] = coeffs[platform][chan]['slope2'] - fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2'] + fid[chan]["datetime"] = coeffs[platform][chan]["datetime"] + fid[chan]["slope1"] = coeffs[platform][chan]["slope1"] + fid[chan]["intercept1"] = coeffs[platform][chan]["intercept1"] + fid[chan]["slope2"] = coeffs[platform][chan]["slope2"] + fid[chan]["intercept2"] = coeffs[platform][chan]["intercept2"] fid.close() print("Calibration coefficients saved for %s" % platform) From 4f78a7e3ae83b0c6de7e54f69f4912d496e904f5 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:45:46 +0200 Subject: [PATCH 0592/1416] Turn single quotes into double quotes --- .pre-commit-config.yaml | 1 - benchmarks/abi_l1b_benchmarks.py | 2 +- benchmarks/ahi_hsd_benchmarks.py | 4 +- benchmarks/seviri_hrit_benchmarks.py | 2 +- benchmarks/utils.py | 2 +- benchmarks/viirs_sdr_benchmarks.py | 8 +- doc/source/conf.py | 104 +- doc/source/doi_role.py | 20 +- pyproject.toml | 2 +- satpy/__init__.py | 2 +- satpy/_config.py | 54 +- satpy/_scene_converters.py | 2 +- satpy/aux_download.py | 50 +- satpy/composites/__init__.py | 298 ++-- satpy/composites/cloud_products.py | 16 +- satpy/composites/config_loader.py | 52 +- satpy/composites/glm.py | 8 +- satpy/composites/viirs.py | 8 +- satpy/dataset/anc_vars.py | 10 +- satpy/dataset/data_dict.py | 16 +- satpy/dataset/dataid.py | 116 +- satpy/dataset/metadata.py | 4 +- satpy/demo/ahi_hsd.py | 6 +- satpy/dependency_tree.py | 12 +- satpy/enhancements/__init__.py | 50 +- satpy/enhancements/mimic.py | 14 +- satpy/enhancements/viirs.py | 6 +- satpy/modifiers/_crefl.py | 2 +- satpy/modifiers/_crefl_utils.py | 4 +- satpy/modifiers/angles.py | 8 +- satpy/modifiers/atmosphere.py | 30 +- satpy/modifiers/geometry.py | 4 +- satpy/modifiers/spectral.py | 20 +- satpy/multiscene/_blend_funcs.py | 14 +- satpy/multiscene/_multiscene.py | 28 +- satpy/node.py | 10 +- satpy/plugin_base.py | 2 +- satpy/readers/__init__.py | 32 +- satpy/readers/_geos_area.py | 72 +- satpy/readers/aapp_l1b.py | 142 +- satpy/readers/aapp_mhs_amsub_l1c.py | 54 +- satpy/readers/abi_base.py | 144 +- satpy/readers/abi_l1b.py | 84 +- satpy/readers/abi_l2_nc.py | 50 +- satpy/readers/acspo.py | 74 +- satpy/readers/agri_l1.py | 36 +- satpy/readers/ahi_hsd.py | 220 +-- satpy/readers/ahi_l1b_gridded_bin.py | 86 +- satpy/readers/ami_l1b.py | 132 +- satpy/readers/amsr2_l1b.py | 22 +- satpy/readers/amsr2_l2.py | 8 +- satpy/readers/amsr2_l2_gaasp.py | 90 +- satpy/readers/ascat_l2_soilmoisture_bufr.py | 46 +- satpy/readers/atms_l1b_nc.py | 10 +- satpy/readers/atms_sdr_hdf5.py | 20 +- satpy/readers/avhrr_l1b_gaclac.py | 104 +- satpy/readers/caliop_l2_cloud.py | 18 +- satpy/readers/clavrx.py | 192 +-- satpy/readers/cmsaf_claas2.py | 2 +- satpy/readers/electrol_hrit.py | 172 +-- satpy/readers/epic_l1b_h5.py | 40 +- satpy/readers/eps_l1b.py | 60 +- satpy/readers/eum_base.py | 36 +- satpy/readers/fci_l1c_nc.py | 238 ++-- satpy/readers/fci_l2_nc.py | 138 +- satpy/readers/file_handlers.py | 44 +- satpy/readers/fy4_base.py | 130 +- satpy/readers/generic_image.py | 40 +- satpy/readers/geocat.py | 126 +- satpy/readers/ghi_l1.py | 76 +- satpy/readers/ghrsst_l2.py | 28 +- satpy/readers/ghrsst_l3c_sst.py | 48 +- satpy/readers/glm_l2.py | 72 +- satpy/readers/gms/gms5_vissr_format.py | 572 ++++---- satpy/readers/goes_imager_hrit.py | 252 ++-- satpy/readers/goes_imager_nc.py | 876 ++++++------ satpy/readers/gpm_imerg.py | 42 +- satpy/readers/grib.py | 112 +- satpy/readers/hdf4_utils.py | 8 +- satpy/readers/hdf5_utils.py | 12 +- satpy/readers/hdfeos_base.py | 84 +- satpy/readers/hrit_base.py | 144 +- satpy/readers/hrit_jma.py | 182 +-- satpy/readers/hrpt.py | 88 +- satpy/readers/hsaf_grib.py | 72 +- satpy/readers/hsaf_h5.py | 42 +- satpy/readers/hy2_scat_l2b_h5.py | 96 +- satpy/readers/iasi_l2.py | 90 +- satpy/readers/iasi_l2_so2_bufr.py | 46 +- satpy/readers/ici_l1b_nc.py | 116 +- satpy/readers/insat3d_img_l1b_h5.py | 34 +- satpy/readers/li_base_nc.py | 152 +- satpy/readers/li_l2_nc.py | 12 +- satpy/readers/maia.py | 72 +- satpy/readers/meris_nc_sen3.py | 34 +- satpy/readers/mersi_l1b.py | 70 +- satpy/readers/mimic_TPW2_nc.py | 56 +- satpy/readers/mirs.py | 104 +- satpy/readers/modis_l1b.py | 56 +- satpy/readers/modis_l2.py | 48 +- satpy/readers/msi_safe.py | 90 +- satpy/readers/msu_gsa_l1b.py | 34 +- satpy/readers/mviri_l1b_fiduceo_nc.py | 238 ++-- satpy/readers/mws_l1b.py | 132 +- satpy/readers/netcdf_utils.py | 18 +- satpy/readers/nucaps.py | 122 +- satpy/readers/nwcsaf_msg2013_hdf5.py | 52 +- satpy/readers/nwcsaf_nc.py | 226 +-- satpy/readers/oceancolorcci_l3_nc.py | 64 +- satpy/readers/olci_nc.py | 148 +- satpy/readers/omps_edr.py | 56 +- satpy/readers/safe_sar_l2_ocn.py | 64 +- satpy/readers/sar_c_safe.py | 126 +- satpy/readers/satpy_cf_nc.py | 44 +- satpy/readers/scatsat1_l2b.py | 34 +- satpy/readers/scmi.py | 158 +-- satpy/readers/seadas_l2.py | 6 +- satpy/readers/seviri_base.py | 462 +++---- satpy/readers/seviri_l1b_hrit.py | 368 ++--- satpy/readers/seviri_l1b_icare.py | 130 +- satpy/readers/seviri_l1b_native.py | 386 +++--- satpy/readers/seviri_l1b_native_hdr.py | 1226 ++++++++--------- satpy/readers/seviri_l1b_nc.py | 224 +-- satpy/readers/seviri_l2_bufr.py | 90 +- satpy/readers/seviri_l2_grib.py | 80 +- satpy/readers/slstr_l1b.py | 216 +-- satpy/readers/smos_l2_wind.py | 62 +- satpy/readers/tropomi_l2.py | 76 +- satpy/readers/utils.py | 44 +- satpy/readers/vaisala_gld360.py | 24 +- satpy/readers/vii_base_nc.py | 82 +- satpy/readers/vii_l1b_nc.py | 26 +- satpy/readers/vii_l2_nc.py | 2 +- satpy/readers/viirs_atms_sdr_base.py | 124 +- satpy/readers/viirs_compact.py | 118 +- satpy/readers/viirs_edr.py | 62 +- satpy/readers/viirs_edr_active_fires.py | 40 +- satpy/readers/viirs_edr_flood.py | 48 +- satpy/readers/viirs_l1b.py | 132 +- satpy/readers/viirs_sdr.py | 86 +- satpy/readers/viirs_vgac_l1c_nc.py | 12 +- satpy/readers/virr_l1b.py | 70 +- satpy/readers/yaml_reader.py | 270 ++-- satpy/resample.py | 210 +-- satpy/scene.py | 96 +- satpy/tests/compositor_tests/test_abi.py | 30 +- satpy/tests/compositor_tests/test_agri.py | 26 +- satpy/tests/compositor_tests/test_ahi.py | 2 +- satpy/tests/compositor_tests/test_glm.py | 26 +- satpy/tests/compositor_tests/test_sar.py | 48 +- satpy/tests/compositor_tests/test_spectral.py | 50 +- satpy/tests/compositor_tests/test_viirs.py | 88 +- satpy/tests/conftest.py | 2 +- satpy/tests/enhancement_tests/test_abi.py | 2 +- .../enhancement_tests/test_enhancements.py | 210 +-- satpy/tests/enhancement_tests/test_viirs.py | 8 +- satpy/tests/features/steps/steps-load.py | 46 +- .../steps/steps-real-load-process-write.py | 24 +- satpy/tests/features/steps/steps-save.py | 8 +- satpy/tests/modifier_tests/test_angles.py | 6 +- satpy/tests/modifier_tests/test_crefl.py | 312 ++--- satpy/tests/modifier_tests/test_parallax.py | 2 +- satpy/tests/multiscene_tests/test_blend.py | 200 +-- satpy/tests/multiscene_tests/test_misc.py | 40 +- .../multiscene_tests/test_save_animation.py | 206 +-- satpy/tests/multiscene_tests/test_utils.py | 62 +- satpy/tests/reader_tests/_li_test_utils.py | 296 ++-- .../reader_tests/gms/test_gms5_vissr_l1b.py | 2 +- .../gms/test_gms5_vissr_navigation.py | 24 +- .../modis_tests/_modis_fixtures.py | 262 ++-- .../modis_tests/test_modis_l1b.py | 56 +- .../reader_tests/modis_tests/test_modis_l2.py | 62 +- satpy/tests/reader_tests/test_aapp_l1b.py | 172 +-- .../reader_tests/test_aapp_mhs_amsub_l1c.py | 86 +- satpy/tests/reader_tests/test_abi_l1b.py | 246 ++-- satpy/tests/reader_tests/test_abi_l2_nc.py | 238 ++-- satpy/tests/reader_tests/test_acspo.py | 100 +- satpy/tests/reader_tests/test_agri_l1.py | 204 +-- satpy/tests/reader_tests/test_ahi_hrit.py | 200 +-- satpy/tests/reader_tests/test_ahi_hsd.py | 420 +++--- .../reader_tests/test_ahi_l1b_gridded_bin.py | 160 +-- satpy/tests/reader_tests/test_ami_l1b.py | 246 ++-- satpy/tests/reader_tests/test_amsr2_l1b.py | 148 +- satpy/tests/reader_tests/test_amsr2_l2.py | 52 +- .../tests/reader_tests/test_amsr2_l2_gaasp.py | 202 +-- .../test_ascat_l2_soilmoisture_bufr.py | 116 +- .../tests/reader_tests/test_atms_sdr_hdf5.py | 84 +- .../tests/reader_tests/test_avhrr_l0_hrpt.py | 24 +- .../reader_tests/test_avhrr_l1b_gaclac.py | 346 ++--- satpy/tests/reader_tests/test_clavrx.py | 286 ++-- satpy/tests/reader_tests/test_clavrx_nc.py | 124 +- satpy/tests/reader_tests/test_cmsaf_claas.py | 4 +- .../tests/reader_tests/test_electrol_hrit.py | 148 +- satpy/tests/reader_tests/test_epic_l1b_h5.py | 64 +- satpy/tests/reader_tests/test_eps_l1b.py | 130 +- satpy/tests/reader_tests/test_eum_base.py | 116 +- satpy/tests/reader_tests/test_fci_l1c_nc.py | 386 +++--- satpy/tests/reader_tests/test_fci_l2_nc.py | 350 ++--- satpy/tests/reader_tests/test_fy4_base.py | 12 +- .../tests/reader_tests/test_generic_image.py | 208 +-- satpy/tests/reader_tests/test_geocat.py | 158 +-- satpy/tests/reader_tests/test_geos_area.py | 126 +- satpy/tests/reader_tests/test_ghi_l1.py | 190 +-- satpy/tests/reader_tests/test_ghrsst_l2.py | 58 +- satpy/tests/reader_tests/test_glm_l2.py | 198 +-- .../reader_tests/test_goes_imager_hrit.py | 146 +- .../reader_tests/test_goes_imager_nc_eum.py | 64 +- .../reader_tests/test_goes_imager_nc_noaa.py | 228 +-- satpy/tests/reader_tests/test_gpm_imerg.py | 56 +- satpy/tests/reader_tests/test_grib.py | 90 +- satpy/tests/reader_tests/test_hdf4_utils.py | 50 +- satpy/tests/reader_tests/test_hdf5_utils.py | 96 +- satpy/tests/reader_tests/test_hdfeos_base.py | 230 ++-- satpy/tests/reader_tests/test_hrit_base.py | 128 +- satpy/tests/reader_tests/test_hsaf_grib.py | 46 +- satpy/tests/reader_tests/test_hsaf_h5.py | 18 +- .../reader_tests/test_hy2_scat_l2b_h5.py | 402 +++--- satpy/tests/reader_tests/test_iasi_l2.py | 228 +-- .../reader_tests/test_iasi_l2_so2_bufr.py | 158 +-- satpy/tests/reader_tests/test_ici_l1b_nc.py | 232 ++-- .../reader_tests/test_insat3d_img_l1b_h5.py | 4 +- satpy/tests/reader_tests/test_li_l2_nc.py | 286 ++-- satpy/tests/reader_tests/test_meris_nc.py | 118 +- satpy/tests/reader_tests/test_mersi_l1b.py | 568 ++++---- .../reader_tests/test_mimic_TPW2_lowres.py | 104 +- .../tests/reader_tests/test_mimic_TPW2_nc.py | 64 +- satpy/tests/reader_tests/test_mirs.py | 176 +-- satpy/tests/reader_tests/test_msi_safe.py | 2 +- satpy/tests/reader_tests/test_msu_gsa_l1b.py | 92 +- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 318 ++--- satpy/tests/reader_tests/test_mws_l1b_nc.py | 154 +-- satpy/tests/reader_tests/test_netcdf_utils.py | 140 +- satpy/tests/reader_tests/test_nucaps.py | 338 ++--- satpy/tests/reader_tests/test_nwcsaf_msg.py | 18 +- satpy/tests/reader_tests/test_nwcsaf_nc.py | 148 +- .../reader_tests/test_oceancolorcci_l3_nc.py | 80 +- satpy/tests/reader_tests/test_olci_nc.py | 174 +-- satpy/tests/reader_tests/test_omps_edr.py | 252 ++-- .../reader_tests/test_safe_sar_l2_ocn.py | 42 +- satpy/tests/reader_tests/test_sar_c_safe.py | 32 +- satpy/tests/reader_tests/test_satpy_cf_nc.py | 346 ++--- satpy/tests/reader_tests/test_scmi.py | 176 +-- satpy/tests/reader_tests/test_seadas_l2.py | 34 +- satpy/tests/reader_tests/test_seviri_base.py | 64 +- .../test_seviri_l1b_calibration.py | 152 +- .../reader_tests/test_seviri_l1b_hrit.py | 268 ++-- .../test_seviri_l1b_hrit_setup.py | 194 +-- .../reader_tests/test_seviri_l1b_icare.py | 136 +- .../reader_tests/test_seviri_l1b_native.py | 1114 +++++++-------- .../tests/reader_tests/test_seviri_l1b_nc.py | 304 ++-- .../tests/reader_tests/test_seviri_l2_bufr.py | 144 +- .../tests/reader_tests/test_seviri_l2_grib.py | 100 +- satpy/tests/reader_tests/test_slstr_l1b.py | 212 +-- satpy/tests/reader_tests/test_smos_l2_wind.py | 112 +- satpy/tests/reader_tests/test_tropomi_l2.py | 120 +- satpy/tests/reader_tests/test_utils.py | 216 +-- .../tests/reader_tests/test_vaisala_gld360.py | 20 +- satpy/tests/reader_tests/test_vii_base_nc.py | 228 +-- satpy/tests/reader_tests/test_vii_l1b_nc.py | 66 +- satpy/tests/reader_tests/test_vii_l2_nc.py | 32 +- satpy/tests/reader_tests/test_vii_wv_nc.py | 32 +- .../reader_tests/test_viirs_atms_utils.py | 16 +- .../tests/reader_tests/test_viirs_compact.py | 18 +- .../test_viirs_edr_active_fires.py | 186 +-- .../reader_tests/test_viirs_edr_flood.py | 58 +- satpy/tests/reader_tests/test_viirs_l1b.py | 292 ++-- satpy/tests/reader_tests/test_viirs_sdr.py | 616 ++++----- .../reader_tests/test_viirs_vgac_l1c_nc.py | 22 +- satpy/tests/reader_tests/test_virr_l1b.py | 162 +-- satpy/tests/scene_tests/test_conversions.py | 50 +- satpy/tests/scene_tests/test_data_access.py | 194 +-- satpy/tests/scene_tests/test_init.py | 116 +- satpy/tests/scene_tests/test_load.py | 338 ++--- satpy/tests/scene_tests/test_resampling.py | 372 ++--- satpy/tests/scene_tests/test_saving.py | 52 +- satpy/tests/test_composites.py | 874 ++++++------ satpy/tests/test_config.py | 52 +- satpy/tests/test_data_download.py | 66 +- satpy/tests/test_dataset.py | 516 +++---- satpy/tests/test_demo.py | 72 +- satpy/tests/test_dependency_tree.py | 68 +- satpy/tests/test_file_handlers.py | 148 +- satpy/tests/test_modifiers.py | 324 ++--- satpy/tests/test_node.py | 2 +- satpy/tests/test_readers.py | 408 +++--- satpy/tests/test_regressions.py | 194 +-- satpy/tests/test_resample.py | 390 +++--- satpy/tests/test_utils.py | 60 +- satpy/tests/test_writers.py | 298 ++-- satpy/tests/test_yaml_reader.py | 918 ++++++------ satpy/tests/utils.py | 78 +- satpy/tests/writer_tests/test_awips_tiled.py | 228 +-- satpy/tests/writer_tests/test_cf.py | 1060 +++++++------- satpy/tests/writer_tests/test_geotiff.py | 54 +- satpy/tests/writer_tests/test_mitiff.py | 946 ++++++------- satpy/tests/writer_tests/test_ninjogeotiff.py | 2 +- satpy/tests/writer_tests/test_ninjotiff.py | 54 +- satpy/tests/writer_tests/test_simple_image.py | 6 +- satpy/tests/writer_tests/test_utils.py | 10 +- satpy/utils.py | 80 +- satpy/writers/__init__.py | 132 +- satpy/writers/awips_tiled.py | 382 ++--- satpy/writers/cf/coords_attrs.py | 24 +- satpy/writers/cf_writer.py | 216 +-- satpy/writers/geotiff.py | 8 +- satpy/writers/mitiff.py | 394 +++--- satpy/writers/utils.py | 2 +- setup.py | 138 +- utils/convert_to_ninjotiff.py | 38 +- utils/coord2area_def.py | 12 +- 310 files changed, 20763 insertions(+), 20764 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b5b21a52fa..376f37b95e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,6 @@ repos: rev: 'v0.0.247' hooks: - id: ruff - args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: diff --git a/benchmarks/abi_l1b_benchmarks.py b/benchmarks/abi_l1b_benchmarks.py index b52cb46abd..936e0dc514 100644 --- a/benchmarks/abi_l1b_benchmarks.py +++ b/benchmarks/abi_l1b_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): if len(get_filenames(self.subdir)) != 16: raise RuntimeError("Existing data files do not match the expected number of files.") download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/ahi_hsd_benchmarks.py b/benchmarks/ahi_hsd_benchmarks.py index 9b78ae8ac5..361934168a 100644 --- a/benchmarks/ahi_hsd_benchmarks.py +++ b/benchmarks/ahi_hsd_benchmarks.py @@ -33,7 +33,7 @@ class HimawariHSD(GeoBenchmarks): timeout = 600 data_files: list[str] = [] subdir = os.path.join("ahi_hsd", "20210417_0500_typhoon_surigae") - reader = 'ahi_hsd' + reader = "ahi_hsd" def setup_cache(self): """Fetch the data files.""" @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 4 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/seviri_hrit_benchmarks.py b/benchmarks/seviri_hrit_benchmarks.py index 177d929adf..9851dbdac9 100644 --- a/benchmarks/seviri_hrit_benchmarks.py +++ b/benchmarks/seviri_hrit_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 114 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/utils.py b/benchmarks/utils.py index 54338d4eac..67b88025b9 100644 --- a/benchmarks/utils.py +++ b/benchmarks/utils.py @@ -64,7 +64,7 @@ def save_composite_as_geotiff(self, composite, resampler="native", area=None, filenames=None): """Save a composite to disk as geotiff.""" lscn = self.load_and_resample(composite, resampler, area, filenames) - lscn.save_dataset(composite, filename='test.tif', tiled=True) + lscn.save_dataset(composite, filename="test.tif", tiled=True) def compute_channel(self, channel, filenames=None): """Load and compute one channel.""" diff --git a/benchmarks/viirs_sdr_benchmarks.py b/benchmarks/viirs_sdr_benchmarks.py index 940c2d524b..68db5c6682 100644 --- a/benchmarks/viirs_sdr_benchmarks.py +++ b/benchmarks/viirs_sdr_benchmarks.py @@ -42,7 +42,7 @@ def setup_cache(self): except ImportError: assert len(self.get_filenames()) == 6 * 3 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self, name): """Set up the benchmarks.""" @@ -58,14 +58,14 @@ def get_filenames(self): def load(self, composite): """Load one composite.""" from satpy import Scene - scn = Scene(filenames=self.data_files, reader='viirs_sdr') + scn = Scene(filenames=self.data_files, reader="viirs_sdr") scn.load([composite]) return scn def load_and_native_resample(self, composite): """Load and native resample a composite.""" scn = self.load(composite) - lscn = scn.resample(resampler='native') + lscn = scn.resample(resampler="native") return lscn @@ -119,4 +119,4 @@ def compute_composite(self, name): def save_composite_as_geotiff(self, name): """Save a composite to disk as geotiff.""" lscn = self.load_and_native_resample(name) - lscn.save_dataset(name, filename='test.tif', tiled=True) + lscn.save_dataset(name, filename="test.tif", tiled=True) diff --git a/doc/source/conf.py b/doc/source/conf.py index 4af8d63b4a..3bef218f89 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -23,7 +23,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('../../')) +sys.path.append(os.path.abspath("../../")) sys.path.append(os.path.abspath(os.path.dirname(__file__))) from reader_table import generate_reader_table # noqa: E402 @@ -33,10 +33,10 @@ # built documents. # # get version using setuptools-scm -release = get_distribution('satpy').version +release = get_distribution("satpy").version # The full version, including alpha/beta/rc tags. # for example take major/minor -version = '.'.join(release.split('.')[:2]) +version = ".".join(release.split(".")[:2]) class Mock(object): # noqa @@ -53,8 +53,8 @@ def __call__(self, *args, **kwargs): @classmethod def __getattr__(cls, name): """Mock common module attributes used in documentation.""" - if name in ('__file__', '__path__'): - return '/dev/null' + if name in ("__file__", "__path__"): + return "/dev/null" elif name[0] == name[0].upper(): mockType = type(name, (), {}) mockType.__module__ = __name__ @@ -66,15 +66,15 @@ def __getattr__(cls, name): # https://github.com/sphinx-doc/sphinx/issues/3920 -MOCK_MODULES = ['h5py'] +MOCK_MODULES = ["h5py"] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() # type: ignore -autodoc_mock_imports = ['cf', 'glymur', 'h5netcdf', 'imageio', 'mipp', 'netCDF4', - 'pygac', 'pygrib', 'pyhdf', 'pyninjotiff', - 'pyorbital', 'pyspectral', 'rasterio', 'trollimage', - 'zarr'] -autoclass_content = 'both' # append class __init__ docstring to the class docstring +autodoc_mock_imports = ["cf", "glymur", "h5netcdf", "imageio", "mipp", "netCDF4", + "pygac", "pygrib", "pyhdf", "pyninjotiff", + "pyorbital", "pyspectral", "rasterio", "trollimage", + "zarr"] +autoclass_content = "both" # append class __init__ docstring to the class docstring # auto generate reader table from reader config files with open("reader_table.rst", mode="w") as f: @@ -84,19 +84,19 @@ def __getattr__(cls, name): # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', - 'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.autosummary', 'doi_role', - 'sphinx.ext.viewcode', 'sphinxcontrib.apidoc', - 'sphinx.ext.mathjax'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "doi_role", + "sphinx.ext.viewcode", "sphinxcontrib.apidoc", + "sphinx.ext.mathjax"] # API docs apidoc_module_dir = "../../satpy" apidoc_output_dir = "api" apidoc_excluded_paths = [ - 'readers/caliop_l2_cloud.py', - 'readers/ghrsst_l3c_sst.py', - 'readers/li_l2.py', - 'readers/scatsat1_l2b.py', + "readers/caliop_l2_cloud.py", + "readers/ghrsst_l3c_sst.py", + "readers/li_l2.py", + "readers/scatsat1_l2b.py", ] apidoc_separate_modules = True apidoc_extra_args = [ @@ -104,20 +104,20 @@ def __getattr__(cls, name): ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'Satpy' -copyright = u'2009-{}, The PyTroll Team'.format(datetime.utcnow().strftime("%Y")) +project = u"Satpy" +copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -151,7 +151,7 @@ def __getattr__(cls, name): # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -161,7 +161,7 @@ def __getattr__(cls, name): # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -190,16 +190,16 @@ def __getattr__(cls, name): # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] html_css_files = [ - 'theme_overrides.css', # override wide tables in RTD theme - 'https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css', + "theme_overrides.css", # override wide tables in RTD theme + "https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css", ] html_js_files = [ - 'https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js', - 'main.js', + "https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js", + "main.js", ] @@ -239,7 +239,7 @@ def __getattr__(cls, name): # html_file_suffix = '' # Output file base name for HTML help builder. -htmlhelp_basename = 'NWCSAFMSGPPdoc' +htmlhelp_basename = "NWCSAFMSGPPdoc" # -- Options for LaTeX output -------------------------------------------------- @@ -253,8 +253,8 @@ def __getattr__(cls, name): # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'satpy.tex', 'Satpy Documentation', - 'Satpy Developers', 'manual'), + ("index", "satpy.tex", "Satpy Documentation", + "Satpy Developers", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -277,22 +277,22 @@ def __getattr__(cls, name): # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'dask': ('https://docs.dask.org/en/latest', None), - 'geoviews': ('http://geoviews.org', None), - 'jobqueue': ('https://jobqueue.dask.org/en/latest', None), - 'numpy': ('https://numpy.org/doc/stable', None), - 'pydecorate': ('https://pydecorate.readthedocs.io/en/stable', None), - 'pyorbital': ('https://pyorbital.readthedocs.io/en/stable', None), - 'pyproj': ('https://pyproj4.github.io/pyproj/dev', None), - 'pyresample': ('https://pyresample.readthedocs.io/en/stable', None), - 'pytest': ('https://docs.pytest.org/en/stable/', None), - 'python': ('https://docs.python.org/3', None), - 'scipy': ('http://scipy.github.io/devdocs', None), - 'trollimage': ('https://trollimage.readthedocs.io/en/stable', None), - 'trollsift': ('https://trollsift.readthedocs.io/en/stable', None), - 'xarray': ('https://xarray.pydata.org/en/stable', None), - 'rasterio': ('https://rasterio.readthedocs.io/en/latest', None), - 'donfig': ('https://donfig.readthedocs.io/en/latest', None), - 'pooch': ('https://www.fatiando.org/pooch/latest/', None), - 'fsspec': ('https://filesystem-spec.readthedocs.io/en/latest/', None), + "dask": ("https://docs.dask.org/en/latest", None), + "geoviews": ("http://geoviews.org", None), + "jobqueue": ("https://jobqueue.dask.org/en/latest", None), + "numpy": ("https://numpy.org/doc/stable", None), + "pydecorate": ("https://pydecorate.readthedocs.io/en/stable", None), + "pyorbital": ("https://pyorbital.readthedocs.io/en/stable", None), + "pyproj": ("https://pyproj4.github.io/pyproj/dev", None), + "pyresample": ("https://pyresample.readthedocs.io/en/stable", None), + "pytest": ("https://docs.pytest.org/en/stable/", None), + "python": ("https://docs.python.org/3", None), + "scipy": ("http://scipy.github.io/devdocs", None), + "trollimage": ("https://trollimage.readthedocs.io/en/stable", None), + "trollsift": ("https://trollsift.readthedocs.io/en/stable", None), + "xarray": ("https://xarray.pydata.org/en/stable", None), + "rasterio": ("https://rasterio.readthedocs.io/en/latest", None), + "donfig": ("https://donfig.readthedocs.io/en/latest", None), + "pooch": ("https://www.fatiando.org/pooch/latest/", None), + "fsspec": ("https://filesystem-spec.readthedocs.io/en/latest/", None), } diff --git a/doc/source/doi_role.py b/doc/source/doi_role.py index b7c64a14ac..115e7895c6 100644 --- a/doc/source/doi_role.py +++ b/doc/source/doi_role.py @@ -26,9 +26,9 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) - full_url = 'https://doi.org/' + part + full_url = "https://doi.org/" + part if not has_explicit_title: - title = 'DOI:' + part + title = "DOI:" + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] @@ -40,20 +40,20 @@ def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) - full_url = 'https://arxiv.org/abs/' + part + full_url = "https://arxiv.org/abs/" + part if not has_explicit_title: - title = 'arXiv:' + part + title = "arXiv:" + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def setup_link_role(app): - app.add_role('doi', doi_role, override=True) - app.add_role('DOI', doi_role, override=True) - app.add_role('arXiv', arxiv_role, override=True) - app.add_role('arxiv', arxiv_role, override=True) + app.add_role("doi", doi_role, override=True) + app.add_role("DOI", doi_role, override=True) + app.add_role("arXiv", arxiv_role, override=True) + app.add_role("arxiv", arxiv_role, override=True) def setup(app): - app.connect('builder-inited', setup_link_role) - return {'version': '0.1', 'parallel_read_safe': True} + app.connect("builder-inited", setup_link_role) + return {"version": "0.1", "parallel_read_safe": True} diff --git a/pyproject.toml b/pyproject.toml index d2059e68af..01b0272e89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = [] +select = ["Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/__init__.py b/satpy/__init__.py index 5392e0d9dd..d90f65d892 100644 --- a/satpy/__init__.py +++ b/satpy/__init__.py @@ -35,4 +35,4 @@ from satpy.utils import get_logger # noqa from satpy.writers import available_writers # noqa -log = get_logger('satpy') +log = get_logger("satpy") diff --git a/satpy/_config.py b/satpy/_config.py index 4abc00aba2..bcbd909aae 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -49,21 +49,21 @@ def impr_files(module_name: str) -> Path: BASE_PATH = os.path.dirname(os.path.realpath(__file__)) # FIXME: Use package_resources? -PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, 'etc') +PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, "etc") -_satpy_dirs = appdirs.AppDirs(appname='satpy', appauthor='pytroll') +_satpy_dirs = appdirs.AppDirs(appname="satpy", appauthor="pytroll") _CONFIG_DEFAULTS = { - 'tmp_dir': tempfile.gettempdir(), - 'cache_dir': _satpy_dirs.user_cache_dir, - 'cache_lonlats': False, - 'cache_sensor_angles': False, - 'config_path': [], - 'data_dir': _satpy_dirs.user_data_dir, - 'demo_data_dir': '.', - 'download_aux': True, - 'sensor_angles_position_preference': 'actual', - 'readers': { - 'clip_negative_radiances': False, + "tmp_dir": tempfile.gettempdir(), + "cache_dir": _satpy_dirs.user_cache_dir, + "cache_lonlats": False, + "cache_sensor_angles": False, + "config_path": [], + "data_dir": _satpy_dirs.user_data_dir, + "demo_data_dir": ".", + "download_aux": True, + "sensor_angles_position_preference": "actual", + "readers": { + "clip_negative_radiances": False, }, } @@ -79,17 +79,17 @@ def impr_files(module_name: str) -> Path: # 5. ~/.satpy/satpy.yaml # 6. $SATPY_CONFIG_PATH/satpy.yaml if present (colon separated) _CONFIG_PATHS = [ - os.path.join(PACKAGE_CONFIG_PATH, 'satpy.yaml'), - os.getenv('SATPY_ROOT_CONFIG', os.path.join('/etc', 'satpy', 'satpy.yaml')), - os.path.join(sys.prefix, 'etc', 'satpy', 'satpy.yaml'), - os.path.join(_satpy_dirs.user_config_dir, 'satpy.yaml'), - os.path.join(os.path.expanduser('~'), '.satpy', 'satpy.yaml'), + os.path.join(PACKAGE_CONFIG_PATH, "satpy.yaml"), + os.getenv("SATPY_ROOT_CONFIG", os.path.join("/etc", "satpy", "satpy.yaml")), + os.path.join(sys.prefix, "etc", "satpy", "satpy.yaml"), + os.path.join(_satpy_dirs.user_config_dir, "satpy.yaml"), + os.path.join(os.path.expanduser("~"), ".satpy", "satpy.yaml"), ] # The above files can also be directories. If directories all files # with `.yaml`., `.yml`, or `.json` extensions will be used. -_ppp_config_dir = os.getenv('PPP_CONFIG_DIR', None) -_satpy_config_path = os.getenv('SATPY_CONFIG_PATH', None) +_ppp_config_dir = os.getenv("PPP_CONFIG_DIR", None) +_satpy_config_path = os.getenv("SATPY_CONFIG_PATH", None) if _ppp_config_dir is not None and _satpy_config_path is None: LOG.warning("'PPP_CONFIG_DIR' is deprecated. Please use 'SATPY_CONFIG_PATH' instead.") @@ -105,22 +105,22 @@ def impr_files(module_name: str) -> Path: # i.e. last-applied/highest priority to first-applied/lowest priority _satpy_config_path_list = _satpy_config_path.split(os.pathsep) - os.environ['SATPY_CONFIG_PATH'] = repr(_satpy_config_path_list) + os.environ["SATPY_CONFIG_PATH"] = repr(_satpy_config_path_list) for config_dir in _satpy_config_path_list: - _CONFIG_PATHS.append(os.path.join(config_dir, 'satpy.yaml')) + _CONFIG_PATHS.append(os.path.join(config_dir, "satpy.yaml")) -_ancpath = os.getenv('SATPY_ANCPATH', None) -_data_dir = os.getenv('SATPY_DATA_DIR', None) +_ancpath = os.getenv("SATPY_ANCPATH", None) +_data_dir = os.getenv("SATPY_DATA_DIR", None) if _ancpath is not None and _data_dir is None: LOG.warning("'SATPY_ANCPATH' is deprecated. Please use 'SATPY_DATA_DIR' instead.") - os.environ['SATPY_DATA_DIR'] = _ancpath + os.environ["SATPY_DATA_DIR"] = _ancpath config = Config("satpy", defaults=[_CONFIG_DEFAULTS], paths=_CONFIG_PATHS) def get_config_path_safe(): """Get 'config_path' and check for proper 'list' type.""" - config_path = config.get('config_path') + config_path = config.get("config_path") if not isinstance(config_path, list): raise ValueError("Satpy config option 'config_path' must be a " "list, not '{}'".format(type(config_path))) @@ -136,7 +136,7 @@ def get_entry_points_config_dirs(group_name: str, include_config_path: bool = Tr if not dirs or dirs[-1] != new_dir: dirs.append(new_dir) if include_config_path: - dirs.extend(config.get('config_path')[::-1]) + dirs.extend(config.get("config_path")[::-1]) return dirs diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 25fe728b9f..0aa903d2f8 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -45,7 +45,7 @@ def to_xarray(scn, include_lonlats=True, epoch=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. diff --git a/satpy/aux_download.py b/satpy/aux_download.py index 82095737f0..5d9f0630e3 100644 --- a/satpy/aux_download.py +++ b/satpy/aux_download.py @@ -68,14 +68,14 @@ def _generate_filename(filename, component_type): return None path = filename if component_type: - path = '/'.join([component_type, path]) + path = "/".join([component_type, path]) return path def _retrieve_offline(data_dir, cache_key): - logger.debug('Downloading auxiliary files is turned off, will check ' - 'local files.') - local_file = os.path.join(data_dir, *cache_key.split('/')) + logger.debug("Downloading auxiliary files is turned off, will check " + "local files.") + local_file = os.path.join(data_dir, *cache_key.split("/")) if not os.path.isfile(local_file): raise RuntimeError("Satpy 'download_aux' setting is False meaning " "no new files will be downloaded and the local " @@ -85,7 +85,7 @@ def _retrieve_offline(data_dir, cache_key): def _should_download(cache_key): """Check if we're running tests and can download this file.""" - return not RUNNING_TESTS or 'README' in cache_key + return not RUNNING_TESTS or "README" in cache_key def retrieve(cache_key, pooch_kwargs=None): @@ -107,8 +107,8 @@ def retrieve(cache_key, pooch_kwargs=None): """ pooch_kwargs = pooch_kwargs or {} - path = satpy.config.get('data_dir') - if not satpy.config.get('download_aux'): + path = satpy.config.get("data_dir") + if not satpy.config.get("download_aux"): return _retrieve_offline(path, cache_key) if not _should_download(cache_key): raise RuntimeError("Auxiliary data download is not allowed during " @@ -123,7 +123,7 @@ def retrieve(cache_key, pooch_kwargs=None): def _retrieve_all_with_pooch(pooch_kwargs): if pooch_kwargs is None: pooch_kwargs = {} - path = satpy.config.get('data_dir') + path = satpy.config.get("data_dir") pooch_obj = pooch.create(path, path, registry=_FILE_REGISTRY, urls=_FILE_URLS) for fname in _FILE_REGISTRY: @@ -153,7 +153,7 @@ def retrieve_all(readers=None, writers=None, composite_sensors=None, ``fetch``. """ - if not satpy.config.get('download_aux'): + if not satpy.config.get("download_aux"): raise RuntimeError("Satpy 'download_aux' setting is False so no files " "will be downloaded.") @@ -305,11 +305,11 @@ def __init__(self, *args, **kwargs): """ DATA_FILE_COMPONENTS = { - 'reader': 'readers', - 'writer': 'writers', - 'composit': 'composites', - 'modifi': 'modifiers', - 'corr': 'modifiers', + "reader": "readers", + "writer": "writers", + "composit": "composites", + "modifi": "modifiers", + "corr": "modifiers", } @property @@ -318,7 +318,7 @@ def _data_file_component_type(self): for cls_name_sub, comp_type in self.DATA_FILE_COMPONENTS.items(): if cls_name_sub in cls_name: return comp_type - return 'other' + return "other" def register_data_files(self, data_files=None): """Register a series of files that may be downloaded later. @@ -330,8 +330,8 @@ def register_data_files(self, data_files=None): """ comp_type = self._data_file_component_type if data_files is None: - df_parent = getattr(self, 'info', self.config) - data_files = df_parent.get('data_files', []) + df_parent = getattr(self, "info", self.config) + data_files = df_parent.get("data_files", []) cache_keys = [] for data_file_entry in data_files: cache_key = self._register_data_file(data_file_entry, comp_type) @@ -340,9 +340,9 @@ def register_data_files(self, data_files=None): @staticmethod def _register_data_file(data_file_entry, comp_type): - url = data_file_entry['url'] - filename = data_file_entry.get('filename', os.path.basename(url)) - known_hash = data_file_entry.get('known_hash') + url = data_file_entry["url"] + filename = data_file_entry.get("filename", os.path.basename(url)) + known_hash = data_file_entry.get("known_hash") return register_file(url, filename, component_type=comp_type, known_hash=known_hash) @@ -351,20 +351,20 @@ def retrieve_all_cmd(argv=None): """Call 'retrieve_all' function from console script 'satpy_retrieve_all'.""" import argparse parser = argparse.ArgumentParser(description="Download auxiliary data files used by Satpy.") - parser.add_argument('--data-dir', + parser.add_argument("--data-dir", help="Override 'SATPY_DATA_DIR' for destination of " "downloaded files. This does NOT change the " "directory Satpy will look at when searching " "for files outside of this script.") - parser.add_argument('--composite-sensors', nargs="*", + parser.add_argument("--composite-sensors", nargs="*", help="Limit loaded composites for the specified " "sensors. If specified with no arguments, " "no composite files will be downloaded.") - parser.add_argument('--readers', nargs="*", + parser.add_argument("--readers", nargs="*", help="Limit searching to these readers. If specified " "with no arguments, no reader files will be " "downloaded.") - parser.add_argument('--writers', nargs="*", + parser.add_argument("--writers", nargs="*", help="Limit searching to these writers. If specified " "with no arguments, no writer files will be " "downloaded.") @@ -373,7 +373,7 @@ def retrieve_all_cmd(argv=None): logging.basicConfig(level=logging.INFO) if args.data_dir is None: - args.data_dir = satpy.config.get('data_dir') + args.data_dir = satpy.config.get("data_dir") with satpy.config.set(data_dir=args.data_dir): retrieve_all(readers=args.readers, writers=args.writers, diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index da4d1a9e5c..f6b1b13150 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -34,12 +34,12 @@ LOG = logging.getLogger(__name__) -NEGLIGIBLE_COORDS = ['time'] +NEGLIGIBLE_COORDS = ["time"] """Keywords identifying non-dimensional coordinates to be ignored during composite generation.""" -MASKING_COMPOSITOR_METHODS = ['less', 'less_equal', 'equal', 'greater_equal', - 'greater', 'not_equal', 'isnan', 'isfinite', - 'isneginf', 'isposinf'] +MASKING_COMPOSITOR_METHODS = ["less", "less_equal", "equal", "greater_equal", + "greater", "not_equal", "isnan", "isfinite", + "isneginf", "isposinf"] class IncompatibleAreas(Exception): @@ -55,8 +55,8 @@ def check_times(projectables): times = [] for proj in projectables: try: - if proj['time'].size and proj['time'][0] != 0: - times.append(proj['time'][0].values) + if proj["time"].size and proj["time"][0] != 0: + times.append(proj["time"][0].values) else: break # right? except KeyError: @@ -64,13 +64,13 @@ def check_times(projectables): break except IndexError: # time is a scalar - if proj['time'].values != 0: - times.append(proj['time'].values) + if proj["time"].values != 0: + times.append(proj["time"].values) else: break else: # Is there a more gracious way to handle this ? - if np.max(times) - np.min(times) > np.timedelta64(1, 's'): + if np.max(times) - np.min(times) > np.timedelta64(1, "s"): raise IncompatibleTimes mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times) return mid_time @@ -79,9 +79,9 @@ def check_times(projectables): def sub_arrays(proj1, proj2): """Substract two DataArrays and combine their attrs.""" attrs = combine_metadata(proj1.attrs, proj2.attrs) - if (attrs.get('area') is None - and proj1.attrs.get('area') is not None - and proj2.attrs.get('area') is not None): + if (attrs.get("area") is None + and proj1.attrs.get("area") is not None + and proj2.attrs.get("area") is not None): raise IncompatibleAreas res = proj1 - proj2 res.attrs = attrs @@ -114,9 +114,9 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar def id(self): """Return the DataID of the object.""" try: - return self.attrs['_satpy_id'] + return self.attrs["_satpy_id"] except KeyError: - id_keys = self.attrs.get('_satpy_id_keys', minimal_default_keys_config) + id_keys = self.attrs.get("_satpy_id_keys", minimal_default_keys_config) return DataID(id_keys, **self.attrs) def __call__(self, datasets, optional_datasets=None, **info): @@ -135,15 +135,15 @@ def __repr__(self): def apply_modifier_info(self, origin, destination): """Apply the modifier info from *origin* to *destination*.""" - o = getattr(origin, 'attrs', origin) - d = getattr(destination, 'attrs', destination) + o = getattr(origin, "attrs", origin) + d = getattr(destination, "attrs", destination) try: - dataset_keys = self.attrs['_satpy_id'].id_keys.keys() + dataset_keys = self.attrs["_satpy_id"].id_keys.keys() except KeyError: - dataset_keys = ['name', 'modifiers'] + dataset_keys = ["name", "modifiers"] for k in dataset_keys: - if k == 'modifiers' and k in self.attrs: + if k == "modifiers" and k in self.attrs: d[k] = self.attrs[k] elif d.get(k) is None: if self.attrs.get(k) is not None: @@ -225,16 +225,16 @@ def check_geolocation(self, data_arrays): if len(data_arrays) == 1: return - if 'x' in data_arrays[0].dims and \ - not all(x.sizes['x'] == data_arrays[0].sizes['x'] + if "x" in data_arrays[0].dims and \ + not all(x.sizes["x"] == data_arrays[0].sizes["x"] for x in data_arrays[1:]): raise IncompatibleAreas("X dimension has different sizes") - if 'y' in data_arrays[0].dims and \ - not all(x.sizes['y'] == data_arrays[0].sizes['y'] + if "y" in data_arrays[0].dims and \ + not all(x.sizes["y"] == data_arrays[0].sizes["y"] for x in data_arrays[1:]): raise IncompatibleAreas("Y dimension has different sizes") - areas = [ds.attrs.get('area') for ds in data_arrays] + areas = [ds.attrs.get("area") for ds in data_arrays] if all(a is None for a in areas): return if any(a is None for a in areas): @@ -242,7 +242,7 @@ def check_geolocation(self, data_arrays): if not all(areas[0] == x for x in areas[1:]): LOG.debug("Not all areas are the same in " - "'{}'".format(self.attrs['name'])) + "'{}'".format(self.attrs["name"])) raise IncompatibleAreas("Areas are different") @@ -255,7 +255,7 @@ def __call__(self, projectables, nonprojectables=None, **attrs): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] info.update(self.attrs) # attrs from YAML/__init__ info.update(attrs) # overwriting of DataID properties @@ -273,7 +273,7 @@ def __call__(self, projectables, nonprojectables=None, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] proj = projectables[0] / projectables[1] proj.attrs = info @@ -289,7 +289,7 @@ def __call__(self, projectables, nonprojectables=None, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] proj = projectables[0] + projectables[1] proj.attrs = info @@ -316,10 +316,10 @@ def __call__(self, projectables, nonprojectables=None, **attrs): data = projectables[0] new_attrs = data.attrs.copy() self._update_missing_metadata(new_attrs, attrs) - resolution = new_attrs.get('resolution', None) + resolution = new_attrs.get("resolution", None) new_attrs.update(self.attrs) if resolution is not None: - new_attrs['resolution'] = resolution + new_attrs["resolution"] = resolution return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) @@ -349,8 +349,8 @@ def __init__(self, name, lut=None, **kwargs): def _update_attrs(self, new_attrs): """Modify name and add LUT.""" - new_attrs['name'] = self.attrs['name'] - new_attrs['composite_lut'] = list(self.lut) + new_attrs["name"] = self.attrs["name"] + new_attrs["composite_lut"] = list(self.lut) @staticmethod def _getitem(block, lut): @@ -373,7 +373,7 @@ def __call__(self, projectables, **kwargs): class GenericCompositor(CompositeBase): """Basic colored composite builder.""" - modes = {1: 'L', 2: 'LA', 3: 'RGB', 4: 'RGBA'} + modes = {1: "L", 2: "LA", 3: "RGB", 4: "RGBA"} def __init__(self, name, common_channel_mask=True, **kwargs): """Collect custom configuration values. @@ -389,18 +389,18 @@ def __init__(self, name, common_channel_mask=True, **kwargs): @classmethod def infer_mode(cls, data_arr): """Guess at the mode for a particular DataArray.""" - if 'mode' in data_arr.attrs: - return data_arr.attrs['mode'] - if 'bands' not in data_arr.dims: + if "mode" in data_arr.attrs: + return data_arr.attrs["mode"] + if "bands" not in data_arr.dims: return cls.modes[1] - if 'bands' in data_arr.coords and isinstance(data_arr.coords['bands'][0].item(), str): - return ''.join(data_arr.coords['bands'].values) - return cls.modes[data_arr.sizes['bands']] + if "bands" in data_arr.coords and isinstance(data_arr.coords["bands"][0].item(), str): + return "".join(data_arr.coords["bands"].values) + return cls.modes[data_arr.sizes["bands"]] def _concat_datasets(self, projectables, mode): try: - data = xr.concat(projectables, 'bands', coords='minimal') - data['bands'] = list(mode) + data = xr.concat(projectables, "bands", coords="minimal") + data["bands"] = list(mode) except ValueError as e: LOG.debug("Original exception for incompatible areas: {}".format(str(e))) raise IncompatibleAreas @@ -424,15 +424,15 @@ def _get_sensors(self, projectables): def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" - if 'deprecation_warning' in self.attrs: + if "deprecation_warning" in self.attrs: warnings.warn( - self.attrs['deprecation_warning'], + self.attrs["deprecation_warning"], UserWarning, stacklevel=2 ) - self.attrs.pop('deprecation_warning', None) + self.attrs.pop("deprecation_warning", None) num = len(projectables) - mode = attrs.get('mode') + mode = attrs.get("mode") if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] @@ -440,8 +440,8 @@ def __call__(self, projectables, nonprojectables=None, **attrs): projectables = self.match_data_arrays(projectables) data = self._concat_datasets(projectables, mode) # Skip masking if user wants it or a specific alpha channel is given. - if self.common_channel_mask and mode[-1] != 'A': - data = data.where(data.notnull().all(dim='bands')) + if self.common_channel_mask and mode[-1] != "A": + data = data.where(data.notnull().all(dim="bands")) else: data = projectables[0] @@ -450,23 +450,23 @@ def __call__(self, projectables, nonprojectables=None, **attrs): # time coordinate value if len(projectables) > 1: time = check_times(projectables) - if time is not None and 'time' in data.dims: - data['time'] = [time] + if time is not None and "time" in data.dims: + data["time"] = [time] new_attrs = combine_metadata(*projectables) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) - new_attrs.pop('calibration', None) - new_attrs.pop('modifiers', None) + new_attrs.pop("calibration", None) + new_attrs.pop("modifiers", None) new_attrs.update({key: val for (key, val) in attrs.items() if val is not None}) - resolution = new_attrs.get('resolution', None) + resolution = new_attrs.get("resolution", None) new_attrs.update(self.attrs) if resolution is not None: - new_attrs['resolution'] = resolution + new_attrs["resolution"] = resolution new_attrs["sensor"] = self._get_sensors(projectables) new_attrs["mode"] = mode @@ -505,8 +505,8 @@ def __call__(self, projectables, nonprojectables=None, **info): filled_projectable = projectables[0] for next_projectable in projectables[1:]: filled_projectable = filled_projectable.fillna(next_projectable) - if 'optional_datasets' in info.keys(): - for next_projectable in info['optional_datasets']: + if "optional_datasets" in info.keys(): + for next_projectable in info["optional_datasets"]: filled_projectable = filled_projectable.fillna(next_projectable) return super().__call__([filled_projectable], **info) @@ -604,16 +604,16 @@ def _create_composite_from_channels(self, channels, template): mask = self._get_mask_from_data(template) channels = [self._create_masked_dataarray_like(channel, template, mask) for channel in channels] res = super(ColormapCompositor, self).__call__(channels, **template.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @staticmethod def _get_mask_from_data(data): - fill_value = data.attrs.get('_FillValue', np.nan) + fill_value = data.attrs.get("_FillValue", np.nan) if np.isnan(fill_value): mask = data.notnull() else: - mask = data != data.attrs['_FillValue'] + mask = data != data.attrs["_FillValue"] return mask @staticmethod @@ -770,8 +770,8 @@ def _get_data_for_combined_product(self, day_data, night_data): # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA - day_data = add_bands(day_data, night_data['bands']) - night_data = add_bands(night_data, day_data['bands']) + day_data = add_bands(day_data, night_data["bands"]) + night_data = add_bands(night_data, day_data["bands"]) # Get merged metadata attrs = combine_metadata(day_data, night_data) @@ -801,7 +801,7 @@ def _weight_data(self, day_data, night_data, weights, attrs): night_band = _get_single_band_data(night_data, b) # For day-only and night-only products only the alpha channel is weighted # If there's no alpha band, weight the actual data - if b == 'A' or "only" not in self.day_night or not self.include_alpha: + if b == "A" or "only" not in self.day_night or not self.include_alpha: day_band = day_band * weights night_band = night_band * (1 - weights) band = day_band + night_band @@ -812,9 +812,9 @@ def _weight_data(self, day_data, night_data, weights, attrs): def _get_band_names(day_data, night_data): try: - bands = day_data['bands'] + bands = day_data["bands"] except TypeError: - bands = night_data['bands'] + bands = night_data["bands"] return bands @@ -850,18 +850,18 @@ def add_alpha_bands(data): Add an alpha band to L or RGB composite as prerequisites for the following band matching to make the masked-out area transparent. """ - if 'A' not in data['bands'].data: - new_data = [data.sel(bands=band) for band in data['bands'].data] + if "A" not in data["bands"].data: + new_data = [data.sel(bands=band) for band in data["bands"].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() - alpha.data = da.ones((data.sizes['y'], - data.sizes['x']), + alpha.data = da.ones((data.sizes["y"], + data.sizes["x"]), chunks=new_data[0].chunks) # Rename band to indicate it's alpha - alpha['bands'] = 'A' + alpha["bands"] = "A" new_data.append(alpha) - new_data = xr.concat(new_data, dim='bands') - new_data.attrs['mode'] = data.attrs['mode'] + 'A' + new_data = xr.concat(new_data, dim="bands") + new_data.attrs["mode"] = data.attrs["mode"] + "A" data = new_data return data @@ -875,17 +875,17 @@ def enhance2dataset(dset, convert_p=False): data = _get_data_from_enhanced_image(dset, convert_p) data.attrs = attrs # remove 'mode' if it is specified since it may have been updated - data.attrs.pop('mode', None) + data.attrs.pop("mode", None) # update mode since it may have changed (colorized/palettize) - data.attrs['mode'] = GenericCompositor.infer_mode(data) + data.attrs["mode"] = GenericCompositor.infer_mode(data) return data def _get_data_from_enhanced_image(dset, convert_p): img = get_enhanced_image(dset) - if convert_p and img.mode == 'P': + if convert_p and img.mode == "P": img = _apply_palette_to_image(img) - if img.mode != 'P': + if img.mode != "P": data = img.data.clip(0.0, 1.0) else: data = img.data @@ -894,9 +894,9 @@ def _get_data_from_enhanced_image(dset, convert_p): def _apply_palette_to_image(img): if len(img.palette[0]) == 3: - img = img.convert('RGB') + img = img.convert("RGB") elif len(img.palette[0]) == 4: - img = img.convert('RGBA') + img = img.convert("RGBA") return img @@ -904,36 +904,36 @@ def add_bands(data, bands): """Add bands so that they match *bands*.""" # Add R, G and B bands, remove L band bands = bands.compute() - if 'P' in data['bands'].data or 'P' in bands.data: - raise NotImplementedError('Cannot mix datasets of mode P with other datasets at the moment.') - if 'L' in data['bands'].data and 'R' in bands.data: - lum = data.sel(bands='L') + if "P" in data["bands"].data or "P" in bands.data: + raise NotImplementedError("Cannot mix datasets of mode P with other datasets at the moment.") + if "L" in data["bands"].data and "R" in bands.data: + lum = data.sel(bands="L") # Keep 'A' if it was present - if 'A' in data['bands']: - alpha = data.sel(bands='A') + if "A" in data["bands"]: + alpha = data.sel(bands="A") new_data = (lum, lum, lum, alpha) - new_bands = ['R', 'G', 'B', 'A'] - mode = 'RGBA' + new_bands = ["R", "G", "B", "A"] + mode = "RGBA" else: new_data = (lum, lum, lum) - new_bands = ['R', 'G', 'B'] - mode = 'RGB' - data = xr.concat(new_data, dim='bands', coords={'bands': new_bands}) - data['bands'] = new_bands - data.attrs['mode'] = mode + new_bands = ["R", "G", "B"] + mode = "RGB" + data = xr.concat(new_data, dim="bands", coords={"bands": new_bands}) + data["bands"] = new_bands + data.attrs["mode"] = mode # Add alpha band - if 'A' not in data['bands'].data and 'A' in bands.data: - new_data = [data.sel(bands=band) for band in data['bands'].data] + if "A" not in data["bands"].data and "A" in bands.data: + new_data = [data.sel(bands=band) for band in data["bands"].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() - alpha.data = da.ones((data.sizes['y'], - data.sizes['x']), + alpha.data = da.ones((data.sizes["y"], + data.sizes["x"]), chunks=new_data[0].chunks) # Rename band to indicate it's alpha - alpha['bands'] = 'A' + alpha["bands"] = "A" new_data.append(alpha) - new_data = xr.concat(new_data, dim='bands') - new_data.attrs['mode'] = data.attrs['mode'] + 'A' + new_data = xr.concat(new_data, dim="bands") + new_data.attrs["mode"] = data.attrs["mode"] + "A" data = new_data return data @@ -1061,11 +1061,11 @@ def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_color = kwargs.pop("high_resolution_band", "red") self.neutral_resolution_color = kwargs.pop("neutral_resolution_band", None) - if self.high_resolution_color not in ['red', 'green', 'blue', None]: + if self.high_resolution_color not in ["red", "green", "blue", None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.high_resolution_color)) - if self.neutral_resolution_color not in ['red', 'green', 'blue', None]: + if self.neutral_resolution_color not in ["red", "green", "blue", None]: raise ValueError("RatioSharpenedRGB.neutral_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.neutral_resolution_color)) @@ -1081,8 +1081,8 @@ def __call__(self, datasets, optional_datasets=None, **info): if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ (optional_datasets and optional_datasets[0].shape != datasets[0].shape): - raise IncompatibleAreas('RatioSharpening requires datasets of ' - 'the same size. Must resample first.') + raise IncompatibleAreas("RatioSharpening requires datasets of " + "the same size. Must resample first.") optional_datasets = tuple() if optional_datasets is None else optional_datasets datasets = self.match_data_arrays(datasets + optional_datasets) @@ -1100,19 +1100,19 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) if optional_datasets and self.high_resolution_color is not None: LOG.debug("Sharpening image with high resolution {} band".format(self.high_resolution_color)) high_res = datasets[3] - if 'rows_per_scan' in high_res.attrs: - new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) - new_attrs.setdefault('resolution', high_res.attrs['resolution']) + if "rows_per_scan" in high_res.attrs: + new_attrs.setdefault("rows_per_scan", high_res.attrs["rows_per_scan"]) + new_attrs.setdefault("resolution", high_res.attrs["resolution"]) else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None - bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} + bands = {"red": low_res_red, "green": low_res_green, "blue": low_res_blue} if high_res is not None: self._sharpen_bands_with_high_res(bands, high_res) - return bands['red'], bands['green'], bands['blue'], new_attrs + return bands["red"], bands["green"], bands["blue"], new_attrs def _sharpen_bands_with_high_res(self, bands, high_res): ratio = da.map_blocks( @@ -1170,9 +1170,9 @@ def _mean4(data, offset=(0, 0), block_id=None): rows2 = rows + row_offset + row_after cols2 = cols + col_offset + col_after - av_data = np.pad(data, pad, 'edge') + av_data = np.pad(data, pad, "edge") new_shape = (int(rows2 / 2.), 2, int(cols2 / 2.), 2) - with np.errstate(invalid='ignore'): + with np.errstate(invalid="ignore"): data_mean = np.nanmean(av_data.reshape(new_shape), axis=(1, 3)) data_mean = np.repeat(np.repeat(data_mean, 2, axis=0), 2, axis=1) data_mean = data_mean[row_offset:row_offset + rows, col_offset:col_offset + cols] @@ -1199,7 +1199,7 @@ class SelfSharpenedRGB(RatioSharpenedRGB): def four_element_average_dask(d): """Average every 4 elements (2x2) in a 2D array.""" try: - offset = d.attrs['area'].crop_offset + offset = d.attrs["area"].crop_offset except (KeyError, AttributeError): offset = (0, 0) @@ -1208,16 +1208,16 @@ def four_element_average_dask(d): def __call__(self, datasets, optional_datasets=None, **attrs): """Generate the composite.""" - colors = ['red', 'green', 'blue'] + colors = ["red", "green", "blue"] if self.high_resolution_color not in colors: raise ValueError("SelfSharpenedRGB requires at least one high resolution band, not " "'{}'".format(self.high_resolution_color)) high_res = datasets[colors.index(self.high_resolution_color)] high_mean = self.four_element_average_dask(high_res) - red = high_mean if self.high_resolution_color == 'red' else datasets[0] - green = high_mean if self.high_resolution_color == 'green' else datasets[1] - blue = high_mean if self.high_resolution_color == 'blue' else datasets[2] + red = high_mean if self.high_resolution_color == "red" else datasets[0] + green = high_mean if self.high_resolution_color == "green" else datasets[1] + blue = high_mean if self.high_resolution_color == "blue" else datasets[2] return super(SelfSharpenedRGB, self).__call__((red, green, blue), optional_datasets=(high_res,), **attrs) @@ -1273,7 +1273,7 @@ def __call__(self, projectables, *args, **kwargs): # Get the enhanced version of the RGB composite to be sharpened rgb_img = enhance2dataset(projectables[1]) # Ignore alpha band when applying luminance - rgb_img = rgb_img.where(rgb_img.bands == 'A', rgb_img * luminance) + rgb_img = rgb_img.where(rgb_img.bands == "A", rgb_img * luminance) return super(SandwichCompositor, self).__call__(rgb_img, *args, **kwargs) @@ -1378,7 +1378,7 @@ def __init__(self, name, filename=None, url=None, known_hash=None, area=None, @staticmethod def _check_relative_filename(filename): - data_dir = satpy.config.get('data_dir') + data_dir = satpy.config.get("data_dir") path = os.path.join(data_dir, filename) return path if os.path.exists(path) else filename @@ -1406,9 +1406,9 @@ def register_data_files(self, data_files): if os.path.isabs(self._cache_filename): return [None] return super().register_data_files([{ - 'url': self._url, - 'known_hash': self._known_hash, - 'filename': self._cache_filename, + "url": self._url, + "known_hash": self._known_hash, + "filename": self._cache_filename, }]) def _retrieve_data_file(self): @@ -1421,29 +1421,29 @@ def __call__(self, *args, **kwargs): """Call the compositor.""" from satpy import Scene local_file = self._retrieve_data_file() - scn = Scene(reader='generic_image', filenames=[local_file]) - scn.load(['image']) - img = scn['image'] + scn = Scene(reader="generic_image", filenames=[local_file]) + scn.load(["image"]) + img = scn["image"] # use compositor parameters as extra metadata # most important: set 'name' of the image img.attrs.update(self.attrs) # Check for proper area definition. Non-georeferenced images # do not have `area` in the attributes - if 'area' not in img.attrs: + if "area" not in img.attrs: if self.area is None: raise AttributeError("Area definition needs to be configured") - img.attrs['area'] = self.area - img.attrs['sensor'] = None - img.attrs['mode'] = ''.join(img.bands.data) - img.attrs.pop('modifiers', None) - img.attrs.pop('calibration', None) + img.attrs["area"] = self.area + img.attrs["sensor"] = None + img.attrs["mode"] = "".join(img.bands.data) + img.attrs.pop("modifiers", None) + img.attrs.pop("calibration", None) # Add start time if not present in the filename - if 'start_time' not in img.attrs or not img.attrs['start_time']: + if "start_time" not in img.attrs or not img.attrs["start_time"]: import datetime as dt - img.attrs['start_time'] = dt.datetime.utcnow() - if 'end_time' not in img.attrs or not img.attrs['end_time']: + img.attrs["start_time"] = dt.datetime.utcnow() + if "end_time" not in img.attrs or not img.attrs["end_time"]: import datetime as dt - img.attrs['end_time'] = dt.datetime.utcnow() + img.attrs["end_time"] = dt.datetime.utcnow() return img @@ -1461,8 +1461,8 @@ def __call__(self, projectables, *args, **kwargs): # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA - foreground = add_bands(foreground, background['bands']) - background = add_bands(background, foreground['bands']) + foreground = add_bands(foreground, background["bands"]) + background = add_bands(background, foreground["bands"]) attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) data = self._get_merged_image_data(foreground, background) @@ -1479,18 +1479,18 @@ def _combine_metadata_with_mode_and_sensor(self, # 'mode' is no longer valid after we've remove the 'A' # let the base class __call__ determine mode attrs.pop("mode", None) - if attrs.get('sensor') is None: + if attrs.get("sensor") is None: # sensor can be a set - attrs['sensor'] = self._get_sensors([foreground, background]) + attrs["sensor"] = self._get_sensors([foreground, background]) return attrs @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray ) -> list[xr.DataArray]: - if 'A' in foreground.attrs['mode']: + if "A" in foreground.attrs["mode"]: # Use alpha channel as weight and blend the two composites - alpha = foreground.sel(bands='A') + alpha = foreground.sel(bands="A") data = [] # NOTE: there's no alpha band in the output image, it will # be added by the data writer @@ -1503,7 +1503,7 @@ def _get_merged_image_data(foreground: xr.DataArray, else: data_arr = xr.where(foreground.isnull(), background, foreground) # Split to separate bands so the mode is correct - data = [data_arr.sel(bands=b) for b in data_arr['bands']] + data = [data_arr.sel(bands=b) for b in data_arr["bands"]] return data @@ -1577,9 +1577,9 @@ def __init__(self, name, transparency=None, conditions=None, mode="LA", "MaskingCompositor, use 'conditions' instead.") self.conditions = [] for key, transp in transparency.items(): - self.conditions.append({'method': 'equal', - 'value': key, - 'transparency': transp}) + self.conditions.append({"method": "equal", + "value": key, + "transparency": transp}) LOG.info("Converted 'transparency' to 'conditions': %s", str(self.conditions)) else: @@ -1643,8 +1643,8 @@ def _select_data_bands(self, data_in): From input data, select the bands that need to have masking applied. """ - if 'bands' in data_in.dims: - return [data_in.sel(bands=b) for b in data_in['bands'] if b != 'A'] + if "bands" in data_in.dims: + return [data_in.sel(bands=b) for b in data_in["bands"] if b != "A"] if self.mode == "RGBA": return [data_in, data_in, data_in] return [data_in] @@ -1656,16 +1656,16 @@ def _get_alpha_bands(self, data, mask_in, alpha_attrs): """ # Create alpha band mask_data = mask_in.data - alpha = da.ones((data[0].sizes['y'], - data[0].sizes['x']), + alpha = da.ones((data[0].sizes["y"], + data[0].sizes["x"]), chunks=data[0].chunks) for condition in self.conditions: - method = condition['method'] - value = condition.get('value', None) + method = condition["method"] + value = condition.get("value", None) if isinstance(value, str): value = _get_flag_value(mask_in, value) - transparency = condition['transparency'] + transparency = condition["transparency"] mask = self._get_mask(method, value, mask_data) if transparency == 100.0: @@ -1684,8 +1684,8 @@ def _get_flag_value(mask, val): NWC SAF GEO/PPS softwares. """ - flag_meanings = mask.attrs['flag_meanings'] - flag_values = mask.attrs['flag_values'] + flag_meanings = mask.attrs["flag_meanings"] + flag_values = mask.attrs["flag_values"] if isinstance(flag_meanings, str): flag_meanings = flag_meanings.split() diff --git a/satpy/composites/cloud_products.py b/satpy/composites/cloud_products.py index a05be8ad17..4dbc2e489f 100644 --- a/satpy/composites/cloud_products.py +++ b/satpy/composites/cloud_products.py @@ -31,7 +31,7 @@ def __call__(self, projectables, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, status = projectables - valid = status != status.attrs['_FillValue'] + valid = status != status.attrs["_FillValue"] status_cloud_free = status % 2 == 1 # bit 0 is set cloud_free = np.logical_and(valid, status_cloud_free) if "bad_optical_conditions" in status.attrs.get("flag_meanings", "") and data.name == "cmic_cre": @@ -43,7 +43,7 @@ def __call__(self, projectables, **info): # Keep cloudfree or valid product data = data.where(np.logical_or(cloud_free, data != data.attrs["scaled_FillValue"]), np.nan) res = SingleBandCompositor.__call__(self, [data], **data.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @@ -56,15 +56,15 @@ def __call__(self, projectables, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, cma = projectables - valid_cma = cma != cma.attrs['_FillValue'] - valid_prod = data != data.attrs['_FillValue'] + valid_cma = cma != cma.attrs["_FillValue"] + valid_prod = data != data.attrs["_FillValue"] valid_prod = np.logical_and(valid_prod, np.logical_not(np.isnan(data))) # Update valid_cma and not valid_prod means: keep not valid cma or valid prod data = data.where(np.logical_or(np.logical_not(valid_cma), valid_prod), data.attrs["scaled_FillValue"]) data = data.where(np.logical_or(valid_prod, valid_cma), np.nan) res = SingleBandCompositor.__call__(self, [data], **data.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @@ -95,15 +95,15 @@ def __call__(self, projectables, *args, **kwargs): scalef1 = 1.0 / maxs1 - 1 / 255.0 p1data = (light*scalef1).where(light != 0) - p1data = p1data.where(light != light.attrs['_FillValue']) + p1data = p1data.where(light != light.attrs["_FillValue"]) p1data.attrs = light.attrs data = moderate*scalef2 p2data = data.where(moderate != 0) - p2data = p2data.where(moderate != moderate.attrs['_FillValue']) + p2data = p2data.where(moderate != moderate.attrs["_FillValue"]) p2data.attrs = moderate.attrs data = intense*scalef3 p3data = data.where(intense != 0) - p3data = p3data.where(intense != intense.attrs['_FillValue']) + p3data = p3data.where(intense != intense.attrs["_FillValue"]) p3data.attrs = intense.attrs res = super(PrecipCloudsRGB, self).__call__((p3data, p2data, p1data), diff --git a/satpy/composites/config_loader.py b/satpy/composites/config_loader.py index e5af45355b..bffbee8a13 100644 --- a/satpy/composites/config_loader.py +++ b/satpy/composites/config_loader.py @@ -38,10 +38,10 @@ def _convert_dep_info_to_data_query(dep_info): key_item = dep_info.copy() - key_item.pop('prerequisites', None) - key_item.pop('optional_prerequisites', None) - if 'modifiers' in key_item: - key_item['modifiers'] = tuple(key_item['modifiers']) + key_item.pop("prerequisites", None) + key_item.pop("optional_prerequisites", None) + if "modifiers" in key_item: + key_item["modifiers"] = tuple(key_item["modifiers"]) key = DataQuery.from_dict(key_item) return key @@ -64,14 +64,14 @@ def _create_comp_from_info(self, composite_info, loader): def _handle_inline_comp_dep(self, dep_info, dep_num, parent_name): # Create an unique temporary name for the composite - sub_comp_name = '_' + parent_name + '_dep_{}'.format(dep_num) - dep_info['name'] = sub_comp_name + sub_comp_name = "_" + parent_name + "_dep_{}".format(dep_num) + dep_info["name"] = sub_comp_name self._load_config_composite(dep_info) @staticmethod def _get_compositor_loader_from_config(composite_name, composite_info): try: - loader = composite_info.pop('compositor') + loader = composite_info.pop("compositor") except KeyError: raise ValueError("'compositor' key missing or empty for '{}'. Option keys = {}".format( composite_name, str(composite_info.keys()))) @@ -79,22 +79,22 @@ def _get_compositor_loader_from_config(composite_name, composite_info): def _process_composite_deps(self, composite_info): dep_num = -1 - for prereq_type in ['prerequisites', 'optional_prerequisites']: + for prereq_type in ["prerequisites", "optional_prerequisites"]: prereqs = [] for dep_info in composite_info.get(prereq_type, []): dep_num += 1 if not isinstance(dep_info, dict): prereqs.append(dep_info) continue - elif 'compositor' in dep_info: + elif "compositor" in dep_info: self._handle_inline_comp_dep( - dep_info, dep_num, composite_info['name']) + dep_info, dep_num, composite_info["name"]) prereq_key = _convert_dep_info_to_data_query(dep_info) prereqs.append(prereq_key) composite_info[prereq_type] = prereqs def _load_config_composite(self, composite_info): - composite_name = composite_info['name'] + composite_name = composite_info["name"] loader = self._get_compositor_loader_from_config(composite_name, composite_info) self._process_composite_deps(composite_info) key, comp = self._create_comp_from_info(composite_info, loader) @@ -102,7 +102,7 @@ def _load_config_composite(self, composite_info): def _load_config_composites(self, configured_composites): for composite_name, composite_info in configured_composites.items(): - composite_info['name'] = composite_name + composite_info["name"] = composite_name self._load_config_composite(composite_info) def parse_config(self, configured_composites, composite_configs): @@ -128,9 +128,9 @@ def __init__(self, loaded_modifiers, sensor_id_keys): @staticmethod def _get_modifier_loader_from_config(modifier_name, modifier_info): try: - loader = modifier_info.pop('modifier', None) + loader = modifier_info.pop("modifier", None) if loader is None: - loader = modifier_info.pop('compositor') + loader = modifier_info.pop("compositor") warnings.warn( "Modifier '{}' uses deprecated 'compositor' " "key to point to Python class, replace " @@ -143,7 +143,7 @@ def _get_modifier_loader_from_config(modifier_name, modifier_info): return loader def _process_modifier_deps(self, modifier_info): - for prereq_type in ['prerequisites', 'optional_prerequisites']: + for prereq_type in ["prerequisites", "optional_prerequisites"]: prereqs = [] for dep_info in modifier_info.get(prereq_type, []): if not isinstance(dep_info, dict): @@ -154,14 +154,14 @@ def _process_modifier_deps(self, modifier_info): modifier_info[prereq_type] = prereqs def _load_config_modifier(self, modifier_info): - modifier_name = modifier_info['name'] + modifier_name = modifier_info["name"] loader = self._get_modifier_loader_from_config(modifier_name, modifier_info) self._process_modifier_deps(modifier_info) self.loaded_modifiers[modifier_name] = (loader, modifier_info) def _load_config_modifiers(self, configured_modifiers): for modifier_name, modifier_info in configured_modifiers.items(): - modifier_info['name'] = modifier_name + modifier_info["name"] = modifier_name self._load_config_modifier(modifier_info) def parse_config(self, configured_modifiers, composite_configs): @@ -179,10 +179,10 @@ def _load_config(composite_configs): conf = {} for composite_config in composite_configs: - with open(composite_config, 'r', encoding='utf-8') as conf_file: + with open(composite_config, "r", encoding="utf-8") as conf_file: conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader)) try: - sensor_name = conf['sensor_name'] + sensor_name = conf["sensor_name"] except KeyError: logger.debug('No "sensor_name" tag found in %s, skipping.', composite_configs) @@ -192,7 +192,7 @@ def _load_config(composite_configs): sensor_modifiers = {} dep_id_keys = None - sensor_deps = sensor_name.split('/')[:-1] + sensor_deps = sensor_name.split("/")[:-1] if sensor_deps: # get dependent for sensor_dep in sensor_deps: @@ -203,18 +203,18 @@ def _load_config(composite_configs): id_keys = _get_sensor_id_keys(conf, dep_id_keys) mod_config_helper = _ModifierConfigHelper(sensor_modifiers, id_keys) - configured_modifiers = conf.get('modifiers', {}) + configured_modifiers = conf.get("modifiers", {}) mod_config_helper.parse_config(configured_modifiers, composite_configs) comp_config_helper = _CompositeConfigHelper(sensor_compositors, id_keys) - configured_composites = conf.get('composites', {}) + configured_composites = conf.get("composites", {}) comp_config_helper.parse_config(configured_composites, composite_configs) return sensor_compositors, sensor_modifiers, id_keys def _get_sensor_id_keys(conf, parent_id_keys): try: - id_keys = conf['composite_identification_keys'] + id_keys = conf["composite_identification_keys"] except KeyError: id_keys = parent_id_keys if not id_keys: @@ -270,7 +270,7 @@ def load_compositor_configs_for_sensor(sensor_name: str) -> tuple[dict[str, dict """ config_filename = sensor_name + ".yaml" logger.debug("Looking for composites config file %s", config_filename) - paths = get_entry_points_config_dirs('satpy.composites') + paths = get_entry_points_config_dirs("satpy.composites") composite_configs = config_search_paths( os.path.join("composites", config_filename), search_dirs=paths, check_exists=True) @@ -310,12 +310,12 @@ def load_compositor_configs_for_sensors(sensor_names: Iterable[str]) -> tuple[di def all_composite_sensors(): """Get all sensor names from available composite configs.""" - paths = get_entry_points_config_dirs('satpy.composites') + paths = get_entry_points_config_dirs("satpy.composites") composite_configs = glob_config( os.path.join("composites", "*.yaml"), search_dirs=paths) yaml_names = set([os.path.splitext(os.path.basename(fn))[0] for fn in composite_configs]) - non_sensor_yamls = ('visir',) + non_sensor_yamls = ("visir",) sensor_names = [x for x in yaml_names if x not in non_sensor_yamls] return sensor_names diff --git a/satpy/composites/glm.py b/satpy/composites/glm.py index 48fe6b922c..e9b6ef275e 100644 --- a/satpy/composites/glm.py +++ b/satpy/composites/glm.py @@ -74,7 +74,7 @@ def __init__(self, name, min_highlight=0.0, max_highlight=10.0, def _get_enhanced_background_data(background_layer): img = get_enhanced_image(background_layer) img.data = img.data.clip(0.0, 1.0) - img = img.convert('RGBA') + img = img.convert("RGBA") return img.data def _get_highlight_factor(self, highlight_data): @@ -93,10 +93,10 @@ def _apply_highlight_effect(self, background_data, factor): def _update_attrs(self, new_data, background_layer, highlight_layer): new_data.attrs = background_layer.attrs.copy() - new_data.attrs['units'] = 1 + new_data.attrs["units"] = 1 new_sensors = self._get_sensors((highlight_layer, background_layer)) new_data.attrs.update({ - 'sensor': new_sensors, + "sensor": new_sensors, }) def __call__(self, projectables, optional_datasets=None, **attrs): @@ -107,7 +107,7 @@ def __call__(self, projectables, optional_datasets=None, **attrs): # Adjust the colors of background by highlight layer factor = self._get_highlight_factor(highlight_product) new_channels = self._apply_highlight_effect(background_data, factor) - new_data = xr.concat(new_channels, dim='bands') + new_data = xr.concat(new_channels, dim="bands") self._update_attrs(new_data, background_layer, highlight_product) return super(HighlightCompositor, self).__call__((new_data,), **attrs) diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py index 1dd0523053..5df2d482af 100644 --- a/satpy/composites/viirs.py +++ b/satpy/composites/viirs.py @@ -90,8 +90,8 @@ def _run_dnb_normalization(self, dnb_data, sza_data): """ # convert dask arrays to DataArray objects - dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) - sza_data = xr.DataArray(sza_data, dims=('y', 'x')) + dnb_data = xr.DataArray(dnb_data, dims=("y", "x")) + sza_data = xr.DataArray(sza_data, dims=("y", "x")) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) output_dataset = dnb_data.where(good_mask) @@ -904,8 +904,8 @@ def __call__(self, datasets, **info): dnb_data += 2.6e-10 dnb_data *= gtot - mda['name'] = self.attrs['name'] - mda['standard_name'] = 'ncc_radiance' + mda["name"] = self.attrs["name"] + mda["standard_name"] = "ncc_radiance" dnb_data.attrs = mda return dnb_data diff --git a/satpy/dataset/anc_vars.py b/satpy/dataset/anc_vars.py index 071a21d786..90b2d7bd3c 100644 --- a/satpy/dataset/anc_vars.py +++ b/satpy/dataset/anc_vars.py @@ -27,7 +27,7 @@ def dataset_walker(datasets): """ for dataset in datasets: yield dataset, None - for anc_ds in dataset.attrs.get('ancillary_variables', []): + for anc_ds in dataset.attrs.get("ancillary_variables", []): try: anc_ds.attrs yield anc_ds, dataset @@ -40,12 +40,12 @@ def replace_anc(dataset, parent_dataset): if parent_dataset is None: return id_keys = parent_dataset.attrs.get( - '_satpy_id_keys', + "_satpy_id_keys", dataset.attrs.get( - '_satpy_id_keys', + "_satpy_id_keys", default_id_keys_config)) current_dataid = DataID(id_keys, **dataset.attrs) - for idx, ds in enumerate(parent_dataset.attrs['ancillary_variables']): + for idx, ds in enumerate(parent_dataset.attrs["ancillary_variables"]): if current_dataid == DataID(id_keys, **ds.attrs): - parent_dataset.attrs['ancillary_variables'][idx] = dataset + parent_dataset.attrs["ancillary_variables"][idx] = dataset return diff --git a/satpy/dataset/data_dict.py b/satpy/dataset/data_dict.py index eb9d8b9662..790d688b24 100644 --- a/satpy/dataset/data_dict.py +++ b/satpy/dataset/data_dict.py @@ -133,9 +133,9 @@ def keys(self, names=False, wavelengths=False): # sort keys so things are a little more deterministic (.keys() is not) keys = sorted(super(DatasetDict, self).keys()) if names: - return (k.get('name') for k in keys) + return (k.get("name") for k in keys) elif wavelengths: - return (k.get('wavelength') for k in keys) + return (k.get("wavelength") for k in keys) else: return keys @@ -181,7 +181,7 @@ def get(self, key, default=None): def __setitem__(self, key, value): """Support assigning 'Dataset' objects or dictionaries of metadata.""" - if hasattr(value, 'attrs'): + if hasattr(value, "attrs"): # xarray.DataArray objects value_info = value.attrs else: @@ -198,7 +198,7 @@ def __setitem__(self, key, value): if isinstance(value_info, dict): value_info.update(new_info) if isinstance(key, DataID): - value_info['_satpy_id'] = key + value_info["_satpy_id"] = key return super(DatasetDict, self).__setitem__(key, value) @@ -215,21 +215,21 @@ def _create_dataid_key(self, key, value_info): else: new_name = value_info.get("name") # this is a new key and it's not a full DataID tuple - if new_name is None and value_info.get('wavelength') is None: + if new_name is None and value_info.get("wavelength") is None: raise ValueError("One of 'name' or 'wavelength' attrs " "values should be set.") id_keys = self._create_id_keys_from_dict(value_info) - value_info['name'] = new_name + value_info["name"] = new_name key = DataID(id_keys, **value_info) return key def _create_id_keys_from_dict(self, value_info_dict): """Create id_keys from dict.""" try: - id_keys = value_info_dict['_satpy_id'].id_keys + id_keys = value_info_dict["_satpy_id"].id_keys except KeyError: try: - id_keys = value_info_dict['_satpy_id_keys'] + id_keys = value_info_dict["_satpy_id_keys"] except KeyError: id_keys = minimal_default_keys_config return id_keys diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index ded6cec146..f52c6494b7 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -34,10 +34,10 @@ def get_keys_from_config(common_id_keys, config): for key, val in common_id_keys.items(): if key in config: id_keys[key] = val - elif val is not None and (val.get('required') is True or val.get('default') is not None): + elif val is not None and (val.get("required") is True or val.get("default") is not None): id_keys[key] = val if not id_keys: - raise ValueError('Metadata does not contain enough information to create a DataID.') + raise ValueError("Metadata does not contain enough information to create a DataID.") return id_keys @@ -57,7 +57,7 @@ def convert(cls, value): try: return cls[value] except KeyError: - raise ValueError('{} invalid value for {}'.format(value, cls)) + raise ValueError("{} invalid value for {}".format(value, cls)) @classmethod def _unpickle(cls, enum_name, enum_members, enum_member): @@ -88,10 +88,10 @@ def __hash__(self): def __repr__(self): """Represent the values.""" - return '<' + str(self) + '>' + return "<" + str(self) + ">" -wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=('µm',)) # type: ignore +wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=("µm",)) # type: ignore class WavelengthRange(wlklass): @@ -196,7 +196,7 @@ def _read_cf_from_string_export(cls, blob): from trollsift import Parser parser = Parser(pattern) res_dict = parser.parse(blob) - res_dict.pop('unit2') + res_dict.pop("unit2") obj = cls(**res_dict) return obj @@ -239,45 +239,45 @@ def __hash__(self): #: Default ID keys DataArrays. -default_id_keys_config = {'name': { - 'required': True, +default_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': { - 'transitive': False, + "resolution": { + "transitive": False, }, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ], - 'transitive': True, + "transitive": True, }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } #: Default ID keys for coordinate DataArrays. -default_co_keys_config = {'name': { - 'required': True, +default_co_keys_config = {"name": { + "required": True, }, - 'resolution': { - 'transitive': True, + "resolution": { + "transitive": True, } } #: Minimal ID keys for DataArrays, for example composites. -minimal_default_keys_config = {'name': { - 'required': True, +minimal_default_keys_config = {"name": { + "required": True, }, - 'resolution': { - 'transitive': True, + "resolution": { + "transitive": True, } } @@ -312,11 +312,11 @@ def fix_id_keys(id_keys): for key, val in id_keys.items(): if not val: continue - if 'enum' in val and 'type' in val: - raise ValueError('Cannot have both type and enum for the same id key.') + if "enum" in val and "type" in val: + raise ValueError("Cannot have both type and enum for the same id key.") new_val = copy(val) - if 'enum' in val: - new_val['type'] = ValueList(key, ' '.join(new_val.pop('enum'))) + if "enum" in val: + new_val["type"] = ValueList(key, " ".join(new_val.pop("enum"))) new_id_keys[key] = new_val return new_id_keys @@ -328,12 +328,12 @@ def convert_dict(self, keyvals): for key, val in self._id_keys.items(): if val is None: val = {} - if key in keyvals or val.get('default') is not None or val.get('required'): - curated_val = keyvals.get(key, val.get('default')) - if 'required' in val and curated_val is None: - raise ValueError('Required field {} missing.'.format(key)) - if 'type' in val: - curated[key] = val['type'].convert(curated_val) + if key in keyvals or val.get("default") is not None or val.get("required"): + curated_val = keyvals.get(key, val.get("default")) + if "required" in val and curated_val is None: + raise ValueError("Required field {} missing.".format(key)) + if "type" in val: + curated[key] = val["type"].convert(curated_val) elif curated_val is not None: curated[key] = curated_val @@ -355,17 +355,17 @@ def from_dict(self, keyvals): @classmethod def from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Get the DataID using the dataarray attributes.""" - if '_satpy_id' in array.attrs: - return array.attrs['_satpy_id'] + if "_satpy_id" in array.attrs: + return array.attrs["_satpy_id"] return cls.new_id_from_dataarray(array, default_keys) @classmethod def new_id_from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Create a new DataID from a dataarray's attributes.""" try: - id_keys = array.attrs['_satpy_id'].id_keys + id_keys = array.attrs["_satpy_id"].id_keys except KeyError: - id_keys = array.attrs.get('_satpy_id_keys', default_keys) + id_keys = array.attrs.get("_satpy_id_keys", default_keys) return cls(id_keys, **array.attrs) @property @@ -380,7 +380,7 @@ def create_filter_query_without_required_fields(self, query): except AttributeError: new_query = query.copy() for key, val in self._id_keys.items(): - if val and (val.get('transitive') is not True): + if val and (val.get("transitive") is not True): new_query.pop(key, None) return DataQuery.from_dict(new_query) @@ -430,7 +430,7 @@ def __hash__(self): def _immutable(self, *args, **kws) -> NoReturn: """Raise and error.""" - raise TypeError('Cannot change a DataID') + raise TypeError("Cannot change a DataID") def __lt__(self, other): """Check lesser than.""" @@ -468,7 +468,7 @@ def _find_modifiers_key(self): def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() - new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) + new_dict["modifiers"] = tuple(new_dict["modifiers"][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): @@ -535,7 +535,7 @@ def __hash__(self): fields = [] values = [] for field, value in sorted(self._dict.items()): - if value != '*': + if value != "*": fields.append(field) if isinstance(value, (list, set)): value = tuple(value) @@ -567,7 +567,7 @@ def to_dict(self, trim=True): def _to_trimmed_dict(self): return {key: val for key, val in self._dict.items() - if val != '*'} + if val != "*"} def __repr__(self): """Represent the query.""" @@ -594,7 +594,7 @@ def _shares_required_keys(self, dataid): """Check if dataid shares required keys with the current query.""" for key, val in dataid._id_keys.items(): try: - if val.get('required', False): + if val.get("required", False): if key in self._fields: return True except AttributeError: @@ -603,7 +603,7 @@ def _shares_required_keys(self, dataid): def _match_query_value(self, key, id_val): val = self._dict[key] - if val == '*': + if val == "*": return True if isinstance(id_val, tuple) and isinstance(val, (tuple, list)): return tuple(val) == id_val @@ -663,8 +663,8 @@ def sort_dataids(self, dataids): for key in keys: if distance == np.inf: break - val = self._dict.get(key, '*') - if val == '*': + val = self._dict.get(key, "*") + if val == "*": distance = self._add_absolute_distance(dataid, key, distance) else: try: @@ -710,12 +710,12 @@ def _add_distance_from_query(dataid_val, requested_val, distance): def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() - new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) + new_dict["modifiers"] = tuple(new_dict["modifiers"][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): """Check if this is modified.""" - return bool(self._dict.get('modifiers')) + return bool(self._dict.get("modifiers")) def create_filtered_query(dataset_key, filter_query): @@ -734,7 +734,7 @@ def create_filtered_query(dataset_key, filter_query): def _update_dict_with_filter_query(ds_dict, filter_query): if filter_query is not None: for key, value in filter_query.items(): - if value != '*': + if value != "*": ds_dict.setdefault(key, value) @@ -743,9 +743,9 @@ def _create_id_dict_from_any_key(dataset_key): ds_dict = dataset_key.to_dict() except AttributeError: if isinstance(dataset_key, str): - ds_dict = {'name': dataset_key} + ds_dict = {"name": dataset_key} elif isinstance(dataset_key, numbers.Number): - ds_dict = {'wavelength': dataset_key} + ds_dict = {"wavelength": dataset_key} else: raise TypeError("Don't know how to interpret a dataset_key of type {}".format(type(dataset_key))) return ds_dict diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 4ba3cde1a1..46f6f622b8 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -79,7 +79,7 @@ def _combine_shared_info(shared_keys, info_dicts, average_times): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] - if 'time' in key and isinstance(values[0], datetime) and average_times: + if "time" in key and isinstance(values[0], datetime) and average_times: shared_info[key] = average_datetimes(values) elif _are_values_combinable(values): shared_info[key] = values[0] @@ -146,7 +146,7 @@ def _all_arrays_equal(arrays): If the arrays are lazy, just check if they have the same identity. """ - if hasattr(arrays[0], 'compute'): + if hasattr(arrays[0], "compute"): return _all_identical(arrays) return _all_values_equal(arrays) diff --git a/satpy/demo/ahi_hsd.py b/satpy/demo/ahi_hsd.py index 784d90719f..5731b23f01 100644 --- a/satpy/demo/ahi_hsd.py +++ b/satpy/demo/ahi_hsd.py @@ -29,7 +29,7 @@ def download_typhoon_surigae_ahi(base_dir=None, This scene shows the Typhoon Surigae. """ import s3fs - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") channel_resolution = {1: 10, 2: 10, 3: 5, @@ -40,7 +40,7 @@ def download_typhoon_surigae_ahi(base_dir=None, for segment in segments: data_files.append(f"HS_H08_20210417_0500_B{channel:02d}_FLDK_R{resolution:02d}_S{segment:02d}10.DAT.bz2") - subdir = os.path.join(base_dir, 'ahi_hsd', '20210417_0500_typhoon_surigae') + subdir = os.path.join(base_dir, "ahi_hsd", "20210417_0500_typhoon_surigae") os.makedirs(subdir, exist_ok=True) fs = s3fs.S3FileSystem(anon=True) @@ -50,7 +50,7 @@ def download_typhoon_surigae_ahi(base_dir=None, result.append(destination_filename) if os.path.exists(destination_filename): continue - to_get = 'noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/' + filename + to_get = "noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/" + filename fs.get_file(to_get, destination_filename) return result diff --git a/satpy/dependency_tree.py b/satpy/dependency_tree.py index 331483cabc..d99fb536eb 100644 --- a/satpy/dependency_tree.py +++ b/satpy/dependency_tree.py @@ -431,7 +431,7 @@ def _find_compositor(self, dataset_key, query): compositor = self.get_modifier(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) - compositor.attrs['prerequisites'] = [implicit_dependency_node] + list(compositor.attrs['prerequisites']) + compositor.attrs["prerequisites"] = [implicit_dependency_node] + list(compositor.attrs["prerequisites"]) else: try: compositor = self.get_compositor(dataset_key) @@ -446,14 +446,14 @@ def _find_compositor(self, dataset_key, query): # Get the prerequisites LOG.trace("Looking for composite prerequisites for: {}".format(dataset_key)) prereqs = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq - for prereq in compositor.attrs['prerequisites']] + for prereq in compositor.attrs["prerequisites"]] prereqs = self._create_required_subtrees(root, prereqs, query=query) root.add_required_nodes(prereqs) # Get the optionals LOG.trace("Looking for optional prerequisites for: {}".format(dataset_key)) optionals = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq - for prereq in compositor.attrs['optional_prerequisites']] + for prereq in compositor.attrs["optional_prerequisites"]] optionals = self._create_optional_subtrees(root, optionals, query=query) root.add_optional_nodes(optionals) @@ -501,7 +501,7 @@ def get_compositor(self, key): def get_modifier(self, comp_id): """Get a modifer.""" # create a DataID for the compositor we are generating - modifier = comp_id['modifiers'][-1] + modifier = comp_id["modifiers"][-1] for sensor_name in sorted(self.modifiers): modifiers = self.modifiers[sensor_name] compositors = self.compositors[sensor_name] @@ -511,7 +511,7 @@ def get_modifier(self, comp_id): mloader, moptions = modifiers[modifier] moptions = moptions.copy() moptions.update(comp_id.to_dict()) - moptions['sensor'] = sensor_name + moptions["sensor"] = sensor_name compositors[comp_id] = mloader(_satpy_id=comp_id, **moptions) return compositors[comp_id] @@ -544,7 +544,7 @@ def _create_optional_subtrees(self, parent, prereqs, query=None): for prereq, unknowns in unknown_datasets.items(): u_str = ", ".join([str(x) for x in unknowns]) - LOG.debug('Skipping optional %s: Unknown dataset %s', + LOG.debug("Skipping optional %s: Unknown dataset %s", str(prereq), u_str) return prereq_nodes diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index 6f6a66654d..b74cc2c8bd 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -57,8 +57,8 @@ def exclude_alpha(func): @wraps(func) def wrapper(data, **kwargs): - bands = data.coords['bands'].values - exclude = ['A'] if 'A' in bands else [] + bands = data.coords["bands"].values + exclude = ["A"] if "A" in bands else [] band_data = data.sel(bands=[b for b in bands if b not in exclude]) band_data = func(band_data, **kwargs) @@ -67,7 +67,7 @@ def wrapper(data, **kwargs): attrs.update(band_data.attrs) # combine the new data with the excluded data new_data = xr.concat([band_data, data.sel(bands=exclude)], - dim='bands') + dim="bands") data.data = new_data.sel(bands=bands).data data.attrs = attrs return data @@ -92,12 +92,12 @@ def my_enhancement_function(data): def wrapper(data, **kwargs): attrs = data.attrs data_arrs = [] - for idx, band in enumerate(data.coords['bands'].values): + for idx, band in enumerate(data.coords["bands"].values): band_data = func(data.sel(bands=[band]), index=idx, **kwargs) data_arrs.append(band_data) # we assume that the func can add attrs attrs.update(band_data.attrs) - data.data = xr.concat(data_arrs, dim='bands').data + data.data = xr.concat(data_arrs, dim="bands").data data.attrs = attrs return data @@ -248,9 +248,9 @@ def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): white /= 100 # extract color components - r = rgb.sel(bands='R').data - g = rgb.sel(bands='G').data - b = rgb.sel(bands='B').data + r = rgb.sel(bands="R").data + g = rgb.sel(bands="G").data + b = rgb.sel(bands="B").data # saturate luma = _compute_luminance_from_rgb(r, g, b) @@ -280,7 +280,7 @@ def _srgb_gamma(arr): def lookup(img, **kwargs): """Assign values to channels based on a table.""" - luts = np.array(kwargs['luts'], dtype=np.float32) / 255.0 + luts = np.array(kwargs["luts"], dtype=np.float32) / 255.0 return _lookup_table(img.data, luts=luts) @@ -352,7 +352,7 @@ def _merge_colormaps(kwargs, img=None): from trollimage.colormap import Colormap full_cmap = None - palette = kwargs['palettes'] + palette = kwargs["palettes"] if isinstance(palette, Colormap): full_cmap = palette else: @@ -457,11 +457,11 @@ def create_colormap(palette, img=None): information. """ - fname = palette.get('filename', None) - colors = palette.get('colors', None) + fname = palette.get("filename", None) + colors = palette.get("colors", None) dataset = palette.get("dataset", None) # are colors between 0-255 or 0-1 - color_scale = palette.get('color_scale', 255) + color_scale = palette.get("color_scale", 255) if fname: if not os.path.exists(fname): fname = get_config_path(fname) @@ -477,9 +477,9 @@ def create_colormap(palette, img=None): if palette.get("reverse", False): cmap.reverse() - if 'min_value' in palette and 'max_value' in palette: + if "min_value" in palette and "max_value" in palette: cmap.set_range(palette["min_value"], palette["max_value"]) - elif 'min_value' in palette or 'max_value' in palette: + elif "min_value" in palette or "max_value" in palette: raise ValueError("Both 'min_value' and 'max_value' must be specified (or neither)") return cmap @@ -498,12 +498,12 @@ def _create_colormap_from_dataset(img, dataset, color_scale): def three_d_effect(img, **kwargs): """Create 3D effect using convolution.""" - w = kwargs.get('weight', 1) + w = kwargs.get("weight", 1) LOG.debug("Applying 3D effect with weight %.2f", w) kernel = np.array([[-w, 0, w], [-w, 1, w], [-w, 0, w]]) - mode = kwargs.get('convolve_mode', 'same') + mode = kwargs.get("convolve_mode", "same") return _three_d_effect(img.data, kernel=kernel, mode=mode) @@ -582,7 +582,7 @@ def jma_true_color_reproduction(img): https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html """ _jma_true_color_reproduction(img.data, - platform=img.data.attrs['platform_name']) + platform=img.data.attrs["platform_name"]) @exclude_alpha @@ -597,29 +597,29 @@ def _jma_true_color_reproduction(img_data, platform=None): """ # Conversion matrix dictionaries specifying sensor and platform. - ccm_dict = {'himawari-8': np.array([[1.1629, 0.1539, -0.2175], + ccm_dict = {"himawari-8": np.array([[1.1629, 0.1539, -0.2175], [-0.0252, 0.8725, 0.1300], [-0.0204, -0.1100, 1.0633]]), - 'himawari-9': np.array([[1.1619, 0.1542, -0.2168], + "himawari-9": np.array([[1.1619, 0.1542, -0.2168], [-0.0271, 0.8749, 0.1295], [-0.0202, -0.1103, 1.0634]]), - 'goes-16': np.array([[1.1425, 0.1819, -0.2250], + "goes-16": np.array([[1.1425, 0.1819, -0.2250], [-0.0951, 0.9363, 0.1360], [-0.0113, -0.1179, 1.0621]]), - 'goes-17': np.array([[1.1437, 0.1818, -0.2262], + "goes-17": np.array([[1.1437, 0.1818, -0.2262], [-0.0952, 0.9354, 0.1371], [-0.0113, -0.1178, 1.0620]]), - 'goes-18': np.array([[1.1629, 0.1539, -0.2175], + "goes-18": np.array([[1.1629, 0.1539, -0.2175], [-0.0252, 0.8725, 0.1300], [-0.0204, -0.1100, 1.0633]]), - 'mtg-i1': np.array([[0.9007, 0.2086, -0.0100], + "mtg-i1": np.array([[0.9007, 0.2086, -0.0100], [-0.0475, 1.0662, -0.0414], [-0.0123, -0.1342, 1.0794]]), - 'geo-kompsat-2a': np.array([[1.1661, 0.1489, -0.2157], + "geo-kompsat-2a": np.array([[1.1661, 0.1489, -0.2157], [-0.0255, 0.8745, 0.1282], [-0.0205, -0.1103, 1.0637]]), } diff --git a/satpy/enhancements/mimic.py b/satpy/enhancements/mimic.py index c2b028f6e8..3a72d5b247 100644 --- a/satpy/enhancements/mimic.py +++ b/satpy/enhancements/mimic.py @@ -296,11 +296,11 @@ def nrl_colors(img, **kwargs): [74.72527472527473, [218, 131, 185]], [75.0, [220, 135, 189]], ]} - kwargs['palettes'].update(nrl_tpw_colors) - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + kwargs["palettes"].update(nrl_tpw_colors) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) @@ -310,8 +310,8 @@ def total_precipitable_water(img, **kwargs): This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) diff --git a/satpy/enhancements/viirs.py b/satpy/enhancements/viirs.py index 627fc80220..3ed5e2dd5f 100644 --- a/satpy/enhancements/viirs.py +++ b/satpy/enhancements/viirs.py @@ -27,11 +27,11 @@ def water_detection(img, **kwargs): This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) _water_detection(img.data) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py index 3d34ab9d93..bc42228f26 100644 --- a/satpy/modifiers/_crefl.py +++ b/satpy/modifiers/_crefl.py @@ -70,7 +70,7 @@ def _get_registered_dem_cache_key(self): if not self.url: return reg_files = self.register_data_files([{ - 'url': self.url, 'known_hash': self.known_hash} + "url": self.url, "known_hash": self.known_hash} ]) return reg_files[0] diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py index c8d6920056..e83d43b5e2 100644 --- a/satpy/modifiers/_crefl_utils.py +++ b/satpy/modifiers/_crefl_utils.py @@ -282,7 +282,7 @@ def run_crefl(refl, :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf) """ - runner_cls = _runner_class_for_sensor(refl.attrs['sensor']) + runner_cls = _runner_class_for_sensor(refl.attrs["sensor"]) runner = runner_cls(refl) corr_refl = runner(sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation) return corr_refl @@ -326,7 +326,7 @@ def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da. height = 0. else: LOG.debug("Using average elevation information provided to CREFL") - lon, lat = self._refl.attrs['area'].get_lonlats(chunks=self._refl.chunks) + lon, lat = self._refl.attrs["area"].get_lonlats(chunks=self._refl.chunks) height = da.map_blocks(_space_mask_height, lon, lat, avg_elevation, chunks=lon.chunks, dtype=avg_elevation.dtype) return height diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 28adb60028..8adf32f3d0 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -257,7 +257,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): arg = arg.isoformat(" ") hashable_args.append(arg) arg_hash = hashlib.sha1() # nosec - arg_hash.update(json.dumps(tuple(hashable_args)).encode('utf8')) + arg_hash.update(json.dumps(tuple(hashable_args)).encode("utf8")) return arg_hash.hexdigest() @@ -320,7 +320,7 @@ def _chunks_are_irregular(chunks_tuple: tuple) -> bool: def _geo_dask_to_data_array(arr: da.Array) -> xr.DataArray: - return xr.DataArray(arr, dims=('y', 'x')) + return xr.DataArray(arr, dims=("y", "x")) def compute_relative_azimuth(sat_azi: xr.DataArray, sun_azi: xr.DataArray) -> xr.DataArray: @@ -447,7 +447,7 @@ def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: dat def _get_sensor_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: - preference = satpy.config.get('sensor_angles_position_preference', 'actual') + preference = satpy.config.get("sensor_angles_position_preference", "actual") sat_lon, sat_lat, sat_alt = get_satpos(data_arr, preference=preference) area_def = data_arr.attrs["area"] chunks = _geo_chunks_from_data_arr(data_arr) @@ -531,7 +531,7 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later - with np.errstate(invalid='ignore'): # we expect space pixels to be invalid + with np.errstate(invalid="ignore"): # we expect space pixels to be invalid grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py index e804982330..1c6225f42a 100644 --- a/satpy/modifiers/atmosphere.py +++ b/satpy/modifiers/atmosphere.py @@ -93,28 +93,28 @@ def __call__(self, projectables, optional_datasets=None, **info): ssadiff = compute_relative_azimuth(sata, suna) del sata, suna - atmosphere = self.attrs.get('atmosphere', 'us-standard') - aerosol_type = self.attrs.get('aerosol_type', 'marine_clean_aerosol') - reduce_lim_low = abs(self.attrs.get('reduce_lim_low', 70)) - reduce_lim_high = abs(self.attrs.get('reduce_lim_high', 105)) - reduce_strength = np.clip(self.attrs.get('reduce_strength', 0), 0, 1) + atmosphere = self.attrs.get("atmosphere", "us-standard") + aerosol_type = self.attrs.get("aerosol_type", "marine_clean_aerosol") + reduce_lim_low = abs(self.attrs.get("reduce_lim_low", 70)) + reduce_lim_high = abs(self.attrs.get("reduce_lim_high", 105)) + reduce_strength = np.clip(self.attrs.get("reduce_strength", 0), 0, 1) logger.info("Removing Rayleigh scattering with atmosphere '%s' and " "aerosol type '%s' for '%s'", - atmosphere, aerosol_type, vis.attrs['name']) - corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'], + atmosphere, aerosol_type, vis.attrs["name"]) + corrector = Rayleigh(vis.attrs["platform_name"], vis.attrs["sensor"], atmosphere=atmosphere, aerosol_type=aerosol_type) try: refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, - vis.attrs['name'], + vis.attrs["name"], red.data) except (KeyError, IOError): - logger.warning("Could not get the reflectance correction using band name: %s", vis.attrs['name']) + logger.warning("Could not get the reflectance correction using band name: %s", vis.attrs["name"]) logger.warning("Will try use the wavelength, however, this may be ambiguous!") refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, - vis.attrs['wavelength'][1], + vis.attrs["wavelength"][1], red.data) if reduce_strength > 0: @@ -155,13 +155,13 @@ def __call__(self, projectables, optional_datasets=None, **info): satz = get_satellite_zenith_angle(band) satz = satz.data # get dask array underneath - logger.info('Correction for limb cooling') - corrector = AtmosphericalCorrection(band.attrs['platform_name'], - band.attrs['sensor']) + logger.info("Correction for limb cooling") + corrector = AtmosphericalCorrection(band.attrs["platform_name"], + band.attrs["sensor"]) atm_corr = da.map_blocks(_call_mapped_correction, satz, band.data, corrector=corrector, - band_name=band.attrs['name'], + band_name=band.attrs["name"], meta=np.array((), dtype=band.dtype)) proj = xr.DataArray(atm_corr, attrs=band.attrs, dims=band.dims, coords=band.coords) @@ -187,7 +187,7 @@ class CO2Corrector(ModifierBase): def __call__(self, projectables, optional_datasets=None, **info): """Apply correction.""" ir_039, ir_108, ir_134 = projectables - logger.info('Applying CO2 correction') + logger.info("Applying CO2 correction") dt_co2 = (ir_108 - ir_134) / 4.0 rcorr = ir_108 ** 4 - (ir_108 - dt_co2) ** 4 t4_co2corr = (ir_039 ** 4 + rcorr).clip(0.0) ** 0.25 diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index ecd83f80e5..a43a530c0e 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -47,14 +47,14 @@ def __init__(self, max_sza=95.0, **kwargs): def __call__(self, projectables, **info): """Generate the composite.""" - projectables = self.match_data_arrays(list(projectables) + list(info.get('optional_datasets', []))) + projectables = self.match_data_arrays(list(projectables) + list(info.get("optional_datasets", []))) vis = projectables[0] if vis.attrs.get("sunz_corrected"): logger.debug("Sun zenith correction already applied") return vis logger.debug("Applying sun zen correction") - if not info.get('optional_datasets'): + if not info.get("optional_datasets"): # we were not given SZA, generate cos(SZA) logger.debug("Computing sun zenith angles.") from .angles import get_cos_sza diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 6fbf695972..e3ea3214b8 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -77,19 +77,19 @@ def _get_reflectance_as_dataarray(self, projectables, optional_datasets): da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) - logger.info('Getting reflective part of %s', _nir.attrs['name']) + logger.info("Getting reflective part of %s", _nir.attrs["name"]) reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) proj = self._create_modified_dataarray(reflectance, base_dataarray=_nir) - proj.attrs['units'] = '%' + proj.attrs["units"] = "%" return proj @staticmethod def _get_tb13_4_from_optionals(optional_datasets): tb13_4 = None for dataset in optional_datasets: - wavelengths = dataset.attrs.get('wavelength', [100., 0, 0]) - if (dataset.attrs.get('units') == 'K' and + wavelengths = dataset.attrs.get("wavelength", [100., 0, 0]) + if (dataset.attrs.get("units") == "K" and wavelengths[0] <= 13.4 <= wavelengths[2]): tb13_4 = dataset.data return tb13_4 @@ -108,14 +108,14 @@ def _get_sun_zenith_from_provided_data(projectables, optional_datasets): raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") _nir = projectables[0] lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks) - sun_zenith = sun_zenith_angle(_nir.attrs['start_time'], lons, lats) + sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) return sun_zenith def _create_modified_dataarray(self, reflectance, base_dataarray): proj = xr.DataArray(reflectance, dims=base_dataarray.dims, coords=base_dataarray.coords, attrs=base_dataarray.attrs.copy()) - proj.attrs['sun_zenith_threshold'] = self.sun_zenith_threshold - proj.attrs['sun_zenith_masking_limit'] = self.masking_limit + proj.attrs["sun_zenith_threshold"] = self.sun_zenith_threshold + proj.attrs["sun_zenith_masking_limit"] = self.masking_limit self.apply_modifier_info(base_dataarray, proj) return proj @@ -130,7 +130,7 @@ def _init_reflectance_calculator(self, metadata): logger.info("Couldn't load pyspectral") raise ImportError("No module named pyspectral.near_infrared_reflectance") - reflectance_3x_calculator = Calculator(metadata['platform_name'], metadata['sensor'], metadata['name'], + reflectance_3x_calculator = Calculator(metadata["platform_name"], metadata["sensor"], metadata["name"], sunz_threshold=self.sun_zenith_threshold, masking_limit=self.masking_limit) return reflectance_3x_calculator @@ -169,11 +169,11 @@ def _get_emissivity_as_dataarray(self, projectables, optional_datasets): da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) - logger.info('Getting emissive part of %s', _nir.attrs['name']) + logger.info("Getting emissive part of %s", _nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) proj = self._create_modified_dataarray(emissivity, base_dataarray=_nir) - proj.attrs['units'] = 'K' + proj.attrs["units"] = "K" return proj def _get_emissivity_as_dask(self, da_nir, da_tb11, da_tb13_4, da_sun_zenith, metadata): diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index 0210cef5cc..e77a7765fb 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -14,7 +14,7 @@ def stack( data_arrays: Sequence[xr.DataArray], weights: Optional[Sequence[xr.DataArray]] = None, combine_times: bool = True, - blend_type: str = 'select_with_weights' + blend_type: str = "select_with_weights" ) -> xr.DataArray: """Combine a series of datasets in different ways. @@ -143,7 +143,7 @@ def _stack_no_weights( def _combine_stacked_attrs(collected_attrs: Sequence[Mapping], combine_times: bool) -> dict: attrs = combine_metadata(*collected_attrs) - if combine_times and ('start_time' in attrs or 'end_time' in attrs): + if combine_times and ("start_time" in attrs or "end_time" in attrs): new_start, new_end = _get_combined_start_end_times(collected_attrs) if new_start: attrs["start_time"] = new_start @@ -157,10 +157,10 @@ def _get_combined_start_end_times(metadata_objects: Iterable[Mapping]) -> tuple[ start_time = None end_time = None for md_obj in metadata_objects: - if "start_time" in md_obj and (start_time is None or md_obj['start_time'] < start_time): - start_time = md_obj['start_time'] - if "end_time" in md_obj and (end_time is None or md_obj['end_time'] > end_time): - end_time = md_obj['end_time'] + if "start_time" in md_obj and (start_time is None or md_obj["start_time"] < start_time): + start_time = md_obj["start_time"] + if "end_time" in md_obj and (end_time is None or md_obj["end_time"] > end_time): + end_time = md_obj["end_time"] return start_time, end_time @@ -168,7 +168,7 @@ def timeseries(datasets): """Expand dataset with and concatenate by time dimension.""" expanded_ds = [] for ds in datasets: - if 'time' not in ds.dims: + if "time" not in ds.dims: tmp = ds.expand_dims("time") tmp.coords["time"] = pd.DatetimeIndex([ds.attrs["start_time"]]) else: diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index d803758b88..11c4a4888f 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -87,12 +87,12 @@ def _duplicate_dataset_with_group_alias(self, group_id, group_members): alias_id=group_id, ) elif len(member_ids) > 1: - raise ValueError('Cannot add multiple datasets from a scene ' - 'to the same group') + raise ValueError("Cannot add multiple datasets from a scene " + "to the same group") def _get_dataset_id_of_group_members_in_scene(self, group_members): return [ - self.scene[member].attrs['_satpy_id'] + self.scene[member].attrs["_satpy_id"] for member in group_members if member in self.scene ] @@ -281,7 +281,7 @@ def _all_same_area(self, dataset_ids): ds = scn.get(ds_id) if ds is None: continue - all_areas.append(ds.attrs.get('area')) + all_areas.append(ds.attrs.get("area")) all_areas = [area for area in all_areas if area is not None] return all(all_areas[0] == area for area in all_areas[1:]) @@ -314,15 +314,15 @@ def _generate_scene_func(self, gen, func_name, create_new_scene, *args, **kwargs def load(self, *args, **kwargs): """Load the required datasets from the multiple scenes.""" - self._generate_scene_func(self._scenes, 'load', False, *args, **kwargs) + self._generate_scene_func(self._scenes, "load", False, *args, **kwargs) def crop(self, *args, **kwargs): """Crop the multiscene and return a new cropped multiscene.""" - return self._generate_scene_func(self._scenes, 'crop', True, *args, **kwargs) + return self._generate_scene_func(self._scenes, "crop", True, *args, **kwargs) def resample(self, destination=None, **kwargs): """Resample the multiscene.""" - return self._generate_scene_func(self._scenes, 'resample', True, destination=destination, **kwargs) + return self._generate_scene_func(self._scenes, "resample", True, destination=destination, **kwargs) def blend( self, @@ -447,7 +447,7 @@ def save_datasets(self, client=True, batch_size=1, **kwargs): Note ``compute`` can not be provided. """ - if 'compute' in kwargs: + if "compute" in kwargs: raise ValueError("The 'compute' keyword argument can not be provided.") client = self._get_client(client=client) @@ -466,15 +466,15 @@ def _get_animation_info(self, all_datasets, filename, fill_value=None): first_img = get_enhanced_image(first_dataset) first_img_data = first_img.finalize(fill_value=fill_value)[0] shape = tuple(first_img_data.sizes.get(dim_name) - for dim_name in ('y', 'x', 'bands')) - if fill_value is None and filename.endswith('gif'): + for dim_name in ("y", "x", "bands")) + if fill_value is None and filename.endswith("gif"): log.warning("Forcing fill value to '0' for GIF Luminance images") fill_value = 0 shape = shape[:2] attrs = first_dataset.attrs.copy() - if 'end_time' in last_dataset.attrs: - attrs['end_time'] = last_dataset.attrs['end_time'] + if "end_time" in last_dataset.attrs: + attrs["end_time"] = last_dataset.attrs["end_time"] this_fn = filename.format(**attrs) return this_fn, shape, fill_value @@ -508,7 +508,7 @@ def _get_single_frame(self, ds, enh_args, fill_value): # assume all other shapes are (y, x) # we need arrays grouped by pixel so # transpose if needed - data = data.transpose('y', 'x', 'bands') + data = data.transpose("y", "x", "bands") return data def _get_animation_frames(self, all_datasets, shape, fill_value=None, @@ -603,7 +603,7 @@ def _get_writers_and_frames( first_scene = self.first_scene scenes = iter(self._scene_gen) info_scenes = [first_scene] - if 'end_time' in filename: + if "end_time" in filename: # if we need the last scene to generate the filename # then compute all the scenes so we can figure it out log.debug("Generating scenes to compute end_time for filename") diff --git a/satpy/node.py b/satpy/node.py index f1cf401057..191ec0bbcf 100644 --- a/satpy/node.py +++ b/satpy/node.py @@ -121,8 +121,8 @@ def display(self, previous=0, include_data=False): """Display the node.""" no_data = " (No Data)" if self.data is None else "" return ( - (" +" * previous) + str(self.name) + no_data + '\n' + - ''.join([child.display(previous + 1) for child in self.children])) + (" +" * previous) + str(self.name) + no_data + "\n" + + "".join([child.display(previous + 1) for child in self.children])) def leaves(self, unique=True): """Get the leaves of the tree starting at this root.""" @@ -204,12 +204,12 @@ class ReaderNode(Node): def __init__(self, unique_id, reader_name): """Set up the node.""" - super().__init__(unique_id, data={'reader_name': reader_name}) + super().__init__(unique_id, data={"reader_name": reader_name}) def _copy_name_and_data(self, node_cache): - return ReaderNode(self.name, self.data['reader_name']) + return ReaderNode(self.name, self.data["reader_name"]) @property def reader_name(self): """Get the name of the reader.""" - return self.data['reader_name'] + return self.data["reader_name"] diff --git a/satpy/plugin_base.py b/satpy/plugin_base.py index ee19341796..286b5fc335 100644 --- a/satpy/plugin_base.py +++ b/satpy/plugin_base.py @@ -60,5 +60,5 @@ def __init__(self, default_config_filename=None, config_files=None, **kwargs): def load_yaml_config(self, conf): """Load a YAML configuration file and recursively update the overall configuration.""" - with open(conf, 'r', encoding='utf-8') as fd: + with open(conf, "r", encoding="utf-8") as fd: self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 2b1bbc37ba..81ebf2393b 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -37,7 +37,7 @@ # Old Name -> New Name -PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc', 'viirs_l2_cloud_mask_nc': 'viirs_edr'} +PENDING_OLD_READER_NAMES = {"fci_l1c_fdhsi": "fci_l1c_nc", "viirs_l2_cloud_mask_nc": "viirs_edr"} OLD_READER_NAMES: dict[str, str] = {} @@ -171,7 +171,7 @@ def _get_file_keys_for_reader_files(reader_files, group_keys=None): file_keys = {} for (reader_name, (reader_instance, files_to_sort)) in reader_files.items(): if group_keys is None: - group_keys = reader_instance.info.get('group_keys', ('start_time',)) + group_keys = reader_instance.info.get("group_keys", ("start_time",)) file_keys[reader_name] = [] # make a copy because filename_items_for_filetype will modify inplace files_to_sort = set(files_to_sort) @@ -301,7 +301,7 @@ def _get_keys_with_empty_values(grp): def read_reader_config(config_files, loader=UnsafeLoader): """Read the reader `config_files` and return the extracted reader metadata.""" reader_config = load_yaml_reader_configs(*config_files, loader=loader) - return reader_config['reader'] + return reader_config["reader"] def load_reader(reader_configs, **reader_kwargs): @@ -324,16 +324,16 @@ def configs_for_reader(reader=None): reader = get_valid_reader_names(reader) # given a config filename or reader name - config_files = [r if r.endswith('.yaml') else r + '.yaml' for r in reader] + config_files = [r if r.endswith(".yaml") else r + ".yaml" for r in reader] else: - paths = get_entry_points_config_dirs('satpy.readers') - reader_configs = glob_config(os.path.join('readers', '*.yaml'), search_dirs=paths) + paths = get_entry_points_config_dirs("satpy.readers") + reader_configs = glob_config(os.path.join("readers", "*.yaml"), search_dirs=paths) config_files = set(reader_configs) for config_file in config_files: config_basename = os.path.basename(config_file) reader_name = os.path.splitext(config_basename)[0] - paths = get_entry_points_config_dirs('satpy.readers') + paths = get_entry_points_config_dirs("satpy.readers") reader_configs = config_search_paths( os.path.join("readers", config_basename), search_dirs=paths, check_exists=True) @@ -393,9 +393,9 @@ def available_readers(as_dict=False, yaml_loader=UnsafeLoader): LOG.debug("Could not import reader config from: %s", reader_configs) LOG.debug("Error loading YAML", exc_info=True) continue - readers.append(reader_info if as_dict else reader_info['name']) + readers.append(reader_info if as_dict else reader_info["name"]) if as_dict: - readers = sorted(readers, key=lambda reader_info: reader_info['name']) + readers = sorted(readers, key=lambda reader_info: reader_info["name"]) else: readers = sorted(readers) return readers @@ -467,13 +467,13 @@ def find_files_and_readers(start_time=None, end_time=None, base_dir=None, """ reader_files = {} reader_kwargs = reader_kwargs or {} - filter_parameters = filter_parameters or reader_kwargs.get('filter_parameters', {}) + filter_parameters = filter_parameters or reader_kwargs.get("filter_parameters", {}) sensor_supported = False if start_time or end_time: - filter_parameters['start_time'] = start_time - filter_parameters['end_time'] = end_time - reader_kwargs['filter_parameters'] = filter_parameters + filter_parameters["start_time"] = start_time + filter_parameters["end_time"] = end_time + reader_kwargs["filter_parameters"] = filter_parameters for reader_configs in configs_for_reader(reader): (reader_instance, loadables, this_sensor_supported) = _get_loadables_for_reader_config( @@ -509,7 +509,7 @@ def _get_loadables_for_reader_config(base_dir, reader, sensor, reader_configs, try: reader_instance = load_reader(reader_configs, **reader_kwargs) except (KeyError, IOError, yaml.YAMLError) as err: - LOG.info('Cannot use %s', str(reader_configs)) + LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) if reader and (isinstance(reader, str) or len(reader) == 1): # if it is a single reader then give a more usable error @@ -563,7 +563,7 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): reader_configs, **reader_kwargs[None if reader is None else reader[idx]]) except (KeyError, IOError, yaml.YAMLError) as err: - LOG.info('Cannot use %s', str(reader_configs)) + LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) continue @@ -644,7 +644,7 @@ def _get_reader_kwargs(reader, reader_kwargs): reader_kwargs_without_filter = {} for (k, v) in reader_kwargs.items(): reader_kwargs_without_filter[k] = v.copy() - reader_kwargs_without_filter[k].pop('filter_parameters', None) + reader_kwargs_without_filter[k].pop("filter_parameters", None) return (reader_kwargs, reader_kwargs_without_filter) diff --git a/satpy/readers/_geos_area.py b/satpy/readers/_geos_area.py index 0775e51381..e777d78ca7 100644 --- a/satpy/readers/_geos_area.py +++ b/satpy/readers/_geos_area.py @@ -79,7 +79,7 @@ def get_area_extent(pdict): # count starts at 1 cols = 1 - 0.5 - if pdict['scandir'] == 'S2N': + if pdict["scandir"] == "S2N": lines = 0.5 - 1 scanmult = -1 else: @@ -88,22 +88,22 @@ def get_area_extent(pdict): # Lower left x, y scanning angles in degrees ll_x, ll_y = get_xy_from_linecol(lines * scanmult, cols, - (pdict['loff'], pdict['coff']), - (pdict['lfac'], pdict['cfac'])) + (pdict["loff"], pdict["coff"]), + (pdict["lfac"], pdict["cfac"])) - cols += pdict['ncols'] - lines += pdict['nlines'] + cols += pdict["ncols"] + lines += pdict["nlines"] # Upper right x, y scanning angles in degrees ur_x, ur_y = get_xy_from_linecol(lines * scanmult, cols, - (pdict['loff'], pdict['coff']), - (pdict['lfac'], pdict['cfac'])) - if pdict['scandir'] == 'S2N': + (pdict["loff"], pdict["coff"]), + (pdict["lfac"], pdict["cfac"])) + if pdict["scandir"] == "S2N": ll_y *= -1 ur_y *= -1 # Convert degrees to radians and create area extent - aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict['h']) + aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict["h"]) return aex @@ -132,20 +132,20 @@ def get_area_definition(pdict, a_ext): The AreaDefinition `proj_id` attribute is being deprecated. """ - proj_dict = {'a': float(pdict['a']), - 'b': float(pdict['b']), - 'lon_0': float(pdict['ssp_lon']), - 'h': float(pdict['h']), - 'proj': 'geos', - 'units': 'm'} + proj_dict = {"a": float(pdict["a"]), + "b": float(pdict["b"]), + "lon_0": float(pdict["ssp_lon"]), + "h": float(pdict["h"]), + "proj": "geos", + "units": "m"} a_def = geometry.AreaDefinition( - pdict['a_name'], - pdict['a_desc'], - pdict['p_id'], + pdict["a_name"], + pdict["a_desc"], + pdict["p_id"], proj_dict, - int(pdict['ncols']), - int(pdict['nlines']), + int(pdict["ncols"]), + int(pdict["nlines"]), a_ext) return a_def @@ -189,21 +189,21 @@ def get_geos_area_naming(input_dict): """ area_naming_dict = {} - resolution_strings = get_resolution_and_unit_strings(input_dict['resolution']) + resolution_strings = get_resolution_and_unit_strings(input_dict["resolution"]) - area_naming_dict['area_id'] = '{}_{}_{}_{}{}'.format(input_dict['platform_name'].lower(), - input_dict['instrument_name'].lower(), - input_dict['service_name'].lower(), - resolution_strings['value'], - resolution_strings['unit'] + area_naming_dict["area_id"] = "{}_{}_{}_{}{}".format(input_dict["platform_name"].lower(), + input_dict["instrument_name"].lower(), + input_dict["service_name"].lower(), + resolution_strings["value"], + resolution_strings["unit"] ) - area_naming_dict['description'] = '{} {} {} area definition ' \ - 'with {} {} resolution'.format(input_dict['platform_name'].upper(), - input_dict['instrument_name'].upper(), - input_dict['service_desc'], - resolution_strings['value'], - resolution_strings['unit'] + area_naming_dict["description"] = "{} {} {} area definition " \ + "with {} {} resolution".format(input_dict["platform_name"].upper(), + input_dict["instrument_name"].upper(), + input_dict["service_desc"], + resolution_strings["value"], + resolution_strings["unit"] ) return area_naming_dict @@ -222,8 +222,8 @@ def get_resolution_and_unit_strings(resolution): Dictionary with `value` and `unit` keys, values are strings. """ if resolution >= 1000: - return {'value': '{:.0f}'.format(resolution*1e-3), - 'unit': 'km'} + return {"value": "{:.0f}".format(resolution*1e-3), + "unit": "km"} - return {'value': '{:.0f}'.format(resolution), - 'unit': 'm'} + return {"value": "{:.0f}".format(resolution), + "unit": "m"} diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index ea3877e48a..e502a9da64 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -54,24 +54,24 @@ def get_aapp_chunks(shape): AVHRR_CHANNEL_NAMES = ["1", "2", "3a", "3b", "4", "5"] -AVHRR_ANGLE_NAMES = ['sensor_zenith_angle', - 'solar_zenith_angle', - 'sun_sensor_azimuth_difference_angle'] +AVHRR_ANGLE_NAMES = ["sensor_zenith_angle", + "solar_zenith_angle", + "sun_sensor_azimuth_difference_angle"] -AVHRR_PLATFORM_IDS2NAMES = {4: 'NOAA-15', - 2: 'NOAA-16', - 6: 'NOAA-17', - 7: 'NOAA-18', - 8: 'NOAA-19', - 11: 'Metop-B', - 12: 'Metop-A', - 13: 'Metop-C', - 14: 'Metop simulator'} +AVHRR_PLATFORM_IDS2NAMES = {4: "NOAA-15", + 2: "NOAA-16", + 6: "NOAA-17", + 7: "NOAA-18", + 8: "NOAA-19", + 11: "Metop-B", + 12: "Metop-A", + 13: "Metop-C", + 14: "Metop simulator"} def create_xarray(arr): """Create an `xarray.DataArray`.""" - res = xr.DataArray(arr, dims=['y', 'x']) + res = xr.DataArray(arr, dims=["y", "x"]) return res @@ -102,30 +102,30 @@ def _set_filedata_layout(self): @property def start_time(self): """Get the time of the first observation.""" - return datetime(self._data['scnlinyr'][0], 1, 1) + timedelta( - days=int(self._data['scnlindy'][0]) - 1, - milliseconds=int(self._data['scnlintime'][0])) + return datetime(self._data["scnlinyr"][0], 1, 1) + timedelta( + days=int(self._data["scnlindy"][0]) - 1, + milliseconds=int(self._data["scnlintime"][0])) @property def end_time(self): """Get the time of the final observation.""" - return datetime(self._data['scnlinyr'][-1], 1, 1) + timedelta( - days=int(self._data['scnlindy'][-1]) - 1, - milliseconds=int(self._data['scnlintime'][-1])) + return datetime(self._data["scnlinyr"][-1], 1, 1) + timedelta( + days=int(self._data["scnlindy"][-1]) - 1, + milliseconds=int(self._data["scnlintime"][-1])) def _update_dataset_attributes(self, dataset, key, info): - dataset.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + dataset.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) dataset.attrs.update(key.to_dict()) - for meta_key in ('standard_name', 'units'): + for meta_key in ("standard_name", "units"): if meta_key in info: dataset.attrs.setdefault(meta_key, info[meta_key]) def _get_platform_name(self, platform_names_lookup): """Get the platform name from the file header.""" - self.platform_name = platform_names_lookup.get(self._header['satid'][0], None) + self.platform_name = platform_names_lookup.get(self._header["satid"][0], None) if self.platform_name is None: - raise ValueError("Unsupported platform ID: %d" % self.header['satid']) + raise ValueError("Unsupported platform ID: %d" % self.header["satid"]) def read(self): """Read the data.""" @@ -143,17 +143,17 @@ def _calibrate_active_channel_data(self, key): def get_dataset(self, key, info): """Get a dataset from the file.""" - if key['name'] in self._channel_names: + if key["name"] in self._channel_names: dataset = self._calibrate_active_channel_data(key) if dataset is None: return None - elif key['name'] in ['longitude', 'latitude']: - dataset = self.navigate(key['name']) + elif key["name"] in ["longitude", "latitude"]: + dataset = self.navigate(key["name"]) dataset.attrs = info - elif key['name'] in self._angle_names: - dataset = self.get_angles(key['name']) + elif key["name"] in self._angle_names: + dataset = self.get_angles(key["name"]) else: - raise ValueError("Not a supported dataset: %s", key['name']) + raise ValueError("Not a supported dataset: %s", key["name"]) self._update_dataset_attributes(dataset, key, info) return dataset @@ -168,7 +168,7 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} - self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} + self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} self._is3b = None self._is3a = None @@ -181,7 +181,7 @@ def __init__(self, filename, filename_info, filetype_info): self.active_channels = self._get_active_channels() self._get_platform_name(AVHRR_PLATFORM_IDS2NAMES) - self.sensor = 'avhrr-3' + self.sensor = "avhrr-3" self._get_all_interpolated_angles = functools.lru_cache(maxsize=10)( self._get_all_interpolated_angles_uncached @@ -202,25 +202,25 @@ def _get_active_channels(self): def _calibrate_active_channel_data(self, key): """Calibrate active channel data only.""" - if self.active_channels[key['name']]: + if self.active_channels[key["name"]]: return self.calibrate(key) return None def _get_channel_binary_status_from_header(self): - status = self._header['inststat1'].item() - change_line = self._header['statchrecnb'] + status = self._header["inststat1"].item() + change_line = self._header["statchrecnb"] if change_line > 0: - status |= self._header['inststat2'].item() + status |= self._header["inststat2"].item() return status @staticmethod def _convert_binary_channel_status_to_activation_dict(status): - bits_channels = ((13, '1'), - (12, '2'), - (11, '3a'), - (10, '3b'), - (9, '4'), - (8, '5')) + bits_channels = ((13, "1"), + (12, "2"), + (11, "3a"), + (10, "3b"), + (9, "4"), + (8, "5")) activated = dict() for bit, channel_name in bits_channels: activated[channel_name] = bool(status >> bit & 1) @@ -229,8 +229,8 @@ def _convert_binary_channel_status_to_activation_dict(status): def available_datasets(self, configured_datasets=None): """Get the available datasets.""" for _, mda in configured_datasets: - if mda['name'] in self._channel_names: - yield self.active_channels[mda['name']], mda + if mda["name"] in self._channel_names: + yield self.active_channels[mda["name"]], mda else: yield True, mda @@ -285,9 +285,9 @@ def _create_40km_interpolator(lines, *arrays_40km, geolocation=False): def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_all_interpolated_coordinates() - if coordinate_id == 'longitude': + if coordinate_id == "longitude": return create_xarray(lons) - if coordinate_id == 'latitude': + if coordinate_id == "latitude": return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) @@ -309,49 +309,49 @@ def calibrate(self, if calib_coeffs is None: calib_coeffs = {} - units = {'reflectance': '%', - 'brightness_temperature': 'K', - 'counts': '', - 'radiance': 'W*m-2*sr-1*cm ?'} + units = {"reflectance": "%", + "brightness_temperature": "K", + "counts": "", + "radiance": "W*m-2*sr-1*cm ?"} - if dataset_id['name'] in ("3a", "3b") and self._is3b is None: + if dataset_id["name"] in ("3a", "3b") and self._is3b is None: # Is it 3a or 3b: line_chunks = get_aapp_chunks((self._data.shape[0], 2048))[0] - self._is3a = da.bitwise_and(da.from_array(self._data['scnlinbit'], + self._is3a = da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=line_chunks), 3) == 0 - self._is3b = da.bitwise_and(da.from_array(self._data['scnlinbit'], + self._is3b = da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=line_chunks), 3) == 1 try: - vis_idx = ['1', '2', '3a'].index(dataset_id['name']) + vis_idx = ["1", "2", "3a"].index(dataset_id["name"]) ir_idx = None except ValueError: vis_idx = None - ir_idx = ['3b', '4', '5'].index(dataset_id['name']) + ir_idx = ["3b", "4", "5"].index(dataset_id["name"]) mask = True if vis_idx is not None: - coeffs = calib_coeffs.get('ch' + dataset_id['name']) - if dataset_id['name'] == '3a': + coeffs = calib_coeffs.get("ch" + dataset_id["name"]) + if dataset_id["name"] == "3a": mask = self._is3a[:, None] ds = create_xarray( _vis_calibrate(self._data, vis_idx, - dataset_id['calibration'], + dataset_id["calibration"], pre_launch_coeffs, coeffs, mask=mask)) else: - if dataset_id['name'] == '3b': + if dataset_id["name"] == "3b": mask = self._is3b[:, None] ds = create_xarray( _ir_calibrate(self._header, self._data, ir_idx, - dataset_id['calibration'], + dataset_id["calibration"], mask=mask)) - ds.attrs['units'] = units[dataset_id['calibration']] + ds.attrs["units"] = units[dataset_id["calibration"]] ds.attrs.update(dataset_id._asdict()) return ds @@ -545,8 +545,8 @@ def _vis_calibrate(data, """ # Calibration count to albedo, the calibration is performed separately for # two value ranges. - if calib_type not in ['counts', 'radiance', 'reflectance']: - raise ValueError('Calibration ' + calib_type + ' unknown!') + if calib_type not in ["counts", "radiance", "reflectance"]: + raise ValueError("Calibration " + calib_type + " unknown!") channel_data = data["hrpt"][:, :, chn] chunks = get_aapp_chunks(channel_data.shape) @@ -554,12 +554,12 @@ def _vis_calibrate(data, channel = da.from_array(channel_data, chunks=chunks) mask &= channel != 0 - if calib_type == 'counts': + if calib_type == "counts": return channel channel = channel.astype(CHANNEL_DTYPE) - if calib_type == 'radiance': + if calib_type == "radiance": logger.info("Radiances are not yet supported for " + "the VIS/NIR channels!") @@ -630,9 +630,9 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): mask &= count != 0 count = count.astype(CHANNEL_DTYPE) - k1_ = da.from_array(data['calir'][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9 - k2_ = da.from_array(data['calir'][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6 - k3_ = da.from_array(data['calir'][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6 + k1_ = da.from_array(data["calir"][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9 + k2_ = da.from_array(data["calir"][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6 + k3_ = da.from_array(data["calir"][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6 # Count to radiance conversion: rad = k1_[:, None] * count * count + k2_[:, None] * count + k3_[:, None] @@ -645,14 +645,14 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): return da.where(mask, rad, np.nan) # Central wavenumber: - cwnum = header['radtempcnv'][0, irchn, 0] + cwnum = header["radtempcnv"][0, irchn, 0] if irchn == 0: cwnum = cwnum / 1.0e2 else: cwnum = cwnum / 1.0e3 - bandcor_2 = header['radtempcnv'][0, irchn, 1] / 1e5 - bandcor_3 = header['radtempcnv'][0, irchn, 2] / 1e6 + bandcor_2 = header["radtempcnv"][0, irchn, 1] / 1e5 + bandcor_3 = header["radtempcnv"][0, irchn, 2] / 1e6 ir_const_1 = 1.1910659e-5 ir_const_2 = 1.438833 diff --git a/satpy/readers/aapp_mhs_amsub_l1c.py b/satpy/readers/aapp_mhs_amsub_l1c.py index 39216431f4..18c054dd3b 100644 --- a/satpy/readers/aapp_mhs_amsub_l1c.py +++ b/satpy/readers/aapp_mhs_amsub_l1c.py @@ -36,21 +36,21 @@ CHUNK_SIZE = get_legacy_chunk_size() LINE_CHUNK = CHUNK_SIZE ** 2 // 90 -MHS_AMSUB_CHANNEL_NAMES = ['1', '2', '3', '4', '5'] -MHS_AMSUB_ANGLE_NAMES = ['sensor_zenith_angle', 'sensor_azimuth_angle', - 'solar_zenith_angle', 'solar_azimuth_difference_angle'] +MHS_AMSUB_CHANNEL_NAMES = ["1", "2", "3", "4", "5"] +MHS_AMSUB_ANGLE_NAMES = ["sensor_zenith_angle", "sensor_azimuth_angle", + "solar_zenith_angle", "solar_azimuth_difference_angle"] -MHS_AMSUB_PLATFORM_IDS2NAMES = {15: 'NOAA-15', - 16: 'NOAA-16', - 17: 'NOAA-17', - 18: 'NOAA-18', - 19: 'NOAA-19', - 1: 'Metop-B', - 2: 'Metop-A', - 3: 'Metop-C', - 4: 'Metop simulator'} +MHS_AMSUB_PLATFORM_IDS2NAMES = {15: "NOAA-15", + 16: "NOAA-16", + 17: "NOAA-17", + 18: "NOAA-18", + 19: "NOAA-19", + 1: "Metop-B", + 2: "Metop-A", + 3: "Metop-C", + 4: "Metop simulator"} -MHS_AMSUB_PLATFORMS = ['Metop-A', 'Metop-B', 'Metop-C', 'NOAA-18', 'NOAA-19'] +MHS_AMSUB_PLATFORMS = ["Metop-A", "Metop-B", "Metop-C", "NOAA-18", "NOAA-19"] class MHS_AMSUB_AAPPL1CFile(AAPPL1BaseFileHandler): @@ -61,7 +61,7 @@ def __init__(self, filename, filename_info, filetype_info): super().__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in MHS_AMSUB_CHANNEL_NAMES} - self.units = {i: 'brightness_temperature' for i in MHS_AMSUB_CHANNEL_NAMES} + self.units = {i: "brightness_temperature" for i in MHS_AMSUB_CHANNEL_NAMES} self._channel_names = MHS_AMSUB_CHANNEL_NAMES self._angle_names = MHS_AMSUB_ANGLE_NAMES @@ -80,10 +80,10 @@ def _set_filedata_layout(self): def _get_sensorname(self): """Get the sensor name from the header.""" - if self._header['instrument'][0] == 11: - self.sensor = 'amsub' - elif self._header['instrument'][0] == 12: - self.sensor = 'mhs' + if self._header["instrument"][0] == 11: + self.sensor = "amsub" + elif self._header["instrument"][0] == 12: + self.sensor = "mhs" else: raise IOError("Sensor neither MHS nor AMSU-B!") @@ -101,9 +101,9 @@ def get_angles(self, angle_id): def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_coordinates_in_degrees() - if coordinate_id == 'longitude': + if coordinate_id == "longitude": return create_xarray(lons) - if coordinate_id == 'latitude': + if coordinate_id == "latitude": return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) @@ -119,17 +119,17 @@ def _calibrate_active_channel_data(self, key): def calibrate(self, dataset_id): """Calibrate the data.""" - units = {'brightness_temperature': 'K'} + units = {"brightness_temperature": "K"} mask = True - idx = ['1', '2', '3', '4', '5'].index(dataset_id['name']) + idx = ["1", "2", "3", "4", "5"].index(dataset_id["name"]) ds = create_xarray( _calibrate(self._data, idx, - dataset_id['calibration'], + dataset_id["calibration"], mask=mask)) - ds.attrs['units'] = units[dataset_id['calibration']] + ds.attrs["units"] = units[dataset_id["calibration"]] ds.attrs.update(dataset_id._asdict()) return ds @@ -143,13 +143,13 @@ def _calibrate(data, *calib_type* in brightness_temperature. """ - if calib_type not in ['brightness_temperature']: - raise ValueError('Calibration ' + calib_type + ' unknown!') + if calib_type not in ["brightness_temperature"]: + raise ValueError("Calibration " + calib_type + " unknown!") channel = da.from_array(data["btemps"][:, :, chn] / 100., chunks=(LINE_CHUNK, 90)) mask &= channel != 0 - if calib_type == 'counts': + if calib_type == "counts": return channel channel = channel.astype(np.float_) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 4a6bf069c1..0b80045767 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -34,13 +34,13 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { - 'g16': 'GOES-16', - 'g17': 'GOES-17', - 'g18': 'GOES-18', - 'g19': 'GOES-19', - 'goes16': 'GOES-16', - 'goes17': 'GOES-17', - 'goes18': 'GOES-18', + "g16": "GOES-16", + "g17": "GOES-17", + "g18": "GOES-18", + "g19": "GOES-19", + "goes16": "GOES-16", + "goes17": "GOES-17", + "goes18": "GOES-18", } @@ -51,11 +51,11 @@ def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) - platform_shortname = filename_info['platform_shortname'] + platform_shortname = filename_info["platform_shortname"] self.platform_name = PLATFORM_NAMES.get(platform_shortname.lower()) - self.nlines = self.nc['y'].size - self.ncols = self.nc['x'].size + self.nlines = self.nc["y"].size + self.ncols = self.nc["x"].size self.coords = {} @@ -67,28 +67,28 @@ def nc(self): nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'x': CHUNK_SIZE, 'y': CHUNK_SIZE}, ) + chunks={"x": CHUNK_SIZE, "y": CHUNK_SIZE}, ) except ValueError: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'lon': CHUNK_SIZE, 'lat': CHUNK_SIZE}, ) + chunks={"lon": CHUNK_SIZE, "lat": CHUNK_SIZE}, ) nc = self._rename_dims(nc) return nc @staticmethod def _rename_dims(nc): - if 't' in nc.dims or 't' in nc.coords: - nc = nc.rename({'t': 'time'}) - if 'goes_lat_lon_projection' in nc: + if "t" in nc.dims or "t" in nc.coords: + nc = nc.rename({"t": "time"}) + if "goes_lat_lon_projection" in nc: with suppress(ValueError): - nc = nc.rename({'lon': 'x', 'lat': 'y'}) + nc = nc.rename({"lon": "x", "lat": "y"}) return nc @property def sensor(self): """Get sensor name for current file handler.""" - return 'abi' + return "abi" def __getitem__(self, item): """Wrap `self.nc[item]` for better floating point precision. @@ -110,21 +110,21 @@ def __getitem__(self, item): def _adjust_data(self, data, item): """Adjust data with typing, scaling and filling.""" - factor = data.attrs.get('scale_factor', 1) - offset = data.attrs.get('add_offset', 0) - fill = data.attrs.get('_FillValue') - unsigned = data.attrs.get('_Unsigned', None) + factor = data.attrs.get("scale_factor", 1) + offset = data.attrs.get("add_offset", 0) + fill = data.attrs.get("_FillValue") + unsigned = data.attrs.get("_Unsigned", None) def is_int(val): - return np.issubdtype(val.dtype, np.integer) if hasattr(val, 'dtype') else isinstance(val, int) + return np.issubdtype(val.dtype, np.integer) if hasattr(val, "dtype") else isinstance(val, int) # Ref. GOESR PUG-L1B-vol3, section 5.0.2 Unsigned Integer Processing - if unsigned is not None and unsigned.lower() == 'true': + if unsigned is not None and unsigned.lower() == "true": # cast the data from int to uint - data = data.astype('u%s' % data.dtype.itemsize) + data = data.astype("u%s" % data.dtype.itemsize) if fill is not None: - fill = fill.astype('u%s' % fill.dtype.itemsize) + fill = fill.astype("u%s" % fill.dtype.itemsize) if fill is not None: # Some backends (h5netcdf) may return attributes as shape (1,) # arrays rather than shape () scalars, which according to the netcdf @@ -138,7 +138,7 @@ def is_int(val): else: new_fill = np.nan data = data.where(data != fill, new_fill) - if factor != 1 and item in ('x', 'y'): + if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information data = data * np.round(float(factor), 6) + np.round(float(offset), 6) @@ -157,7 +157,7 @@ def _adjust_coords(self, data, item): # 'time' dimension causes issues in other processing # 'x_image' and 'y_image' are confusing to some users and unnecessary # 'x' and 'y' will be overwritten by base class AreaDefinition - for coord_name in ('x_image', 'y_image', 'time', 'x', 'y'): + for coord_name in ("x_image", "y_image", "time", "x", "y"): if coord_name in data.coords: data = data.drop_vars(coord_name) if item in data.coords: @@ -175,44 +175,44 @@ def get_dataset(self, key, info): def get_area_def(self, key): """Get the area definition of the data at hand.""" - if 'goes_imager_projection' in self.nc: + if "goes_imager_projection" in self.nc: return self._get_areadef_fixedgrid(key) - if 'goes_lat_lon_projection' in self.nc: + if "goes_lat_lon_projection" in self.nc: return self._get_areadef_latlon(key) - raise ValueError('Unsupported projection found in the dataset') + raise ValueError("Unsupported projection found in the dataset") def _get_areadef_latlon(self, key): """Get the area definition of the data at hand.""" projection = self.nc["goes_lat_lon_projection"] - a = projection.attrs['semi_major_axis'] - b = projection.attrs['semi_minor_axis'] - fi = projection.attrs['inverse_flattening'] - pm = projection.attrs['longitude_of_prime_meridian'] + a = projection.attrs["semi_major_axis"] + b = projection.attrs["semi_minor_axis"] + fi = projection.attrs["inverse_flattening"] + pm = projection.attrs["longitude_of_prime_meridian"] proj_ext = self.nc["geospatial_lat_lon_extent"] - w_lon = proj_ext.attrs['geospatial_westbound_longitude'] - e_lon = proj_ext.attrs['geospatial_eastbound_longitude'] - n_lat = proj_ext.attrs['geospatial_northbound_latitude'] - s_lat = proj_ext.attrs['geospatial_southbound_latitude'] + w_lon = proj_ext.attrs["geospatial_westbound_longitude"] + e_lon = proj_ext.attrs["geospatial_eastbound_longitude"] + n_lat = proj_ext.attrs["geospatial_northbound_latitude"] + s_lat = proj_ext.attrs["geospatial_southbound_latitude"] - lat_0 = proj_ext.attrs['geospatial_lat_center'] - lon_0 = proj_ext.attrs['geospatial_lon_center'] + lat_0 = proj_ext.attrs["geospatial_lat_center"] + lon_0 = proj_ext.attrs["geospatial_lon_center"] area_extent = (w_lon, s_lat, e_lon, n_lat) - proj_dict = {'proj': 'latlong', - 'lon_0': float(lon_0), - 'lat_0': float(lat_0), - 'a': float(a), - 'b': float(b), - 'fi': float(fi), - 'pm': float(pm)} + proj_dict = {"proj": "latlong", + "lon_0": float(lon_0), + "lat_0": float(lat_0), + "a": float(a), + "b": float(b), + "fi": float(fi), + "pm": float(pm)} ll_area_def = geometry.AreaDefinition( - self.nc.attrs.get('orbital_slot', 'abi_geos'), - self.nc.attrs.get('spatial_resolution', 'ABI file area'), - 'abi_latlon', + self.nc.attrs.get("orbital_slot", "abi_geos"), + self.nc.attrs.get("spatial_resolution", "ABI file area"), + "abi_latlon", proj_dict, self.ncols, self.nlines, @@ -231,17 +231,17 @@ def _get_areadef_fixedgrid(self, key): """ projection = self.nc["goes_imager_projection"] - a = projection.attrs['semi_major_axis'] - b = projection.attrs['semi_minor_axis'] - h = projection.attrs['perspective_point_height'] + a = projection.attrs["semi_major_axis"] + b = projection.attrs["semi_minor_axis"] + h = projection.attrs["perspective_point_height"] - lon_0 = projection.attrs['longitude_of_projection_origin'] - sweep_axis = projection.attrs['sweep_angle_axis'][0] + lon_0 = projection.attrs["longitude_of_projection_origin"] + sweep_axis = projection.attrs["sweep_angle_axis"][0] # compute x and y extents in m h = np.float64(h) - x = self['x'] - y = self['y'] + x = self["x"] + y = self["y"] x_l = x[0].values x_r = x[-1].values y_l = y[-1].values @@ -251,18 +251,18 @@ def _get_areadef_fixedgrid(self, key): area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) area_extent = tuple(np.round(h * val, 6) for val in area_extent) - proj_dict = {'proj': 'geos', - 'lon_0': float(lon_0), - 'a': float(a), - 'b': float(b), - 'h': h, - 'units': 'm', - 'sweep': sweep_axis} + proj_dict = {"proj": "geos", + "lon_0": float(lon_0), + "a": float(a), + "b": float(b), + "h": h, + "units": "m", + "sweep": sweep_axis} fg_area_def = geometry.AreaDefinition( - self.nc.attrs.get('orbital_slot', 'abi_geos'), - self.nc.attrs.get('spatial_resolution', 'ABI file area'), - 'abi_fixed_grid', + self.nc.attrs.get("orbital_slot", "abi_geos"), + self.nc.attrs.get("spatial_resolution", "ABI file area"), + "abi_fixed_grid", proj_dict, self.ncols, self.nlines, @@ -273,19 +273,19 @@ def _get_areadef_fixedgrid(self, key): @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ") def spatial_resolution_to_number(self): """Convert the 'spatial_resolution' global attribute to meters.""" - res = self.nc.attrs['spatial_resolution'].split(' ')[0] - if res.endswith('km'): + res = self.nc.attrs["spatial_resolution"].split(" ")[0] + if res.endswith("km"): res = int(float(res[:-2]) * 1000) - elif res.endswith('m'): + elif res.endswith("m"): res = int(res[:-1]) else: raise ValueError("Unexpected 'spatial_resolution' attribute '{}'".format(res)) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index dafdc8a373..3a22397cde 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -44,70 +44,70 @@ def __init__(self, filename, filename_info, filetype_info, clip_negative_radianc def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) + logger.debug("Reading in get_dataset %s.", key["name"]) # For raw cal, don't apply scale and offset, return raw file counts - if key['calibration'] == 'counts': - radiances = self.nc['Rad'].copy() + if key["calibration"] == "counts": + radiances = self.nc["Rad"].copy() else: - radiances = self['Rad'] + radiances = self["Rad"] # mapping of calibration types to calibration functions cal_dictionary = { - 'reflectance': self._vis_calibrate, - 'brightness_temperature': self._ir_calibrate, - 'radiance': self._rad_calibrate, - 'counts': self._raw_calibrate, + "reflectance": self._vis_calibrate, + "brightness_temperature": self._ir_calibrate, + "radiance": self._rad_calibrate, + "counts": self._raw_calibrate, } try: - func = cal_dictionary[key['calibration']] + func = cal_dictionary[key["calibration"]] res = func(radiances) except KeyError: - raise ValueError("Unknown calibration '{}'".format(key['calibration'])) + raise ValueError("Unknown calibration '{}'".format(key["calibration"])) # convert to satpy standard units - if res.attrs['units'] == '1' and key['calibration'] != 'counts': + if res.attrs["units"] == "1" and key["calibration"] != "counts": res *= 100 - res.attrs['units'] = '%' + res.attrs["units"] = "%" self._adjust_attrs(res, key) return res def _adjust_attrs(self, data, key): - data.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + data.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) # Add orbital parameters projection = self.nc["goes_imager_projection"] - data.attrs['orbital_parameters'] = { - 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), - 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), - 'projection_altitude': float(projection.attrs['perspective_point_height']), - 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), - 'satellite_nominal_altitude': float(self['nominal_satellite_height']) * 1000., - 'yaw_flip': bool(self['yaw_flip_flag']), + data.attrs["orbital_parameters"] = { + "projection_longitude": float(projection.attrs["longitude_of_projection_origin"]), + "projection_latitude": float(projection.attrs["latitude_of_projection_origin"]), + "projection_altitude": float(projection.attrs["perspective_point_height"]), + "satellite_nominal_latitude": float(self["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self["nominal_satellite_subpoint_lon"]), + "satellite_nominal_altitude": float(self["nominal_satellite_height"]) * 1000., + "yaw_flip": bool(self["yaw_flip_flag"]), } data.attrs.update(key.to_dict()) # remove attributes that could be confusing later # if calibration type is raw counts, we leave them in - if key['calibration'] != 'counts': - data.attrs.pop('_FillValue', None) - data.attrs.pop('scale_factor', None) - data.attrs.pop('add_offset', None) - data.attrs.pop('_Unsigned', None) - data.attrs.pop('ancillary_variables', None) # Can't currently load DQF + if key["calibration"] != "counts": + data.attrs.pop("_FillValue", None) + data.attrs.pop("scale_factor", None) + data.attrs.pop("add_offset", None) + data.attrs.pop("_Unsigned", None) + data.attrs.pop("ancillary_variables", None) # Can't currently load DQF # although we could compute these, we'd have to update in calibration - data.attrs.pop('valid_range', None) + data.attrs.pop("valid_range", None) # add in information from the filename that may be useful to the user - for attr in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname', 'suffix'): + for attr in ("observation_type", "scene_abbr", "scan_mode", "platform_shortname", "suffix"): if attr in self.filename_info: data.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID"): data.attrs[attr] = self.nc.attrs.get(attr) # only include these if they are present - for attr in ('fusion_args',): + for attr in ("fusion_args",): if attr in self.nc.attrs: data.attrs[attr] = self.nc.attrs[attr] @@ -128,23 +128,23 @@ def _raw_calibrate(self, data): """ res = data res.attrs = data.attrs - res.attrs['units'] = '1' - res.attrs['long_name'] = 'Raw Counts' - res.attrs['standard_name'] = 'counts' + res.attrs["units"] = "1" + res.attrs["long_name"] = "Raw Counts" + res.attrs["standard_name"] = "counts" return res def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" - solar_irradiance = self['esun'] + solar_irradiance = self["esun"] esd = self["earth_sun_distance_anomaly_in_AU"].astype(float) factor = np.pi * esd * esd / solar_irradiance res = data * factor res.attrs = data.attrs - res.attrs['units'] = '1' - res.attrs['long_name'] = 'Bidirectional Reflectance' - res.attrs['standard_name'] = 'toa_bidirectional_reflectance' + res.attrs["units"] = "1" + res.attrs["long_name"] = "Bidirectional Reflectance" + res.attrs["standard_name"] = "toa_bidirectional_reflectance" return res def _get_minimum_radiance(self, data): @@ -170,7 +170,7 @@ def _ir_calibrate(self, data): res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs - res.attrs['units'] = 'K' - res.attrs['long_name'] = 'Brightness Temperature' - res.attrs['standard_name'] = 'toa_brightness_temperature' + res.attrs["units"] = "K" + res.attrs["long_name"] = "Brightness Temperature" + res.attrs["standard_name"] = "toa_brightness_temperature" return res diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index a152790197..ad87286f32 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -33,10 +33,10 @@ class NC_ABI_L2(NC_ABI_BASE): def get_dataset(self, key, info): """Load a dataset.""" - var = info['file_key'] - if self.filetype_info['file_type'] == 'abi_l2_mcmip': + var = info["file_key"] + if self.filetype_info["file_type"] == "abi_l2_mcmip": var += "_" + key["name"] - LOG.debug('Reading in get_dataset %s.', var) + LOG.debug("Reading in get_dataset %s.", var) variable = self[var] variable.attrs.update(key.to_dict()) self._update_data_arr_with_filename_attrs(variable) @@ -44,32 +44,32 @@ def get_dataset(self, key, info): return variable def _update_data_arr_with_filename_attrs(self, variable): - _units = variable.attrs['units'] if 'units' in variable.attrs else None + _units = variable.attrs["units"] if "units" in variable.attrs else None variable.attrs.update({ - 'platform_name': self.platform_name, - 'sensor': self.sensor, - 'units': _units, - 'orbital_parameters': { - 'satellite_nominal_latitude': float(self.nc['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self.nc['nominal_satellite_subpoint_lon']), - 'satellite_nominal_altitude': float(self.nc['nominal_satellite_height']) * 1000., + "platform_name": self.platform_name, + "sensor": self.sensor, + "units": _units, + "orbital_parameters": { + "satellite_nominal_latitude": float(self.nc["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self.nc["nominal_satellite_subpoint_lon"]), + "satellite_nominal_altitude": float(self.nc["nominal_satellite_height"]) * 1000., }, }) - if 'flag_meanings' in variable.attrs: - variable.attrs['flag_meanings'] = variable.attrs['flag_meanings'].split(' ') + if "flag_meanings" in variable.attrs: + variable.attrs["flag_meanings"] = variable.attrs["flag_meanings"].split(" ") # add in information from the filename that may be useful to the user - for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): + for attr in ("scene_abbr", "scan_mode", "platform_shortname"): variable.attrs[attr] = self.filename_info.get(attr) # add in information hardcoded in the filetype YAML - for attr in ('observation_type',): + for attr in ("observation_type",): if attr in self.filetype_info: variable.attrs[attr] = self.filetype_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID"): variable.attrs[attr] = self.nc.attrs.get(attr) @staticmethod @@ -77,13 +77,13 @@ def _remove_problem_attrs(variable): # remove attributes that could be confusing later if not np.issubdtype(variable.dtype, np.integer): # integer fields keep the _FillValue - variable.attrs.pop('_FillValue', None) - variable.attrs.pop('scale_factor', None) - variable.attrs.pop('add_offset', None) - variable.attrs.pop('valid_range', None) - variable.attrs.pop('_Unsigned', None) - variable.attrs.pop('valid_range', None) - variable.attrs.pop('ancillary_variables', None) # Can't currently load DQF + variable.attrs.pop("_FillValue", None) + variable.attrs.pop("scale_factor", None) + variable.attrs.pop("add_offset", None) + variable.attrs.pop("valid_range", None) + variable.attrs.pop("_Unsigned", None) + variable.attrs.pop("valid_range", None) + variable.attrs.pop("ancillary_variables", None) # Can't currently load DQF def available_datasets(self, configured_datasets=None): """Add resolution to configured datasets.""" @@ -92,12 +92,12 @@ def available_datasets(self, configured_datasets=None): # don't override what they've done if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) if matches: # we have this dataset resolution = self.spatial_resolution_to_number() new_info = ds_info.copy() - new_info.setdefault('resolution', resolution) + new_info.setdefault("resolution", resolution) yield True, ds_info elif is_avail is None: # we don't know what to do with this diff --git a/satpy/readers/acspo.py b/satpy/readers/acspo.py index 14c8038b63..8a8262af33 100644 --- a/satpy/readers/acspo.py +++ b/satpy/readers/acspo.py @@ -33,9 +33,9 @@ ROWS_PER_SCAN = { - 'modis': 10, - 'viirs': 16, - 'avhrr': None, + "modis": 10, + "viirs": 16, + "avhrr": None, } @@ -45,7 +45,7 @@ class ACSPOFileHandler(NetCDF4FileHandler): @property def platform_name(self): """Get satellite name for this file's data.""" - res = self['/attr/platform'] + res = self["/attr/platform"] if isinstance(res, np.ndarray): return str(res.astype(str)) return res @@ -53,7 +53,7 @@ def platform_name(self): @property def sensor_name(self): """Get instrument name for this file's data.""" - res = self['/attr/sensor'] + res = self["/attr/sensor"] if isinstance(res, np.ndarray): res = str(res.astype(str)) return res.lower() @@ -69,12 +69,12 @@ def get_shape(self, ds_id, ds_info): tuple: (rows, cols) """ - var_path = ds_info.get('file_key', '{}'.format(ds_id['name'])) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(ds_id["name"])) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: - shape = self[var_path + '/shape'] + shape = self[var_path + "/shape"] if len(shape) == 3: if shape[0] != 1: raise ValueError("Not sure how to load 3D Dataset with more than 1 time") @@ -88,49 +88,49 @@ def _parse_datetime(datestr): @property def start_time(self): """Get first observation time of data.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get final observation time of data.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) def get_metadata(self, dataset_id, ds_info): """Collect various metadata about the specified dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) shape = self.get_shape(dataset_id, ds_info) - units = self[var_path + '/attr/units'] - info = getattr(self[var_path], 'attrs', {}) - standard_name = self[var_path + '/attr/standard_name'] - resolution = float(self['/attr/spatial_resolution'].split(' ')[0]) + units = self[var_path + "/attr/units"] + info = getattr(self[var_path], "attrs", {}) + standard_name = self[var_path + "/attr/standard_name"] + resolution = float(self["/attr/spatial_resolution"].split(" ")[0]) rows_per_scan = ROWS_PER_SCAN.get(self.sensor_name) or 0 info.update(dataset_id.to_dict()) info.update({ - 'shape': shape, - 'units': units, - 'platform_name': self.platform_name, - 'sensor': self.sensor_name, - 'standard_name': standard_name, - 'resolution': resolution, - 'rows_per_scan': rows_per_scan, - 'long_name': self.get(var_path + '/attr/long_name'), - 'comment': self.get(var_path + '/attr/comment'), + "shape": shape, + "units": units, + "platform_name": self.platform_name, + "sensor": self.sensor_name, + "standard_name": standard_name, + "resolution": resolution, + "rows_per_scan": rows_per_scan, + "long_name": self.get(var_path + "/attr/long_name"), + "comment": self.get(var_path + "/attr/comment"), }) return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata from file on disk.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - shape = metadata['shape'] - file_shape = self[var_path + '/shape'] - metadata['shape'] = shape + shape = metadata["shape"] + file_shape = self[var_path + "/shape"] + metadata["shape"] = shape - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] # no need to check fill value since we are using valid min/max - scale_factor = self.get(var_path + '/attr/scale_factor') - add_offset = self.get(var_path + '/attr/add_offset') + scale_factor = self.get(var_path + "/attr/scale_factor") + add_offset = self.get(var_path + "/attr/add_offset") data = self[var_path] data = data.rename({"ni": "x", "nj": "y"}) @@ -141,15 +141,15 @@ def get_dataset(self, dataset_id, ds_info): if scale_factor is not None: data = data * scale_factor + add_offset - if ds_info.get('cloud_clear', False): + if ds_info.get("cloud_clear", False): # clear-sky if bit 15-16 are 00 - clear_sky_mask = (self['l2p_flags'][0] & 0b1100000000000000) != 0 + clear_sky_mask = (self["l2p_flags"][0] & 0b1100000000000000) != 0 clear_sky_mask = clear_sky_mask.rename({"ni": "x", "nj": "y"}) data = data.where(~clear_sky_mask) data.attrs.update(metadata) # Remove these attributes since they are no longer valid and can cause invalid value filling. - data.attrs.pop('_FillValue', None) - data.attrs.pop('valid_max', None) - data.attrs.pop('valid_min', None) + data.attrs.pop("_FillValue", None) + data.attrs.pop("valid_max", None) + data.attrs.pop("valid_min", None) return data diff --git a/satpy/readers/agri_l1.py b/satpy/readers/agri_l1.py index 9612d016cd..381880cd5c 100644 --- a/satpy/readers/agri_l1.py +++ b/satpy/readers/agri_l1.py @@ -36,21 +36,21 @@ class HDF_AGRI_L1(FY4Base): def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_AGRI_L1, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'AGRI' + self.sensor = "AGRI" def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) - if self.PLATFORM_ID == 'FY-4B': + ds_name = dataset_id["name"] + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) + if self.PLATFORM_ID == "FY-4B": if self.CHANS_ID in file_key: - file_key = f'Data/{file_key}' + file_key = f"Data/{file_key}" elif self.SUN_ID in file_key or self.SAT_ID in file_key: - file_key = f'Navigation/{file_key}' + file_key = f"Navigation/{file_key}" data = self.get(file_key) if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data = self.calibrate(data, ds_info, ds_name, file_key) self.adjust_attrs(data, ds_info) @@ -59,15 +59,15 @@ def get_dataset(self, dataset_id, ds_info): def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" - satname = self.PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name']) - data.attrs.update({'platform_name': satname, - 'sensor': self['/attr/Sensor Identification Code'].lower(), - 'orbital_parameters': { - 'satellite_nominal_latitude': self['/attr/NOMCenterLat'].item(), - 'satellite_nominal_longitude': self['/attr/NOMCenterLon'].item(), - 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}}) + satname = self.PLATFORM_NAMES.get(self["/attr/Satellite Name"], self["/attr/Satellite Name"]) + data.attrs.update({"platform_name": satname, + "sensor": self["/attr/Sensor Identification Code"].lower(), + "orbital_parameters": { + "satellite_nominal_latitude": self["/attr/NOMCenterLat"].item(), + "satellite_nominal_longitude": self["/attr/NOMCenterLon"].item(), + "satellite_nominal_altitude": self["/attr/NOMSatHeight"].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later - data.attrs.pop('FillValue', None) - data.attrs.pop('Intercept', None) - data.attrs.pop('Slope', None) + data.attrs.pop("FillValue", None) + data.attrs.pop("Intercept", None) + data.attrs.pop("Slope", None) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index e06f7ebc50..681885dd51 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -84,7 +84,7 @@ "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16") -logger = logging.getLogger('ahi_hsd') +logger = logging.getLogger("ahi_hsd") # Basic information block: _BASIC_INFO_TYPE = np.dtype([("hblock_number", "u1"), @@ -350,14 +350,14 @@ class AHIHSDFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - mask_space=True, calib_mode='update', + mask_space=True, calib_mode="update", user_calibration=None, round_actual_position=True): """Initialize the reader.""" super(AHIHSDFileHandler, self).__init__(filename, filename_info, filetype_info) self.is_zipped = False - self._unzipped = unzip_file(self.filename, prefix=str(filename_info['segment']).zfill(2)) + self._unzipped = unzip_file(self.filename, prefix=str(filename_info["segment"]).zfill(2)) # Assume file is not zipped if self._unzipped: # But if it is, set the filename to point to unzipped temp file @@ -365,14 +365,14 @@ def __init__(self, filename, filename_info, filetype_info, self.filename = self._unzipped self.channels = dict([(i, None) for i in AHI_CHANNEL_NAMES]) - self.units = dict([(i, 'counts') for i in AHI_CHANNEL_NAMES]) + self.units = dict([(i, "counts") for i in AHI_CHANNEL_NAMES]) self._data = dict([(i, None) for i in AHI_CHANNEL_NAMES]) self._header = dict([(i, None) for i in AHI_CHANNEL_NAMES]) self.lons = None self.lats = None - self.segment_number = filename_info['segment'] - self.total_segments = filename_info['total_segments'] + self.segment_number = filename_info["segment"] + self.total_segments = filename_info["total_segments"] with open(self.filename) as fd: self.basic_info = np.fromfile(fd, @@ -387,14 +387,14 @@ def __init__(self, filename, filename_info, filetype_info, self.nav_info = np.fromfile(fd, dtype=_NAV_INFO_TYPE, count=1)[0] - self.platform_name = np2str(self.basic_info['satellite']) - self.observation_area = np2str(self.basic_info['observation_area']) - self.sensor = 'ahi' + self.platform_name = np2str(self.basic_info["satellite"]) + self.observation_area = np2str(self.basic_info["observation_area"]) + self.sensor = "ahi" self.mask_space = mask_space - self.band_name = filetype_info['file_type'][4:].upper() - calib_mode_choices = ('NOMINAL', 'UPDATE') + self.band_name = filetype_info["file_type"][4:].upper() + calib_mode_choices = ("NOMINAL", "UPDATE") if calib_mode.upper() not in calib_mode_choices: - raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( + raise ValueError("Invalid calibration mode: {}. Choose one of {}".format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() @@ -419,12 +419,12 @@ def end_time(self): @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info['observation_start_time'])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"])) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info['observation_end_time'])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"])) @property def nominal_start_time(self): @@ -456,7 +456,7 @@ def _modify_observation_time_for_nominal(self, observation_time): 2.5 minutes apart, then the result should be 13:32:30. """ - timeline = "{:04d}".format(self.basic_info['observation_timeline'][0]) + timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) if not self._is_valid_timeline(timeline): warnings.warn( "Observation timeline is fill value, not rounding observation time.", @@ -464,10 +464,10 @@ def _modify_observation_time_for_nominal(self, observation_time): ) return observation_time - if self.observation_area == 'FLDK': + if self.observation_area == "FLDK": dt = 0 else: - observation_frequency_seconds = {'JP': 150, 'R3': 150, 'R4': 30, 'R5': 30}[self.observation_area[:2]] + observation_frequency_seconds = {"JP": 150, "R3": 150, "R4": 30, "R5": 30}[self.observation_area[:2]] dt = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) return observation_time.replace( @@ -490,25 +490,25 @@ def get_area_def(self, dsid): def _get_area_def(self): pdict = {} - pdict['cfac'] = np.uint32(self.proj_info['CFAC']) - pdict['lfac'] = np.uint32(self.proj_info['LFAC']) - pdict['coff'] = np.float32(self.proj_info['COFF']) - pdict['loff'] = -np.float32(self.proj_info['LOFF']) + 1 - pdict['a'] = float(self.proj_info['earth_equatorial_radius'] * 1000) - pdict['h'] = float(self.proj_info['distance_from_earth_center'] * 1000 - pdict['a']) - pdict['b'] = float(self.proj_info['earth_polar_radius'] * 1000) - pdict['ssp_lon'] = float(self.proj_info['sub_lon']) - pdict['nlines'] = int(self.data_info['number_of_lines']) - pdict['ncols'] = int(self.data_info['number_of_columns']) - pdict['scandir'] = 'N2S' - - pdict['loff'] = pdict['loff'] + (self.segment_number * pdict['nlines']) + pdict["cfac"] = np.uint32(self.proj_info["CFAC"]) + pdict["lfac"] = np.uint32(self.proj_info["LFAC"]) + pdict["coff"] = np.float32(self.proj_info["COFF"]) + pdict["loff"] = -np.float32(self.proj_info["LOFF"]) + 1 + pdict["a"] = float(self.proj_info["earth_equatorial_radius"] * 1000) + pdict["h"] = float(self.proj_info["distance_from_earth_center"] * 1000 - pdict["a"]) + pdict["b"] = float(self.proj_info["earth_polar_radius"] * 1000) + pdict["ssp_lon"] = float(self.proj_info["sub_lon"]) + pdict["nlines"] = int(self.data_info["number_of_lines"]) + pdict["ncols"] = int(self.data_info["number_of_columns"]) + pdict["scandir"] = "N2S" + + pdict["loff"] = pdict["loff"] + (self.segment_number * pdict["nlines"]) aex = get_area_extent(pdict) - pdict['a_name'] = self.observation_area - pdict['a_desc'] = "AHI {} area".format(self.observation_area) - pdict['p_id'] = f'geosh{self.basic_info["satellite"][0].decode()[-1]}' + pdict["a_name"] = self.observation_area + pdict["a_desc"] = "AHI {} area".format(self.observation_area) + pdict["p_id"] = f'geosh{self.basic_info["satellite"][0].decode()[-1]}' return get_area_definition(pdict, aex) @@ -526,112 +526,112 @@ def _read_header(self, fp_): header = {} fpos = 0 - header['block1'] = np.fromfile( + header["block1"] = np.fromfile( fp_, dtype=_BASIC_INFO_TYPE, count=1) - fpos = fpos + int(header['block1']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block1') + fpos = fpos + int(header["block1"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block1") fp_.seek(fpos, 0) header["block2"] = np.fromfile(fp_, dtype=_DATA_INFO_TYPE, count=1) - fpos = fpos + int(header['block2']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block2') + fpos = fpos + int(header["block2"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block2") fp_.seek(fpos, 0) header["block3"] = np.fromfile(fp_, dtype=_PROJ_INFO_TYPE, count=1) - fpos = fpos + int(header['block3']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block3') + fpos = fpos + int(header["block3"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block3") fp_.seek(fpos, 0) header["block4"] = np.fromfile(fp_, dtype=_NAV_INFO_TYPE, count=1) - fpos = fpos + int(header['block4']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block4') + fpos = fpos + int(header["block4"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block4") fp_.seek(fpos, 0) header["block5"] = np.fromfile(fp_, dtype=_CAL_INFO_TYPE, count=1) logger.debug("Band number = " + - str(header["block5"]['band_number'][0])) - logger.debug('Time_interval: %s - %s', + str(header["block5"]["band_number"][0])) + logger.debug("Time_interval: %s - %s", str(self.start_time), str(self.end_time)) - band_number = header["block5"]['band_number'][0] + band_number = header["block5"]["band_number"][0] if band_number < 7: cal = np.fromfile(fp_, dtype=_VISCAL_INFO_TYPE, count=1) else: cal = np.fromfile(fp_, dtype=_IRCAL_INFO_TYPE, count=1) - fpos = fpos + int(header['block5']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block5') + fpos = fpos + int(header["block5"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block5") fp_.seek(fpos, 0) - header['calibration'] = cal + header["calibration"] = cal header["block6"] = np.fromfile( fp_, dtype=_INTER_CALIBRATION_INFO_TYPE, count=1) - fpos = fpos + int(header['block6']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block6') + fpos = fpos + int(header["block6"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block6") fp_.seek(fpos, 0) header["block7"] = np.fromfile( fp_, dtype=_SEGMENT_INFO_TYPE, count=1) - fpos = fpos + int(header['block7']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block7') + fpos = fpos + int(header["block7"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block7") fp_.seek(fpos, 0) header["block8"] = np.fromfile( fp_, dtype=_NAVIGATION_CORRECTION_INFO_TYPE, count=1) # 8 The navigation corrections: - ncorrs = header["block8"]['numof_correction_info_data'][0] + ncorrs = header["block8"]["numof_correction_info_data"][0] corrections = [] for _i in range(ncorrs): corrections.append(np.fromfile(fp_, dtype=_NAVIGATION_CORRECTION_SUBINFO_TYPE, count=1)) - fpos = fpos + int(header['block8']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block8') + fpos = fpos + int(header["block8"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block8") fp_.seek(fpos, 0) - header['navigation_corrections'] = corrections + header["navigation_corrections"] = corrections header["block9"] = np.fromfile(fp_, dtype=_OBSERVATION_TIME_INFO_TYPE, count=1) - numobstimes = header["block9"]['number_of_observation_times'][0] + numobstimes = header["block9"]["number_of_observation_times"][0] lines_and_times = [] for _i in range(numobstimes): lines_and_times.append(np.fromfile(fp_, dtype=_OBSERVATION_LINE_TIME_INFO_TYPE, count=1)) - header['observation_time_information'] = lines_and_times - fpos = fpos + int(header['block9']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block9') + header["observation_time_information"] = lines_and_times + fpos = fpos + int(header["block9"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block9") fp_.seek(fpos, 0) header["block10"] = np.fromfile(fp_, dtype=_ERROR_INFO_TYPE, count=1) num_err_info_data = header["block10"][ - 'number_of_error_info_data'][0] + "number_of_error_info_data"][0] err_info_data = [] for _i in range(num_err_info_data): err_info_data.append(np.fromfile(fp_, dtype=_ERROR_LINE_INFO_TYPE, count=1)) - header['error_information_data'] = err_info_data - fpos = fpos + int(header['block10']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block10') + header["error_information_data"] = err_info_data + fpos = fpos + int(header["block10"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block10") fp_.seek(fpos, 0) header["block11"] = np.fromfile(fp_, dtype=_SPARE_TYPE, count=1) - fpos = fpos + int(header['block11']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block11') + fpos = fpos + int(header["block11"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block11") fp_.seek(fpos, 0) return header def _read_data(self, fp_, header): """Read data block.""" - nlines = int(header["block2"]['number_of_lines'][0]) - ncols = int(header["block2"]['number_of_columns'][0]) + nlines = int(header["block2"]["number_of_lines"][0]) + ncols = int(header["block2"]["number_of_columns"][0]) chunks = da.core.normalize_chunks("auto", shape=(nlines, ncols), limit=get_chunk_size_limit(), - dtype='f8', + dtype="f8", previous_chunks=(550, 550)) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), - dtype=' no temperature data = da.where(data == 0, np.float32(np.nan), data) - cwl = self._header['block5']["central_wave_length"][0] * 1e-6 - c__ = self._header['calibration']["speed_of_light"][0] - h__ = self._header['calibration']["planck_constant"][0] - k__ = self._header['calibration']["boltzmann_constant"][0] + cwl = self._header["block5"]["central_wave_length"][0] * 1e-6 + c__ = self._header["calibration"]["speed_of_light"][0] + h__ = self._header["calibration"]["planck_constant"][0] + k__ = self._header["calibration"]["boltzmann_constant"][0] a__ = (h__ * c__) / (k__ * cwl) b__ = ((2 * h__ * c__ ** 2) / (data * 1.0e6 * cwl ** 5)) + 1 Te_ = a__ / da.log(b__) - c0_ = self._header['calibration']["c0_rad2tb_conversion"][0] - c1_ = self._header['calibration']["c1_rad2tb_conversion"][0] - c2_ = self._header['calibration']["c2_rad2tb_conversion"][0] + c0_ = self._header["calibration"]["c0_rad2tb_conversion"][0] + c1_ = self._header["calibration"]["c1_rad2tb_conversion"][0] + c2_ = self._header["calibration"]["c2_rad2tb_conversion"][0] return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) diff --git a/satpy/readers/ahi_l1b_gridded_bin.py b/satpy/readers/ahi_l1b_gridded_bin.py index 0270015950..33289aee11 100644 --- a/satpy/readers/ahi_l1b_gridded_bin.py +++ b/satpy/readers/ahi_l1b_gridded_bin.py @@ -47,32 +47,32 @@ CHUNK_SIZE = get_legacy_chunk_size() # Hardcoded address of the reflectance and BT look-up tables -AHI_REMOTE_LUTS = 'http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/count2tbb_v102.tgz' +AHI_REMOTE_LUTS = "http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/count2tbb_v102.tgz" # Full disk image sizes for each spatial resolution -AHI_FULLDISK_SIZES = {0.005: {'x_size': 24000, - 'y_size': 24000}, - 0.01: {'x_size': 12000, - 'y_size': 12000}, - 0.02: {'x_size': 6000, - 'y_size': 6000}} +AHI_FULLDISK_SIZES = {0.005: {"x_size": 24000, + "y_size": 24000}, + 0.01: {"x_size": 12000, + "y_size": 12000}, + 0.02: {"x_size": 6000, + "y_size": 6000}} # Geographic extent of the full disk area in degrees AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] # Resolutions of each channel type -AHI_CHANNEL_RES = {'vis': 0.01, - 'ext': 0.005, - 'sir': 0.02, - 'tir': 0.02} +AHI_CHANNEL_RES = {"vis": 0.01, + "ext": 0.005, + "sir": 0.02, + "tir": 0.02} # List of LUT filenames -AHI_LUT_NAMES = ['ext.01', 'vis.01', 'vis.02', 'vis.03', - 'sir.01', 'sir.02', 'tir.01', 'tir.02', - 'tir.03', 'tir.04', 'tir.05', 'tir.06', - 'tir.07', 'tir.08', 'tir.09', 'tir.10'] +AHI_LUT_NAMES = ["ext.01", "vis.01", "vis.02", "vis.03", + "sir.01", "sir.02", "tir.01", "tir.02", + "tir.03", "tir.04", "tir.05", "tir.06", + "tir.07", "tir.08", "tir.09", "tir.10"] -logger = logging.getLogger('ahi_grid') +logger = logging.getLogger("ahi_grid") class AHIGriddedFileHandler(BaseFileHandler): @@ -99,19 +99,19 @@ def __init__(self, filename, filename_info, filetype_info): # But if it is, set the filename to point to unzipped temp file self.filename = self._unzipped # Get the band name, needed for finding area and dimensions - self.product_name = filetype_info['file_type'] - self.areaname = filename_info['area'] - self.sensor = 'ahi' + self.product_name = filetype_info["file_type"] + self.areaname = filename_info["area"] + self.sensor = "ahi" self.res = AHI_CHANNEL_RES[self.product_name[:3]] - if self.areaname == 'fld': - self.nlines = AHI_FULLDISK_SIZES[self.res]['y_size'] - self.ncols = AHI_FULLDISK_SIZES[self.res]['x_size'] + if self.areaname == "fld": + self.nlines = AHI_FULLDISK_SIZES[self.res]["y_size"] + self.ncols = AHI_FULLDISK_SIZES[self.res]["x_size"] else: raise NotImplementedError("Only full disk data is supported.") # Set up directory path for the LUTs - app_dirs = AppDirs('ahi_gridded_luts', 'satpy', '1.0.2') - self.lut_dir = os.path.expanduser(app_dirs.user_data_dir) + '/' + app_dirs = AppDirs("ahi_gridded_luts", "satpy", "1.0.2") + self.lut_dir = os.path.expanduser(app_dirs.user_data_dir) + "/" self.area = None def __del__(self): @@ -149,7 +149,7 @@ def _download_luts(file_name): # Set up an connection and download with urllib.request.urlopen(AHI_REMOTE_LUTS) as response: # nosec - with open(file_name, 'wb') as out_file: + with open(file_name, "wb") as out_file: shutil.copyfileobj(response, out_file) @staticmethod @@ -174,14 +174,14 @@ def _get_luts(self): logger.info("Download AHI LUTs files and store in directory %s", self.lut_dir) tempdir = config["tmp_dir"] - fname = os.path.join(tempdir, 'tmp.tgz') + fname = os.path.join(tempdir, "tmp.tgz") # Download the LUTs self._download_luts(fname) # The file is tarred, untar and remove the downloaded file self._untar_luts(fname, tempdir) - lut_dl_dir = os.path.join(tempdir, 'count2tbb_v102/') + lut_dl_dir = os.path.join(tempdir, "count2tbb_v102/") # Loop over the LUTs and copy to the correct location for lutfile in AHI_LUT_NAMES: @@ -198,16 +198,16 @@ def get_area_def(self, dsid): This is fixed, but not defined in the file. So we must generate it ourselves with some assumptions. """ - if self.areaname == 'fld': + if self.areaname == "fld": area_extent = AHI_FULLDISK_EXTENT else: raise NotImplementedError("Reader only supports full disk data.") - proj_param = 'EPSG:4326' + proj_param = "EPSG:4326" - area = geometry.AreaDefinition('gridded_himawari', - 'A gridded Himawari area', - 'longlat', + area = geometry.AreaDefinition("gridded_himawari", + "A gridded Himawari area", + "longlat", proj_param, self.ncols, self.nlines, @@ -220,9 +220,9 @@ def _read_data(self, fp_): """Read raw binary data from file.""" return da.from_array(np.memmap(self.filename, offset=fp_.tell(), - dtype='>u2', + dtype=">u2", shape=(self.nlines, self.ncols), - mode='r'), + mode="r"), chunks=CHUNK_SIZE) def read_band(self, key, info): @@ -231,26 +231,26 @@ def read_band(self, key, info): res = self._read_data(fp_) # Calibrate - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) # Update metadata new_info = dict( - units=info['units'], - standard_name=info['standard_name'], - wavelength=info['wavelength'], - resolution=info['resolution'], + units=info["units"], + standard_name=info["standard_name"], + wavelength=info["wavelength"], + resolution=info["resolution"], id=key, - name=key['name'], + name=key["name"], sensor=self.sensor, ) - res = xr.DataArray(res, attrs=new_info, dims=['y', 'x']) + res = xr.DataArray(res, attrs=new_info, dims=["y", "x"]) return res def calibrate(self, data, calib): """Calibrate the data.""" - if calib == 'counts': + if calib == "counts": return data - if calib == 'reflectance' or calib == 'brightness_temperature': + if calib == "reflectance" or calib == "brightness_temperature": return self._calibrate(data) raise NotImplementedError("ERROR: Unsupported calibration.", "Only counts, reflectance and ", diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py index 9adeaf76f1..db8c8444d8 100644 --- a/satpy/readers/ami_l1b.py +++ b/satpy/readers/ami_l1b.py @@ -36,8 +36,8 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { - 'GK-2A': 'GEO-KOMPSAT-2A', - 'GK-2B': 'GEO-KOMPSAT-2B', + "GK-2A": "GEO-KOMPSAT-2A", + "GK-2B": "GEO-KOMPSAT-2B", } @@ -90,7 +90,7 @@ class AMIL1bNetCDF(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - calib_mode='PYSPECTRAL', allow_conditional_pixels=False, + calib_mode="PYSPECTRAL", allow_conditional_pixels=False, user_calibration=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(AMIL1bNetCDF, self).__init__(filename, filename_info, filetype_info) @@ -98,17 +98,17 @@ def __init__(self, filename, filename_info, filetype_info, self.nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'dim_image_x': CHUNK_SIZE, 'dim_image_y': CHUNK_SIZE}) - self.nc = self.nc.rename({'dim_image_x': 'x', 'dim_image_y': 'y'}) + chunks={"dim_image_x": CHUNK_SIZE, "dim_image_y": CHUNK_SIZE}) + self.nc = self.nc.rename({"dim_image_x": "x", "dim_image_y": "y"}) - platform_shortname = self.nc.attrs['satellite_name'] + platform_shortname = self.nc.attrs["satellite_name"] self.platform_name = PLATFORM_NAMES.get(platform_shortname) - self.sensor = 'ami' - self.band_name = filetype_info['file_type'].upper() + self.sensor = "ami" + self.band_name = filetype_info["file_type"].upper() self.allow_conditional_pixels = allow_conditional_pixels - calib_mode_choices = ('FILE', 'PYSPECTRAL', 'GSICS') + calib_mode_choices = ("FILE", "PYSPECTRAL", "GSICS") if calib_mode.upper() not in calib_mode_choices: - raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( + raise ValueError("Invalid calibration mode: {}. Choose one of {}".format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() @@ -118,36 +118,36 @@ def __init__(self, filename, filename_info, filetype_info, def start_time(self): """Get observation start time.""" base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs['observation_start_time']) + return base + timedelta(seconds=self.nc.attrs["observation_start_time"]) @property def end_time(self): """Get observation end time.""" base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs['observation_end_time']) + return base + timedelta(seconds=self.nc.attrs["observation_end_time"]) def get_area_def(self, dsid): """Get area definition for this file.""" pdict = {} - pdict['a'] = self.nc.attrs['earth_equatorial_radius'] - pdict['b'] = self.nc.attrs['earth_polar_radius'] - pdict['h'] = self.nc.attrs['nominal_satellite_height'] - pdict['a'] - pdict['ssp_lon'] = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? - pdict['ncols'] = self.nc.attrs['number_of_columns'] - pdict['nlines'] = self.nc.attrs['number_of_lines'] - obs_mode = self.nc.attrs['observation_mode'] - resolution = self.nc.attrs['channel_spatial_resolution'] + pdict["a"] = self.nc.attrs["earth_equatorial_radius"] + pdict["b"] = self.nc.attrs["earth_polar_radius"] + pdict["h"] = self.nc.attrs["nominal_satellite_height"] - pdict["a"] + pdict["ssp_lon"] = self.nc.attrs["sub_longitude"] * 180 / np.pi # it's in radians? + pdict["ncols"] = self.nc.attrs["number_of_columns"] + pdict["nlines"] = self.nc.attrs["number_of_lines"] + obs_mode = self.nc.attrs["observation_mode"] + resolution = self.nc.attrs["channel_spatial_resolution"] # Example offset: 11000.5 # the 'get_area_extent' will handle this half pixel for us - pdict['cfac'] = self.nc.attrs['cfac'] - pdict['coff'] = self.nc.attrs['coff'] - pdict['lfac'] = -self.nc.attrs['lfac'] - pdict['loff'] = self.nc.attrs['loff'] - pdict['scandir'] = 'N2S' - pdict['a_name'] = 'ami_geos_{}'.format(obs_mode.lower()) - pdict['a_desc'] = 'AMI {} Area at {} resolution'.format(obs_mode, resolution) - pdict['p_id'] = 'ami_fixed_grid' + pdict["cfac"] = self.nc.attrs["cfac"] + pdict["coff"] = self.nc.attrs["coff"] + pdict["lfac"] = -self.nc.attrs["lfac"] + pdict["loff"] = self.nc.attrs["loff"] + pdict["scandir"] = "N2S" + pdict["a_name"] = "ami_geos_{}".format(obs_mode.lower()) + pdict["a_desc"] = "AMI {} Area at {} resolution".format(obs_mode, resolution) + pdict["p_id"] = "ami_fixed_grid" area_extent = get_area_extent(pdict) fg_area_def = get_area_definition(pdict, area_extent) @@ -155,12 +155,12 @@ def get_area_def(self, dsid): def get_orbital_parameters(self): """Collect orbital parameters for this file.""" - a = float(self.nc.attrs['earth_equatorial_radius']) - b = float(self.nc.attrs['earth_polar_radius']) + a = float(self.nc.attrs["earth_equatorial_radius"]) + b = float(self.nc.attrs["earth_polar_radius"]) # nominal_satellite_height seems to be from the center of the earth - h = float(self.nc.attrs['nominal_satellite_height']) - a - lon_0 = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? - sc_position = self.nc['sc_position'].attrs['sc_position_center_pixel'] + h = float(self.nc.attrs["nominal_satellite_height"]) - a + lon_0 = self.nc.attrs["sub_longitude"] * 180 / np.pi # it's in radians? + sc_position = self.nc["sc_position"].attrs["sc_position_center_pixel"] # convert ECEF coordinates to lon, lat, alt ecef = pyproj.CRS.from_dict({"proj": "geocent", "a": a, "b": b}) @@ -169,18 +169,18 @@ def get_orbital_parameters(self): sc_position = transformer.transform(sc_position[0], sc_position[1], sc_position[2]) orbital_parameters = { - 'projection_longitude': float(lon_0), - 'projection_latitude': 0.0, - 'projection_altitude': h, - 'satellite_actual_longitude': sc_position[0], - 'satellite_actual_latitude': sc_position[1], - 'satellite_actual_altitude': sc_position[2], # meters + "projection_longitude": float(lon_0), + "projection_latitude": 0.0, + "projection_altitude": h, + "satellite_actual_longitude": sc_position[0], + "satellite_actual_latitude": sc_position[1], + "satellite_actual_altitude": sc_position[2], # meters } return orbital_parameters def get_dataset(self, dataset_id, ds_info): """Load a dataset as a xarray DataArray.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self.nc[file_key] # hold on to attributes for later attrs = data.attrs @@ -195,47 +195,47 @@ def get_dataset(self, dataset_id, ds_info): qf = data & 0b1100000000000000 # mask DQF bits - bits = attrs['number_of_valid_bits_per_pixel'] + bits = attrs["number_of_valid_bits_per_pixel"] data &= 2**bits - 1 # only take "no error" pixels as valid data = data.where(qf == 0) # Calibration values from file, fall back to built-in if unavailable - gain = self.nc.attrs['DN_to_Radiance_Gain'] - offset = self.nc.attrs['DN_to_Radiance_Offset'] + gain = self.nc.attrs["DN_to_Radiance_Gain"] + offset = self.nc.attrs["DN_to_Radiance_Offset"] - if dataset_id['calibration'] in ('radiance', 'reflectance', 'brightness_temperature'): + if dataset_id["calibration"] in ("radiance", "reflectance", "brightness_temperature"): data = gain * data + offset - if self.calib_mode == 'GSICS': + if self.calib_mode == "GSICS": data = self._apply_gsics_rad_correction(data) elif isinstance(self.user_calibration, dict): data = self._apply_user_rad_correction(data) - if dataset_id['calibration'] == 'reflectance': + if dataset_id["calibration"] == "reflectance": # depends on the radiance calibration above - rad_to_alb = self.nc.attrs['Radiance_to_Albedo_c'] - if ds_info.get('units') == '%': + rad_to_alb = self.nc.attrs["Radiance_to_Albedo_c"] + if ds_info.get("units") == "%": rad_to_alb *= 100 data = data * rad_to_alb - elif dataset_id['calibration'] == 'brightness_temperature': + elif dataset_id["calibration"] == "brightness_temperature": data = self._calibrate_ir(dataset_id, data) - elif dataset_id['calibration'] not in ('counts', 'radiance'): - raise ValueError("Unknown calibration: '{}'".format(dataset_id['calibration'])) + elif dataset_id["calibration"] not in ("counts", "radiance"): + raise ValueError("Unknown calibration: '{}'".format(dataset_id["calibration"])) - for attr_name in ('standard_name', 'units'): + for attr_name in ("standard_name", "units"): attrs[attr_name] = ds_info[attr_name] attrs.update(dataset_id.to_dict()) - attrs['orbital_parameters'] = self.get_orbital_parameters() - attrs['platform_name'] = self.platform_name - attrs['sensor'] = self.sensor + attrs["orbital_parameters"] = self.get_orbital_parameters() + attrs["platform_name"] = self.platform_name + attrs["sensor"] = self.sensor data.attrs = attrs return data def _calibrate_ir(self, dataset_id, data): """Calibrate radiance data to BTs using either pyspectral or in-file coefficients.""" - if self.calib_mode == 'PYSPECTRAL': + if self.calib_mode == "PYSPECTRAL": # depends on the radiance calibration above # Convert um to m^-1 (SI units for pyspectral) - wn = 1 / (dataset_id['wavelength'][1] / 1e6) + wn = 1 / (dataset_id["wavelength"][1] / 1e6) # Convert cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. bt_data = rad2temp(wn, data.data * 1e-5) @@ -248,17 +248,17 @@ def _calibrate_ir(self, dataset_id, data): else: # IR coefficients from the file # Channel specific - c0 = self.nc.attrs['Teff_to_Tbb_c0'] - c1 = self.nc.attrs['Teff_to_Tbb_c1'] - c2 = self.nc.attrs['Teff_to_Tbb_c2'] + c0 = self.nc.attrs["Teff_to_Tbb_c0"] + c1 = self.nc.attrs["Teff_to_Tbb_c1"] + c2 = self.nc.attrs["Teff_to_Tbb_c2"] # These should be fixed, but load anyway - cval = self.nc.attrs['light_speed'] - kval = self.nc.attrs['Boltzmann_constant_k'] - hval = self.nc.attrs['Plank_constant_h'] + cval = self.nc.attrs["light_speed"] + kval = self.nc.attrs["Boltzmann_constant_k"] + hval = self.nc.attrs["Plank_constant_h"] # Compute wavenumber as cm-1 - wn = (10000 / dataset_id['wavelength'][1]) * 100 + wn = (10000 / dataset_id["wavelength"][1]) * 100 # Convert radiance to effective brightness temperature e1 = (2 * hval * cval * cval) * np.power(wn, 3) e2 = (data.data * 1e-5) @@ -271,8 +271,8 @@ def _calibrate_ir(self, dataset_id, data): def _apply_gsics_rad_correction(self, data): """Retrieve GSICS factors from L1 file and apply to radiance.""" - rad_slope = self.nc['gsics_coeff_slope'][0] - rad_offset = self.nc['gsics_coeff_intercept'][0] + rad_slope = self.nc["gsics_coeff_slope"][0] + rad_offset = self.nc["gsics_coeff_intercept"][0] data = apply_rad_correction(data, rad_slope, rad_offset) return data diff --git a/satpy/readers/amsr2_l1b.py b/satpy/readers/amsr2_l1b.py index bd3a35c05d..29778c5f0d 100644 --- a/satpy/readers/amsr2_l1b.py +++ b/satpy/readers/amsr2_l1b.py @@ -25,8 +25,8 @@ class AMSR2L1BFileHandler(HDF5FileHandler): def get_metadata(self, ds_id, ds_info): """Get the metadata.""" - var_path = ds_info['file_key'] - info = getattr(self[var_path], 'attrs', {}) + var_path = ds_info["file_key"] + info = getattr(self[var_path], "attrs", {}) info.update(ds_info) info.update({ "shape": self.get_shape(ds_id, ds_info), @@ -41,23 +41,23 @@ def get_metadata(self, ds_id, ds_info): def get_shape(self, ds_id, ds_info): """Get output shape of specified dataset.""" - var_path = ds_info['file_key'] - shape = self[var_path + '/shape'] - if ((ds_info.get('standard_name') == "longitude" or ds_info.get('standard_name') == "latitude") and - ds_id['resolution'] == 10000): + var_path = ds_info["file_key"] + shape = self[var_path + "/shape"] + if ((ds_info.get("standard_name") == "longitude" or ds_info.get("standard_name") == "latitude") and + ds_id["resolution"] == 10000): return shape[0], int(shape[1] / 2) return shape def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" - var_path = ds_info['file_key'] - fill_value = ds_info.get('fill_value', 65535) + var_path = ds_info["file_key"] + fill_value = ds_info.get("fill_value", 65535) metadata = self.get_metadata(ds_id, ds_info) data = self[var_path] - if ((ds_info.get('standard_name') == "longitude" or - ds_info.get('standard_name') == "latitude") and - ds_id['resolution'] == 10000): + if ((ds_info.get("standard_name") == "longitude" or + ds_info.get("standard_name") == "latitude") and + ds_id["resolution"] == 10000): # FIXME: Lower frequency channels need CoRegistration parameters applied data = data[:, ::2] * self[var_path + "/attr/SCALE FACTOR"] else: diff --git a/satpy/readers/amsr2_l2.py b/satpy/readers/amsr2_l2.py index f241861c22..0797ad5bbd 100644 --- a/satpy/readers/amsr2_l2.py +++ b/satpy/readers/amsr2_l2.py @@ -25,7 +25,7 @@ class AMSR2L2FileHandler(AMSR2L1BFileHandler): def mask_dataset(self, ds_info, data): """Mask data with the fill value.""" - fill_value = ds_info.get('fill_value', 65535) + fill_value = ds_info.get("fill_value", 65535) return data.where(data != fill_value) def scale_dataset(self, var_path, data): @@ -34,14 +34,14 @@ def scale_dataset(self, var_path, data): def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" - var_path = ds_info['file_key'] + var_path = ds_info["file_key"] data = self[var_path].squeeze() data = self.mask_dataset(ds_info, data) data = self.scale_dataset(var_path, data) - if ds_info.get('name') == "ssw": - data = data.rename({'dim_0': 'y', 'dim_1': 'x'}) + if ds_info.get("name") == "ssw": + data = data.rename({"dim_0": "y", "dim_1": "x"}) metadata = self.get_metadata(ds_id, ds_info) data.attrs.update(metadata) return data diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 5f91e2d965..54a3769747 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -58,19 +58,19 @@ class GAASPFileHandler(BaseFileHandler): """Generic file handler for GAASP output files.""" y_dims: Tuple[str, ...] = ( - 'Number_of_Scans', + "Number_of_Scans", ) x_dims: Tuple[str, ...] = ( - 'Number_of_hi_rez_FOVs', - 'Number_of_low_rez_FOVs', + "Number_of_hi_rez_FOVs", + "Number_of_low_rez_FOVs", ) time_dims = ( - 'Time_Dimension', + "Time_Dimension", ) is_gridded = False dim_resolutions = { - 'Number_of_hi_rez_FOVs': 5000, - 'Number_of_low_rez_FOVs': 10000, + "Number_of_hi_rez_FOVs": 5000, + "Number_of_low_rez_FOVs": 10000, } @cached_property @@ -84,39 +84,39 @@ def nc(self): chunks=chunks) if len(self.time_dims) == 1: - nc = nc.rename({self.time_dims[0]: 'time'}) + nc = nc.rename({self.time_dims[0]: "time"}) return nc @property def start_time(self): """Get start time of observation.""" try: - return self.filename_info['start_time'] + return self.filename_info["start_time"] except KeyError: - time_str = self.nc.attrs['time_coverage_start'] + time_str = self.nc.attrs["time_coverage_start"] return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get end time of observation.""" try: - return self.filename_info['end_time'] + return self.filename_info["end_time"] except KeyError: - time_str = self.nc.attrs['time_coverage_end'] + time_str = self.nc.attrs["time_coverage_end"] return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def sensor_names(self): """Sensors who have data in this file.""" - return {self.nc.attrs['instrument_name'].lower()} + return {self.nc.attrs["instrument_name"].lower()} @property def platform_name(self): """Name of the platform whose data is stored in this file.""" - return self.nc.attrs['platform_name'] + return self.nc.attrs["platform_name"] def _get_var_name_without_suffix(self, var_name): - var_suffix = self.filetype_info.get('var_suffix', "") + var_suffix = self.filetype_info.get("var_suffix", "") if var_suffix: var_name = var_name[:-len(var_suffix)] return var_name @@ -124,8 +124,8 @@ def _get_var_name_without_suffix(self, var_name): def _scale_data(self, data_arr, attrs): # handle scaling # take special care for integer/category fields - scale_factor = attrs.pop('scale_factor', 1.) - add_offset = attrs.pop('add_offset', 0.) + scale_factor = attrs.pop("scale_factor", 1.) + add_offset = attrs.pop("add_offset", 0.) scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: data_arr = data_arr * scale_factor + add_offset @@ -138,19 +138,19 @@ def _nan_for_dtype(data_arr_dtype): if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): - return np.timedelta64('NaT') + return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): - return np.datetime64('NaT') + return np.datetime64("NaT") return np.nan def _fill_data(self, data_arr, attrs): - fill_value = attrs.pop('_FillValue', None) + fill_value = attrs.pop("_FillValue", None) is_int = np.issubdtype(data_arr.dtype, np.integer) - has_flag_comment = 'comment' in attrs + has_flag_comment = "comment" in attrs if is_int and has_flag_comment: # category product fill_out = fill_value - attrs['_FillValue'] = fill_out + attrs["_FillValue"] = fill_out else: fill_out = self._nan_for_dtype(data_arr.dtype) if fill_value is not None: @@ -159,19 +159,19 @@ def _fill_data(self, data_arr, attrs): def get_dataset(self, dataid, ds_info): """Load, scale, and collect metadata for the specified DataID.""" - orig_var_name = self._get_var_name_without_suffix(dataid['name']) + orig_var_name = self._get_var_name_without_suffix(dataid["name"]) data_arr = self.nc[orig_var_name].copy() attrs = data_arr.attrs.copy() data_arr, attrs = self._scale_data(data_arr, attrs) data_arr, attrs = self._fill_data(data_arr, attrs) attrs.update({ - 'platform_name': self.platform_name, - 'sensor': sorted(self.sensor_names)[0], - 'start_time': self.start_time, - 'end_time': self.end_time, + "platform_name": self.platform_name, + "sensor": sorted(self.sensor_names)[0], + "start_time": self.start_time, + "end_time": self.end_time, }) - dim_map = dict(zip(data_arr.dims, ('y', 'x'))) + dim_map = dict(zip(data_arr.dims, ("y", "x"))) # rename dims data_arr = data_arr.rename(**dim_map) # drop coords, the base reader will recreate these @@ -187,27 +187,27 @@ def _available_if_this_file_type(self, configured_datasets): # file handler so let's yield early yield is_avail, ds_info continue - yield self.file_type_matches(ds_info['file_type']), ds_info + yield self.file_type_matches(ds_info["file_type"]), ds_info def _add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: - if 'longitude' in coord_name.lower(): + if "longitude" in coord_name.lower(): lon_coord = coord_name - if 'latitude' in coord_name.lower(): + if "latitude" in coord_name.lower(): lat_coord = coord_name - ds_info['coordinates'] = [lon_coord, lat_coord] + ds_info["coordinates"] = [lon_coord, lat_coord] def _get_ds_info_for_data_arr(self, var_name, data_arr): - var_suffix = self.filetype_info.get('var_suffix', "") + var_suffix = self.filetype_info.get("var_suffix", "") ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name + var_suffix, + "file_type": self.filetype_info["file_type"], + "name": var_name + var_suffix, } x_dim_name = data_arr.dims[1] if x_dim_name in self.dim_resolutions: - ds_info['resolution'] = self.dim_resolutions[x_dim_name] + ds_info["resolution"] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: self._add_lonlat_coords(data_arr, ds_info) return ds_info @@ -245,13 +245,13 @@ class GAASPGriddedFileHandler(GAASPFileHandler): """GAASP file handler for gridded products like SEAICE.""" y_dims = ( - 'Number_of_Y_Dimension', + "Number_of_Y_Dimension", ) x_dims = ( - 'Number_of_X_Dimension', + "Number_of_X_Dimension", ) dim_resolutions = { - 'Number_of_X_Dimension': 10000, + "Number_of_X_Dimension": 10000, } is_gridded = True @@ -266,12 +266,12 @@ def _get_extents(data_shape, res): def get_area_def(self, dataid): """Create area definition for equirectangular projected data.""" - var_suffix = self.filetype_info.get('var_suffix', '') - area_name = 'gaasp{}'.format(var_suffix) - orig_var_name = self._get_var_name_without_suffix(dataid['name']) + var_suffix = self.filetype_info.get("var_suffix", "") + area_name = "gaasp{}".format(var_suffix) + orig_var_name = self._get_var_name_without_suffix(dataid["name"]) data_shape = self.nc[orig_var_name].shape - crs = CRS(self.filetype_info['grid_epsg']) - res = dataid['resolution'] + crs = CRS(self.filetype_info["grid_epsg"]) + res = dataid["resolution"] extent = self._get_extents(data_shape, res) area_def = AreaDefinition( area_name, @@ -289,8 +289,8 @@ class GAASPLowResFileHandler(GAASPFileHandler): """GAASP file handler for files that only have low resolution products.""" x_dims = ( - 'Number_of_low_rez_FOVs', + "Number_of_low_rez_FOVs", ) dim_resolutions = { - 'Number_of_low_rez_FOVs': 10000, + "Number_of_low_rez_FOVs": 10000, } diff --git a/satpy/readers/ascat_l2_soilmoisture_bufr.py b/satpy/readers/ascat_l2_soilmoisture_bufr.py index c1a974807d..a5f77fd7eb 100644 --- a/satpy/readers/ascat_l2_soilmoisture_bufr.py +++ b/satpy/readers/ascat_l2_soilmoisture_bufr.py @@ -38,7 +38,7 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('AscatSoilMoistureBufr') +logger = logging.getLogger("AscatSoilMoistureBufr") CHUNK_SIZE = get_legacy_chunk_size() @@ -53,34 +53,34 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): start_time, end_time = self.get_start_end_date() self.metadata = {} - self.metadata['start_time'] = start_time - self.metadata['end_time'] = end_time + self.metadata["start_time"] = start_time + self.metadata["end_time"] = end_time @property def start_time(self): """Return the start time of data acqusition.""" - return self.metadata['start_time'] + return self.metadata["start_time"] @property def end_time(self): """Return the end time of data acquisition.""" - return self.metadata['end_time'] + return self.metadata["end_time"] @property def platform_name(self): """Return spacecraft name.""" - return self.filename_info['platform'] + return self.filename_info["platform"] def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): """Extract the minimum and maximum dates from a single bufr message.""" - ec.codes_set(bufr, 'unpack', 1) - size = ec.codes_get(bufr, 'numberOfSubsets') - years = np.resize(ec.codes_get_array(bufr, 'year'), size) - months = np.resize(ec.codes_get_array(bufr, 'month'), size) - days = np.resize(ec.codes_get_array(bufr, 'day'), size) - hours = np.resize(ec.codes_get_array(bufr, 'hour'), size) - minutes = np.resize(ec.codes_get_array(bufr, 'minute'), size) - seconds = np.resize(ec.codes_get_array(bufr, 'second'), size) + ec.codes_set(bufr, "unpack", 1) + size = ec.codes_get(bufr, "numberOfSubsets") + years = np.resize(ec.codes_get_array(bufr, "year"), size) + months = np.resize(ec.codes_get_array(bufr, "month"), size) + days = np.resize(ec.codes_get_array(bufr, "day"), size) + hours = np.resize(ec.codes_get_array(bufr, "hour"), size) + minutes = np.resize(ec.codes_get_array(bufr, "minute"), size) + seconds = np.resize(ec.codes_get_array(bufr, "second"), size) for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds): time_stamp = datetime(year, month, day, hour, minute, second) date_min = time_stamp if not date_min else min(date_min, time_stamp) @@ -89,7 +89,7 @@ def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): def get_start_end_date(self): """Get the first and last date from the bufr file.""" - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: date_min = None date_max = None while True: @@ -103,16 +103,16 @@ def get_start_end_date(self): def get_bufr_data(self, key): """Get BUFR data by key.""" attr = np.array([]) - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) tmp = ec.codes_get_array(bufr, key, float) if len(tmp) == 1: - size = ec.codes_get(bufr, 'numberOfSubsets') + size = ec.codes_get(bufr, "numberOfSubsets") tmp = np.resize(tmp, size) attr = np.append(attr, tmp) ec.codes_release(bufr) @@ -120,12 +120,12 @@ def get_bufr_data(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" - arr = self.get_bufr_data(dataset_info['key']) - if 'fill_value' in dataset_info: - arr[arr == dataset_info['fill_value']] = np.nan + arr = self.get_bufr_data(dataset_info["key"]) + if "fill_value" in dataset_info: + arr[arr == dataset_info["fill_value"]] = np.nan arr = da.from_array(arr, chunks=CHUNK_SIZE) - xarr = xr.DataArray(arr, dims=["y"], name=dataset_info['name']) - xarr.attrs['platform_name'] = self.platform_name + xarr = xr.DataArray(arr, dims=["y"], name=dataset_info["name"]) + xarr.attrs["platform_name"] = self.platform_name xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/atms_l1b_nc.py b/satpy/readers/atms_l1b_nc.py index 1ea61fe92c..95d48b81cd 100644 --- a/satpy/readers/atms_l1b_nc.py +++ b/satpy/readers/atms_l1b_nc.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) -DATE_FMT = '%Y-%m-%dT%H:%M:%SZ' +DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" class AtmsL1bNCFileHandler(NetCDF4FileHandler): @@ -43,12 +43,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): @property def start_time(self): """Get observation start time.""" - return datetime.strptime(self['/attr/time_coverage_start'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def end_time(self): """Get observation end time.""" - return datetime.strptime(self['/attr/time_coverage_end'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def platform_name(self): @@ -113,8 +113,8 @@ def _select_dataset(self, name): def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - name = dataset_id['name'] - logger.debug(f'Reading in file to get dataset with name {name}.') + name = dataset_id["name"] + logger.debug(f"Reading in file to get dataset with name {name}.") dataset = self._select_dataset(name) dataset = self._merge_attributes(dataset, ds_info) dataset = self._drop_coords(dataset) diff --git a/satpy/readers/atms_sdr_hdf5.py b/satpy/readers/atms_sdr_hdf5.py index 26fd3d58e5..7f2d43bd71 100644 --- a/satpy/readers/atms_sdr_hdf5.py +++ b/satpy/readers/atms_sdr_hdf5.py @@ -46,8 +46,8 @@ LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() -ATMS_CHANNEL_NAMES = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', - '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22'] +ATMS_CHANNEL_NAMES = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", + "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22"] class ATMS_SDR_FileHandler(JPSS_SDR_FileHandler): @@ -55,18 +55,18 @@ class ATMS_SDR_FileHandler(JPSS_SDR_FileHandler): def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize file handler.""" - self.datasets = os.path.basename(filename).split('_')[0].split('-') + self.datasets = os.path.basename(filename).split("_")[0].split("-") super().__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, h5py.Dataset): - dset = h5py.File(self.filename, 'r')[key] + dset = h5py.File(self.filename, "r")[key] if dset.ndim == 3: dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) - return xr.DataArray(dset_data, dims=['y', 'x', 'z'], attrs=attrs) + return xr.DataArray(dset_data, dims=["y", "x", "z"], attrs=attrs) return super().__getitem__(key) @@ -78,11 +78,11 @@ def _get_atms_channel_index(self, ch_name): return None def _get_scans_per_granule(self, dataset_group): - number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules' + number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules" nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): - scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans' + scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans" scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans @@ -99,15 +99,15 @@ def get_dataset(self, dataset_id, ds_info): scans actually sensed of course. """ - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] - ds_info['dataset_group'] = dataset_group + ds_info["dataset_group"] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) - ch_index = self._get_atms_channel_index(ds_info['name']) + ch_index = self._get_atms_channel_index(ds_info["name"]) data = self.concatenate_dataset(dataset_group, var_path, channel_index=ch_index) data = self.mask_fill_values(data, ds_info) diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index e520b29b30..c566175b8c 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -53,8 +53,8 @@ AVHRR3_CHANNEL_NAMES = {"1": 0, "2": 1, "3A": 2, "3B": 3, "4": 4, "5": 5} AVHRR2_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4} AVHRR_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3} -ANGLES = ('sensor_zenith_angle', 'sensor_azimuth_angle', 'solar_zenith_angle', - 'solar_azimuth_angle', 'sun_sensor_azimuth_difference_angle') +ANGLES = ("sensor_zenith_angle", "sensor_azimuth_angle", "solar_zenith_angle", + "solar_azimuth_angle", "sun_sensor_azimuth_difference_angle") class GACLACFile(BaseFileHandler): @@ -84,7 +84,7 @@ def __init__(self, filename, filename_info, filetype_info, self.strip_invalid_coords = strip_invalid_coords self.interpolate_coords = interpolate_coords self.reader_kwargs = reader_kwargs - self.creation_site = filename_info.get('creation_site') + self.creation_site = filename_info.get("creation_site") self.reader = None self.calib_channels = None self.counts = None @@ -92,34 +92,34 @@ def __init__(self, filename, filename_info, filetype_info, self.qual_flags = None self.first_valid_lat = None self.last_valid_lat = None - self._start_time = filename_info['start_time'] - self._end_time = datetime.combine(filename_info['start_time'].date(), - filename_info['end_time'].time()) + self._start_time = filename_info["start_time"] + self._end_time = datetime.combine(filename_info["start_time"].date(), + filename_info["end_time"].time()) if self._end_time < self._start_time: self._end_time += timedelta(days=1) - self.platform_id = filename_info['platform_id'] - if self.platform_id in ['NK', 'NL', 'NM', 'NN', 'NP', 'M1', 'M2', - 'M3']: - if filename_info.get('transfer_mode') == 'GHRR': + self.platform_id = filename_info["platform_id"] + if self.platform_id in ["NK", "NL", "NM", "NN", "NP", "M1", "M2", + "M3"]: + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES - self.sensor = 'avhrr-3' - elif self.platform_id in ['NC', 'ND', 'NF', 'NH', 'NJ']: - if filename_info.get('transfer_mode') == 'GHRR': + self.sensor = "avhrr-3" + elif self.platform_id in ["NC", "ND", "NF", "NH", "NJ"]: + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR2_CHANNEL_NAMES - self.sensor = 'avhrr-2' + self.sensor = "avhrr-2" else: - if filename_info.get('transfer_mode') == 'GHRR': + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR_CHANNEL_NAMES - self.sensor = 'avhrr' + self.sensor = "avhrr" self.filename_info = filename_info def read_raw_data(self): @@ -131,43 +131,43 @@ def read_raw_data(self): **self.reader_kwargs) self.reader.read(self.filename) if np.all(self.reader.mask): - raise ValueError('All data is masked out') + raise ValueError("All data is masked out") def get_dataset(self, key, info): """Get the dataset.""" self.read_raw_data() - if key['name'] in ['latitude', 'longitude']: + if key["name"] in ["latitude", "longitude"]: # Lats/lons are buffered by the reader - if key['name'] == 'latitude': + if key["name"] == "latitude": _, data = self.reader.get_lonlat() else: data, _ = self.reader.get_lonlat() # If coordinate interpolation is disabled, only every eighth # pixel has a lat/lon coordinate - xdim = 'x' if self.interpolate_coords else 'x_every_eighth' + xdim = "x" if self.interpolate_coords else "x_every_eighth" xcoords = None - elif key['name'] in ANGLES: + elif key["name"] in ANGLES: data = self._get_angle(key) - xdim = 'x' if self.interpolate_coords else 'x_every_eighth' + xdim = "x" if self.interpolate_coords else "x_every_eighth" xcoords = None - elif key['name'] == 'qual_flags': + elif key["name"] == "qual_flags": data = self.reader.get_qual_flags() - xdim = 'num_flags' - xcoords = ['Scan line number', - 'Fatal error flag', - 'Insufficient data for calibration', - 'Insufficient data for calibration', - 'Solar contamination of blackbody in channels 3', - 'Solar contamination of blackbody in channels 4', - 'Solar contamination of blackbody in channels 5'] - elif key['name'].upper() in self.chn_dict: + xdim = "num_flags" + xcoords = ["Scan line number", + "Fatal error flag", + "Insufficient data for calibration", + "Insufficient data for calibration", + "Solar contamination of blackbody in channels 3", + "Solar contamination of blackbody in channels 4", + "Solar contamination of blackbody in channels 5"] + elif key["name"].upper() in self.chn_dict: # Read and calibrate channel data data = self._get_channel(key) - xdim = 'x' + xdim = "x" xcoords = None else: - raise ValueError('Unknown dataset: {}'.format(key['name'])) + raise ValueError("Unknown dataset: {}".format(key["name"])) # Update start/end time using the actual scanline timestamps times = self.reader.get_times() @@ -183,7 +183,7 @@ def get_dataset(self, key, info): chunk_cols = data.shape[1] chunk_lines = int((CHUNK_SIZE ** 2) / chunk_cols) res = xr.DataArray(da.from_array(data, chunks=(chunk_lines, chunk_cols)), - dims=['y', xdim], attrs=info) + dims=["y", xdim], attrs=info) if xcoords: res[xdim] = xcoords @@ -191,8 +191,8 @@ def get_dataset(self, key, info): self._update_attrs(res) # Add scanline acquisition times - res['acq_time'] = ('y', times) - res['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + res["acq_time"] = ("y", times) + res["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" return res @@ -253,19 +253,19 @@ def _slice(self, data): def _get_channel(self, key): """Get channel and buffer results.""" - name = key['name'] - calibration = key['calibration'] - if calibration == 'counts': + name = key["name"] + calibration = key["calibration"] + if calibration == "counts": if self.counts is None: counts = self.reader.get_counts() self.counts = counts channels = self.counts - elif calibration in ['reflectance', 'brightness_temperature']: + elif calibration in ["reflectance", "brightness_temperature"]: if self.calib_channels is None: self.calib_channels = self.reader.get_calibrated_channels() channels = self.calib_channels else: - raise ValueError('Unknown calibration: {}'.format(calibration)) + raise ValueError("Unknown calibration: {}".format(calibration)) return channels[:, :, self.chn_dict[name.upper()]] def _get_qual_flags(self): @@ -278,12 +278,12 @@ def _get_angle(self, key): """Get angles and buffer results.""" if self.angles is None: sat_azi, sat_zenith, sun_azi, sun_zenith, rel_azi = self.reader.get_angles() - self.angles = {'sensor_zenith_angle': sat_zenith, - 'sensor_azimuth_angle': sat_azi, - 'solar_zenith_angle': sun_zenith, - 'solar_azimuth_angle': sun_azi, - 'sun_sensor_azimuth_difference_angle': rel_azi} - return self.angles[key['name']] + self.angles = {"sensor_zenith_angle": sat_zenith, + "sensor_azimuth_angle": sat_azi, + "solar_zenith_angle": sun_zenith, + "solar_azimuth_angle": sun_azi, + "sun_sensor_azimuth_difference_angle": rel_azi} + return self.angles[key["name"]] def _strip_invalid_lat(self): """Strip scanlines with invalid coordinates in the beginning/end of the orbit. @@ -302,11 +302,11 @@ def _update_attrs(self, res): """Update dataset attributes.""" for attr in self.reader.meta_data: res.attrs[attr] = self.reader.meta_data[attr] - res.attrs['platform_name'] = self.reader.spacecraft_name - res.attrs['orbit_number'] = self.filename_info.get('orbit_number', None) - res.attrs['sensor'] = self.sensor + res.attrs["platform_name"] = self.reader.spacecraft_name + res.attrs["orbit_number"] = self.filename_info.get("orbit_number", None) + res.attrs["sensor"] = self.sensor try: - res.attrs['orbital_parameters'] = {'tle': self.reader.get_tle_lines()} + res.attrs["orbital_parameters"] = {"tle": self.reader.get_tle_lines()} except (IndexError, RuntimeError): pass diff --git a/satpy/readers/caliop_l2_cloud.py b/satpy/readers/caliop_l2_cloud.py index 0fc89ae548..54dd100ffc 100644 --- a/satpy/readers/caliop_l2_cloud.py +++ b/satpy/readers/caliop_l2_cloud.py @@ -46,15 +46,15 @@ def __init__(self, filename, filename_info, filetype_info): self.get_filehandle() - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] - logger.debug('Retrieving end time from metadata array') + logger.debug("Retrieving end time from metadata array") self.get_end_time() def get_end_time(self): """Get observation end time from file metadata.""" mda_dict = self.filehandle.attributes() - core_mda = mda_dict['coremetadata'] + core_mda = mda_dict["coremetadata"] end_time_str = self.parse_metadata_string(core_mda) self._end_time = datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") @@ -76,19 +76,19 @@ def get_filehandle(self): def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" - if key['name'] in ['longitude', 'latitude']: - logger.debug('Reading coordinate arrays.') + if key["name"] in ["longitude", "latitude"]: + logger.debug("Reading coordinate arrays.") if self.lons is None or self.lats is None: self.lons, self.lats = self.get_lonlats() - if key['name'] == 'latitude': + if key["name"] == "latitude": proj = Dataset(self.lats, id=key, **info) else: proj = Dataset(self.lons, id=key, **info) else: - data = self.get_sds_variable(key['name']) + data = self.get_sds_variable(key["name"]) proj = Dataset(data, id=key, **info) return proj @@ -101,8 +101,8 @@ def get_sds_variable(self, name): def get_lonlats(self): """Get longitude and latitude arrays from the file.""" - longitudes = self.get_sds_variable('Longitude') - latitudes = self.get_sds_variable('Latitude') + longitudes = self.get_sds_variable("Longitude") + latitudes = self.get_sds_variable("Latitude") return longitudes, latitudes @property diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index fd8cd552ae..4303456c04 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -36,37 +36,37 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } SENSORS = { - 'MODIS': 'modis', - 'VIIRS': 'viirs', - 'AVHRR': 'avhrr', - 'AHI': 'ahi', - 'ABI': 'abi', - 'GOES-RU-IMAGER': 'abi', + "MODIS": "modis", + "VIIRS": "viirs", + "AVHRR": "avhrr", + "AHI": "ahi", + "ABI": "abi", + "GOES-RU-IMAGER": "abi", } PLATFORMS = { - 'SNPP': 'npp', - 'HIM8': 'himawari8', - 'HIM9': 'himawari9', - 'H08': 'himawari8', - 'H09': 'himawari9', - 'G16': 'GOES-16', - 'G17': 'GOES-17', - 'G18': 'GOES-18', + "SNPP": "npp", + "HIM8": "himawari8", + "HIM9": "himawari9", + "H08": "himawari8", + "H09": "himawari9", + "G16": "GOES-16", + "G17": "GOES-17", + "G18": "GOES-18", } ROWS_PER_SCAN = { - 'viirs': 16, - 'modis': 10, + "viirs": 16, + "modis": 10, } NADIR_RESOLUTION = { - 'viirs': 742, - 'modis': 1000, - 'avhrr': 1050, - 'ahi': 2000, - 'abi': 2004, + "viirs": 742, + "modis": 1000, + "avhrr": 1050, + "ahi": 2000, + "abi": 2004, } @@ -100,8 +100,8 @@ class _CLAVRxHelper: @staticmethod def _remove_attributes(attrs: dict) -> dict: """Remove attributes that described data before scaling.""" - old_attrs = ['unscaled_missing', 'SCALED_MIN', 'SCALED_MAX', - 'SCALED_MISSING'] + old_attrs = ["unscaled_missing", "SCALED_MIN", "SCALED_MAX", + "SCALED_MISSING"] for attr_key in old_attrs: attrs.pop(attr_key, None) @@ -118,15 +118,15 @@ def _scale_data(data_arr: Union[xr.DataArray, int], scale_factor: float, add_off @staticmethod def _get_data(data, dataset_id: dict) -> xr.DataArray: """Get a dataset.""" - if dataset_id.get('resolution'): - data.attrs['resolution'] = dataset_id['resolution'] + if dataset_id.get("resolution"): + data.attrs["resolution"] = dataset_id["resolution"] attrs = data.attrs.copy() - fill = attrs.get('_FillValue') - factor = attrs.pop('scale_factor', (np.ones(1, dtype=data.dtype))[0]) - offset = attrs.pop('add_offset', (np.zeros(1, dtype=data.dtype))[0]) - valid_range = attrs.get('valid_range', [None]) + fill = attrs.get("_FillValue") + factor = attrs.pop("scale_factor", (np.ones(1, dtype=data.dtype))[0]) + offset = attrs.pop("add_offset", (np.zeros(1, dtype=data.dtype))[0]) + valid_range = attrs.get("valid_range", [None]) if isinstance(valid_range, np.ndarray): attrs["valid_range"] = valid_range.tolist() @@ -135,7 +135,7 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: data = data.where(data != fill) data = _CLAVRxHelper._scale_data(data, factor, offset) # don't need _FillValue if it has been applied. - attrs.pop('_FillValue', None) + attrs.pop("_FillValue", None) if all(valid_range): valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset) @@ -144,7 +144,7 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: data = data.where((data >= valid_min) & (data <= valid_max), fill) else: data = data.where((data >= valid_min) & (data <= valid_max)) - attrs['valid_range'] = [valid_min, valid_max] + attrs["valid_range"] = [valid_min, valid_max] data.attrs = _CLAVRxHelper._remove_attributes(attrs) @@ -173,29 +173,29 @@ def _read_pug_fixed_grid(projection_coordinates: netCDF4.Variable, distance_mult lon_0 = projection_coordinates.longitude_of_projection_origin sweep_axis = projection_coordinates.sweep_angle_axis[0] - proj_dict = {'a': float(a) * distance_multiplier, - 'b': float(b) * distance_multiplier, - 'lon_0': float(lon_0), - 'h': float(h) * distance_multiplier, - 'proj': 'geos', - 'units': 'm', - 'sweep': sweep_axis} + proj_dict = {"a": float(a) * distance_multiplier, + "b": float(b) * distance_multiplier, + "lon_0": float(lon_0), + "h": float(h) * distance_multiplier, + "proj": "geos", + "units": "m", + "sweep": sweep_axis} return proj_dict @staticmethod def _find_input_nc(filename: str, l1b_base: str) -> str: dirname = os.path.dirname(filename) - l1b_filename = os.path.join(dirname, l1b_base + '.nc') + l1b_filename = os.path.join(dirname, l1b_base + ".nc") if os.path.exists(l1b_filename): return str(l1b_filename) - glob_pat = os.path.join(dirname, l1b_base + '*R20*.nc') + glob_pat = os.path.join(dirname, l1b_base + "*R20*.nc") LOG.debug("searching for {0}".format(glob_pat)) found_l1b_filenames = list(glob(glob_pat)) if len(found_l1b_filenames) == 0: raise IOError("Could not find navigation donor for {0}" " in same directory as CLAVR-x data".format(l1b_base)) - LOG.debug('Candidate nav donors: {0}'.format(repr(found_l1b_filenames))) + LOG.debug("Candidate nav donors: {0}".format(repr(found_l1b_filenames))) return found_l1b_filenames[0] @staticmethod @@ -231,14 +231,14 @@ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: if not proj: raise ValueError(f"Unable to recover projection information for {filename}") - h = float(proj['h']) - x, y = l1b['x'], l1b['y'] + h = float(proj["h"]) + x, y = l1b["x"], l1b["y"] area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h) area = geometry.AreaDefinition( - 'ahi_geos', + "ahi_geos", "AHI L2 file area", - 'ahi_geos', + "ahi_geos", proj, ncols, nlines, @@ -253,24 +253,24 @@ def get_metadata(sensor: str, platform: str, attrs: dict, ds_info: dict) -> dict attr_info.update(attrs) attr_info.update(ds_info) - flag_meanings = attr_info.get('flag_meanings', None) - if not attr_info.get('SCALED', 1) and not flag_meanings: - attr_info['flag_meanings'] = '' - attr_info.setdefault('flag_values', [None]) - elif not attr_info.get('SCALED', 1) and isinstance(flag_meanings, str): + flag_meanings = attr_info.get("flag_meanings", None) + if not attr_info.get("SCALED", 1) and not flag_meanings: + attr_info["flag_meanings"] = "" + attr_info.setdefault("flag_values", [None]) + elif not attr_info.get("SCALED", 1) and isinstance(flag_meanings, str): attr_info["flag_meanings"] = flag_meanings.split(" ") - u = attr_info.get('units') + u = attr_info.get("units") if u in CF_UNITS: # CF compliance - attr_info['units'] = CF_UNITS[u] + attr_info["units"] = CF_UNITS[u] if u.lower() == "none": - attr_info['units'] = "1" - attr_info['sensor'] = sensor - attr_info['platform_name'] = platform + attr_info["units"] = "1" + attr_info["sensor"] = sensor + attr_info["platform_name"] = platform rps = _get_rows_per_scan(sensor) if rps: - attr_info['rows_per_scan'] = rps - attr_info['reader'] = 'clavrx' + attr_info["rows_per_scan"] = rps + attr_info["reader"] = "clavrx" return attr_info @@ -287,16 +287,16 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get the start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, @@ -308,39 +308,39 @@ def get_nadir_resolution(self, sensor): for k, v in NADIR_RESOLUTION.items(): if sensor.startswith(k): return v - res = self.filename_info.get('resolution') - if res.endswith('m'): + res = self.filename_info.get("resolution") + if res.endswith("m"): return int(res[:-1]) elif res is not None: return int(res) def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) - self.platform = _get_platform(self.file_content.get('/attr/platform')) + self.sensor = _get_sensor(self.file_content.get("/attr/sensor")) + self.platform = _get_platform(self.file_content.get("/attr/platform")) nadir_resolution = self.get_nadir_resolution(self.sensor) - coordinates = ('longitude', 'latitude') + coordinates = ("longitude", "latitude") handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): - this_res = ds_info.get('resolution') - this_coords = ds_info.get('coordinates') + this_res = ds_info.get("resolution") + this_coords = ds_info.get("coordinates") # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) - matches = self.file_type_matches(ds_info['file_type']) + var_name = ds_info.get("file_key", ds_info["name"]) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != nadir_resolution: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded - new_info['resolution'] = nadir_resolution + new_info["resolution"] = nadir_resolution if self._is_polar() and this_coords is None: - new_info['coordinates'] = coordinates + new_info["coordinates"] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did @@ -351,31 +351,31 @@ def available_datasets(self, configured_datasets=None): for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'resolution': nadir_resolution, - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "resolution": nadir_resolution, + "name": var_name, } if self._is_polar(): - ds_info['coordinates'] = ['longitude', 'latitude'] + ds_info["coordinates"] = ["longitude", "latitude"] yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get the shape.""" - var_name = ds_info.get('file_key', dataset_id['name']) - return self[var_name + '/shape'] + var_name = ds_info.get("file_key", dataset_id["name"]) + return self[var_name + "/shape"] def _is_polar(self): - l1b_att, inst_att = (str(self.file_content.get('/attr/L1B', None)), - str(self.file_content.get('/attr/sensor', None))) + l1b_att, inst_att = (str(self.file_content.get("/attr/L1B", None)), + str(self.file_content.get("/attr/sensor", None))) - return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) + return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXHDF4FileHandler, self).get_area_def(key) - l1b_att = str(self.file_content.get('/attr/L1B', None)) + l1b_att = str(self.file_content.get("/attr/L1B", None)) area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) return area_def @@ -396,12 +396,12 @@ def __init__(self, filename, filename_info, filetype_info): decode_coords=True, chunks=CHUNK_SIZE) # y,x is used in satpy, bands rather than channel using in xrimage - self.nc = self.nc.rename_dims({'scan_lines_along_track_direction': "y", - 'pixel_elements_along_scan_direction': "x"}) + self.nc = self.nc.rename_dims({"scan_lines_along_track_direction": "y", + "pixel_elements_along_scan_direction": "x"}) self.platform = _get_platform( - self.filename_info.get('platform_shortname', None)) - self.sensor = _get_sensor(self.nc.attrs.get('sensor', None)) + self.filename_info.get("platform_shortname", None)) + self.sensor = _get_sensor(self.nc.attrs.get("sensor", None)) # coordinates need scaling and valid_range (mask_and_scale won't work on valid_range) self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"], {"name": "latitude"}) @@ -410,8 +410,8 @@ def __init__(self, filename, filename_info, filetype_info): def _get_ds_info_for_data_arr(self, var_name): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "name": var_name, } return ds_info @@ -451,28 +451,28 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - if self.file_type_matches(ds_info['file_type']): - handled_vars.add(ds_info['name']) - yield self.file_type_matches(ds_info['file_type']), ds_info + if self.file_type_matches(ds_info["file_type"]): + handled_vars.add(ds_info["name"]) + yield self.file_type_matches(ds_info["file_type"]), ds_info yield from self._available_new_datasets(handled_vars) def _is_polar(self): - l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)), - str(self.nc.attrs.get('sensor', None))) + l1b_att, inst_att = (str(self.nc.attrs.get("L1B", None)), + str(self.nc.attrs.get("sensor", None))) - return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) + return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXNetCDFFileHandler, self).get_area_def(key) - l1b_att = str(self.nc.attrs.get('L1B', None)) + l1b_att = str(self.nc.attrs.get("L1B", None)) return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" - var_name = ds_info.get('name', dataset_id['name']) + var_name = ds_info.get("name", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, diff --git a/satpy/readers/cmsaf_claas2.py b/satpy/readers/cmsaf_claas2.py index f8f360623e..9bf3ca3deb 100644 --- a/satpy/readers/cmsaf_claas2.py +++ b/satpy/readers/cmsaf_claas2.py @@ -87,7 +87,7 @@ def _get_dsinfo(self, var): def get_dataset(self, dataset_id, info): """Get the dataset.""" - ds = self[dataset_id['name']] + ds = self[dataset_id["name"]] if "time" in ds.dims: return ds.squeeze(["time"]) diff --git a/satpy/readers/electrol_hrit.py b/satpy/readers/electrol_hrit.py index 53e69d42b4..c773850a73 100644 --- a/satpy/readers/electrol_hrit.py +++ b/satpy/readers/electrol_hrit.py @@ -40,34 +40,34 @@ time_cds_short, ) -logger = logging.getLogger('hrit_electrol') +logger = logging.getLogger("hrit_electrol") # goms implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) goms_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -goms_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +goms_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} goms_hdr_map = base_hdr_map.copy() goms_hdr_map.update({7: key_header, @@ -76,28 +76,28 @@ }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -time_cds_expanded = np.dtype([('days', '>u2'), - ('milliseconds', '>u4'), - ('microseconds', '>u2'), - ('nanoseconds', '>u2')]) +time_cds_expanded = np.dtype([("days", ">u2"), + ("milliseconds", ">u4"), + ("microseconds", ">u2"), + ("nanoseconds", ">u2")]) satellite_status = np.dtype([("TagType", " 16777216: lut = lut.astype(np.float64) else: @@ -337,26 +337,26 @@ def _calibrate(self, data): def get_area_def(self, dsid): """Get the area definition of the band.""" pdict = {} - pdict['cfac'] = np.int32(self.mda['cfac']) - pdict['lfac'] = np.int32(self.mda['lfac']) - pdict['coff'] = np.float32(self.mda['coff']) - pdict['loff'] = np.float32(self.mda['loff']) + pdict["cfac"] = np.int32(self.mda["cfac"]) + pdict["lfac"] = np.int32(self.mda["lfac"]) + pdict["coff"] = np.float32(self.mda["coff"]) + pdict["loff"] = np.float32(self.mda["loff"]) - pdict['a'] = 6378169.00 - pdict['b'] = 6356583.80 - pdict['h'] = 35785831.00 - pdict['scandir'] = 'N2S' + pdict["a"] = 6378169.00 + pdict["b"] = 6356583.80 + pdict["h"] = 35785831.00 + pdict["scandir"] = "N2S" - pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] + pdict["ssp_lon"] = self.mda["projection_parameters"]["SSP_longitude"] - pdict['nlines'] = int(self.mda['number_of_lines']) - pdict['ncols'] = int(self.mda['number_of_columns']) + pdict["nlines"] = int(self.mda["number_of_lines"]) + pdict["ncols"] = int(self.mda["number_of_columns"]) - pdict['loff'] = pdict['nlines'] - pdict['loff'] + pdict["loff"] = pdict["nlines"] - pdict["loff"] - pdict['a_name'] = 'geosgoms' - pdict['a_desc'] = 'Electro-L/GOMS channel area' - pdict['p_id'] = 'goms' + pdict["a_name"] = "geosgoms" + pdict["a_desc"] = "Electro-L/GOMS channel area" + pdict["p_id"] = "goms" area_extent = get_area_extent(pdict) area = get_area_definition(pdict, area_extent) diff --git a/satpy/readers/epic_l1b_h5.py b/satpy/readers/epic_l1b_h5.py index 55c020ee21..4f6d66ebae 100644 --- a/satpy/readers/epic_l1b_h5.py +++ b/satpy/readers/epic_l1b_h5.py @@ -49,16 +49,16 @@ # Level 1b is given as counts. These factors convert to reflectance. # Retrieved from: https://asdc.larc.nasa.gov/documents/dscovr/DSCOVR_EPIC_Calibration_Factors_V03.pdf -CALIB_COEFS = {'B317': 1.216e-4, - 'B325': 1.111e-4, - 'B340': 1.975e-5, - 'B388': 2.685e-5, - 'B443': 8.34e-6, - 'B551': 6.66e-6, - 'B680': 9.3e-6, - 'B688': 2.02e-5, - 'B764': 2.36e-5, - 'B780': 1.435e-5} +CALIB_COEFS = {"B317": 1.216e-4, + "B325": 1.111e-4, + "B340": 1.975e-5, + "B388": 2.685e-5, + "B443": 8.34e-6, + "B551": 6.66e-6, + "B680": 9.3e-6, + "B688": 2.02e-5, + "B764": 2.36e-5, + "B780": 1.435e-5} class DscovrEpicL1BH5FileHandler(HDF5FileHandler): @@ -68,19 +68,19 @@ def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(DscovrEpicL1BH5FileHandler, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'epic' - self.platform_name = 'dscovr' + self.sensor = "epic" + self.platform_name = "dscovr" @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.file_content['/attr/begin_time'], '%Y-%m-%d %H:%M:%S') + start_time = datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.file_content['/attr/end_time'], '%Y-%m-%d %H:%M:%S') + end_time = datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S") return end_time @staticmethod @@ -97,19 +97,19 @@ def calibrate(data, ds_name, calibration=None): def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] + ds_name = dataset_id["name"] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) band = self._mask_infinite(self.get(file_key)) - band = self.calibrate(band, ds_name, calibration=dataset_id.get('calibration')) + band = self.calibrate(band, ds_name, calibration=dataset_id.get("calibration")) band = self._update_metadata(band) return band def _update_metadata(self, band): - band = band.rename({band.dims[0]: 'x', band.dims[1]: 'y'}) - band.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) + band = band.rename({band.dims[0]: "x", band.dims[1]: "y"}) + band.attrs.update({"platform_name": self.platform_name, "sensor": self.sensor}) return band diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 1cc098a612..23e4ca712d 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -90,11 +90,11 @@ def read_records(filename): the_type = form.dtype((rec_class, sub_class)) # the_descr = grh_dtype.descr + the_type.descr except KeyError: - the_type = np.dtype([('unknown', 'V%d' % bare_size)]) + the_type = np.dtype([("unknown", "V%d" % bare_size)]) the_descr = grh_dtype.descr + the_type.descr the_type = np.dtype(the_descr) if the_type.itemsize < expected_size: - padding = [('unknown%d' % cnt, 'V%d' % (expected_size - the_type.itemsize))] + padding = [("unknown%d" % cnt, "V%d" % (expected_size - the_type.itemsize))] cnt += 1 the_descr += padding new_dtype = np.dtype(the_descr) @@ -112,14 +112,14 @@ def read_records(filename): offset = 0 for dtype, count, rec_class in zip(dtypes, counts, classes): fdes.seek(offset) - if rec_class == ('mdr', 2): - record = da.from_array(np.memmap(fdes, mode='r', dtype=dtype, shape=count, offset=offset), + if rec_class == ("mdr", 2): + record = da.from_array(np.memmap(fdes, mode="r", dtype=dtype, shape=count, offset=offset), chunks=(max_lines,)) else: record = np.fromfile(fdes, dtype=dtype, count=count) offset += dtype.itemsize * count if rec_class in sections: - logger.debug('Multiple records for ', str(rec_class)) + logger.debug("Multiple records for ", str(rec_class)) sections[rec_class] = np.hstack((sections[rec_class], record)) else: sections[rec_class] = record @@ -130,7 +130,7 @@ def read_records(filename): def create_xarray(arr): """Create xarray with correct dimensions.""" res = arr - res = xr.DataArray(res, dims=['y', 'x']) + res = xr.DataArray(res, dims=["y", "x"]) return res @@ -152,8 +152,8 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.area = None - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] self.form = None self.scanlines = None self.pixels = None @@ -168,10 +168,10 @@ def __init__(self, filename, filename_info, filetype_info): def _read_all(self): logger.debug("Reading %s", self.filename) self.sections, self.form = read_records(self.filename) - self.scanlines = self['TOTAL_MDR'] - if self.scanlines != len(self.sections[('mdr', 2)]): + self.scanlines = self["TOTAL_MDR"] + if self.scanlines != len(self.sections[("mdr", 2)]): logger.warning("Number of declared records doesn't match number of scanlines in the file.") - self.scanlines = len(self.sections[('mdr', 2)]) + self.scanlines = len(self.sections[("mdr", 2)]) self.pixels = self["EARTH_VIEWS_PER_SCANLINE"] def __getitem__(self, key): @@ -287,24 +287,24 @@ def get_dataset(self, key, info): if self.sections is None: self._read_all() - if key['name'] in ['longitude', 'latitude']: + if key["name"] in ["longitude", "latitude"]: lons, lats = self.get_full_lonlats() - if key['name'] == 'longitude': + if key["name"] == "longitude": dataset = create_xarray(lons) else: dataset = create_xarray(lats) - elif key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle', - 'satellite_zenith_angle', 'satellite_azimuth_angle']: + elif key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", + "satellite_zenith_angle", "satellite_azimuth_angle"]: dataset = self._get_angle_dataarray(key) - elif key['name'] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: + elif key["name"] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: dataset = self._get_calibrated_dataarray(key) else: - logger.info("Can't load channel in eps_l1b: " + str(key['name'])) + logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return - dataset.attrs['platform_name'] = self.platform_name - dataset.attrs['sensor'] = self.sensor_name + dataset.attrs["platform_name"] = self.platform_name + dataset.attrs["sensor"] = self.sensor_name if "calibration" in key: dataset.attrs["units"] = self.units[key["calibration"]] dataset.attrs.update(info) @@ -314,13 +314,13 @@ def get_dataset(self, key, info): def _get_angle_dataarray(self, key): """Get an angle dataarray.""" sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles() - if key['name'] == 'solar_zenith_angle': + if key["name"] == "solar_zenith_angle": dataset = create_xarray(sun_zen) - elif key['name'] == 'solar_azimuth_angle': + elif key["name"] == "solar_azimuth_angle": dataset = create_xarray(sun_azi) - if key['name'] == 'satellite_zenith_angle': + if key["name"] == "satellite_zenith_angle": dataset = create_xarray(sat_zen) - elif key['name'] == 'satellite_azimuth_angle': + elif key["name"] == "satellite_azimuth_angle": dataset = create_xarray(sat_azi) return dataset @@ -336,26 +336,26 @@ def three_b_mask(self): def _get_calibrated_dataarray(self, key): """Get a calibrated dataarray.""" - if key['calibration'] not in ['reflectance', 'brightness_temperature', 'radiance']: - raise ValueError('calibration type ' + str(key['calibration']) + - ' is not supported!') + if key["calibration"] not in ["reflectance", "brightness_temperature", "radiance"]: + raise ValueError("calibration type " + str(key["calibration"]) + + " is not supported!") mask = None - channel_name = key['name'].upper() + channel_name = key["name"].upper() radiance_indices = {"1": 0, "2": 1, "3A": 2, "3B": 2, "4": 3, "5": 4} array = self["SCENE_RADIANCES"][:, radiance_indices[channel_name], :] if channel_name in ["1", "2", "3A"]: - if key['calibration'] == 'reflectance': + if key["calibration"] == "reflectance": array = radiance_to_refl(array, self[f"CH{channel_name}_SOLAR_FILTERED_IRRADIANCE"]) if channel_name == "3A": mask = self.three_a_mask[:, np.newaxis] if channel_name in ["3B", "4", "5"]: - if key['calibration'] == 'brightness_temperature': + if key["calibration"] == "brightness_temperature": array = radiance_to_bt(array, self[f"CH{channel_name}_CENTRAL_WAVENUMBER"], self[f"CH{channel_name}_CONSTANT1"], @@ -373,7 +373,7 @@ def get_lonlats(self): if self.area is None: lons, lats = self.get_full_lonlats() self.area = SwathDefinition(lons, lats) - self.area.name = '_'.join([self.platform_name, str(self.start_time), + self.area.name = "_".join([self.platform_name, str(self.start_time), str(self.end_time)]) return self.area diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index 76abcf035c..cc82ee008d 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -22,10 +22,10 @@ import numpy as np # 6 bytes, 8 bytes, 10 bytes -time_cds_short = [('Days', '>u2'), ('Milliseconds', '>u4')] -time_cds = time_cds_short + [('Microseconds', '>u2')] -time_cds_expanded = time_cds + [('Nanoseconds', '>u2')] -issue_revision = [('Issue', np.uint16), ('Revision', np.uint16)] +time_cds_short = [("Days", ">u2"), ("Milliseconds", ">u4")] +time_cds = time_cds_short + [("Microseconds", ">u2")] +time_cds_expanded = time_cds + [("Nanoseconds", ">u2")] +issue_revision = [("Issue", np.uint16), ("Revision", np.uint16)] def timecds2datetime(tcds): @@ -33,14 +33,14 @@ def timecds2datetime(tcds): Works both with a dictionary and a numpy record_array. """ - days = int(tcds['Days']) - milliseconds = int(tcds['Milliseconds']) + days = int(tcds["Days"]) + milliseconds = int(tcds["Milliseconds"]) try: - microseconds = int(tcds['Microseconds']) + microseconds = int(tcds["Microseconds"]) except (KeyError, ValueError): microseconds = 0 try: - microseconds += int(tcds['Nanoseconds']) / 1000. + microseconds += int(tcds["Nanoseconds"]) / 1000. except (KeyError, ValueError): pass @@ -71,14 +71,14 @@ def recarray2dict(arr): else: if data.size == 1: data = data[0] - if ntype[:2] == '|S': + if ntype[:2] == "|S": # Python2 and Python3 handle strings differently try: data = data.decode() except ValueError: data = None else: - data = data.split(':')[0].strip() + data = data.split(":")[0].strip() res[key] = data else: res[key] = data.squeeze() @@ -88,15 +88,15 @@ def recarray2dict(arr): def get_service_mode(instrument_name, ssp_lon): """Get information about service mode for a given instrument and subsatellite longitude.""" - service_modes = {'seviri': {'0.0': {'service_name': 'fes', 'service_desc': 'Full Earth Scanning service'}, - '9.5': {'service_name': 'rss', 'service_desc': 'Rapid Scanning Service'}, - '41.5': {'service_name': 'iodc', 'service_desc': 'Indian Ocean Data Coverage service'}, - '45.5': {'service_name': 'iodc', 'service_desc': 'Indian Ocean Data Coverage service'} + service_modes = {"seviri": {"0.0": {"service_name": "fes", "service_desc": "Full Earth Scanning service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, + "41.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"}, + "45.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"} }, - 'fci': {'0.0': {'service_name': 'fdss', 'service_desc': 'Full Disk Scanning Service'}, - '9.5': {'service_name': 'rss', 'service_desc': 'Rapid Scanning Service'}, + "fci": {"0.0": {"service_name": "fdss", "service_desc": "Full Disk Scanning Service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, }, } - unknown_modes = {'service_name': 'unknown', 'service_desc': 'unknown'} + unknown_modes = {"service_name": "unknown", "service_desc": "unknown"} - return service_modes.get(instrument_name, unknown_modes).get('{:.1f}'.format(ssp_lon), unknown_modes) + return service_modes.get(instrument_name, unknown_modes).get("{:.1f}".format(ssp_lon), unknown_modes) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 8e28219035..a405c86201 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -131,26 +131,26 @@ # dict containing all available auxiliary data parameters to be read using the index map. Keys are the # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { - 'subsatellite_latitude': 'state/platform/subsatellite_latitude', - 'subsatellite_longitude': 'state/platform/subsatellite_longitude', - 'platform_altitude': 'state/platform/platform_altitude', - 'subsolar_latitude': 'state/celestial/subsolar_latitude', - 'subsolar_longitude': 'state/celestial/subsolar_longitude', - 'earth_sun_distance': 'state/celestial/earth_sun_distance', - 'sun_satellite_distance': 'state/celestial/sun_satellite_distance', - 'time': 'time', - 'swath_number': 'data/swath_number', - 'swath_direction': 'data/swath_direction', + "subsatellite_latitude": "state/platform/subsatellite_latitude", + "subsatellite_longitude": "state/platform/subsatellite_longitude", + "platform_altitude": "state/platform/platform_altitude", + "subsolar_latitude": "state/celestial/subsolar_latitude", + "subsolar_longitude": "state/celestial/subsolar_longitude", + "earth_sun_distance": "state/celestial/earth_sun_distance", + "sun_satellite_distance": "state/celestial/sun_satellite_distance", + "time": "time", + "swath_number": "data/swath_number", + "swath_direction": "data/swath_direction", } -HIGH_RES_GRID_INFO = {'fci_l1c_hrfi': {'grid_type': '500m', - 'grid_width': 22272}, - 'fci_l1c_fdhsi': {'grid_type': '1km', - 'grid_width': 11136}} -LOW_RES_GRID_INFO = {'fci_l1c_hrfi': {'grid_type': '1km', - 'grid_width': 11136}, - 'fci_l1c_fdhsi': {'grid_type': '2km', - 'grid_width': 5568}} +HIGH_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "500m", + "grid_width": 22272}, + "fci_l1c_fdhsi": {"grid_type": "1km", + "grid_width": 11136}} +LOW_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "1km", + "grid_width": 11136}, + "fci_l1c_fdhsi": {"grid_type": "2km", + "grid_width": 5568}} def _get_aux_data_name_from_dsname(dsname): @@ -206,9 +206,9 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info, cache_var_size=0, cache_handle=True) - logger.debug('Reading: {}'.format(self.filename)) - logger.debug('Start: {}'.format(self.start_time)) - logger.debug('End: {}'.format(self.end_time)) + logger.debug("Reading: {}".format(self.filename)) + logger.debug("Start: {}".format(self.start_time)) + logger.debug("End: {}".format(self.end_time)) self._cache = {} @@ -218,7 +218,7 @@ def rc_period_min(self): As RSS is not yet implemeted and error will be raised if RSS are to be read """ - if not self.filename_info['coverage'] == 'FD': + if not self.filename_info["coverage"] == "FD": raise NotImplementedError(f"coverage for {self.filename_info['coverage']} not supported by this reader") return 2.5 return 10 @@ -227,7 +227,7 @@ def rc_period_min(self): def nominal_start_time(self): """Get nominal start time.""" rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) - return rc_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*self.rc_period_min) + return rc_date + timedelta(minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min) @property def nominal_end_time(self): @@ -237,12 +237,12 @@ def nominal_end_time(self): @property def observation_start_time(self): """Get observation start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def observation_end_time(self): """Get observation end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def start_time(self): @@ -256,9 +256,9 @@ def end_time(self): def get_channel_measured_group_path(self, channel): """Get the channel's measured group path.""" - if self.filetype_info['file_type'] == 'fci_l1c_hrfi': - channel += '_hr' - measured_group_path = 'data/{}/measured'.format(channel) + if self.filetype_info["file_type"] == "fci_l1c_hrfi": + channel += "_hr" + measured_group_path = "data/{}/measured".format(channel) return measured_group_path @@ -273,25 +273,25 @@ def get_segment_position_info(self): Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept of chunk, and to be consistent with SEVIRI, we opt to use the word segment. """ - vis_06_measured_path = self.get_channel_measured_group_path('vis_06') - ir_105_measured_path = self.get_channel_measured_group_path('ir_105') + vis_06_measured_path = self.get_channel_measured_group_path("vis_06") + ir_105_measured_path = self.get_channel_measured_group_path("ir_105") - file_type = self.filetype_info['file_type'] + file_type = self.filetype_info["file_type"] segment_position_info = { - HIGH_RES_GRID_INFO[file_type]['grid_type']: { - 'start_position_row': self.get_and_cache_npxr(vis_06_measured_path + '/start_position_row').item(), - 'end_position_row': self.get_and_cache_npxr(vis_06_measured_path + '/end_position_row').item(), - 'segment_height': self.get_and_cache_npxr(vis_06_measured_path + '/end_position_row').item() - - self.get_and_cache_npxr(vis_06_measured_path + '/start_position_row').item() + 1, - 'grid_width': HIGH_RES_GRID_INFO[file_type]['grid_width'] + HIGH_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, + "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] }, - LOW_RES_GRID_INFO[file_type]['grid_type']: { - 'start_position_row': self.get_and_cache_npxr(ir_105_measured_path + '/start_position_row').item(), - 'end_position_row': self.get_and_cache_npxr(ir_105_measured_path + '/end_position_row').item(), - 'segment_height': self.get_and_cache_npxr(ir_105_measured_path + '/end_position_row').item() - - self.get_and_cache_npxr(ir_105_measured_path + '/start_position_row').item() + 1, - 'grid_width': LOW_RES_GRID_INFO[file_type]['grid_width'] + LOW_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, + "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] } } @@ -299,14 +299,14 @@ def get_segment_position_info(self): def get_dataset(self, key, info=None): """Load a dataset.""" - logger.debug('Reading {} from {}'.format(key['name'], self.filename)) - if "pixel_quality" in key['name']: - return self._get_dataset_quality(key['name']) - elif "index_map" in key['name']: - return self._get_dataset_index_map(key['name']) - elif _get_aux_data_name_from_dsname(key['name']) is not None: - return self._get_dataset_aux_data(key['name']) - elif any(lb in key['name'] for lb in {"vis_", "ir_", "nir_", "wv_"}): + logger.debug("Reading {} from {}".format(key["name"], self.filename)) + if "pixel_quality" in key["name"]: + return self._get_dataset_quality(key["name"]) + elif "index_map" in key["name"]: + return self._get_dataset_index_map(key["name"]) + elif _get_aux_data_name_from_dsname(key["name"]) is not None: + return self._get_dataset_aux_data(key["name"]) + elif any(lb in key["name"] for lb in {"vis_", "ir_", "nir_", "wv_"}): return self._get_dataset_measurand(key, info=info) else: raise ValueError("Unknown dataset key, not a channel, quality or auxiliary data: " @@ -321,7 +321,7 @@ def _get_dataset_measurand(self, key, info=None): """ # Get the dataset # Get metadata for given dataset - measured = self.get_channel_measured_group_path(key['name']) + measured = self.get_channel_measured_group_path(key["name"]) data = self[measured + "/effective_radiance"] attrs = dict(data.attrs).copy() @@ -332,7 +332,7 @@ def _get_dataset_measurand(self, key, info=None): "FillValue", default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.get("valid_range", [-np.inf, np.inf]) - if key['calibration'] == "counts": + if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = fv else: @@ -357,7 +357,7 @@ def _get_dataset_measurand(self, key, info=None): # https://github.com/pytroll/satpy/issues/1171. if "pixel_quality" in attrs["ancillary_variables"]: attrs["ancillary_variables"] = attrs["ancillary_variables"].replace( - "pixel_quality", key['name'] + "_pixel_quality") + "pixel_quality", key["name"] + "_pixel_quality") else: raise ValueError( "Unexpected value for attribute ancillary_variables, " @@ -373,20 +373,20 @@ def _get_dataset_measurand(self, key, info=None): self["attr/platform"], self["attr/platform"]) # remove unpacking parameters for calibrated data - if key['calibration'] in ['brightness_temperature', 'reflectance']: + if key["calibration"] in ["brightness_temperature", "reflectance"]: res.attrs.pop("add_offset") res.attrs.pop("warm_add_offset") res.attrs.pop("scale_factor") res.attrs.pop("warm_scale_factor") # remove attributes from original file which don't apply anymore - res.attrs.pop('long_name') + res.attrs.pop("long_name") # Add time_parameter attributes - res.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + res.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } res.attrs.update(self.orbital_param) @@ -395,9 +395,9 @@ def _get_dataset_measurand(self, key, info=None): @cached_property def orbital_param(self): """Compute the orbital parameters for the current segment.""" - actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector('subsatellite_longitude'))) - actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector('subsatellite_latitude'))) - actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector('platform_altitude'))) + actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) + actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) + actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector("platform_altitude"))) nominal_and_proj_subsat_lon = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) nominal_and_proj_subsat_lat = 0 @@ -405,16 +405,16 @@ def orbital_param(self): self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) orb_param_dict = { - 'orbital_parameters': { - 'satellite_actual_longitude': actual_subsat_lon, - 'satellite_actual_latitude': actual_subsat_lat, - 'satellite_actual_altitude': actual_sat_alt, - 'satellite_nominal_longitude': nominal_and_proj_subsat_lon, - 'satellite_nominal_latitude': nominal_and_proj_subsat_lat, - 'satellite_nominal_altitude': nominal_and_proj_sat_alt, - 'projection_longitude': nominal_and_proj_subsat_lon, - 'projection_latitude': nominal_and_proj_subsat_lat, - 'projection_altitude': nominal_and_proj_sat_alt, + "orbital_parameters": { + "satellite_actual_longitude": actual_subsat_lon, + "satellite_actual_latitude": actual_subsat_lat, + "satellite_actual_altitude": actual_sat_alt, + "satellite_nominal_longitude": nominal_and_proj_subsat_lon, + "satellite_nominal_latitude": nominal_and_proj_subsat_lat, + "satellite_nominal_altitude": nominal_and_proj_sat_alt, + "projection_longitude": nominal_and_proj_subsat_lon, + "projection_latitude": nominal_and_proj_subsat_lat, + "projection_altitude": nominal_and_proj_sat_alt, }} return orb_param_dict @@ -432,7 +432,7 @@ def _get_dataset_index_map(self, dsname): dv_path = grp_path + "/index_map" data = self[dv_path] - data = data.where(data != data.attrs.get('_FillValue', 65535)) + data = data.where(data != data.attrs.get("_FillValue", 65535)) return data def _get_aux_data_lut_vector(self, aux_data_name): @@ -446,14 +446,14 @@ def _get_aux_data_lut_vector(self, aux_data_name): @staticmethod def _getitem(block, lut): - return lut[block.astype('uint16')] + return lut[block.astype("uint16")] def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # get index map index_map = self._get_dataset_index_map(_get_channel_name_from_dsname(dsname)) # subtract minimum of index variable (index_offset) - index_map -= np.min(self.get_and_cache_npxr('index')) + index_map -= np.min(self.get_and_cache_npxr("index")) # get lut values from 1-d vector variable lut = self._get_aux_data_lut_vector(_get_aux_data_name_from_dsname(dsname)) @@ -472,14 +472,14 @@ def calc_area_extent(self, key): # if a user requests a pixel quality or index map before the channel data, the # yaml-reader will ask the area extent of the pixel quality/index map field, # which will ultimately end up here - channel_name = _get_channel_name_from_dsname(key['name']) + channel_name = _get_channel_name_from_dsname(key["name"]) # Get metadata for given dataset measured = self.get_channel_measured_group_path(channel_name) # Get start/end line and column of loaded swath. nlines, ncols = self[measured + "/effective_radiance/shape"] - logger.debug('Channel {} resolution: {}'.format(channel_name, ncols)) - logger.debug('Row/Cols: {} / {}'.format(nlines, ncols)) + logger.debug("Channel {} resolution: {}".format(channel_name, ncols)) + logger.debug("Row/Cols: {} / {}".format(nlines, ncols)) # Calculate full globe line extent h = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) @@ -489,16 +489,16 @@ def calc_area_extent(self, key): coord_radian = self.get_and_cache_npxr(measured + "/{:s}".format(coord)) # TODO remove this check when old versions of IDPF test data ( 0: - coord_radian.attrs['scale_factor'] *= -1 + if coord == "x" and coord_radian.attrs["scale_factor"] > 0: + coord_radian.attrs["scale_factor"] *= -1 # TODO remove this check when old versions of IDPF test data ( 1.1: - logger.info('The variable state/celestial/earth_sun_distance contains unexpected values' - '(mean value is {} AU). Defaulting to 1 AU for reflectance calculation.' - ''.format(sun_earth_distance)) + logger.info("The variable state/celestial/earth_sun_distance contains unexpected values" + "(mean value is {} AU). Defaulting to 1 AU for reflectance calculation." + "".format(sun_earth_distance)) sun_earth_distance = 1 res = 100 * radiance * np.pi * sun_earth_distance ** 2 / cesi diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index cbb47b2c8c..c387326f89 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -41,18 +41,18 @@ class FciL2CommonFunctions(object): @property def spacecraft_name(self): """Return spacecraft name.""" - return self.nc.attrs['platform'] + return self.nc.attrs["platform"] @property def sensor_name(self): """Return instrument name.""" - return self.nc.attrs['data_source'] + return self.nc.attrs["data_source"] @property def ssp_lon(self): """Return longitude at subsatellite point.""" try: - return float(self.nc['mtg_geos_projection'].attrs['longitude_of_projection_origin']) + return float(self.nc["mtg_geos_projection"].attrs["longitude_of_projection_origin"]) except (KeyError, AttributeError): logger.warning(f"ssp_lon could not be obtained from file content, using default value " f"of {SSP_DEFAULT} degrees east instead") @@ -71,11 +71,11 @@ def _get_global_attributes(self): """ attributes = { - 'filename': self.filename, - 'spacecraft_name': self.spacecraft_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor_name, - 'platform_name': self.spacecraft_name, + "filename": self.filename, + "spacecraft_name": self.spacecraft_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor_name, + "platform_name": self.spacecraft_name, } return attributes @@ -86,10 +86,10 @@ def _set_attributes(self, variable, dataset_info, segmented=False): else: xdim, ydim = "number_of_columns", "number_of_rows" - if dataset_info['file_key'] not in ['product_quality', 'product_completeness', 'product_timeliness']: - variable = variable.rename({ydim: 'y', xdim: 'x'}) + if dataset_info["file_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: + variable = variable.rename({ydim: "y", xdim: "x"}) - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) @@ -116,7 +116,7 @@ def _mask_data(variable, fill_value): fill_value = [fill_value] for val in fill_value: - variable = variable.where(variable != val).astype('float32') + variable = variable.where(variable != val).astype("float32") return variable @@ -139,8 +139,8 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= decode_cf=True, mask_and_scale=True, chunks={ - 'number_of_columns': CHUNK_SIZE, - 'number_of_rows': CHUNK_SIZE + "number_of_columns": CHUNK_SIZE, + "number_of_rows": CHUNK_SIZE } ) @@ -148,10 +148,10 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= logger.info("Setting `with_area_defintion=False` has no effect on pixel-based products.") # Read metadata which are common to all datasets - self.nlines = self.nc['y'].size - self.ncols = self.nc['x'].size - self._projection = self.nc['mtg_geos_projection'] - self.multi_dims = {'maximum_number_of_layers': 'layer', 'number_of_vis_channels': 'vis_channel_id'} + self.nlines = self.nc["y"].size + self.ncols = self.nc["x"].size + self._projection = self.nc["mtg_geos_projection"] + self.multi_dims = {"maximum_number_of_layers": "layer", "number_of_vis_channels": "vis_channel_id"} def get_area_def(self, key): """Return the area definition.""" @@ -162,9 +162,9 @@ def get_area_def(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info['file_key'] - par_name = dataset_info['name'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + par_name = dataset_info["name"] + logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] @@ -173,20 +173,20 @@ def get_dataset(self, dataset_id, dataset_info): return None # Compute the area definition - if var_key not in ['product_quality', 'product_completeness', 'product_timeliness']: + if var_key not in ["product_quality", "product_completeness", "product_timeliness"]: self._area_def = self._compute_area_def(dataset_id) if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): variable = self._slice_dataset(variable, dataset_info, self.multi_dims) - if par_name == 'retrieved_cloud_optical_thickness': + if par_name == "retrieved_cloud_optical_thickness": variable = self.get_total_cot(variable) - if dataset_info['file_type'] == 'nc_fci_test_clm': + if dataset_info["file_type"] == "nc_fci_test_clm": variable = self._decode_clm_test_data(variable, dataset_info) - if 'fill_value' in dataset_info: - variable = self._mask_data(variable, dataset_info['fill_value']) + if "fill_value" in dataset_info: + variable = self._mask_data(variable, dataset_info["fill_value"]) variable = self._set_attributes(variable, dataset_info) @@ -194,9 +194,9 @@ def get_dataset(self, dataset_id, dataset_info): @staticmethod def _decode_clm_test_data(variable, dataset_info): - if dataset_info['file_key'] != 'cloud_mask_cmrt6_test_result': - variable = variable.astype('uint32') - variable.values = (variable.values >> dataset_info['extract_byte'] << 31 >> 31).astype('int8') + if dataset_info["file_key"] != "cloud_mask_cmrt6_test_result": + variable = variable.astype("uint32") + variable.values = (variable.values >> dataset_info["extract_byte"] << 31 >> 31).astype("int8") return variable @@ -210,8 +210,8 @@ def _compute_area_def(self, dataset_id): area_extent = self._get_area_extent() area_naming, proj_dict = self._get_proj_area(dataset_id) area_def = geometry.AreaDefinition( - area_naming['area_id'], - area_naming['description'], + area_naming["area_id"], + area_naming["description"], "", proj_dict, self.ncols, @@ -223,15 +223,15 @@ def _compute_area_def(self, dataset_id): def _get_area_extent(self): """Calculate area extent of dataset.""" # Load and convert x/y coordinates to degrees as required by the make_ext function - x = self.nc['x'] - y = self.nc['y'] + x = self.nc["x"] + y = self.nc["y"] x_deg = np.degrees(x) y_deg = np.degrees(y) # Select the extreme points and calcualte area extent (not: these refer to pixel center) ll_x, ur_x = -x_deg.values[0], -x_deg.values[-1] ll_y, ur_y = y_deg.values[-1], y_deg.values[0] - h = float(self._projection.attrs['perspective_point_height']) + h = float(self._projection.attrs["perspective_point_height"]) area_extent_pixel_center = make_ext(ll_x, ur_x, ll_y, ur_y, h) # Shift area extent by half a pixel to get the area extent w.r.t. the dataset/pixel corners @@ -244,30 +244,30 @@ def _get_area_extent(self): def _get_proj_area(self, dataset_id): """Extract projection and area information.""" # Read the projection data from the mtg_geos_projection variable - a = float(self._projection.attrs['semi_major_axis']) - h = float(self._projection.attrs['perspective_point_height']) + a = float(self._projection.attrs["semi_major_axis"]) + h = float(self._projection.attrs["perspective_point_height"]) # Some L2PF test data files have a typo in the keyname for the inverse flattening parameter. Use a default value # as fallback until all L2PF test files are correctly formatted. - rf = float(self._projection.attrs.get('inverse_flattening', 298.257223563)) + rf = float(self._projection.attrs.get("inverse_flattening", 298.257223563)) res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'mtg', - 'instrument_name': 'fci', - 'resolution': res, + area_naming_input_dict = {"platform_name": "mtg", + "instrument_name": "fci", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('fci', self.ssp_lon)}) + **get_service_mode("fci", self.ssp_lon)}) - proj_dict = {'a': a, - 'lon_0': self.ssp_lon, - 'h': h, + proj_dict = {"a": a, + "lon_0": self.ssp_lon, + "h": h, "rf": rf, - 'proj': 'geos', - 'units': 'm', - "sweep": 'y'} + "proj": "geos", + "units": "m", + "sweep": "y"} return area_naming, proj_dict @@ -281,7 +281,7 @@ def get_total_cot(variable): attrs = variable.attrs variable = 10 ** variable variable = variable.fillna(0.) - variable = variable.sum(dim='maximum_number_of_layers', keep_attrs=True) + variable = variable.sum(dim="maximum_number_of_layers", keep_attrs=True) variable = variable.where(variable != 0., np.nan) variable = np.log10(variable) variable.attrs = attrs @@ -301,19 +301,19 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= decode_cf=True, mask_and_scale=True, chunks={ - 'number_of_FoR_cols': CHUNK_SIZE, - 'number_of_FoR_rows': CHUNK_SIZE + "number_of_FoR_cols": CHUNK_SIZE, + "number_of_FoR_rows": CHUNK_SIZE } ) # Read metadata which are common to all datasets - self.nlines = self.nc['number_of_FoR_rows'].size - self.ncols = self.nc['number_of_FoR_cols'].size + self.nlines = self.nc["number_of_FoR_rows"].size + self.ncols = self.nc["number_of_FoR_cols"].size self.with_adef = with_area_definition self.multi_dims = { - 'number_of_categories': 'category_id', 'number_of_channels': 'channel_id', - 'number_of_vis_channels': 'vis_channel_id', 'number_of_ir_channels': 'ir_channel_id', - 'number_test': 'test_id', + "number_of_categories": "category_id", "number_of_channels": "channel_id", + "number_of_vis_channels": "vis_channel_id", "number_of_ir_channels": "ir_channel_id", + "number_test": "test_id", } def get_area_def(self, key): @@ -325,8 +325,8 @@ def get_area_def(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] @@ -337,16 +337,16 @@ def get_dataset(self, dataset_id, dataset_info): if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): variable = self._slice_dataset(variable, dataset_info, self.multi_dims) - if self.with_adef and var_key not in ['longitude', 'latitude', - 'product_quality', 'product_completeness', 'product_timeliness']: + if self.with_adef and var_key not in ["longitude", "latitude", + "product_quality", "product_completeness", "product_timeliness"]: self._area_def = self._construct_area_def(dataset_id) # coordinates are not relevant when returning data with an AreaDefinition - if 'coordinates' in dataset_info.keys(): - del dataset_info['coordinates'] + if "coordinates" in dataset_info.keys(): + del dataset_info["coordinates"] - if 'fill_value' in dataset_info: - variable = self._mask_data(variable, dataset_info['fill_value']) + if "fill_value" in dataset_info: + variable = self._mask_data(variable, dataset_info["fill_value"]) variable = self._set_attributes(variable, dataset_info, segmented=True) @@ -361,19 +361,19 @@ def _construct_area_def(self, dataset_id): """ res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'mtg', - 'instrument_name': 'fci', - 'resolution': res, + area_naming_input_dict = {"platform_name": "mtg", + "instrument_name": "fci", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('fci', self.ssp_lon)}) + **get_service_mode("fci", self.ssp_lon)}) # Construct area definition from standardized area definition. - stand_area_def = get_area_def(area_naming['area_id']) + stand_area_def = get_area_def(area_naming["area_id"]) if (stand_area_def.x_size != self.ncols) | (stand_area_def.y_size != self.nlines): - raise NotImplementedError('Unrecognised AreaDefinition.') + raise NotImplementedError("Unrecognised AreaDefinition.") mod_area_extent = self._modify_area_extent(stand_area_def.area_extent) diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 0c47553b0d..3fdeed1edc 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -112,16 +112,16 @@ def combine_info(self, all_infos): """ combined_info = combine_metadata(*all_infos) - new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit') - new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit')) + new_dict = self._combine(all_infos, min, "start_time", "start_orbit") + new_dict.update(self._combine(all_infos, max, "end_time", "end_orbit")) new_dict.update(self._combine_orbital_parameters(all_infos)) new_dict.update(self._combine_time_parameters(all_infos)) try: - area = SwathDefinition(lons=np.ma.vstack([info['area'].lons for info in all_infos]), - lats=np.ma.vstack([info['area'].lats for info in all_infos])) - area.name = '_'.join([info['area'].name for info in all_infos]) - combined_info['area'] = area + area = SwathDefinition(lons=np.ma.vstack([info["area"].lons for info in all_infos]), + lats=np.ma.vstack([info["area"].lats for info in all_infos])) + area.name = "_".join([info["area"].name for info in all_infos]) + combined_info["area"] = area except KeyError: pass @@ -129,7 +129,7 @@ def combine_info(self, all_infos): return new_dict def _combine_orbital_parameters(self, all_infos): - orb_params = [info.get('orbital_parameters', {}) for info in all_infos] + orb_params = [info.get("orbital_parameters", {}) for info in all_infos] if not all(orb_params): return {} # Collect all available keys @@ -138,15 +138,15 @@ def _combine_orbital_parameters(self, all_infos): orb_params_comb.update(d) # Average known keys - keys = ['projection_longitude', 'projection_latitude', 'projection_altitude', - 'satellite_nominal_longitude', 'satellite_nominal_latitude', - 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude', - 'nadir_longitude', 'nadir_latitude'] + keys = ["projection_longitude", "projection_latitude", "projection_altitude", + "satellite_nominal_longitude", "satellite_nominal_latitude", + "satellite_actual_longitude", "satellite_actual_latitude", "satellite_actual_altitude", + "nadir_longitude", "nadir_latitude"] orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) - return {'orbital_parameters': orb_params_comb} + return {"orbital_parameters": orb_params_comb} def _combine_time_parameters(self, all_infos): - time_params = [info.get('time_parameters', {}) for info in all_infos] + time_params = [info.get("time_parameters", {}) for info in all_infos] if not all(time_params): return {} # Collect all available keys @@ -155,26 +155,26 @@ def _combine_time_parameters(self, all_infos): time_params_comb.update(d) start_keys = ( - 'nominal_start_time', - 'observation_start_time', + "nominal_start_time", + "observation_start_time", ) end_keys = ( - 'nominal_end_time', - 'observation_end_time', + "nominal_end_time", + "observation_end_time", ) time_params_comb.update(self._combine(time_params, min, *start_keys)) time_params_comb.update(self._combine(time_params, max, *end_keys)) - return {'time_parameters': time_params_comb} + return {"time_parameters": time_params_comb} @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_names(self): @@ -197,7 +197,7 @@ def file_type_matches(self, ds_ftype): """ if not isinstance(ds_ftype, (list, tuple)): ds_ftype = [ds_ftype] - if self.filetype_info['file_type'] in ds_ftype: + if self.filetype_info["file_type"] in ds_ftype: return True return None @@ -295,4 +295,4 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - yield self.file_type_matches(ds_info['file_type']), ds_info + yield self.file_type_matches(ds_info["file_type"]), ds_info diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index 9b6b364420..144e559858 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -46,7 +46,7 @@ def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(FY4Base, self).__init__(filename, filename_info, filetype_info) - self.sensor = filename_info['instrument'] + self.sensor = filename_info["instrument"] # info of 250m, 500m, 1km, 2km and 4km data self._COFF_list = [21983.5, 10991.5, 5495.5, 2747.5, 1373.5] @@ -55,17 +55,17 @@ def __init__(self, filename, filename_info, filetype_info): self._CFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0] self._LFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0] - self.PLATFORM_NAMES = {'FY4A': 'FY-4A', - 'FY4B': 'FY-4B', - 'FY4C': 'FY-4C'} + self.PLATFORM_NAMES = {"FY4A": "FY-4A", + "FY4B": "FY-4B", + "FY4C": "FY-4C"} try: - self.PLATFORM_ID = self.PLATFORM_NAMES[filename_info['platform_id']] + self.PLATFORM_ID = self.PLATFORM_NAMES[filename_info["platform_id"]] except KeyError: raise KeyError(f"Unsupported platform ID: {filename_info['platform_id']}") - self.CHANS_ID = 'NOMChannel' - self.SAT_ID = 'NOMSatellite' - self.SUN_ID = 'NOMSun' + self.CHANS_ID = "NOMChannel" + self.SAT_ID = "NOMSatellite" + self.SUN_ID = "NOMSun" @staticmethod def scale(dn, slope, offset): @@ -112,10 +112,10 @@ def _getitem(block, lut): def reflectance_coeffs(self): """Retrieve the reflectance calibration coefficients from the HDF file.""" # using the corresponding SCALE and OFFSET - if self.PLATFORM_ID == 'FY-4A': - cal_coef = 'CALIBRATION_COEF(SCALE+OFFSET)' - elif self.PLATFORM_ID == 'FY-4B': - cal_coef = 'Calibration/CALIBRATION_COEF(SCALE+OFFSET)' + if self.PLATFORM_ID == "FY-4A": + cal_coef = "CALIBRATION_COEF(SCALE+OFFSET)" + elif self.PLATFORM_ID == "FY-4B": + cal_coef = "Calibration/CALIBRATION_COEF(SCALE+OFFSET)" else: raise KeyError(f"Unsupported platform ID for calibration: {self.PLATFORM_ID}") return self.get(cal_coef).values @@ -123,58 +123,58 @@ def reflectance_coeffs(self): def calibrate(self, data, ds_info, ds_name, file_key): """Calibrate the data.""" # Check if calibration is present, if not assume dataset is an angle - calibration = ds_info.get('calibration') + calibration = ds_info.get("calibration") # Return raw data in case of counts or no calibration - if calibration in ('counts', None): - data.attrs['units'] = ds_info['units'] - ds_info['valid_range'] = data.attrs['valid_range'] - ds_info['fill_value'] = data.attrs['FillValue'].item() - elif calibration == 'reflectance': + if calibration in ("counts", None): + data.attrs["units"] = ds_info["units"] + ds_info["valid_range"] = data.attrs["valid_range"] + ds_info["fill_value"] = data.attrs["FillValue"].item() + elif calibration == "reflectance": channel_index = int(file_key[-2:]) - 1 data = self.calibrate_to_reflectance(data, channel_index, ds_info) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": data = self.calibrate_to_bt(data, ds_info, ds_name) - elif calibration == 'radiance': + elif calibration == "radiance": raise NotImplementedError("Calibration to radiance is not supported.") # Apply range limits, but not for counts or we convert to float! - if calibration != 'counts': - data = data.where((data >= min(data.attrs['valid_range'])) & - (data <= max(data.attrs['valid_range']))) + if calibration != "counts": + data = data.where((data >= min(data.attrs["valid_range"])) & + (data <= max(data.attrs["valid_range"]))) else: - data.attrs['_FillValue'] = data.attrs['FillValue'].item() + data.attrs["_FillValue"] = data.attrs["FillValue"].item() return data def calibrate_to_reflectance(self, data, channel_index, ds_info): """Calibrate to reflectance [%].""" logger.debug("Calibrating to reflectances") # using the corresponding SCALE and OFFSET - if self.sensor != 'AGRI' and self.sensor != 'GHI': - raise ValueError(f'Unsupported sensor type: {self.sensor}') + if self.sensor != "AGRI" and self.sensor != "GHI": + raise ValueError(f"Unsupported sensor type: {self.sensor}") coeffs = self.reflectance_coeffs num_channel = coeffs.shape[0] - if self.sensor == 'AGRI' and num_channel == 1: + if self.sensor == "AGRI" and num_channel == 1: # only channel_2, resolution = 500 m channel_index = 0 - data.data = da.where(data.data == data.attrs['FillValue'].item(), np.nan, data.data) - data.attrs['scale_factor'] = coeffs[channel_index, 0].item() - data.attrs['add_offset'] = coeffs[channel_index, 1].item() - data = self.scale(data, data.attrs['scale_factor'], data.attrs['add_offset']) + data.data = da.where(data.data == data.attrs["FillValue"].item(), np.nan, data.data) + data.attrs["scale_factor"] = coeffs[channel_index, 0].item() + data.attrs["add_offset"] = coeffs[channel_index, 1].item() + data = self.scale(data, data.attrs["scale_factor"], data.attrs["add_offset"]) data *= 100 - ds_info['valid_range'] = (data.attrs['valid_range'] * data.attrs['scale_factor'] + data.attrs['add_offset']) - ds_info['valid_range'] = ds_info['valid_range'] * 100 + ds_info["valid_range"] = (data.attrs["valid_range"] * data.attrs["scale_factor"] + data.attrs["add_offset"]) + ds_info["valid_range"] = ds_info["valid_range"] * 100 return data def calibrate_to_bt(self, data, ds_info, ds_name): """Calibrate to Brightness Temperatures [K].""" logger.debug("Calibrating to brightness_temperature") - if self.sensor not in ['GHI', 'AGRI']: + if self.sensor not in ["GHI", "AGRI"]: raise ValueError("Error, sensor must be GHI or AGRI.") # The key is sometimes prefixes with `Calibration/` so we try both options here - lut_key = ds_info.get('lut_key', ds_name) + lut_key = ds_info.get("lut_key", ds_name) try: lut = self[lut_key] except KeyError: @@ -183,66 +183,66 @@ def calibrate_to_bt(self, data, ds_info, ds_name): # the value of dn is the index of brightness_temperature data = self.apply_lut(data, lut) - ds_info['valid_range'] = lut.attrs['valid_range'] + ds_info["valid_range"] = lut.attrs["valid_range"] return data @property def start_time(self): """Get the start time.""" - start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' + start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" try: - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """Get the end time.""" - end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' + end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" try: - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ") def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf - res = key['resolution'] + res = key["resolution"] pdict = {} - begin_cols = float(self.file_content['/attr/Begin Pixel Number']) - end_lines = float(self.file_content['/attr/End Line Number']) - pdict['coff'] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 - pdict['loff'] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 + begin_cols = float(self.file_content["/attr/Begin Pixel Number"]) + end_lines = float(self.file_content["/attr/End Line Number"]) + pdict["coff"] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 + pdict["loff"] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 - pdict['cfac'] = self._CFAC_list[RESOLUTION_LIST.index(res)] - pdict['lfac'] = self._LFAC_list[RESOLUTION_LIST.index(res)] + pdict["cfac"] = self._CFAC_list[RESOLUTION_LIST.index(res)] + pdict["lfac"] = self._LFAC_list[RESOLUTION_LIST.index(res)] try: - pdict['a'] = float(self.file_content['/attr/Semimajor axis of ellipsoid']) + pdict["a"] = float(self.file_content["/attr/Semimajor axis of ellipsoid"]) except KeyError: - pdict['a'] = float(self.file_content['/attr/dEA']) - if pdict['a'] < 10000: - pdict['a'] = pdict['a'] * 1E3 # equator radius (m) + pdict["a"] = float(self.file_content["/attr/dEA"]) + if pdict["a"] < 10000: + pdict["a"] = pdict["a"] * 1E3 # equator radius (m) try: - pdict['b'] = float(self.file_content['/attr/Semiminor axis of ellipsoid']) + pdict["b"] = float(self.file_content["/attr/Semiminor axis of ellipsoid"]) except KeyError: - pdict['b'] = pdict['a'] * (1 - 1 / self.file_content['/attr/dObRecFlat']) # polar radius (m) + pdict["b"] = pdict["a"] * (1 - 1 / self.file_content["/attr/dObRecFlat"]) # polar radius (m) - pdict['h'] = self.file_content['/attr/NOMSatHeight'] # the altitude of satellite (m) - if pdict['h'] > 42000000.0: - pdict['h'] = pdict['h'] - pdict['a'] + pdict["h"] = self.file_content["/attr/NOMSatHeight"] # the altitude of satellite (m) + if pdict["h"] > 42000000.0: + pdict["h"] = pdict["h"] - pdict["a"] - pdict['ssp_lon'] = float(self.file_content['/attr/NOMCenterLon']) - pdict['nlines'] = float(self.file_content['/attr/RegLength']) - pdict['ncols'] = float(self.file_content['/attr/RegWidth']) + pdict["ssp_lon"] = float(self.file_content["/attr/NOMCenterLon"]) + pdict["nlines"] = float(self.file_content["/attr/RegLength"]) + pdict["ncols"] = float(self.file_content["/attr/RegWidth"]) - pdict['scandir'] = 'N2S' - pdict['a_desc'] = "FY-4 {} area".format(self.filename_info['observation_type']) - pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m' - pdict['p_id'] = f'FY-4, {res}m' + pdict["scandir"] = "N2S" + pdict["a_desc"] = "FY-4 {} area".format(self.filename_info["observation_type"]) + pdict["a_name"] = f'{self.filename_info["observation_type"]}_{res}m' + pdict["p_id"] = f"FY-4, {res}m" area_extent = get_area_extent(pdict) area_extent = (area_extent[0], diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 3fb8c6b560..1ba160095f 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -40,13 +40,13 @@ CHUNK_SIZE = get_legacy_chunk_size() -BANDS = {1: ['L'], - 2: ['L', 'A'], - 3: ['R', 'G', 'B'], - 4: ['R', 'G', 'B', 'A']} +BANDS = {1: ["L"], + 2: ["L", "A"], + 3: ["R", "G", "B"], + 4: ["R", "G", "B", "A"]} -NODATA_HANDLING_FILLVALUE = 'fill_value' -NODATA_HANDLING_NANMASK = 'nan_mask' +NODATA_HANDLING_FILLVALUE = "fill_value" +NODATA_HANDLING_NANMASK = "nan_mask" logger = logging.getLogger(__name__) @@ -60,10 +60,10 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.finfo = filename_info try: - self.finfo['end_time'] = self.finfo['start_time'] + self.finfo["end_time"] = self.finfo["start_time"] except KeyError: pass - self.finfo['filename'] = self.filename + self.finfo["filename"] = self.filename self.file_content = {} self.area = None self.dataset_name = None @@ -71,10 +71,10 @@ def __init__(self, filename, filename_info, filetype_info): def read(self): """Read the image.""" - dataset = rasterio.open(self.finfo['filename']) + dataset = rasterio.open(self.finfo["filename"]) # Create area definition - if hasattr(dataset, 'crs') and dataset.crs is not None: + if hasattr(dataset, "crs") and dataset.crs is not None: self.area = utils.get_area_def_from_raster(dataset) data = xr.open_dataset(self.finfo["filename"], engine="rasterio", @@ -90,13 +90,13 @@ def read(self): attrs = data.attrs.copy() # Rename to Satpy convention - data = data.rename({'band': 'bands'}) + data = data.rename({"band": "bands"}) # Rename bands to [R, G, B, A], or a subset of those - data['bands'] = BANDS[data.bands.size] + data["bands"] = BANDS[data.bands.size] data.attrs = attrs - self.dataset_name = 'image' + self.dataset_name = "image" self.file_content[self.dataset_name] = data def get_area_def(self, dsid): @@ -108,16 +108,16 @@ def get_area_def(self, dsid): @property def start_time(self): """Return start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Return end time.""" - return self.finfo['end_time'] + return self.finfo["end_time"] def get_dataset(self, key, info): """Get a dataset from the file.""" - ds_name = self.dataset_name if self.dataset_name else key['name'] + ds_name = self.dataset_name if self.dataset_name else key["name"] logger.debug("Reading '%s.'", ds_name) data = self.file_content[ds_name] @@ -149,8 +149,8 @@ def _mask_image_data(data, info): for i in range(data.shape[0])]) data.data = masked_data data = data.sel(bands=BANDS[data.bands.size - 1]) - elif hasattr(data, 'nodatavals') and data.nodatavals: - data = _handle_nodatavals(data, info.get('nodata_handling', NODATA_HANDLING_FILLVALUE)) + elif hasattr(data, "nodatavals") and data.nodatavals: + data = _handle_nodatavals(data, info.get("nodata_handling", NODATA_HANDLING_FILLVALUE)) return data @@ -162,7 +162,7 @@ def _handle_nodatavals(data, nodata_handling): masked_data = da.stack([da.where(data.data[i, :, :] == nodataval, np.nan, data.data[i, :, :]) for i, nodataval in enumerate(data.nodatavals)]) data.data = masked_data - data.attrs['_FillValue'] = np.nan + data.attrs["_FillValue"] = np.nan elif nodata_handling == NODATA_HANDLING_FILLVALUE: # keep data as it is but set _FillValue attribute to provided # nodatavalue (first one as it has to be the same for all bands at least @@ -170,5 +170,5 @@ def _handle_nodatavals(data, nodata_handling): fill_value = data.nodatavals[0] if np.issubdtype(data.dtype, np.integer): fill_value = int(fill_value) - data.attrs['_FillValue'] = fill_value + data.attrs["_FillValue"] = fill_value return data diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py index 5086cd899b..185e7d3c13 100644 --- a/satpy/readers/geocat.py +++ b/satpy/readers/geocat.py @@ -44,14 +44,14 @@ CF_UNITS = { - 'none': '1', + "none": "1", } # GEOCAT currently doesn't include projection information in it's files GEO_PROJS = { - 'GOES-16': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', - 'GOES-17': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', - 'HIMAWARI-8': '+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs', + "GOES-16": "+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs", + "GOES-17": "+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs", + "HIMAWARI-8": "+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs", } @@ -72,29 +72,29 @@ class GEOCATFileHandler(NetCDF4FileHandler): def __init__(self, filename, filename_info, filetype_info, **kwargs): """Open and perform initial investigation of NetCDF file.""" - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'engine', "netcdf4") - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'decode_times', False) + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "engine", "netcdf4") + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "decode_times", False) super(GEOCATFileHandler, self).__init__( filename, filename_info, filetype_info, xarray_kwargs=kwargs["xarray_kwargs"]) sensors = { - 'goes': 'goes_imager', - 'himawari8': 'ahi', - 'goes16': 'abi', # untested - 'goesr': 'abi', # untested + "goes": "goes_imager", + "himawari8": "ahi", + "goes16": "abi", # untested + "goesr": "abi", # untested } platforms: dict[str, str] = { } resolutions = { - 'abi': { + "abi": { 1: 1002.0086577437705, 2: 2004.0173154875411, }, - 'ahi': { + "ahi": { 1: 999.9999820317674, # assumption 2: 1999.999964063535, 4: 3999.99992812707, @@ -121,7 +121,7 @@ def get_platform(self, platform): return platform def _get_proj(self, platform, ref_lon): - if platform == 'GOES-16' and -76. < ref_lon < -74.: + if platform == "GOES-16" and -76. < ref_lon < -74.: # geocat file holds the *actual* subsatellite point, not the # projection (-75.2 actual versus -75 projection) ref_lon = -75. @@ -130,33 +130,33 @@ def _get_proj(self, platform, ref_lon): @property def sensor_names(self): """Get sensor names.""" - return [self.get_sensor(self['/attr/Sensor_Name'])] + return [self.get_sensor(self["/attr/Sensor_Name"])] @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def is_geo(self): """Check platform.""" - platform = self.get_platform(self['/attr/Platform_Name']) + platform = self.get_platform(self["/attr/Platform_Name"]) return platform in GEO_PROJS @property def resolution(self): """Get resolution.""" - elem_res = self['/attr/Element_Resolution'] + elem_res = self["/attr/Element_Resolution"] return int(elem_res * 1000) def _calc_area_resolution(self, ds_res): elem_res = round(ds_res / 1000.) # mimic 'Element_Resolution' attribute from above - sensor = self.get_sensor(self['/attr/Sensor_Name']) + sensor = self.get_sensor(self["/attr/Sensor_Name"]) return self.resolutions.get(sensor, {}).get(int(elem_res), elem_res * 1000.) @@ -174,27 +174,27 @@ def available_datasets(self, configured_datasets=None): """ res = self.resolution - coordinates = ('pixel_longitude', 'pixel_latitude') + coordinates = ("pixel_longitude", "pixel_latitude") handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): - this_res = ds_info.get('resolution') - this_coords = ds_info.get('coordinates') + this_res = ds_info.get("resolution") + this_coords = ds_info.get("coordinates") # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) - matches = self.file_type_matches(ds_info['file_type']) + var_name = ds_info.get("file_key", ds_info["name"]) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != res: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded - new_info['resolution'] = res + new_info["resolution"] = res if not self.is_geo and this_coords is None: - new_info['coordinates'] = coordinates + new_info["coordinates"] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did @@ -207,21 +207,21 @@ def available_datasets(self, configured_datasets=None): continue if isinstance(val, netCDF4.Variable): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'resolution': res, - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "resolution": res, + "name": var_name, } if not self.is_geo: - ds_info['coordinates'] = coordinates + ds_info["coordinates"] = coordinates yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get shape.""" - var_name = ds_info.get('file_key', dataset_id['name']) - return self[var_name + '/shape'] + var_name = ds_info.get("file_key", dataset_id["name"]) + return self[var_name + "/shape"] def _first_good_nav(self, lon_arr, lat_arr): - if hasattr(lon_arr, 'mask'): + if hasattr(lon_arr, "mask"): good_indexes = np.nonzero(~lon_arr.mask) else: # no masked values found in auto maskandscale @@ -247,9 +247,9 @@ def _get_extents(self, proj, res, lon_arr, lat_arr): def _load_nav(self, name): nav = self[name] - factor = self[name + '/attr/scale_factor'] - offset = self[name + '/attr/add_offset'] - fill = self[name + '/attr/_FillValue'] + factor = self[name + "/attr/scale_factor"] + offset = self[name + "/attr/add_offset"] + fill = self[name + "/attr/_FillValue"] nav = nav[:] mask = nav == fill nav = np.ma.masked_array(nav * factor + offset, mask=mask) @@ -260,15 +260,15 @@ def get_area_def(self, dsid): if not self.is_geo: raise NotImplementedError("Don't know how to get the Area Definition for this file") - platform = self.get_platform(self['/attr/Platform_Name']) - res = self._calc_area_resolution(dsid['resolution']) - proj = self._get_proj(platform, float(self['/attr/Subsatellite_Longitude'])) - area_name = '{} {} Area at {}m'.format( + platform = self.get_platform(self["/attr/Platform_Name"]) + res = self._calc_area_resolution(dsid["resolution"]) + proj = self._get_proj(platform, float(self["/attr/Subsatellite_Longitude"])) + area_name = "{} {} Area at {}m".format( platform, - self.metadata.get('sector_id', ''), + self.metadata.get("sector_id", ""), int(res)) - lon = self._load_nav('pixel_longitude') - lat = self._load_nav('pixel_latitude') + lon = self._load_nav("pixel_longitude") + lat = self._load_nav("pixel_latitude") extents = self._get_extents(proj, res, lon, lat) area_def = geometry.AreaDefinition( area_name, @@ -283,36 +283,36 @@ def get_area_def(self, dsid): def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) shape = self.get_shape(dataset_id, ds_info) - info = getattr(self[var_name], 'attrs', {}) - info['shape'] = shape + info = getattr(self[var_name], "attrs", {}) + info["shape"] = shape info.update(ds_info) - u = info.get('units') + u = info.get("units") if u in CF_UNITS: # CF compliance - info['units'] = CF_UNITS[u] + info["units"] = CF_UNITS[u] - info['sensor'] = self.get_sensor(self['/attr/Sensor_Name']) - info['platform_name'] = self.get_platform(self['/attr/Platform_Name']) - info['resolution'] = dataset_id['resolution'] - if var_name == 'pixel_longitude': - info['standard_name'] = 'longitude' - elif var_name == 'pixel_latitude': - info['standard_name'] = 'latitude' + info["sensor"] = self.get_sensor(self["/attr/Sensor_Name"]) + info["platform_name"] = self.get_platform(self["/attr/Platform_Name"]) + info["resolution"] = dataset_id["resolution"] + if var_name == "pixel_longitude": + info["standard_name"] = "longitude" + elif var_name == "pixel_latitude": + info["standard_name"] = "latitude" return info def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) # FUTURE: Metadata retrieval may be separate info = self.get_metadata(dataset_id, ds_info) data = self[var_name] - fill = self[var_name + '/attr/_FillValue'] - factor = self.get(var_name + '/attr/scale_factor') - offset = self.get(var_name + '/attr/add_offset') - valid_range = self.get(var_name + '/attr/valid_range') + fill = self[var_name + "/attr/_FillValue"] + factor = self.get(var_name + "/attr/scale_factor") + offset = self.get(var_name + "/attr/add_offset") + valid_range = self.get(var_name + "/attr/valid_range") data = data.where(data != fill) if valid_range is not None: @@ -321,5 +321,5 @@ def get_dataset(self, dataset_id, ds_info): data = data * factor + offset data.attrs.update(info) - data = data.rename({'lines': 'y', 'elements': 'x'}) + data = data.rename({"lines": "y", "elements": "x"}) return data diff --git a/satpy/readers/ghi_l1.py b/satpy/readers/ghi_l1.py index 2e26aeee24..3c085282c7 100644 --- a/satpy/readers/ghi_l1.py +++ b/satpy/readers/ghi_l1.py @@ -38,20 +38,20 @@ class HDF_GHI_L1(FY4Base): def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_GHI_L1, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'GHI' + self.sensor = "GHI" def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) + ds_name = dataset_id["name"] + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) if self.CHANS_ID in file_key: - file_key = f'Data/{file_key}' + file_key = f"Data/{file_key}" elif self.SUN_ID in file_key or self.SAT_ID in file_key: - file_key = f'Navigation/{file_key}' + file_key = f"Navigation/{file_key}" data = self.get(file_key) if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data = self.calibrate(data, ds_info, ds_name, file_key) @@ -61,58 +61,58 @@ def get_dataset(self, dataset_id, ds_info): def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" - satname = self.PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name']) - data.attrs.update({'platform_name': satname, - 'sensor': self['/attr/Sensor Identification Code'].lower(), - 'orbital_parameters': { - 'satellite_nominal_latitude': self['/attr/NOMSubSatLat'].item(), - 'satellite_nominal_longitude': self['/attr/NOMSubSatLon'].item(), - 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}}) + satname = self.PLATFORM_NAMES.get(self["/attr/Satellite Name"], self["/attr/Satellite Name"]) + data.attrs.update({"platform_name": satname, + "sensor": self["/attr/Sensor Identification Code"].lower(), + "orbital_parameters": { + "satellite_nominal_latitude": self["/attr/NOMSubSatLat"].item(), + "satellite_nominal_longitude": self["/attr/NOMSubSatLon"].item(), + "satellite_nominal_altitude": self["/attr/NOMSatHeight"].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later - data.attrs.pop('FillValue', None) - data.attrs.pop('Intercept', None) - data.attrs.pop('Slope', None) + data.attrs.pop("FillValue", None) + data.attrs.pop("Intercept", None) + data.attrs.pop("Slope", None) def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf - res = key['resolution'] + res = key["resolution"] pdict = {} - c_lats = self.file_content['/attr/Corner-Point Latitudes'] - c_lons = self.file_content['/attr/Corner-Point Longitudes'] + c_lats = self.file_content["/attr/Corner-Point Latitudes"] + c_lons = self.file_content["/attr/Corner-Point Longitudes"] p1 = (c_lons[0], c_lats[0]) p2 = (c_lons[1], c_lats[1]) p3 = (c_lons[2], c_lats[2]) p4 = (c_lons[3], c_lats[3]) - pdict['a'] = self.file_content['/attr/Semi_major_axis'] * 1E3 # equator radius (m) - pdict['b'] = self.file_content['/attr/Semi_minor_axis'] * 1E3 # equator radius (m) - pdict['h'] = self.file_content['/attr/NOMSatHeight'] * 1E3 # the altitude of satellite (m) + pdict["a"] = self.file_content["/attr/Semi_major_axis"] * 1E3 # equator radius (m) + pdict["b"] = self.file_content["/attr/Semi_minor_axis"] * 1E3 # equator radius (m) + pdict["h"] = self.file_content["/attr/NOMSatHeight"] * 1E3 # the altitude of satellite (m) - pdict['h'] = pdict['h'] - pdict['a'] + pdict["h"] = pdict["h"] - pdict["a"] - pdict['ssp_lon'] = float(self.file_content['/attr/NOMSubSatLon']) - pdict['nlines'] = float(self.file_content['/attr/RegLength']) - pdict['ncols'] = float(self.file_content['/attr/RegWidth']) + pdict["ssp_lon"] = float(self.file_content["/attr/NOMSubSatLon"]) + pdict["nlines"] = float(self.file_content["/attr/RegLength"]) + pdict["ncols"] = float(self.file_content["/attr/RegWidth"]) - pdict['scandir'] = 'S2N' + pdict["scandir"] = "S2N" - pdict['a_desc'] = "FY-4 {} area".format(self.filename_info['observation_type']) - pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m' - pdict['p_id'] = f'FY-4, {res}m' + pdict["a_desc"] = "FY-4 {} area".format(self.filename_info["observation_type"]) + pdict["a_name"] = f'{self.filename_info["observation_type"]}_{res}m' + pdict["p_id"] = f"FY-4, {res}m" - proj_dict = {'a': pdict['a'], - 'b': pdict['b'], - 'lon_0': pdict['ssp_lon'], - 'h': pdict['h'], - 'proj': 'geos', - 'units': 'm', - 'sweep': 'y'} + proj_dict = {"a": pdict["a"], + "b": pdict["b"], + "lon_0": pdict["ssp_lon"], + "h": pdict["h"], + "proj": "geos", + "units": "m", + "sweep": "y"} p = Proj(proj_dict) o1 = (p(p1[0], p1[1])) # Upper left diff --git a/satpy/readers/ghrsst_l2.py b/satpy/readers/ghrsst_l2.py index 384bafa289..6c4005623e 100644 --- a/satpy/readers/ghrsst_l2.py +++ b/satpy/readers/ghrsst_l2.py @@ -39,15 +39,15 @@ def __init__(self, filename, filename_info, filetype_info, engine=None): self._engine = engine self._tarfile = None - self.filename_info['start_time'] = datetime.strptime( - self.nc.start_time, '%Y%m%dT%H%M%SZ') - self.filename_info['end_time'] = datetime.strptime( - self.nc.stop_time, '%Y%m%dT%H%M%SZ') + self.filename_info["start_time"] = datetime.strptime( + self.nc.start_time, "%Y%m%dT%H%M%SZ") + self.filename_info["end_time"] = datetime.strptime( + self.nc.stop_time, "%Y%m%dT%H%M%SZ") @cached_property def nc(self): """Get the xarray Dataset for the filename.""" - if os.fspath(self.filename).endswith('tar'): + if os.fspath(self.filename).endswith("tar"): file_obj = self._open_tarfile() else: file_obj = self.filename @@ -56,13 +56,13 @@ def nc(self): decode_cf=True, mask_and_scale=True, engine=self._engine, - chunks={'ni': CHUNK_SIZE, - 'nj': CHUNK_SIZE}) + chunks={"ni": CHUNK_SIZE, + "nj": CHUNK_SIZE}) - return nc.rename({'ni': 'x', 'nj': 'y'}) + return nc.rename({"ni": "x", "nj": "y"}) def _open_tarfile(self): - self._tarfile = tarfile.open(name=self.filename, mode='r') + self._tarfile = tarfile.open(name=self.filename, mode="r") sst_filename = next((name for name in self._tarfile.getnames() if self._is_sst_file(name))) file_obj = self._tarfile.extractfile(sst_filename) @@ -71,27 +71,27 @@ def _open_tarfile(self): @staticmethod def _is_sst_file(name): """Check if file in the tar archive is a valid SST file.""" - return name.endswith('nc') and 'GHRSST-SSTskin' in name + return name.endswith("nc") and "GHRSST-SSTskin" in name def get_dataset(self, key, info): """Get any available dataset.""" - stdname = info.get('standard_name') + stdname = info.get("standard_name") return self.nc[stdname].squeeze() @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def sensor(self): """Get the sensor name.""" - return self.nc.attrs['sensor'].lower() + return self.nc.attrs["sensor"].lower() def __del__(self): """Close the tarfile object.""" diff --git a/satpy/readers/ghrsst_l3c_sst.py b/satpy/readers/ghrsst_l3c_sst.py index d35621d341..ef1dd220a9 100644 --- a/satpy/readers/ghrsst_l3c_sst.py +++ b/satpy/readers/ghrsst_l3c_sst.py @@ -28,16 +28,16 @@ logger = logging.getLogger(__name__) -PLATFORM_NAME = {'NPP': 'Suomi-NPP', } -SENSOR_NAME = {'VIIRS': 'viirs', - 'AVHRR': 'avhrr/3'} +PLATFORM_NAME = {"NPP": "Suomi-NPP", } +SENSOR_NAME = {"VIIRS": "viirs", + "AVHRR": "avhrr/3"} class GHRSST_OSISAFL2(NetCDF4FileHandler): """Reader for the OSISAF SST GHRSST format.""" def _parse_datetime(self, datestr): - return datetime.strptime(datestr, '%Y%m%dT%H%M%SZ') + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") def get_area_def(self, area_id, area_info): """Override abstract baseclass method.""" @@ -45,21 +45,21 @@ def get_area_def(self, area_id, area_info): def get_dataset(self, dataset_id, ds_info, out=None): """Load a dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) - dtype = ds_info.get('dtype', np.float32) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) + dtype = ds_info.get("dtype", np.float32) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: - shape = self[var_path + '/shape'] + shape = self[var_path + "/shape"] if shape[0] == 1: # Remove the time dimenstion from dataset shape = shape[1], shape[2] - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: try: - file_units = self[var_path + '/attr/units'] + file_units = self[var_path + "/attr/units"] # they were almost completely CF compliant... if file_units == "none": file_units = "1" @@ -76,19 +76,19 @@ def get_dataset(self, dataset_id, ds_info, out=None): ds_info.update({ "units": ds_info.get("units", file_units), - "platform_name": PLATFORM_NAME.get(self['/attr/platform'], self['/attr/platform']), - "sensor": SENSOR_NAME.get(self['/attr/sensor'], self['/attr/sensor']), + "platform_name": PLATFORM_NAME.get(self["/attr/platform"], self["/attr/platform"]), + "sensor": SENSOR_NAME.get(self["/attr/sensor"], self["/attr/sensor"]), }) ds_info.update(dataset_id.to_dict()) cls = ds_info.pop("container", Dataset) return cls(out, **ds_info) def _scale_and_mask_data(self, out, var_path): - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] try: - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] except KeyError: scale_factor = scale_offset = None if valid_min is not None and valid_max is not None: @@ -100,16 +100,16 @@ def _scale_and_mask_data(self, out, var_path): def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): """Load an area.""" - lon_key = 'lon' - valid_min = self[lon_key + '/attr/valid_min'] - valid_max = self[lon_key + '/attr/valid_max'] + lon_key = "lon" + valid_min = self[lon_key + "/attr/valid_min"] + valid_max = self[lon_key + "/attr/valid_max"] lon_out.data[:] = self[lon_key][::-1] lon_out.mask[:] = (lon_out < valid_min) | (lon_out > valid_max) - lat_key = 'lat' - valid_min = self[lat_key + '/attr/valid_min'] - valid_max = self[lat_key + '/attr/valid_max'] + lat_key = "lat" + valid_min = self[lat_key + "/attr/valid_min"] + valid_max = self[lat_key + "/attr/valid_max"] lat_out.data[:] = self[lat_key][::-1] lat_out.mask[:] = (lat_out < valid_min) | (lat_out > valid_max) @@ -119,9 +119,9 @@ def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): def start_time(self): """Get start time.""" # return self.filename_info['start_time'] - return self._parse_datetime(self['/attr/start_time']) + return self._parse_datetime(self["/attr/start_time"]) @property def end_time(self): """Get end time.""" - return self._parse_datetime(self['/attr/stop_time']) + return self._parse_datetime(self["/attr/stop_time"]) diff --git a/satpy/readers/glm_l2.py b/satpy/readers/glm_l2.py index bfb2719b07..ceb11a33bc 100644 --- a/satpy/readers/glm_l2.py +++ b/satpy/readers/glm_l2.py @@ -33,9 +33,9 @@ logger = logging.getLogger(__name__) PLATFORM_NAMES = { - 'G16': 'GOES-16', - 'G17': 'GOES-17', - 'G18': 'GOES-18', + "G16": "GOES-16", + "G17": "GOES-17", + "G18": "GOES-18", } # class NC_GLM_L2_LCFA(BaseFileHandler): — add this with glmtools @@ -47,45 +47,45 @@ class NCGriddedGLML2(NC_ABI_BASE): @property def sensor(self): """Get sensor name for current file handler.""" - return 'glm' + return "glm" @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ") def _is_category_product(self, data_arr): # if after autoscaling we still have an integer is_int = np.issubdtype(data_arr.dtype, np.integer) # and it has a fill value - has_fill = '_FillValue' in data_arr.attrs + has_fill = "_FillValue" in data_arr.attrs # or it has flag_meanings - has_meanings = 'flag_meanings' in data_arr.attrs + has_meanings = "flag_meanings" in data_arr.attrs # then it is likely a category product and we should keep the # _FillValue for satpy to use later return is_int and (has_fill or has_meanings) def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) - res = self[key['name']] - res.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + logger.debug("Reading in get_dataset %s.", key["name"]) + res = self[key["name"]] + res.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) res.attrs.update(self.filename_info) # Add orbital parameters projection = self.nc["goes_imager_projection"] - res.attrs['orbital_parameters'] = { - 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), - 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), - 'projection_altitude': float(projection.attrs['perspective_point_height']), - 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), + res.attrs["orbital_parameters"] = { + "projection_longitude": float(projection.attrs["longitude_of_projection_origin"]), + "projection_latitude": float(projection.attrs["latitude_of_projection_origin"]), + "projection_altitude": float(projection.attrs["perspective_point_height"]), + "satellite_nominal_latitude": float(self["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self["nominal_satellite_subpoint_lon"]), # 'satellite_nominal_altitude': float(self['nominal_satellite_height']), } @@ -93,25 +93,25 @@ def get_dataset(self, key, info): # remove attributes that could be confusing later if not self._is_category_product(res): - res.attrs.pop('_FillValue', None) - res.attrs.pop('scale_factor', None) - res.attrs.pop('add_offset', None) - res.attrs.pop('_Unsigned', None) - res.attrs.pop('ancillary_variables', None) # Can't currently load DQF + res.attrs.pop("_FillValue", None) + res.attrs.pop("scale_factor", None) + res.attrs.pop("add_offset", None) + res.attrs.pop("_Unsigned", None) + res.attrs.pop("ancillary_variables", None) # Can't currently load DQF # add in information from the filename that may be useful to the user # for key in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname'): - for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): + for attr in ("scene_abbr", "scan_mode", "platform_shortname"): res.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', - 'production_site', 'timeline_ID', 'spatial_resolution'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", + "production_site", "timeline_ID", "spatial_resolution"): res.attrs[attr] = self.nc.attrs.get(attr) return res def _is_2d_xy_var(self, data_arr): is_2d = data_arr.ndim == 2 - has_x_dim = 'x' in data_arr.dims - has_y_dim = 'y' in data_arr.dims + has_x_dim = "x" in data_arr.dims + has_y_dim = "y" in data_arr.dims return is_2d and has_x_dim and has_y_dim def available_datasets(self, configured_datasets=None): @@ -127,14 +127,14 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() - new_info['resolution'] = res - exists = ds_info['name'] in self.nc - handled_vars.add(ds_info['name']) + new_info["resolution"] = res + exists = ds_info["name"] in self.nc + handled_vars.add(ds_info["name"]) yield exists, new_info elif is_avail is None: # we don't know what to do with this @@ -150,9 +150,9 @@ def available_datasets(self, configured_datasets=None): continue new_info = { - 'name': var_name, - 'resolution': res, - 'file_type': self.filetype_info['file_type'] + "name": var_name, + "resolution": res, + "file_type": self.filetype_info["file_type"] } handled_vars.add(var_name) yield True, new_info diff --git a/satpy/readers/gms/gms5_vissr_format.py b/satpy/readers/gms/gms5_vissr_format.py index a5052097eb..a48fcde77f 100644 --- a/satpy/readers/gms/gms5_vissr_format.py +++ b/satpy/readers/gms/gms5_vissr_format.py @@ -32,329 +32,329 @@ CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] VISIR_SOLAR = [("VIS", R4), ("IR", R4)] -CONTROL_BLOCK = np.dtype([('control_block_size', I2), - ('head_block_number_of_parameter_block', I2), - ('parameter_block_size', I2), - ('head_block_number_of_image_data', I2), - ('total_block_size_of_image_data', I2), - ('available_block_size_of_image_data', I2), - ('head_valid_line_number', I2), - ('final_valid_line_number', I2), - ('final_data_block_number', I2)]) +CONTROL_BLOCK = np.dtype([("control_block_size", I2), + ("head_block_number_of_parameter_block", I2), + ("parameter_block_size", I2), + ("head_block_number_of_image_data", I2), + ("total_block_size_of_image_data", I2), + ("available_block_size_of_image_data", I2), + ("head_valid_line_number", I2), + ("final_valid_line_number", I2), + ("final_data_block_number", I2)]) -MODE_BLOCK_FRAME_PARAMETERS = [('bit_length', I4), - ('number_of_lines', I4), - ('number_of_pixels', I4), - ('stepping_angle', R4), - ('sampling_angle', R4), - ('lcw_pixel_size', I4), - ('doc_pixel_size', I4), - ('reserved', I4)] +MODE_BLOCK_FRAME_PARAMETERS = [("bit_length", I4), + ("number_of_lines", I4), + ("number_of_pixels", I4), + ("stepping_angle", R4), + ("sampling_angle", R4), + ("lcw_pixel_size", I4), + ("doc_pixel_size", I4), + ("reserved", I4)] -MODE_BLOCK = np.dtype([('satellite_number', I4), - ('satellite_name', '|S12'), - ('observation_time_ad', '|S16'), - ('observation_time_mjd', R8), - ('gms_operation_mode', I4), - ('dpc_operation_mode', I4), - ('vissr_observation_mode', I4), - ('scanner_selection', I4), - ('sensor_selection', I4), - ('sensor_mode', I4), - ('scan_frame_mode', I4), - ('scan_mode', I4), - ('upper_limit_of_scan_number', I4), - ('lower_limit_of_scan_number', I4), - ('equatorial_scan_line_number', I4), - ('spin_rate', R4), - ('vis_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), - ('ir_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), - ('satellite_height', R4), - ('earth_radius', R4), - ('ssp_longitude', R4), - ('reserved_1', I4, 9), - ('table_of_sensor_trouble', I4, 14), - ('reserved_2', I4, 36), - ('status_tables_of_data_relative_address_segment', I4, 60)]) +MODE_BLOCK = np.dtype([("satellite_number", I4), + ("satellite_name", "|S12"), + ("observation_time_ad", "|S16"), + ("observation_time_mjd", R8), + ("gms_operation_mode", I4), + ("dpc_operation_mode", I4), + ("vissr_observation_mode", I4), + ("scanner_selection", I4), + ("sensor_selection", I4), + ("sensor_mode", I4), + ("scan_frame_mode", I4), + ("scan_mode", I4), + ("upper_limit_of_scan_number", I4), + ("lower_limit_of_scan_number", I4), + ("equatorial_scan_line_number", I4), + ("spin_rate", R4), + ("vis_frame_parameters", MODE_BLOCK_FRAME_PARAMETERS), + ("ir_frame_parameters", MODE_BLOCK_FRAME_PARAMETERS), + ("satellite_height", R4), + ("earth_radius", R4), + ("ssp_longitude", R4), + ("reserved_1", I4, 9), + ("table_of_sensor_trouble", I4, 14), + ("reserved_2", I4, 36), + ("status_tables_of_data_relative_address_segment", I4, 60)]) COORDINATE_CONVERSION_PARAMETERS = np.dtype([ - ('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('scheduled_observation_time', R8), - ('stepping_angle_along_line', CHANNELS), - ('sampling_angle_along_pixel', CHANNELS), - ('central_line_number_of_vissr_frame', CHANNELS), - ('central_pixel_number_of_vissr_frame', CHANNELS), - ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), - ('number_of_sensor_elements', CHANNELS), - ('total_number_of_vissr_frame_lines', CHANNELS), - ('total_number_of_vissr_frame_pixels', CHANNELS), - ('vissr_misalignment', R4, (3,)), - ('matrix_of_misalignment', R4, (3, 3)), - ('parameters', [('judgement_of_observation_convergence_time', R4), - ('judgement_of_line_convergence', R4), - ('east_west_angle_of_sun_light_condense_prism', R4), - ('north_south_angle_of_sun_light_condense_prism', R4), - ('pi', R4), - ('pi_divided_by_180', R4), - ('180_divided_by_pi', R4), - ('equatorial_radius', R4), - ('oblateness_of_earth', R4), - ('eccentricity_of_earth_orbit', R4), - ('first_angle_of_vissr_observation_in_sdb', R4), - ('upper_limited_line_of_2nd_prism_for_vis_solar_observation', R4), - ('lower_limited_line_of_1st_prism_for_vis_solar_observation', R4), - ('upper_limited_line_of_3rd_prism_for_vis_solar_observation', R4), - ('lower_limited_line_of_2nd_prism_for_vis_solar_observation', R4)]), - ('solar_stepping_angle_along_line', VISIR_SOLAR), - ('solar_sampling_angle_along_pixel', VISIR_SOLAR), - ('solar_center_line_of_vissr_frame', VISIR_SOLAR), - ('solar_center_pixel_of_vissr_frame', VISIR_SOLAR), - ('solar_pixel_difference_of_vissr_center_from_normal_position', VISIR_SOLAR), - ('solar_number_of_sensor_elements', VISIR_SOLAR), - ('solar_total_number_of_vissr_frame_lines', VISIR_SOLAR), - ('solar_total_number_of_vissr_frame_pixels', VISIR_SOLAR), - ('reserved_1', I4, 19), - ('orbital_parameters', [('epoch_time', R8), - ('semi_major_axis', R8), - ('eccentricity', R8), - ('orbital_inclination', R8), - ('longitude_of_ascending_node', R8), - ('argument_of_perigee', R8), - ('mean_anomaly', R8), - ('longitude_of_ssp', R8), - ('latitude_of_ssp', R8)]), - ('reserved_2', I4, 2), - ('attitude_parameters', [('epoch_time', R8), - ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8), - ('angle_change_rate_between_spin_axis_and_z_axis', R8), - ('angle_between_spin_axis_and_zy_axis', R8), - ('angle_change_rate_between_spin_axis_and_zt_axis', R8), - ('daily_mean_of_spin_rate', R8)]), - ('reserved_3', I4, 529), - ('correction_of_image_distortion', [('stepping_angle_along_line_of_ir1', R4), - ('stepping_angle_along_line_of_ir2', R4), - ('stepping_angle_along_line_of_wv', R4), - ('stepping_angle_along_line_of_vis', R4), - ('sampling_angle_along_pixel_of_ir1', R4), - ('sampling_angle_along_pixel_of_ir2', R4), - ('sampling_angle_along_pixel_of_wv', R4), - ('sampling_angle_along_pixel_of_vis', R4), - ('x_component_vissr_misalignment', R4), - ('y_component_vissr_misalignment', R4)]) + ("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("scheduled_observation_time", R8), + ("stepping_angle_along_line", CHANNELS), + ("sampling_angle_along_pixel", CHANNELS), + ("central_line_number_of_vissr_frame", CHANNELS), + ("central_pixel_number_of_vissr_frame", CHANNELS), + ("pixel_difference_of_vissr_center_from_normal_position", CHANNELS), + ("number_of_sensor_elements", CHANNELS), + ("total_number_of_vissr_frame_lines", CHANNELS), + ("total_number_of_vissr_frame_pixels", CHANNELS), + ("vissr_misalignment", R4, (3,)), + ("matrix_of_misalignment", R4, (3, 3)), + ("parameters", [("judgement_of_observation_convergence_time", R4), + ("judgement_of_line_convergence", R4), + ("east_west_angle_of_sun_light_condense_prism", R4), + ("north_south_angle_of_sun_light_condense_prism", R4), + ("pi", R4), + ("pi_divided_by_180", R4), + ("180_divided_by_pi", R4), + ("equatorial_radius", R4), + ("oblateness_of_earth", R4), + ("eccentricity_of_earth_orbit", R4), + ("first_angle_of_vissr_observation_in_sdb", R4), + ("upper_limited_line_of_2nd_prism_for_vis_solar_observation", R4), + ("lower_limited_line_of_1st_prism_for_vis_solar_observation", R4), + ("upper_limited_line_of_3rd_prism_for_vis_solar_observation", R4), + ("lower_limited_line_of_2nd_prism_for_vis_solar_observation", R4)]), + ("solar_stepping_angle_along_line", VISIR_SOLAR), + ("solar_sampling_angle_along_pixel", VISIR_SOLAR), + ("solar_center_line_of_vissr_frame", VISIR_SOLAR), + ("solar_center_pixel_of_vissr_frame", VISIR_SOLAR), + ("solar_pixel_difference_of_vissr_center_from_normal_position", VISIR_SOLAR), + ("solar_number_of_sensor_elements", VISIR_SOLAR), + ("solar_total_number_of_vissr_frame_lines", VISIR_SOLAR), + ("solar_total_number_of_vissr_frame_pixels", VISIR_SOLAR), + ("reserved_1", I4, 19), + ("orbital_parameters", [("epoch_time", R8), + ("semi_major_axis", R8), + ("eccentricity", R8), + ("orbital_inclination", R8), + ("longitude_of_ascending_node", R8), + ("argument_of_perigee", R8), + ("mean_anomaly", R8), + ("longitude_of_ssp", R8), + ("latitude_of_ssp", R8)]), + ("reserved_2", I4, 2), + ("attitude_parameters", [("epoch_time", R8), + ("angle_between_z_axis_and_satellite_spin_axis_at_epoch_time", R8), + ("angle_change_rate_between_spin_axis_and_z_axis", R8), + ("angle_between_spin_axis_and_zy_axis", R8), + ("angle_change_rate_between_spin_axis_and_zt_axis", R8), + ("daily_mean_of_spin_rate", R8)]), + ("reserved_3", I4, 529), + ("correction_of_image_distortion", [("stepping_angle_along_line_of_ir1", R4), + ("stepping_angle_along_line_of_ir2", R4), + ("stepping_angle_along_line_of_wv", R4), + ("stepping_angle_along_line_of_vis", R4), + ("sampling_angle_along_pixel_of_ir1", R4), + ("sampling_angle_along_pixel_of_ir2", R4), + ("sampling_angle_along_pixel_of_wv", R4), + ("sampling_angle_along_pixel_of_vis", R4), + ("x_component_vissr_misalignment", R4), + ("y_component_vissr_misalignment", R4)]) ]) -ATTITUDE_PREDICTION_DATA = np.dtype([('prediction_time_mjd', R8), - ('prediction_time_utc', TIME), - ('right_ascension_of_attitude', R8), - ('declination_of_attitude', R8), - ('sun_earth_angle', R8), - ('spin_rate', R8), - ('right_ascension_of_orbital_plane', R8), - ('declination_of_orbital_plane', R8), - ('reserved', R8), - ('eclipse_flag', I4), - ('spin_axis_flag', I4)]) +ATTITUDE_PREDICTION_DATA = np.dtype([("prediction_time_mjd", R8), + ("prediction_time_utc", TIME), + ("right_ascension_of_attitude", R8), + ("declination_of_attitude", R8), + ("sun_earth_angle", R8), + ("spin_rate", R8), + ("right_ascension_of_orbital_plane", R8), + ("declination_of_orbital_plane", R8), + ("reserved", R8), + ("eclipse_flag", I4), + ("spin_axis_flag", I4)]) -ATTITUDE_PREDICTION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('start_time', R8), - ('end_time', R8), - ('prediction_interval_time', R8), - ('number_of_prediction', I4), - ('data_size', I4), - ('data', ATTITUDE_PREDICTION_DATA, (33,))]) +ATTITUDE_PREDICTION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("start_time", R8), + ("end_time", R8), + ("prediction_interval_time", R8), + ("number_of_prediction", I4), + ("data_size", I4), + ("data", ATTITUDE_PREDICTION_DATA, (33,))]) -ORBIT_PREDICTION_DATA = [('prediction_time_mjd', R8), - ('prediction_time_utc', TIME), - ('satellite_position_1950', R8, (3,)), - ('satellite_velocity_1950', R8, (3,)), - ('satellite_position_earth_fixed', R8, (3,)), - ('satellite_velocity_earth_fixed', R8, (3,)), - ('greenwich_sidereal_time', R8), - ('sat_sun_vector_1950', [('azimuth', R8), - ('elevation', R8)]), - ('sat_sun_vector_earth_fixed', [('azimuth', R8), - ('elevation', R8)]), - ('conversion_matrix', R8, (3, 3)), - ('moon_directional_vector', R8, (3,)), - ('satellite_position', [('ssp_longitude', R8), - ('ssp_latitude', R8), - ('satellite_height', R8)]), - ('eclipse_period_flag', I4), - ('reserved', I4)] +ORBIT_PREDICTION_DATA = [("prediction_time_mjd", R8), + ("prediction_time_utc", TIME), + ("satellite_position_1950", R8, (3,)), + ("satellite_velocity_1950", R8, (3,)), + ("satellite_position_earth_fixed", R8, (3,)), + ("satellite_velocity_earth_fixed", R8, (3,)), + ("greenwich_sidereal_time", R8), + ("sat_sun_vector_1950", [("azimuth", R8), + ("elevation", R8)]), + ("sat_sun_vector_earth_fixed", [("azimuth", R8), + ("elevation", R8)]), + ("conversion_matrix", R8, (3, 3)), + ("moon_directional_vector", R8, (3,)), + ("satellite_position", [("ssp_longitude", R8), + ("ssp_latitude", R8), + ("satellite_height", R8)]), + ("eclipse_period_flag", I4), + ("reserved", I4)] -ORBIT_PREDICTION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('start_time', R8), - ('end_time', R8), - ('prediction_interval_time', R8), - ('number_of_prediction', I4), - ('data_size', I4), - ('data', ORBIT_PREDICTION_DATA, (9,))]) +ORBIT_PREDICTION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("start_time", R8), + ("end_time", R8), + ("prediction_interval_time", R8), + ("number_of_prediction", I4), + ("data_size", I4), + ("data", ORBIT_PREDICTION_DATA, (9,))]) VIS_CALIBRATION_TABLE = np.dtype([ - ('channel_number', I4), - ('data_validity', I4), - ('updated_time', TIME), - ('table_id', I4), - ('brightness_albedo_conversion_table', R4, (64,)), - ('vis_channel_staircase_brightness_data', R4, (6,)), - ('coefficients_table_of_vis_staircase_regression_curve', R4, (10,)), - ('brightness_table_for_calibration', [('universal_space_brightness', R4), - ('solar_brightness', R4)]), - ('calibration_uses_brightness_correspondence_voltage_chart', [('universal_space_voltage', R4), - ('solar_voltage', R4)]), - ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), - ('reserved', I4, (9,)) + ("channel_number", I4), + ("data_validity", I4), + ("updated_time", TIME), + ("table_id", I4), + ("brightness_albedo_conversion_table", R4, (64,)), + ("vis_channel_staircase_brightness_data", R4, (6,)), + ("coefficients_table_of_vis_staircase_regression_curve", R4, (10,)), + ("brightness_table_for_calibration", [("universal_space_brightness", R4), + ("solar_brightness", R4)]), + ("calibration_uses_brightness_correspondence_voltage_chart", [("universal_space_voltage", R4), + ("solar_voltage", R4)]), + ("calibration_coefficients_of_radiation_observation", [("G", R4), ("V0", R4)]), + ("reserved", I4, (9,)) ]) -VIS_CALIBRATION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('sensor_group', I4), - ('vis1_calibration_table', VIS_CALIBRATION_TABLE), - ('vis2_calibration_table', VIS_CALIBRATION_TABLE), - ('vis3_calibration_table', VIS_CALIBRATION_TABLE), - ('reserved', I4, (267,))]) +VIS_CALIBRATION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("sensor_group", I4), + ("vis1_calibration_table", VIS_CALIBRATION_TABLE), + ("vis2_calibration_table", VIS_CALIBRATION_TABLE), + ("vis3_calibration_table", VIS_CALIBRATION_TABLE), + ("reserved", I4, (267,))]) TELEMETRY_DATA = np.dtype([ - ('shutter_temp', R4), - ('redundant_mirror_temp', R4), - ('primary_mirror_temp', R4), - ('baffle_fw_temp', R4), - ('baffle_af_temp', R4), - ('15_volt_auxiliary_power_supply', R4), - ('radiative_cooler_temp_1', R4), - ('radiative_cooler_temp_2', R4), - ('electronics_module_temp', R4), - ('scan_mirror_temp', R4), - ('shutter_cavity_temp', R4), - ('primary_mirror_sealed_temp', R4), - ('redundant_mirror_sealed_temp', R4), - ('shutter_temp_2', R4), - ('reserved', R4, (2,)) + ("shutter_temp", R4), + ("redundant_mirror_temp", R4), + ("primary_mirror_temp", R4), + ("baffle_fw_temp", R4), + ("baffle_af_temp", R4), + ("15_volt_auxiliary_power_supply", R4), + ("radiative_cooler_temp_1", R4), + ("radiative_cooler_temp_2", R4), + ("electronics_module_temp", R4), + ("scan_mirror_temp", R4), + ("shutter_cavity_temp", R4), + ("primary_mirror_sealed_temp", R4), + ("redundant_mirror_sealed_temp", R4), + ("shutter_temp_2", R4), + ("reserved", R4, (2,)) ]) IR_CALIBRATION = np.dtype([ - ('data_segment', I4), - ('data_validity', I4), - ('updated_time', TIME), - ('sensor_group', I4), - ('table_id', I4), - ('reserved_1', I4, (2,)), - ('conversion_table_of_equivalent_black_body_radiation', R4, (256,)), - ('conversion_table_of_equivalent_black_body_temperature', R4, (256,)), - ('staircase_brightness_data', R4, (6,)), - ('coefficients_table_of_staircase_regression_curve', R4, (10,)), - ('brightness_data_for_calibration', [('brightness_of_space', R4), - ('brightness_of_black_body_shutter', R4), - ('reserved', R4)]), - ('voltage_table_for_brightness_of_calibration', [('voltage_of_space', R4), - ('voltage_of_black_body_shutter', R4), - ('reserved', R4)]), - ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), - ('valid_shutter_temperature', R4), - ('valid_shutter_radiation', R4), - ('telemetry_data_table', TELEMETRY_DATA), - ('flag_of_calid_shutter_temperature_calculation', I4), - ('reserved_2', I4, (109,)) + ("data_segment", I4), + ("data_validity", I4), + ("updated_time", TIME), + ("sensor_group", I4), + ("table_id", I4), + ("reserved_1", I4, (2,)), + ("conversion_table_of_equivalent_black_body_radiation", R4, (256,)), + ("conversion_table_of_equivalent_black_body_temperature", R4, (256,)), + ("staircase_brightness_data", R4, (6,)), + ("coefficients_table_of_staircase_regression_curve", R4, (10,)), + ("brightness_data_for_calibration", [("brightness_of_space", R4), + ("brightness_of_black_body_shutter", R4), + ("reserved", R4)]), + ("voltage_table_for_brightness_of_calibration", [("voltage_of_space", R4), + ("voltage_of_black_body_shutter", R4), + ("reserved", R4)]), + ("calibration_coefficients_of_radiation_observation", [("G", R4), ("V0", R4)]), + ("valid_shutter_temperature", R4), + ("valid_shutter_radiation", R4), + ("telemetry_data_table", TELEMETRY_DATA), + ("flag_of_calid_shutter_temperature_calculation", I4), + ("reserved_2", I4, (109,)) ]) SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([ - ('coordinate_conversion_table', I2, (1250,)), - ('earth_equator_radius', R4), - ('satellite_height', R4), - ('stepping_angle', R4), - ('sampling_angle', R4), - ('ssp_latitude', R4), - ('ssp_longitude', R4), - ('ssp_line_number', R4), - ('ssp_pixel_number', R4), - ('pi', R4), - ('line_correction_ir1_vis', R4), - ('pixel_correction_ir1_vis', R4), - ('line_correction_ir1_ir2', R4), - ('pixel_correction_ir1_ir2', R4), - ('line_correction_ir1_wv', R4), - ('pixel_correction_ir1_wv', R4), - ('reserved', R4, (32,)), + ("coordinate_conversion_table", I2, (1250,)), + ("earth_equator_radius", R4), + ("satellite_height", R4), + ("stepping_angle", R4), + ("sampling_angle", R4), + ("ssp_latitude", R4), + ("ssp_longitude", R4), + ("ssp_line_number", R4), + ("ssp_pixel_number", R4), + ("pi", R4), + ("line_correction_ir1_vis", R4), + ("pixel_correction_ir1_vis", R4), + ("line_correction_ir1_ir2", R4), + ("pixel_correction_ir1_ir2", R4), + ("line_correction_ir1_wv", R4), + ("pixel_correction_ir1_wv", R4), + ("reserved", R4, (32,)), ]) IMAGE_PARAMS = { - 'mode': { - 'dtype': MODE_BLOCK, - 'offset': { + "mode": { + "dtype": MODE_BLOCK, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS, IR_CHANNEL: 2 * BLOCK_SIZE_IR } }, - 'coordinate_conversion': { - 'dtype': COORDINATE_CONVERSION_PARAMETERS, - 'offset': { + "coordinate_conversion": { + "dtype": COORDINATE_CONVERSION_PARAMETERS, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 4 * BLOCK_SIZE_IR } }, - 'attitude_prediction': { - 'dtype': ATTITUDE_PREDICTION, - 'offset': { + "attitude_prediction": { + "dtype": ATTITUDE_PREDICTION, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 5 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'orbit_prediction_1': { - 'dtype': ORBIT_PREDICTION, - 'offset': { + "orbit_prediction_1": { + "dtype": ORBIT_PREDICTION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS, IR_CHANNEL: 6 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'orbit_prediction_2': { - 'dtype': ORBIT_PREDICTION, - 'offset': { + "orbit_prediction_2": { + "dtype": ORBIT_PREDICTION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 7 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'vis_calibration': { - 'dtype': VIS_CALIBRATION, - 'offset': { + "vis_calibration": { + "dtype": VIS_CALIBRATION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 9 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'ir1_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "ir1_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, IR_CHANNEL: 10 * BLOCK_SIZE_IR }, }, - 'ir2_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "ir2_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 11 * BLOCK_SIZE_IR }, }, - 'wv_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "wv_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 12 * BLOCK_SIZE_IR }, }, - 'simple_coordinate_conversion_table': { - 'dtype': SIMPLE_COORDINATE_CONVERSION_TABLE, - 'offset': { + "simple_coordinate_conversion_table": { + "dtype": SIMPLE_COORDINATE_CONVERSION_TABLE, + "offset": { VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 16 * BLOCK_SIZE_IR }, @@ -362,36 +362,36 @@ } LINE_CONTROL_WORD = np.dtype([ - ('data_id', U1, (4, )), - ('line_number', I4), - ('line_name', I4), - ('error_line_flag', I4), - ('error_message', I4), - ('mode_error_flag', I4), - ('scan_time', R8), - ('beta_angle', R4), - ('west_side_earth_edge', I4), - ('east_side_earth_edge', I4), - ('received_time_1', R8), # Typo in format description (I*4) - ('received_time_2', I4), - ('reserved', U1, (8, )) + ("data_id", U1, (4, )), + ("line_number", I4), + ("line_name", I4), + ("error_line_flag", I4), + ("error_message", I4), + ("mode_error_flag", I4), + ("scan_time", R8), + ("beta_angle", R4), + ("west_side_earth_edge", I4), + ("east_side_earth_edge", I4), + ("received_time_1", R8), # Typo in format description (I*4) + ("received_time_2", I4), + ("reserved", U1, (8, )) ]) -IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', LINE_CONTROL_WORD), - ('DOC', U1, (256,)), # Omitted - ('image_data', U1, 3344)]) +IMAGE_DATA_BLOCK_IR = np.dtype([("LCW", LINE_CONTROL_WORD), + ("DOC", U1, (256,)), # Omitted + ("image_data", U1, 3344)]) -IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', LINE_CONTROL_WORD), - ('DOC', U1, (64,)), # Omitted - ('image_data', U1, (13376,))]) +IMAGE_DATA_BLOCK_VIS = np.dtype([("LCW", LINE_CONTROL_WORD), + ("DOC", U1, (64,)), # Omitted + ("image_data", U1, (13376,))]) IMAGE_DATA = { VIS_CHANNEL: { - 'offset': 6 * BLOCK_SIZE_VIS, - 'dtype': IMAGE_DATA_BLOCK_VIS, + "offset": 6 * BLOCK_SIZE_VIS, + "dtype": IMAGE_DATA_BLOCK_VIS, }, IR_CHANNEL: { - 'offset': 18 * BLOCK_SIZE_IR, - 'dtype': IMAGE_DATA_BLOCK_IR + "offset": 18 * BLOCK_SIZE_IR, + "dtype": IMAGE_DATA_BLOCK_IR } } diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 05bcc513d7..457d5d809c 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -46,7 +46,7 @@ class CalibrationError(Exception): """Dummy error-class.""" -logger = logging.getLogger('hrit_goes') +logger = logging.getLogger("hrit_goes") # Geometric constants [meters] EQUATOR_RADIUS = 6378169.00 @@ -54,30 +54,30 @@ class CalibrationError(Exception): ALTITUDE = 35785831.00 # goes implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) goms_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -goms_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +goms_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} goes_hdr_map = base_hdr_map.copy() goes_hdr_map.update({7: key_header, @@ -86,53 +86,53 @@ class CalibrationError(Exception): }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -sgs_time = np.dtype([('century', 'u1'), - ('year', 'u1'), - ('doy1', 'u1'), - ('doy_hours', 'u1'), - ('hours_mins', 'u1'), - ('mins_secs', 'u1'), - ('secs_msecs', 'u1'), - ('msecs', 'u1')]) +sgs_time = np.dtype([("century", "u1"), + ("year", "u1"), + ("doy1", "u1"), + ("doy_hours", "u1"), + ("hours_mins", "u1"), + ("mins_secs", "u1"), + ("secs_msecs", "u1"), + ("msecs", "u1")]) def make_sgs_time(sgs_time_array): """Make sgs time.""" - year = ((sgs_time_array['century'] >> 4) * 1000 + - (sgs_time_array['century'] & 15) * 100 + - (sgs_time_array['year'] >> 4) * 10 + - (sgs_time_array['year'] & 15)) - doy = ((sgs_time_array['doy1'] >> 4) * 100 + - (sgs_time_array['doy1'] & 15) * 10 + - (sgs_time_array['doy_hours'] >> 4)) - hours = ((sgs_time_array['doy_hours'] & 15) * 10 + - (sgs_time_array['hours_mins'] >> 4)) - mins = ((sgs_time_array['hours_mins'] & 15) * 10 + - (sgs_time_array['mins_secs'] >> 4)) - secs = ((sgs_time_array['mins_secs'] & 15) * 10 + - (sgs_time_array['secs_msecs'] >> 4)) - msecs = ((sgs_time_array['secs_msecs'] & 15) * 100 + - (sgs_time_array['msecs'] >> 4) * 10 + - (sgs_time_array['msecs'] & 15)) + year = ((sgs_time_array["century"] >> 4) * 1000 + + (sgs_time_array["century"] & 15) * 100 + + (sgs_time_array["year"] >> 4) * 10 + + (sgs_time_array["year"] & 15)) + doy = ((sgs_time_array["doy1"] >> 4) * 100 + + (sgs_time_array["doy1"] & 15) * 10 + + (sgs_time_array["doy_hours"] >> 4)) + hours = ((sgs_time_array["doy_hours"] & 15) * 10 + + (sgs_time_array["hours_mins"] >> 4)) + mins = ((sgs_time_array["hours_mins"] & 15) * 10 + + (sgs_time_array["mins_secs"] >> 4)) + secs = ((sgs_time_array["mins_secs"] & 15) * 10 + + (sgs_time_array["secs_msecs"] >> 4)) + msecs = ((sgs_time_array["secs_msecs"] & 15) * 100 + + (sgs_time_array["msecs"] >> 4) * 10 + + (sgs_time_array["msecs"] & 15)) return (datetime(int(year), 1, 1) + timedelta(days=int(doy - 1), hours=int(hours), @@ -156,7 +156,7 @@ def make_sgs_time(sgs_time_array): ("Cel", "u2'), - ("AbsoluteScanCount", '>u2'), - ("NorthernmostScanLine", '>u2'), - ("WesternmostPixel", '>u2'), - ("EasternmostPixel", '>u2'), - ("NorthernmostFrameLine", '>u2'), - ("SouthernmostFrameLine", '>u2'), - ("0Pixel", '>u2'), - ("0ScanLine", '>u2'), - ("0Scan", '>u2'), - ("SubSatScan", '>u2'), - ("SubSatPixel", '>u2'), + ("RelativeScanCount", ">u2"), + ("AbsoluteScanCount", ">u2"), + ("NorthernmostScanLine", ">u2"), + ("WesternmostPixel", ">u2"), + ("EasternmostPixel", ">u2"), + ("NorthernmostFrameLine", ">u2"), + ("SouthernmostFrameLine", ">u2"), + ("0Pixel", ">u2"), + ("0ScanLine", ">u2"), + ("0Scan", ">u2"), + ("SubSatScan", ">u2"), + ("SubSatPixel", ">u2"), ("SubSatLatitude", gvar_float), ("SubSatLongitude", gvar_float), ("Junk4", "u1", 96), # move to "word" 295 @@ -250,7 +250,7 @@ def __init__(self, filename, filename_info, filetype_info): def read_prologue(self): """Read the prologue metadata.""" with open(self.filename, "rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.fromfile(fp_, dtype=prologue, count=1) self.prologue.update(recarray2dict(data)) @@ -258,16 +258,16 @@ def read_prologue(self): def process_prologue(self): """Reprocess prologue to correct types.""" - for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', - 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', - 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: + for key in ["TCurr", "TCHED", "TCTRL", "TLHED", "TLTRL", "TIPFS", + "TINFS", "TISPC", "TIECL", "TIBBC", "TISTR", "TLRAN", + "TIIRT", "TIVIT", "TCLMT", "TIONA"]: try: self.prologue[key] = make_sgs_time(self.prologue[key]) except ValueError: self.prologue.pop(key, None) logger.debug("Invalid data for %s", key) - for key in ['SubSatLatitude', "SubSatLongitude", "ReferenceLongitude", + for key in ["SubSatLatitude", "SubSatLongitude", "ReferenceLongitude", "ReferenceDistance", "ReferenceLatitude"]: self.prologue[key] = make_gvar_float(self.prologue[key]) @@ -352,7 +352,7 @@ def process_prologue(self): 14: "GOES-14", 15: "GOES-15"} -SENSOR_NAME = 'goes_imager' +SENSOR_NAME = "goes_imager" class HRITGOESFileHandler(HRITFileHandler): @@ -367,12 +367,12 @@ def __init__(self, filename, filename_info, filetype_info, goms_variable_length_headers, goms_text_headers)) self.prologue = prologue.prologue - self.chid = self.mda['spectral_channel_id'] + self.chid = self.mda["spectral_channel_id"] - sublon = self.prologue['SubSatLongitude'] - self.mda['projection_parameters']['SSP_longitude'] = sublon + sublon = self.prologue["SubSatLongitude"] + self.mda["projection_parameters"]["SSP_longitude"] = sublon - satellite_id = self.prologue['SatelliteID'] + satellite_id = self.prologue["SatelliteID"] self.platform_name = SPACECRAFTS[satellite_id] def get_dataset(self, key, info): @@ -380,17 +380,17 @@ def get_dataset(self, key, info): logger.debug("Getting raw data") res = super(HRITGOESFileHandler, self).get_dataset(key, info) - self.mda['calibration_parameters'] = self._get_calibration_params() + self.mda["calibration_parameters"] = self._get_calibration_params() - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) new_attrs = info.copy() new_attrs.update(res.attrs) res.attrs = new_attrs - res.attrs['platform_name'] = self.platform_name - res.attrs['sensor'] = SENSOR_NAME - res.attrs['orbital_parameters'] = {'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE} + res.attrs["platform_name"] = self.platform_name + res.attrs["sensor"] = SENSOR_NAME + res.attrs["orbital_parameters"] = {"projection_longitude": self.mda["projection_parameters"]["SSP_longitude"], + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE} return res def _get_calibration_params(self): @@ -398,9 +398,9 @@ def _get_calibration_params(self): params = {} idx_table = [] val_table = [] - for elt in self.mda['image_data_function'].split(b'\r\n'): + for elt in self.mda["image_data_function"].split(b"\r\n"): try: - key, val = elt.split(b':=') + key, val = elt.split(b":=") try: idx_table.append(int(key)) val_table.append(float(val)) @@ -408,19 +408,19 @@ def _get_calibration_params(self): params[key] = val except ValueError: pass - params['indices'] = np.array(idx_table) - params['values'] = np.array(val_table, dtype=np.float32) + params["indices"] = np.array(idx_table) + params["values"] = np.array(val_table, dtype=np.float32) return params def calibrate(self, data, calibration): """Calibrate the data.""" logger.debug("Calibration") tic = datetime.now() - if calibration == 'counts': + if calibration == "counts": return data - if calibration == 'reflectance': + if calibration == "reflectance": res = self._calibrate(data) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": res = self._calibrate(data) else: raise NotImplementedError("Don't know how to calibrate to " + @@ -431,17 +431,17 @@ def calibrate(self, data, calibration): def _calibrate(self, data): """Calibrate *data*.""" - idx = self.mda['calibration_parameters']['indices'] - val = self.mda['calibration_parameters']['values'] + idx = self.mda["calibration_parameters"]["indices"] + val = self.mda["calibration_parameters"]["values"] data.data = da.where(data.data == 0, np.nan, data.data) ddata = data.data.map_blocks(np.interp, idx, val, dtype=val.dtype) res = xr.DataArray(ddata, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.clip(min=0) - units = {b'percent': '%', b'degree Kelvin': 'K'} - unit = self.mda['calibration_parameters'][b'_UNIT'] - res.attrs['units'] = units.get(unit, unit) + units = {b"percent": "%", b"degree Kelvin": "K"} + unit = self.mda["calibration_parameters"][b"_UNIT"] + res.attrs["units"] = units.get(unit, unit) return res def get_area_def(self, dataset_id): @@ -453,32 +453,32 @@ def get_area_def(self, dataset_id): return area def _get_proj_dict(self, dataset_id): - loff = np.float32(self.mda['loff']) - nlines = np.int32(self.mda['number_of_lines']) + loff = np.float32(self.mda["loff"]) + nlines = np.int32(self.mda["number_of_lines"]) loff = nlines - loff name_dict = get_geos_area_naming({ - 'platform_name': self.platform_name, - 'instrument_name': SENSOR_NAME, + "platform_name": self.platform_name, + "instrument_name": SENSOR_NAME, # Partial scans are padded to full disk - 'service_name': 'FD', - 'service_desc': 'Full Disk', - 'resolution': dataset_id['resolution'] + "service_name": "FD", + "service_desc": "Full Disk", + "resolution": dataset_id["resolution"] }) return { - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'ssp_lon': float(self.prologue['SubSatLongitude']), - 'h': ALTITUDE, - 'proj': 'geos', - 'units': 'm', - 'a_name': name_dict['area_id'], - 'a_desc': name_dict['description'], - 'p_id': '', - 'nlines': nlines, - 'ncols': np.int32(self.mda['number_of_columns']), - 'cfac': np.int32(self.mda['cfac']), - 'lfac': np.int32(self.mda['lfac']), - 'coff': np.float32(self.mda['coff']), - 'loff': loff, - 'scandir': 'N2S' + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "ssp_lon": float(self.prologue["SubSatLongitude"]), + "h": ALTITUDE, + "proj": "geos", + "units": "m", + "a_name": name_dict["area_id"], + "a_desc": name_dict["description"], + "p_id": "", + "nlines": nlines, + "ncols": np.int32(self.mda["number_of_columns"]), + "cfac": np.int32(self.mda["cfac"]), + "lfac": np.int32(self.mda["lfac"]), + "coff": np.float32(self.mda["coff"]), + "loff": loff, + "scandir": "N2S" } diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 68932531a8..c343b7f7c5 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -278,287 +278,287 @@ SCALE_13_3 = 5.5297 OFFSET_13_3 = 16.5892 CALIB_COEFS = { - 'GOES-15': {'00_7': {'slope': [5.851966E-1, 5.879772E-1, 5.856793E-1, + "GOES-15": {"00_7": {"slope": [5.851966E-1, 5.879772E-1, 5.856793E-1, 5.854250E-1, 5.866992E-1, 5.836241E-1, 5.846555E-1, 5.843753E-1], - 'offset': [-16.9707, -17.0513, -16.9847, -16.9773, + "offset": [-16.9707, -17.0513, -16.9847, -16.9773, -17.0143, -16.9251, -16.9550, -16.9469], - 'x0': 29, - 'k': 1.88852E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.7905, 2562.7905], - 'a': [-1.5693377, -1.5693377], - 'b': [1.0025034, 1.0025034], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1521.1988, 1521.5277], - 'a': [-3.4706545, -3.4755568], - 'b': [1.0093296, 1.0092838], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [935.89417, 935.78158], - 'a': [-0.36151367, -0.35316361], - 'b': [1.0012715, 1.0012570], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [753.72229, 753.93403], - 'a': [-0.21475817, -0.24630068], - 'b': [1.0006485, 1.0007178], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.88852E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.7905, 2562.7905], + "a": [-1.5693377, -1.5693377], + "b": [1.0025034, 1.0025034], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1521.1988, 1521.5277], + "a": [-3.4706545, -3.4755568], + "b": [1.0093296, 1.0092838], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [935.89417, 935.78158], + "a": [-0.36151367, -0.35316361], + "b": [1.0012715, 1.0012570], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [753.72229, 753.93403], + "a": [-0.21475817, -0.24630068], + "b": [1.0006485, 1.0007178], + "btmin": 180.0, + "btmax": 340.0} }, # ITT RevH + STAR Correction - 'GOES-14': {'00_7': {'slope': [5.874693E-1, 5.865367E-1, 5.862807E-1, + "GOES-14": {"00_7": {"slope": [5.874693E-1, 5.865367E-1, 5.862807E-1, 5.864086E-1, 5.857146E-1, 5.852004E-1, 5.860814E-1, 5.841697E-1], - 'offset': [-17.037, -17.010, -17.002, -17.006, + "offset": [-17.037, -17.010, -17.002, -17.006, -16.986, -16.971, -16.996, -16.941], - 'x0': 29, - 'k': 1.88772E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2577.3518, 2577.3518], - 'a': [-1.5297091, -1.5297091], - 'b': [1.0025608, 1.0025608], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1519.3488, 1518.5610], - 'a': [-3.4647892, -3.4390527], - 'b': [1.0093656, 1.0094427], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [933.98541, 934.19579], - 'a': [-0.29201763, -0.31824779], - 'b': [1.0012018, 1.0012303], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [752.88143, 752.82392], - 'a': [-0.22508805, -0.21700982], - 'b': [1.0006686, 1.0006503], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.88772E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2577.3518, 2577.3518], + "a": [-1.5297091, -1.5297091], + "b": [1.0025608, 1.0025608], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1519.3488, 1518.5610], + "a": [-3.4647892, -3.4390527], + "b": [1.0093656, 1.0094427], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [933.98541, 934.19579], + "a": [-0.29201763, -0.31824779], + "b": [1.0012018, 1.0012303], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [752.88143, 752.82392], + "a": [-0.22508805, -0.21700982], + "b": [1.0006686, 1.0006503], + "btmin": 180.0, + "btmax": 340.0} }, # ITT RevH + STAR Correction - 'GOES-13': {'00_7': {'slope': [6.120196E-1, 6.118504E-1, 6.096360E-1, + "GOES-13": {"00_7": {"slope": [6.120196E-1, 6.118504E-1, 6.096360E-1, 6.087055E-1, 6.132860E-1, 6.118208E-1, 6.122307E-1, 6.066968E-1], - 'offset': [-17.749, -17.744, -17.769, -17.653, + "offset": [-17.749, -17.744, -17.769, -17.653, -17.785, -17.743, -17.755, -17.594], - 'x0': 29, - 'k': 1.89544E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2561.74, 2561.74], - 'a': [-1.437204, -1.437204], - 'b': [1.002562, 1.002562], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1522.52, 1521.66], - 'a': [-3.625663, -3.607841], - 'b': [1.010018, 1.010010], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [937.23, 937.27], - 'a': [-0.386043, -0.380113], - 'b': [1.001298, 1.001285], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [749.83], - 'a': [-0.134801], - 'b': [1.000482], - 'btmin': 180.0, - 'btmax': 340.0} # Has only one detector on GOES-13 + "x0": 29, + "k": 1.89544E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2561.74, 2561.74], + "a": [-1.437204, -1.437204], + "b": [1.002562, 1.002562], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1522.52, 1521.66], + "a": [-3.625663, -3.607841], + "b": [1.010018, 1.010010], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [937.23, 937.27], + "a": [-0.386043, -0.380113], + "b": [1.001298, 1.001285], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [749.83], + "a": [-0.134801], + "b": [1.000482], + "btmin": 180.0, + "btmax": 340.0} # Has only one detector on GOES-13 }, - 'GOES-12': {'00_7': {'slope': [5.771030E-1, 5.761764E-1, 5.775825E-1, + "GOES-12": {"00_7": {"slope": [5.771030E-1, 5.761764E-1, 5.775825E-1, 5.790699E-1, 5.787051E-1, 5.755969E-1, 5.753973E-1, 5.752099E-1], - 'offset': [-16.736, -16.709, -16.750, -16.793, + "offset": [-16.736, -16.709, -16.750, -16.793, -16.782, -16.692, -16.687, -16.681], - 'x0': 29, - 'k': 1.97658E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.45, 2562.45], - 'a': [-0.650731, -0.650731], - 'b': [1.001520, 1.001520], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1536.43, 1536.94], - 'a': [-4.764728, -4.775517], - 'b': [1.012420, 1.012403], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [933.21, 933.21], - 'a': [-0.360331, -0.360331], - 'b': [1.001306, 1.001306], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [751.91], - 'a': [-0.253449], - 'b': [1.000743], - 'btmin': 180.0, - 'btmax': 340.0} # Has only one detector on GOES-12 + "x0": 29, + "k": 1.97658E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.45, 2562.45], + "a": [-0.650731, -0.650731], + "b": [1.001520, 1.001520], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1536.43, 1536.94], + "a": [-4.764728, -4.775517], + "b": [1.012420, 1.012403], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [933.21, 933.21], + "a": [-0.360331, -0.360331], + "b": [1.001306, 1.001306], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [751.91], + "a": [-0.253449], + "b": [1.000743], + "btmin": 180.0, + "btmax": 340.0} # Has only one detector on GOES-12 }, - 'GOES-11': {'00_7': {'slope': [5.561568E-1, 5.552979E-1, 5.558981E-1, + "GOES-11": {"00_7": {"slope": [5.561568E-1, 5.552979E-1, 5.558981E-1, 5.577627E-1, 5.557238E-1, 5.587978E-1, 5.586530E-1, 5.528971E-1], - 'offset': [-16.129, -16.104, -16.121, -16.175, + "offset": [-16.129, -16.104, -16.121, -16.175, -16.116, -16.205, -16.201, -16.034], - 'x0': 29, - 'k': 2.01524E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.07, 2562.07], - 'a': [-0.644790, -0.644790], - 'b': [1.000775, 1.000775], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.53], - 'a': [-0.543401], - 'b': [1.001495], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [931.76, 931.76], - 'a': [-0.306809, -0.306809], - 'b': [1.001274, 1.001274], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [833.67, 833.04], - 'a': [-0.333216, -0.315110], - 'b': [1.001000, 1.000967], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 2.01524E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.07, 2562.07], + "a": [-0.644790, -0.644790], + "b": [1.000775, 1.000775], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.53], + "a": [-0.543401], + "b": [1.001495], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [931.76, 931.76], + "a": [-0.306809, -0.306809], + "b": [1.001274, 1.001274], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [833.67, 833.04], + "a": [-0.333216, -0.315110], + "b": [1.001000, 1.000967], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-10': {'00_7': {'slope': [5.605602E-1, 5.563529E-1, 5.566574E-1, + "GOES-10": {"00_7": {"slope": [5.605602E-1, 5.563529E-1, 5.566574E-1, 5.582154E-1, 5.583361E-1, 5.571736E-1, 5.563135E-1, 5.613536E-1], - 'offset': [-16.256, -16.134, -16.143, -16.188, + "offset": [-16.256, -16.134, -16.143, -16.188, -16.192, -16.158, -16.133, -16.279], - 'x0': 29, - 'k': 1.98808E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2552.9845, 2552.9845], - 'a': [-0.60584483, -0.60584483], - 'b': [1.0011017, 1.0011017], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1486.2212], - 'a': [-0.61653805], - 'b': [1.0014011], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [936.10260, 935.98981], - 'a': [-0.27128884, -0.27064036], - 'b': [1.0009674, 1.0009687], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [830.88473, 830.89691], - 'a': [-0.26505411, -0.26056452], - 'b': [1.0009087, 1.0008962], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.98808E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2552.9845, 2552.9845], + "a": [-0.60584483, -0.60584483], + "b": [1.0011017, 1.0011017], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1486.2212], + "a": [-0.61653805], + "b": [1.0014011], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [936.10260, 935.98981], + "a": [-0.27128884, -0.27064036], + "b": [1.0009674, 1.0009687], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [830.88473, 830.89691], + "a": [-0.26505411, -0.26056452], + "b": [1.0009087, 1.0008962], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-9': {'00_7': {'slope': [0.5492361], - 'offset': [-15.928], - 'x0': 29, - 'k': 1.94180E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2555.18, 2555.18], - 'a': [-0.579908, -0.579908], - 'b': [1.000942, 1.000942], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.82], - 'a': [-0.493016], - 'b': [1.001076], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [934.59, 934.28], - 'a': [-0.384798, -0.363703], - 'b': [1.001293, 1.001272], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [834.02, 834.09], - 'a': [-0.302995, -0.306838], - 'b': [1.000941, 1.000948], - 'btmin': 180.0, - 'btmax': 340.0} + "GOES-9": {"00_7": {"slope": [0.5492361], + "offset": [-15.928], + "x0": 29, + "k": 1.94180E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2555.18, 2555.18], + "a": [-0.579908, -0.579908], + "b": [1.000942, 1.000942], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.82], + "a": [-0.493016], + "b": [1.001076], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [934.59, 934.28], + "a": [-0.384798, -0.363703], + "b": [1.001293, 1.001272], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [834.02, 834.09], + "a": [-0.302995, -0.306838], + "b": [1.000941, 1.000948], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-8': {'00_7': {'slope': [0.5501873], - 'offset': [-15.955], - 'x0': 29, - 'k': 1.92979E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2556.71, 2558.62], - 'a': [-0.578526, -0.581853], - 'b': [1.001512, 1.001532], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.91], - 'a': [-0.593903], - 'b': [1.001418], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [934.30, 935.38], - 'a': [-0.322585, -0.351889], - 'b': [1.001271, 1.001293], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [837.06, 837.00], - 'a': [-0.422571, -0.466954], - 'b': [1.001170, 1.001257], - 'btmin': 180.0, - 'btmax': 340.0} + "GOES-8": {"00_7": {"slope": [0.5501873], + "offset": [-15.955], + "x0": 29, + "k": 1.92979E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2556.71, 2558.62], + "a": [-0.578526, -0.581853], + "b": [1.001512, 1.001532], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.91], + "a": [-0.593903], + "b": [1.001418], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [934.30, 935.38], + "a": [-0.322585, -0.351889], + "b": [1.001271, 1.001293], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [837.06, 837.00], + "a": [-0.422571, -0.466954], + "b": [1.001170, 1.001257], + "btmin": 180.0, + "btmax": 340.0} } } @@ -569,12 +569,12 @@ SAMPLING_NS_IR = 112E-6 # Sector definitions. TODO: Add remaining sectors (PACUS, CONUS, ...) -FULL_DISC = 'Full Disc' -NORTH_HEMIS_EAST = 'Northern Hemisphere (GOES-East)' -SOUTH_HEMIS_EAST = 'Southern Hemisphere (GOES-East)' -NORTH_HEMIS_WEST = 'Northern Hemisphere (GOES-West)' -SOUTH_HEMIS_WEST = 'Southern Hemisphere (GOES-West)' -UNKNOWN_SECTOR = 'Unknown' +FULL_DISC = "Full Disc" +NORTH_HEMIS_EAST = "Northern Hemisphere (GOES-East)" +SOUTH_HEMIS_EAST = "Southern Hemisphere (GOES-East)" +NORTH_HEMIS_WEST = "Northern Hemisphere (GOES-West)" +SOUTH_HEMIS_WEST = "Southern Hemisphere (GOES-West)" +UNKNOWN_SECTOR = "Unknown" IR_SECTORS = { (2704, 5208): FULL_DISC, @@ -613,14 +613,14 @@ def __init__(self, filename, filename_info, filetype_info, geo_data=None): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) - self.sensor = 'goes_imager' - self.nlines = self.nc.dims['yc'] - self.ncols = self.nc.dims['xc'] + chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) + self.sensor = "goes_imager" + self.nlines = self.nc.dims["yc"] + self.ncols = self.nc.dims["xc"] self.platform_name = self._get_platform_name( - self.nc.attrs['Satellite Sensor']) - self.platform_shortname = self.platform_name.replace('-', '').lower() - self.gvar_channel = int(self.nc['bands'].values) + self.nc.attrs["Satellite Sensor"]) + self.platform_shortname = self.platform_name.replace("-", "").lower() + self.gvar_channel = int(self.nc["bands"].values) self.sector = self._get_sector(channel=self.gvar_channel, nlines=self.nlines, ncols=self.ncols) @@ -652,7 +652,7 @@ def ir_sectors(self): @staticmethod def _get_platform_name(ncattr): """Determine name of the platform.""" - match = re.match(r'G-(\d+)', ncattr) + match = re.match(r"G-(\d+)", ncattr) if match: return SPACECRAFTS.get(int(match.groups()[0])) @@ -681,7 +681,7 @@ def _get_earth_mask(lat): Returns: Mask (1=earth, 0=space) """ - logger.debug('Computing earth mask') + logger.debug("Computing earth mask") return np.fabs(lat) <= 90 @staticmethod @@ -695,7 +695,7 @@ def _get_nadir_pixel(earth_mask, sector): nadir row, nadir column """ if sector == FULL_DISC: - logger.debug('Computing nadir pixel') + logger.debug("Computing nadir pixel") # The earth is not centered in the image, compute bounding box # of the earth disc first @@ -711,7 +711,7 @@ def _get_nadir_pixel(earth_mask, sector): def _is_yaw_flip(self, lat): """Determine whether the satellite is yaw-flipped ('upside down').""" - logger.debug('Computing yaw flip flag') + logger.debug("Computing yaw flip flag") # In case of yaw-flip the data and coordinates in the netCDF files are # also flipped. Just check whether the latitude increases or decrases # with the line number. @@ -721,7 +721,7 @@ def _is_yaw_flip(self, lat): def _get_area_def_uniform_sampling(self, lon0, channel): """Get area definition with uniform sampling.""" - logger.debug('Computing area definition') + logger.debug("Computing area definition") if lon0 is not None: est = AreaDefEstimator(self.platform_name, channel) return est.get_area_def_with_uniform_sampling(lon0) @@ -730,7 +730,7 @@ def _get_area_def_uniform_sampling(self, lon0, channel): @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc['time'].dt + dt = self.nc["time"].dt return datetime(year=int(dt.year), month=int(dt.month), day=int(dt.day), hour=int(dt.hour), minute=int(dt.minute), second=int(dt.second), microsecond=int(dt.microsecond)) @@ -757,7 +757,7 @@ def resolution(self): Returns: Spatial resolution in kilometers """ - return 1000. * self.nc['lineRes'].values + return 1000. * self.nc["lineRes"].values def get_shape(self, key, info): """Get the shape of the data. @@ -772,7 +772,7 @@ def meta(self): """Derive metadata from the coordinates.""" # Use buffered data if available if self._meta is None: - lat = self.geo_data['lat'] + lat = self.geo_data["lat"] earth_mask = self._get_earth_mask(lat) crow, ccol = self._get_nadir_pixel(earth_mask=earth_mask, sector=self.sector) @@ -780,55 +780,55 @@ def meta(self): yaw_flip = self._is_yaw_flip(lat) del lat - lon = self.geo_data['lon'] + lon = self.geo_data["lon"] lon0 = lon.values[crow, ccol] if crow is not None else None area_def_uni = self._get_area_def_uniform_sampling( lon0=lon0, channel=self.gvar_channel) del lon - self._meta = {'earth_mask': earth_mask, - 'yaw_flip': yaw_flip, - 'lat0': lat0, - 'lon0': lon0, - 'nadir_row': crow, - 'nadir_col': ccol, - 'area_def_uni': area_def_uni} + self._meta = {"earth_mask": earth_mask, + "yaw_flip": yaw_flip, + "lat0": lat0, + "lon0": lon0, + "nadir_row": crow, + "nadir_col": ccol, + "area_def_uni": area_def_uni} return self._meta def _counts2radiance(self, counts, coefs, channel): """Convert raw detector counts to radiance.""" - logger.debug('Converting counts to radiance') + logger.debug("Converting counts to radiance") if is_vis_channel(channel): # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. - slope = np.array(coefs['slope']).mean() - offset = np.array(coefs['offset']).mean() + slope = np.array(coefs["slope"]).mean() + offset = np.array(coefs["offset"]).mean() return self._viscounts2radiance(counts=counts, slope=slope, offset=offset) - return self._ircounts2radiance(counts=counts, scale=coefs['scale'], - offset=coefs['offset']) + return self._ircounts2radiance(counts=counts, scale=coefs["scale"], + offset=coefs["offset"]) def _calibrate(self, radiance, coefs, channel, calibration): """Convert radiance to reflectance or brightness temperature.""" if is_vis_channel(channel): - if not calibration == 'reflectance': - raise ValueError('Cannot calibrate VIS channel to ' - '{}'.format(calibration)) - return self._calibrate_vis(radiance=radiance, k=coefs['k']) + if not calibration == "reflectance": + raise ValueError("Cannot calibrate VIS channel to " + "{}".format(calibration)) + return self._calibrate_vis(radiance=radiance, k=coefs["k"]) else: - if not calibration == 'brightness_temperature': - raise ValueError('Cannot calibrate IR channel to ' - '{}'.format(calibration)) + if not calibration == "brightness_temperature": + raise ValueError("Cannot calibrate IR channel to " + "{}".format(calibration)) # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. - mean_coefs = {'a': np.array(coefs['a']).mean(), - 'b': np.array(coefs['b']).mean(), - 'n': np.array(coefs['n']).mean(), - 'btmin': coefs['btmin'], - 'btmax': coefs['btmax']} + mean_coefs = {"a": np.array(coefs["a"]).mean(), + "b": np.array(coefs["b"]).mean(), + "n": np.array(coefs["n"]).mean(), + "btmin": coefs["btmin"], + "btmax": coefs["btmax"]} return self._calibrate_ir(radiance=radiance, coefs=mean_coefs) @staticmethod @@ -866,16 +866,16 @@ def _calibrate_ir(radiance, coefs): Returns: Brightness temperature [K] """ - logger.debug('Calibrating to brightness temperature') + logger.debug("Calibrating to brightness temperature") # Compute brightness temperature using inverse Planck formula - n = coefs['n'] + n = coefs["n"] bteff = C2 * n / np.log(1 + C1 * n ** 3 / radiance.where(radiance > 0)) - bt = xr.DataArray(bteff * coefs['b'] + coefs['a']) + bt = xr.DataArray(bteff * coefs["b"] + coefs["a"]) # Apply BT threshold - return bt.where(np.logical_and(bt >= coefs['btmin'], - bt <= coefs['btmax'])) + return bt.where(np.logical_and(bt >= coefs["btmin"], + bt <= coefs["btmax"])) @staticmethod def _viscounts2radiance(counts, slope, offset): @@ -916,7 +916,7 @@ def _calibrate_vis(radiance, k): Returns: Reflectance [%] """ - logger.debug('Calibrating to reflectance') + logger.debug("Calibrating to reflectance") refl = 100 * k * radiance return refl.clip(min=0) @@ -928,28 +928,28 @@ def _update_metadata(self, data, ds_info): # If the file_type attribute is a list and the data is xarray # the concat of the dataset will not work. As the file_type is # not needed this will be popped here. - if 'file_type' in data.attrs: - data.attrs.pop('file_type') + if "file_type" in data.attrs: + data.attrs.pop("file_type") # Metadata discovered from the file. data.attrs.update( - {'platform_name': self.platform_name, - 'sensor': self.sensor, - 'sector': self.sector, - 'orbital_parameters': {'yaw_flip': self.meta['yaw_flip']}} + {"platform_name": self.platform_name, + "sensor": self.sensor, + "sector": self.sector, + "orbital_parameters": {"yaw_flip": self.meta["yaw_flip"]}} ) - if self.meta['lon0'] is not None: + if self.meta["lon0"] is not None: # Attributes only available for full disc images. YAML reader # doesn't like it if satellite_* is present but None data.attrs.update( - {'nadir_row': self.meta['nadir_row'], - 'nadir_col': self.meta['nadir_col'], - 'area_def_uniform_sampling': self.meta['area_def_uni']} + {"nadir_row": self.meta["nadir_row"], + "nadir_col": self.meta["nadir_col"], + "area_def_uniform_sampling": self.meta["area_def_uni"]} ) - data.attrs['orbital_parameters'].update( - {'projection_longitude': self.meta['lon0'], - 'projection_latitude': self.meta['lat0'], - 'projection_altitude': ALTITUDE} + data.attrs["orbital_parameters"].update( + {"projection_longitude": self.meta["lon0"], + "projection_latitude": self.meta["lat0"], + "projection_altitude": ALTITUDE} ) def __del__(self): @@ -977,10 +977,10 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: new_info = ds_info.copy() - new_info['resolution'] = res + new_info["resolution"] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info @@ -989,10 +989,10 @@ def available_datasets(self, configured_datasets=None): def is_vis_channel(channel): """Determine whether the given channel is a visible channel.""" if isinstance(channel, str): - return channel == '00_7' + return channel == "00_7" if isinstance(channel, int): return channel == 1 - raise ValueError('Invalid channel') + raise ValueError("Invalid channel") class GOESNCFileHandler(GOESNCBaseFileHandler): @@ -1008,25 +1008,25 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) # Read data from file and calibrate if necessary - if 'longitude' in key['name']: - data = self.geo_data['lon'] - elif 'latitude' in key['name']: - data = self.geo_data['lat'] + if "longitude" in key["name"]: + data = self.geo_data["lon"] + elif "latitude" in key["name"]: + data = self.geo_data["lat"] else: tic = datetime.now() - data = self.calibrate(self.nc['data'].isel(time=0), - calibration=key['calibration'], - channel=key['name']) - logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + data = self.calibrate(self.nc["data"].isel(time=0), + calibration=key["calibration"], + channel=key["name"]) + logger.debug("Calibration time: {}".format(datetime.now() - tic)) # Mask space pixels - data = data.where(self.meta['earth_mask']) + data = data.where(self.meta["earth_mask"]) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) + data = data.rename({"xc": "x", "yc": "y"}) # Update metadata self._update_metadata(data, ds_info=info) @@ -1040,19 +1040,19 @@ def calibrate(self, counts, calibration, channel): counts = counts / 32. coefs = CALIB_COEFS[self.platform_name][channel] - if calibration == 'counts': + if calibration == "counts": return counts - if calibration in ['radiance', 'reflectance', - 'brightness_temperature']: + if calibration in ["radiance", "reflectance", + "brightness_temperature"]: radiance = self._counts2radiance(counts=counts, coefs=coefs, channel=channel) - if calibration == 'radiance': + if calibration == "radiance": return radiance return self._calibrate(radiance=radiance, coefs=coefs, channel=channel, calibration=calibration) - raise ValueError('Unsupported calibration for channel {}: {}'.format(channel, calibration)) + raise ValueError("Unsupported calibration for channel {}: {}".format(channel, calibration)) class GOESEUMNCFileHandler(GOESNCBaseFileHandler): @@ -1072,20 +1072,20 @@ def __init__(self, filename, filename_info, filetype_info, geo_data): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) tic = datetime.now() - data = self.calibrate(self.nc['data'].isel(time=0), - calibration=key['calibration'], - channel=key['name']) - logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + data = self.calibrate(self.nc["data"].isel(time=0), + calibration=key["calibration"], + channel=key["name"]) + logger.debug("Calibration time: {}".format(datetime.now() - tic)) # Mask space pixels - data = data.where(self.meta['earth_mask']) + data = data.where(self.meta["earth_mask"]) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) - data = data.drop('time') + data = data.rename({"xc": "x", "yc": "y"}) + data = data.drop("time") # Update metadata self._update_metadata(data, ds_info=info) @@ -1098,15 +1098,15 @@ def calibrate(self, data, calibration, channel): is_vis = is_vis_channel(channel) # IR files provide radiances, VIS file provides reflectances - if is_vis and calibration == 'reflectance': + if is_vis and calibration == "reflectance": return data - if not is_vis and calibration == 'radiance': + if not is_vis and calibration == "radiance": return data - if not is_vis and calibration == 'brightness_temperature': + if not is_vis and calibration == "brightness_temperature": return self._calibrate(radiance=data, calibration=calibration, coefs=coefs, channel=channel) - raise ValueError('Unsupported calibration for channel {}: {}' + raise ValueError("Unsupported calibration for channel {}: {}" .format(channel, calibration)) @@ -1120,13 +1120,13 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) - self.sensor = 'goes_imager' - self.nlines = self.nc.dims['yc'] - self.ncols = self.nc.dims['xc'] + chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) + self.sensor = "goes_imager" + self.nlines = self.nc.dims["yc"] + self.ncols = self.nc.dims["xc"] self.platform_name = GOESNCBaseFileHandler._get_platform_name( - self.nc.attrs['Satellite Sensor']) - self.platform_shortname = self.platform_name.replace('-', '').lower() + self.nc.attrs["Satellite Sensor"]) + self.platform_shortname = self.platform_name.replace("-", "").lower() self._meta = None def __getitem__(self, item): @@ -1135,18 +1135,18 @@ def __getitem__(self, item): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) # Read data from file and calibrate if necessary - if 'longitude' in key['name']: - data = self.nc['lon'] - elif 'latitude' in key['name']: - data = self.nc['lat'] + if "longitude" in key["name"]: + data = self.nc["lon"] + elif "latitude" in key["name"]: + data = self.nc["lat"] else: - raise KeyError("Unknown dataset: {}".format(key['name'])) + raise KeyError("Unknown dataset: {}".format(key["name"])) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) + data = data.rename({"xc": "x", "yc": "y"}) # Update metadata data.attrs.update(info) @@ -1165,36 +1165,36 @@ class GOESCoefficientReader(object): """Read GOES Imager calibration coefficients from NOAA reference HTMLs.""" gvar_channels = { - 'GOES-8': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-9': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-10': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-11': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-12': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-13': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-14': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-15': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, + "GOES-8": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-9": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-10": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-11": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-12": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-13": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-14": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-15": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, } ir_tables = { - 'GOES-8': '2-1', - 'GOES-9': '2-2', - 'GOES-10': '2-3', - 'GOES-11': '2-4', - 'GOES-12': '2-5a', - 'GOES-13': '2-6', - 'GOES-14': '2-7c', - 'GOES-15': '2-8b' + "GOES-8": "2-1", + "GOES-9": "2-2", + "GOES-10": "2-3", + "GOES-11": "2-4", + "GOES-12": "2-5a", + "GOES-13": "2-6", + "GOES-14": "2-7c", + "GOES-15": "2-8b" } vis_tables = { - 'GOES-8': 'Table 1.', - 'GOES-9': 'Table 1.', - 'GOES-10': 'Table 2.', - 'GOES-11': 'Table 3.', - 'GOES-12': 'Table 4.', - 'GOES-13': 'Table 5.', - 'GOES-14': 'Table 6.', - 'GOES-15': 'Table 7.' + "GOES-8": "Table 1.", + "GOES-9": "Table 1.", + "GOES-10": "Table 2.", + "GOES-11": "Table 3.", + "GOES-12": "Table 4.", + "GOES-13": "Table 5.", + "GOES-14": "Table 6.", + "GOES-15": "Table 7." } def __init__(self, ir_url, vis_url): @@ -1217,13 +1217,13 @@ def _load_url_or_file(self, url): except (MissingSchema, requests.HTTPError): # Not a valid URL, is it a file? try: - return open(url, mode='r') + return open(url, mode="r") except IOError: - raise ValueError('Invalid URL or file: {}'.format(url)) + raise ValueError("Invalid URL or file: {}".format(url)) def get_coefs(self, platform, channel): """Get the coefs.""" - if channel == '00_7': + if channel == "00_7": return self._get_vis_coefs(platform=platform) return self._get_ir_coefs(platform=platform, channel=channel) @@ -1236,27 +1236,27 @@ def _get_ir_coefs(self, platform, channel): # Extract scale and offset for conversion counts->radiance from # Table 1-1 (same for all platforms, only depends on the channel) gvar_channel = self.gvar_channels[platform][channel] - table11 = self._get_table(root=self.ir_html, heading='Table 1-1', - heading_type='h3') + table11 = self._get_table(root=self.ir_html, heading="Table 1-1", + heading_type="h3") for row in table11: if int(row[0]) == gvar_channel: - coefs['scale'] = self._float(row[1]) - coefs['offset'] = self._float(row[2]) + coefs["scale"] = self._float(row[1]) + coefs["offset"] = self._float(row[2]) # Extract n,a,b (radiance -> BT) from the coefficient table for the # given platform table = self._get_table(root=self.ir_html, heading=self.ir_tables[platform], - heading_type='h3') - channel_regex = re.compile('^{}(?:/[a,b])?$'.format(gvar_channel)) + heading_type="h3") + channel_regex = re.compile("^{}(?:/[a,b])?$".format(gvar_channel)) for row in table: if channel_regex.match(row[0]): # Extract coefficients. Detector (a) always comes before (b) # in the table so that simply appending preserves the order. - coefs['n'].append(self._float(row[1])) - coefs['a'].append(self._float(row[2])) - coefs['b'].append(self._float(row[3])) + coefs["n"].append(self._float(row[1])) + coefs["a"].append(self._float(row[2])) + coefs["b"].append(self._float(row[3])) return coefs @@ -1266,28 +1266,28 @@ def _get_vis_coefs(self, platform): # Find calibration table table = self._get_table(root=self.vis_html, heading=self.vis_tables[platform], - heading_type='p') + heading_type="p") # Extract values coefs = defaultdict(list) - if platform in ('GOES-8', 'GOES-9'): + if platform in ("GOES-8", "GOES-9"): # GOES 8&9 coefficients are in the same table - col = 1 if platform == 'GOES-8' else 2 - coefs['slope'].append(self._float(table[1][col])) - coefs['x0'] = self._float(table[2][col]) - coefs['offset'].append(self._float(table[3][col])) - coefs['k'] = self._float(table[4][col]) + col = 1 if platform == "GOES-8" else 2 + coefs["slope"].append(self._float(table[1][col])) + coefs["x0"] = self._float(table[2][col]) + coefs["offset"].append(self._float(table[3][col])) + coefs["k"] = self._float(table[4][col]) else: # k and x0 appear in the first row only - coefs['slope'].append(self._float(table[0][1])) - coefs['x0'] = self._float(table[0][2]) - coefs['k'] = self._float(table[0][4]) - coefs['offset'].append(self._float(table[0][3])) + coefs["slope"].append(self._float(table[0][1])) + coefs["x0"] = self._float(table[0][2]) + coefs["k"] = self._float(table[0][4]) + coefs["offset"].append(self._float(table[0][3])) # Remaining rows for row in table[1:]: - coefs['slope'].append(self._float(row[1])) - coefs['offset'].append(self._float(row[2])) + coefs["slope"].append(self._float(row[1])) + coefs["offset"].append(self._float(row[2])) return coefs @@ -1296,7 +1296,7 @@ def _get_table(self, root, heading, heading_type, ): headings = [h for h in root.find_all(heading_type) if heading in h.text] if not headings: - raise ValueError('Cannot find a coefficient table matching text ' + raise ValueError("Cannot find a coefficient table matching text " '"{}"'.format(heading)) if len(headings) > 1: raise ValueError('Found multiple headings matching text "{}"' @@ -1305,14 +1305,14 @@ def _get_table(self, root, heading, heading_type, ): # Copy items to a list of lists tab = list() - for row in table.find_all('tr'): - cols = row.find_all('td') + for row in table.find_all("tr"): + cols = row.find_all("td") if cols: tab.append([c.text for c in cols]) return tab def _denoise(self, string): - return string.replace('\n', '').replace(' ', '') + return string.replace("\n", "").replace(" ", "") def _float(self, string): """Convert string to float. @@ -1320,11 +1320,11 @@ def _float(self, string): Take care of numbers in exponential format """ string = self._denoise(string) - exp_match = re.match(r'^[-.\d]+x10-(\d)$', string) + exp_match = re.match(r"^[-.\d]+x10-(\d)$", string) if exp_match: exp = int(exp_match.groups()[0]) fac = 10 ** -exp - string = string.replace('x10-{}'.format(exp), '') + string = string.replace("x10-{}".format(exp), "") else: fac = 1 @@ -1355,10 +1355,10 @@ def test_coefs(ir_url, vis_url): for cname in coefs_expected.keys(): if not np.allclose(coefs[cname], coefs_expected[cname]): raise ValueError( - 'Coefficient {} for {} channel {} does not match the ' - 'reference'.format(cname, platform, channel)) + "Coefficient {} for {} channel {} does not match the " + "reference".format(cname, platform, channel)) - logger.info('Coefficients OK') + logger.info("Coefficients OK") return True @@ -1384,12 +1384,12 @@ def get_area_def_with_uniform_sampling(self, projection_longitude): def _get_projection(self, projection_longitude): return { - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'lon_0': projection_longitude, - 'h': ALTITUDE, - 'proj': 'geos', - 'units': 'm' + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "lon_0": projection_longitude, + "h": ALTITUDE, + "proj": "geos", + "units": "m" } def _get_area_extent_at_max_scan_angle(self, proj_dict): @@ -1398,9 +1398,9 @@ def _get_area_extent_at_max_scan_angle(self, proj_dict): def _get_max_scan_angle(self, proj_dict): dummy_area = pyresample.geometry.AreaDefinition( - area_id='dummy', - proj_id='dummy', - description='dummy', + area_id="dummy", + proj_id="dummy", + description="dummy", projection=proj_dict, width=2, height=2, @@ -1427,8 +1427,8 @@ def _get_uniform_pixel_size(self): def _create_area_def(self, projection, area_extent, shape): width, height = shape return pyresample.geometry.AreaDefinition( - area_id='goes_geos_uniform', - proj_id='goes_geos_uniform', + area_id="goes_geos_uniform", + proj_id="goes_geos_uniform", description=self._get_area_description(), projection=projection, width=width, @@ -1437,6 +1437,6 @@ def _create_area_def(self, projection, area_extent, shape): ) def _get_area_description(self): - return '{} geostationary projection (uniform sampling)'.format( + return "{} geostationary projection (uniform sampling)".format( self.platform_name ) diff --git a/satpy/readers/gpm_imerg.py b/satpy/readers/gpm_imerg.py index 3a68f8a9bb..7bc65ac4c6 100644 --- a/satpy/readers/gpm_imerg.py +++ b/satpy/readers/gpm_imerg.py @@ -49,34 +49,34 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Find the start time from filename info.""" - return datetime(self.finfo['date'].year, - self.finfo['date'].month, - self.finfo['date'].day, - self.finfo['start_time'].hour, - self.finfo['start_time'].minute, - self.finfo['start_time'].second) + return datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["start_time"].hour, + self.finfo["start_time"].minute, + self.finfo["start_time"].second) @property def end_time(self): """Find the end time from filename info.""" - return datetime(self.finfo['date'].year, - self.finfo['date'].month, - self.finfo['date'].day, - self.finfo['end_time'].hour, - self.finfo['end_time'].minute, - self.finfo['end_time'].second) + return datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["end_time"].hour, + self.finfo["end_time"].minute, + self.finfo["end_time"].second) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - file_key = ds_info.get('file_key', dataset_id['name']) - dsname = 'Grid/' + file_key + file_key = ds_info.get("file_key", dataset_id["name"]) + dsname = "Grid/" + file_key data = self.get(dsname) data = data.squeeze().transpose() if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data.data = da.flip(data.data, axis=0) - fill = data.attrs['_FillValue'] + fill = data.attrs["_FillValue"] data = data.where(data != fill) for key in list(data.attrs.keys()): @@ -89,8 +89,8 @@ def get_dataset(self, dataset_id, ds_info): def get_area_def(self, dsid): """Create area definition from the gridded lat/lon values.""" - lats = self.__getitem__('Grid/lat').values - lons = self.__getitem__('Grid/lon').values + lats = self.__getitem__("Grid/lat").values + lons = self.__getitem__("Grid/lon").values width = lons.shape[0] height = lats.shape[0] @@ -103,8 +103,8 @@ def get_area_def(self, dsid): area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "IMERG GPM Equirectangular Projection" - area_id = 'imerg' - proj_id = 'equirectangular' - proj_dict = {'proj': 'longlat', 'datum': 'WGS84', 'ellps': 'WGS84', } + area_id = "imerg" + proj_id = "equirectangular" + proj_dict = {"proj": "longlat", "datum": "WGS84", "ellps": "WGS84", } area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def diff --git a/satpy/readers/grib.py b/satpy/readers/grib.py index 2d78792f2b..dadccce77a 100644 --- a/satpy/readers/grib.py +++ b/satpy/readers/grib.py @@ -41,7 +41,7 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } @@ -60,46 +60,46 @@ def __init__(self, filename, filename_info, filetype_info): first_msg = grib_file.message(1) last_msg = grib_file.message(grib_file.messages) start_time = self._convert_datetime( - first_msg, 'validityDate', 'validityTime') + first_msg, "validityDate", "validityTime") end_time = self._convert_datetime( - last_msg, 'validityDate', 'validityTime') + last_msg, "validityDate", "validityTime") self._start_time = start_time self._end_time = end_time - if 'keys' not in filetype_info: + if "keys" not in filetype_info: self._analyze_messages(grib_file) self._idx = None else: - self._create_dataset_ids(filetype_info['keys']) + self._create_dataset_ids(filetype_info["keys"]) self._idx = pygrib.index(self.filename, - *filetype_info['keys'].keys()) + *filetype_info["keys"].keys()) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) def _analyze_messages(self, grib_file): grib_file.seek(0) for idx, msg in enumerate(grib_file): - msg_id = DataQuery(name=msg['shortName'], - level=msg['level'], + msg_id = DataQuery(name=msg["shortName"], + level=msg["level"], modifiers=tuple()) ds_info = { - 'message': idx + 1, - 'name': msg['shortName'], - 'level': msg['level'], - 'file_type': self.filetype_info['file_type'], + "message": idx + 1, + "name": msg["shortName"], + "level": msg["level"], + "file_type": self.filetype_info["file_type"], } self._msg_datasets[msg_id] = ds_info def _create_dataset_ids(self, keys): from itertools import product - ordered_keys = [k for k in keys.keys() if 'id_key' in keys[k]] - for id_vals in product(*[keys[k]['values'] for k in ordered_keys]): - id_keys = [keys[k]['id_key'] for k in ordered_keys] + ordered_keys = [k for k in keys.keys() if "id_key" in keys[k]] + for id_vals in product(*[keys[k]["values"] for k in ordered_keys]): + id_keys = [keys[k]["id_key"] for k in ordered_keys] msg_info = dict(zip(ordered_keys, id_vals)) ds_info = dict(zip(id_keys, id_vals)) msg_id = DataQuery(**ds_info) ds_info = msg_id.to_dict() ds_info.update(msg_info) - ds_info['file_type'] = self.filetype_info['file_type'] + ds_info["file_type"] = self.filetype_info["file_type"] self._msg_datasets[msg_id] = ds_info @staticmethod @@ -137,11 +137,11 @@ def available_datasets(self, configured_datasets=None): def _get_message(self, ds_info): with pygrib.open(self.filename) as grib_file: - if 'message' in ds_info: - msg_num = ds_info['message'] + if "message" in ds_info: + msg_num = ds_info["message"] msg = grib_file.message(msg_num) else: - msg_keys = self.filetype_info['keys'].keys() + msg_keys = self.filetype_info["keys"].keys() msg = self._idx(**{k: ds_info[k] for k in msg_keys})[0] return msg @@ -154,7 +154,7 @@ def _correct_cyl_minmax_xy(proj_params, min_lon, min_lat, max_lon, max_lat): # wrap around # make 180 longitude the prime meridian # assuming we are going from 0 to 360 longitude - proj_params['pm'] = 180 + proj_params["pm"] = 180 proj = Proj(**proj_params) # recompute x/y extents with this new projection min_x, min_y = proj(min_lon, min_lat) @@ -173,9 +173,9 @@ def _get_cyl_minmax_lonlat(lons, lats): return min_lon, min_lat, max_lon, max_lat def _get_cyl_area_info(self, msg, proj_params): - proj_params['proj'] = 'eqc' - lons = msg['distinctLongitudes'] - lats = msg['distinctLatitudes'] + proj_params["proj"] = "eqc" + lons = msg["distinctLongitudes"] + lats = msg["distinctLatitudes"] shape = (lats.shape[0], lons.shape[0]) minmax_lonlat = self._get_cyl_minmax_lonlat(lons, lats) proj_params, minmax_xy = self._correct_cyl_minmax_xy(proj_params, *minmax_lonlat) @@ -208,14 +208,14 @@ def _get_corner_lonlat(proj_params, lons, lats): # if we have longitudes over 180, assume 0-360 if (lons > 180).any(): # make 180 longitude the prime meridian - proj_params['pm'] = 180 + proj_params["pm"] = 180 return proj_params, lons, lats def _get_area_info(self, msg, proj_params): lats, lons = msg.latlons() shape = lats.shape - scans_positively = (msg.valid_key('jScansPositively') and - msg['jScansPositively'] == 1) + scans_positively = (msg.valid_key("jScansPositively") and + msg["jScansPositively"] == 1) proj_params, lons, lats = self._get_corner_lonlat( proj_params, lons, lats) minmax_xy = self._get_corner_xy(proj_params, lons, lats, scans_positively) @@ -225,7 +225,7 @@ def _get_area_info(self, msg, proj_params): @staticmethod def _correct_proj_params_over_prime_meridian(proj_params): # correct for longitudes over 180 - for lon_param in ['lon_0', 'lon_1', 'lon_2']: + for lon_param in ["lon_0", "lon_1", "lon_2"]: if proj_params.get(lon_param, 0) > 180: proj_params[lon_param] -= 360 return proj_params @@ -234,16 +234,16 @@ def _area_def_from_msg(self, msg): proj_params = msg.projparams.copy() proj_params = self._correct_proj_params_over_prime_meridian(proj_params) - if proj_params['proj'] in ('cyl', 'eqc'): + if proj_params["proj"] in ("cyl", "eqc"): # eqc projection that goes from 0 to 360 proj_params, shape, extents = self._get_cyl_area_info(msg, proj_params) else: proj_params, shape, extents = self._get_area_info(msg, proj_params) return geometry.AreaDefinition( - 'on-the-fly grib area', - 'on-the-fly grib area', - 'on-the-fly grib area', + "on-the-fly grib area", + "on-the-fly grib area", + "on-the-fly grib area", proj_params, shape[1], shape[0], @@ -264,41 +264,41 @@ def get_area_def(self, dsid): def get_metadata(self, msg, ds_info): """Get metadata.""" - model_time = self._convert_datetime(msg, 'dataDate', - 'dataTime') - start_time = self._convert_datetime(msg, 'validityDate', - 'validityTime') + model_time = self._convert_datetime(msg, "dataDate", + "dataTime") + start_time = self._convert_datetime(msg, "validityDate", + "validityTime") end_time = start_time try: - center_description = msg['centreDescription'] + center_description = msg["centreDescription"] except (RuntimeError, KeyError): center_description = None key_dicts = { - 'shortName': 'shortName', - 'long_name': 'name', - 'pressureUnits': 'pressureUnits', - 'typeOfLevel': 'typeOfLevel', - 'standard_name': 'cfName', - 'units': 'units', - 'modelName': 'modelName', - 'valid_min': 'minimum', - 'valid_max': 'maximum', - 'sensor': 'modelName'} + "shortName": "shortName", + "long_name": "name", + "pressureUnits": "pressureUnits", + "typeOfLevel": "typeOfLevel", + "standard_name": "cfName", + "units": "units", + "modelName": "modelName", + "valid_min": "minimum", + "valid_max": "maximum", + "sensor": "modelName"} ds_info.update({ - 'filename': self.filename, - 'model_time': model_time, - 'centreDescription': center_description, - 'start_time': start_time, - 'end_time': end_time, - 'platform_name': 'unknown'}) + "filename": self.filename, + "model_time": model_time, + "centreDescription": center_description, + "start_time": start_time, + "end_time": end_time, + "platform_name": "unknown"}) for key in key_dicts: if key_dicts[key] in msg.keys(): ds_info[key] = msg[key_dicts[key]] else: - ds_info[key] = 'unknown' + ds_info[key] = "unknown" return ds_info @@ -306,9 +306,9 @@ def get_dataset(self, dataset_id, ds_info): """Read a GRIB message into an xarray DataArray.""" msg = self._get_message(ds_info) ds_info = self.get_metadata(msg, ds_info) - fill = msg['missingValue'] + fill = msg["missingValue"] data = msg.values.astype(np.float32) - if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: + if msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): @@ -318,4 +318,4 @@ def get_dataset(self, dataset_id, ds_info): data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index acc86fd64d..90fbc6cccc 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -47,9 +47,9 @@ def from_sds(var, *args, **kwargs): """Create a dask array from a SD dataset.""" - var.__dict__['dtype'] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) + var.__dict__["dtype"] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) shape = var.info()[2] - var.__dict__['shape'] = shape if isinstance(shape, (tuple, list)) else tuple(shape) + var.__dict__["shape"] = shape if isinstance(shape, (tuple, list)) else tuple(shape) return da.from_array(var, *args, **kwargs) @@ -61,7 +61,7 @@ def __init__(self, filename, filename_info, filetype_info): super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = {} file_handle = SD(self.filename, SDC.READ) - self._collect_attrs('', file_handle.attributes()) + self._collect_attrs("", file_handle.attributes()) for k in file_handle.datasets().keys(): self.collect_metadata(k, file_handle.select(k)) del file_handle @@ -94,7 +94,7 @@ def _open_xarray_dataset(self, val, chunks=CHUNK_SIZE): """Read the band in blocks.""" dask_arr = from_sds(val, chunks=chunks) attrs = val.attributes() - return xr.DataArray(dask_arr, dims=('y', 'x'), + return xr.DataArray(dask_arr, dims=("y", "x"), attrs=attrs) def __getitem__(self, key): diff --git a/satpy/readers/hdf5_utils.py b/satpy/readers/hdf5_utils.py index 2a1c8c23bb..428d64e2f1 100644 --- a/satpy/readers/hdf5_utils.py +++ b/satpy/readers/hdf5_utils.py @@ -43,14 +43,14 @@ def __init__(self, filename, filename_info, filetype_info): self._attrs_cache = {} try: - file_handle = h5py.File(self.filename, 'r') + file_handle = h5py.File(self.filename, "r") except IOError: LOG.exception( - 'Failed reading file %s. Possibly corrupted file', self.filename) + "Failed reading file %s. Possibly corrupted file", self.filename) raise file_handle.visititems(self.collect_metadata) - self._collect_attrs('', file_handle.attrs) + self._collect_attrs("", file_handle.attrs) file_handle.close() def _collect_attrs(self, name, attrs): @@ -73,7 +73,7 @@ def _collect_attrs(self, name, attrs): def get_reference(self, name, key): """Get reference.""" - with h5py.File(self.filename, 'r') as hf: + with h5py.File(self.filename, "r") as hf: return self._get_reference(hf, hf[name].attrs[key]) def _get_reference(self, hf, ref): @@ -97,11 +97,11 @@ def __getitem__(self, key): val = self.file_content[key] if isinstance(val, h5py.Dataset): # these datasets are closed and inaccessible when the file is closed, need to reopen - dset = h5py.File(self.filename, 'r')[key] + dset = h5py.File(self.filename, "r")[key] dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) if dset.ndim == 2: - return xr.DataArray(dset_data, dims=['y', 'x'], attrs=attrs) + return xr.DataArray(dset_data, dims=["y", "x"], attrs=attrs) return xr.DataArray(dset_data, attrs=attrs) return val diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index f776256e89..91affbade6 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -119,7 +119,7 @@ def _load_all_metadata_attributes(self): @classmethod def read_mda(cls, attribute): """Read the EOS metadata.""" - line_iterator = iter(attribute.split('\n')) + line_iterator = iter(attribute.split("\n")) return cls._read_mda(line_iterator) @classmethod @@ -129,18 +129,18 @@ def _read_mda(cls, lines, element=None): for line in lines: if not line: continue - if line == 'END': + if line == "END": return current_dict key, val = cls._split_line(line, lines) - if key in ['GROUP', 'OBJECT']: + if key in ["GROUP", "OBJECT"]: current_dict[val] = cls._read_mda(lines, val) - elif key in ['END_GROUP', 'END_OBJECT']: + elif key in ["END_GROUP", "END_OBJECT"]: if val != element: raise SyntaxError("Non-matching end-tag") return current_dict - elif key in ['CLASS', 'NUM_VAL']: + elif key in ["CLASS", "NUM_VAL"]: pass else: current_dict[key] = val @@ -149,7 +149,7 @@ def _read_mda(cls, lines, element=None): @classmethod def _split_line(cls, line, lines): - key, val = line.split('=') + key, val = line.split("=") key = key.strip() val = val.strip() try: @@ -164,8 +164,8 @@ def metadata_platform_name(self): """Platform name from the internal file metadata.""" try: # Example: 'Terra' or 'Aqua' - return self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][ - 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE'] + return self.metadata["INVENTORYMETADATA"]["ASSOCIATEDPLATFORMINSTRUMENTSENSOR"][ + "ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER"]["ASSOCIATEDPLATFORMSHORTNAME"]["VALUE"] except KeyError: return self._platform_name_from_filename() @@ -181,9 +181,9 @@ def _platform_name_from_filename(self): def start_time(self): """Get the start time of the dataset.""" try: - date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGDATE']['VALUE'] + ' ' + - self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGTIME']['VALUE']) - return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') + date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) + return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self._start_time_from_filename() @@ -194,9 +194,9 @@ def _start_time_from_filename(self): def end_time(self): """Get the end time of the dataset.""" try: - date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGDATE']['VALUE'] + ' ' + - self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGTIME']['VALUE']) - return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') + date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) + return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self.start_time @@ -216,7 +216,7 @@ def load_dataset(self, dataset_name, is_category=False): dataset = self._read_dataset_in_file(dataset_name) dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) - dims = ('y', 'x') if dask_arr.ndim == 2 else None + dims = ("y", "x") if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) data = self._scale_and_mask_data_array(data, is_category=is_category) @@ -236,8 +236,8 @@ def _scale_and_mask_data_array(self, data, is_category=False): """ good_mask, new_fill = self._get_good_data_mask(data, is_category=is_category) - scale_factor = data.attrs.pop('scale_factor', None) - add_offset = data.attrs.pop('add_offset', None) + scale_factor = data.attrs.pop("scale_factor", None) + add_offset = data.attrs.pop("add_offset", None) # don't scale category products, even though scale_factor may equal 1 # we still need to convert integers to floats if scale_factor is not None and not is_category: @@ -260,15 +260,15 @@ def _get_good_data_mask(self, data_arr, is_category=False): # no need to mask, the fill value is already what it needs to be return None, None new_fill = np.nan - data_arr.attrs.pop('_FillValue', None) + data_arr.attrs.pop("_FillValue", None) good_mask = data_arr != fill_value return good_mask, new_fill def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray): """Add metadata that is specific to Satpy.""" new_attrs = { - 'platform_name': 'EOS-' + self.metadata_platform_name, - 'sensor': 'modis', + "platform_name": "EOS-" + self.metadata_platform_name, + "sensor": "modis", } res = data_id["resolution"] @@ -293,12 +293,12 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader): # list of geographical datasets handled by the georeader # mapping to the default variable name if not specified in YAML DATASET_NAMES = { - 'longitude': 'Longitude', - 'latitude': 'Latitude', - 'satellite_azimuth_angle': ('SensorAzimuth', 'Sensor_Azimuth'), - 'satellite_zenith_angle': ('SensorZenith', 'Sensor_Zenith'), - 'solar_azimuth_angle': ('SolarAzimuth', 'SolarAzimuth'), - 'solar_zenith_angle': ('SolarZenith', 'Solar_Zenith'), + "longitude": "Longitude", + "latitude": "Latitude", + "satellite_azimuth_angle": ("SensorAzimuth", "Sensor_Azimuth"), + "satellite_zenith_angle": ("SensorZenith", "Sensor_Zenith"), + "solar_azimuth_angle": ("SolarAzimuth", "SolarAzimuth"), + "solar_zenith_angle": ("SolarZenith", "Solar_Zenith"), } def __init__(self, filename, filename_info, filetype_info, **kwargs): @@ -325,8 +325,8 @@ def read_geo_resolution(metadata): @staticmethod def _geo_resolution_for_l1b(metadata): - ds = metadata['INVENTORYMETADATA']['COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] - if ds.endswith('D03') or ds.endswith('HKM') or ds.endswith('QKM'): + ds = metadata["INVENTORYMETADATA"]["COLLECTIONDESCRIPTIONCLASS"]["SHORTNAME"]["VALUE"] + if ds.endswith("D03") or ds.endswith("HKM") or ds.endswith("QKM"): return 1000 # 1km files have 5km geolocation usually return 5000 @@ -336,10 +336,10 @@ def _geo_resolution_for_l2_l1b(metadata): # data files probably have this level 2 files # this does not work for L1B 1KM data files because they are listed # as 1KM data but the geo data inside is at 5km - latitude_dim = metadata['SwathStructure']['SWATH_1']['DimensionMap']['DimensionMap_2']['GeoDimension'] - resolution_regex = re.compile(r'(?P\d+)(km|KM)') + latitude_dim = metadata["SwathStructure"]["SWATH_1"]["DimensionMap"]["DimensionMap_2"]["GeoDimension"] + resolution_regex = re.compile(r"(?P\d+)(km|KM)") resolution_match = resolution_regex.search(latitude_dim) - return int(resolution_match.group('resolution')) * 1000 + return int(resolution_match.group("resolution")) * 1000 @property def geo_resolution(self): @@ -365,7 +365,7 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): result1 = self._load_ds_by_name(name1) result2 = self._load_ds_by_name(name2) - offset try: - sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') + sensor_zenith = self._load_ds_by_name("satellite_zenith_angle") except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None @@ -380,11 +380,11 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray: """Get the geolocation dataset.""" # Name of the dataset as it appears in the HDF EOS file - in_file_dataset_name = dataset_info.get('file_key') + in_file_dataset_name = dataset_info.get("file_key") # Name of the dataset in the YAML file - dataset_name = dataset_id['name'] + dataset_name = dataset_id["name"] # Resolution asked - resolution = dataset_id['resolution'] + resolution = dataset_id["resolution"] if in_file_dataset_name is not None: # if the YAML was configured with a specific name use that data = self.load_dataset(in_file_dataset_name) @@ -401,21 +401,21 @@ def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray: # The data must be interpolated logger.debug("Loading %s", dataset_name) - if dataset_name in ['longitude', 'latitude']: - self.get_interpolated_dataset('longitude', 'latitude', + if dataset_name in ["longitude", "latitude"]: + self.get_interpolated_dataset("longitude", "latitude", resolution) - elif dataset_name in ['satellite_azimuth_angle', 'satellite_zenith_angle']: + elif dataset_name in ["satellite_azimuth_angle", "satellite_zenith_angle"]: # Sensor dataset names differs between L1b and L2 products - self.get_interpolated_dataset('satellite_azimuth_angle', 'satellite_zenith_angle', + self.get_interpolated_dataset("satellite_azimuth_angle", "satellite_zenith_angle", resolution, offset=90) - elif dataset_name in ['solar_azimuth_angle', 'solar_zenith_angle']: + elif dataset_name in ["solar_azimuth_angle", "solar_zenith_angle"]: # Sensor dataset names differs between L1b and L2 products - self.get_interpolated_dataset('solar_azimuth_angle', 'solar_zenith_angle', + self.get_interpolated_dataset("solar_azimuth_angle", "solar_zenith_angle", resolution, offset=90) data = self.cache[dataset_name, resolution] - for key in ('standard_name', 'units'): + for key in ("standard_name", "units"): if key in dataset_info: data.attrs[key] = dataset_info[key] self._add_satpy_metadata(dataset_id, data) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index c8b2287653..bf53d84a65 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -48,41 +48,41 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import dec10216 -logger = logging.getLogger('hrit_base') +logger = logging.getLogger("hrit_base") -common_hdr = np.dtype([('hdr_id', 'u1'), - ('record_length', '>u2')]) +common_hdr = np.dtype([("hdr_id", "u1"), + ("record_length", ">u2")]) -primary_header = np.dtype([('file_type', 'u1'), - ('total_header_length', '>u4'), - ('data_field_length', '>u8')]) +primary_header = np.dtype([("file_type", "u1"), + ("total_header_length", ">u4"), + ("data_field_length", ">u8")]) -image_structure = np.dtype([('number_of_bits_per_pixel', 'u1'), - ('number_of_columns', '>u2'), - ('number_of_lines', '>u2'), - ('compression_flag_for_data', 'u1')]) +image_structure = np.dtype([("number_of_bits_per_pixel", "u1"), + ("number_of_columns", ">u2"), + ("number_of_lines", ">u2"), + ("compression_flag_for_data", "u1")]) -image_navigation = np.dtype([('projection_name', 'S32'), - ('cfac', '>i4'), - ('lfac', '>i4'), - ('coff', '>i4'), - ('loff', '>i4')]) +image_navigation = np.dtype([("projection_name", "S32"), + ("cfac", ">i4"), + ("lfac", ">i4"), + ("coff", ">i4"), + ("loff", ">i4")]) -image_data_function = np.dtype([('function', '|S1')]) +image_data_function = np.dtype([("function", "|S1")]) -annotation_header = np.dtype([('annotation', '|S1')]) +annotation_header = np.dtype([("annotation", "|S1")]) -timestamp_record = np.dtype([('cds_p_field', 'u1'), - ('timestamp', time_cds_short)]) +timestamp_record = np.dtype([("cds_p_field", "u1"), + ("timestamp", time_cds_short)]) -ancillary_text = np.dtype([('ancillary', '|S1')]) +ancillary_text = np.dtype([("ancillary", "|S1")]) -key_header = np.dtype([('key', '|S1')]) +key_header = np.dtype([("key", "|S1")]) -base_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text', - key_header: 'key_header'} +base_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text", + key_header: "key_header"} base_hdr_map = {0: primary_header, 1: image_structure, @@ -97,7 +97,7 @@ def get_xritdecompress_cmd(): """Find a valid binary for the xRITDecompress command.""" - cmd = os.environ.get('XRIT_DECOMPRESS_PATH', None) + cmd = os.environ.get("XRIT_DECOMPRESS_PATH", None) if not cmd: raise IOError("XRIT_DECOMPRESS_PATH is not defined (complete path to xRITDecompress)") @@ -112,20 +112,20 @@ def get_xritdecompress_cmd(): def get_xritdecompress_outfile(stdout): """Analyse the output of the xRITDecompress command call and return the file.""" - outfile = b'' + outfile = b"" for line in stdout: try: - k, v = [x.strip() for x in line.split(b':', 1)] + k, v = [x.strip() for x in line.split(b":", 1)] except ValueError: break - if k == b'Decompressed file': + if k == b"Decompressed file": outfile = v break return outfile -def decompress(infile, outdir='.'): +def decompress(infile, outdir="."): """Decompress an XRIT data file and return the path to the decompressed file. It expect to find Eumetsat's xRITDecompress through the environment variable @@ -149,7 +149,7 @@ def decompress(infile, outdir='.'): if not outfile: raise IOError("xrit_decompress '%s', failed, no output file is generated" % infile) - return os.path.join(outdir, outfile.decode('utf-8')) + return os.path.join(outdir, outfile.decode("utf-8")) def get_header_id(fp): @@ -175,20 +175,20 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info): self.mda = {} self.hdr_info = hdr_info self._get_hd(self.hdr_info) - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] self._end_time = self._start_time + timedelta(minutes=15) def _get_hd(self, hdr_info): """Open the file, read and get the basic file header info and set the mda dictionary.""" hdr_map, variable_length_headers, text_headers = hdr_info - with utils.generic_open(self.filename, mode='rb') as fp: + with utils.generic_open(self.filename, mode="rb") as fp: total_header_length = 16 while fp.tell() < total_header_length: hdr_id = get_header_id(fp) - the_type = hdr_map[hdr_id['hdr_id']] + the_type = hdr_map[hdr_id["hdr_id"]] if the_type in variable_length_headers: - field_length = int((hdr_id['record_length'] - 3) / + field_length = int((hdr_id["record_length"] - 3) / the_type.itemsize) current_hdr = get_header_content(fp, the_type, field_length) key = variable_length_headers[the_type] @@ -199,7 +199,7 @@ def _get_hd(self, hdr_info): else: self.mda[key] = current_hdr elif the_type in text_headers: - field_length = int((hdr_id['record_length'] - 3) / + field_length = int((hdr_id["record_length"] - 3) / the_type.itemsize) char = list(the_type.fields.values())[0][0].char new_type = np.dtype(char + str(field_length)) @@ -210,16 +210,16 @@ def _get_hd(self, hdr_info): self.mda.update( dict(zip(current_hdr.dtype.names, current_hdr))) - total_header_length = self.mda['total_header_length'] + total_header_length = self.mda["total_header_length"] - self.mda.setdefault('number_of_bits_per_pixel', 10) + self.mda.setdefault("number_of_bits_per_pixel", 10) - self.mda['projection_parameters'] = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, + self.mda["projection_parameters"] = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, # FIXME: find a reasonable SSP - 'SSP_longitude': 0.0} - self.mda['orbital_parameters'] = {} + "SSP_longitude": 0.0} + self.mda["orbital_parameters"] = {} @property def observation_start_time(self): @@ -247,7 +247,7 @@ def get_dataset(self, key, info): data = self.read_band(key, info) # Convert to xarray - xdata = xr.DataArray(data, dims=['y', 'x']) + xdata = xr.DataArray(data, dims=["y", "x"]) return xdata @@ -282,34 +282,34 @@ def get_area_extent(self, size, offsets, factors, platform_height): def get_area_def(self, dsid): """Get the area definition of the band.""" - cfac = np.int32(self.mda['cfac']) - lfac = np.int32(self.mda['lfac']) - coff = np.float32(self.mda['coff']) - loff = np.float32(self.mda['loff']) - - a = self.mda['projection_parameters']['a'] - b = self.mda['projection_parameters']['b'] - h = self.mda['projection_parameters']['h'] - lon_0 = self.mda['projection_parameters']['SSP_longitude'] - nlines = int(self.mda['number_of_lines']) - ncols = int(self.mda['number_of_columns']) + cfac = np.int32(self.mda["cfac"]) + lfac = np.int32(self.mda["lfac"]) + coff = np.float32(self.mda["coff"]) + loff = np.float32(self.mda["loff"]) + + a = self.mda["projection_parameters"]["a"] + b = self.mda["projection_parameters"]["b"] + h = self.mda["projection_parameters"]["h"] + lon_0 = self.mda["projection_parameters"]["SSP_longitude"] + nlines = int(self.mda["number_of_lines"]) + ncols = int(self.mda["number_of_columns"]) area_extent = self.get_area_extent((nlines, ncols), (loff, coff), (lfac, cfac), h) - proj_dict = {'a': float(a), - 'b': float(b), - 'lon_0': float(lon_0), - 'h': float(h), - 'proj': 'geos', - 'units': 'm'} + proj_dict = {"a": float(a), + "b": float(b), + "lon_0": float(lon_0), + "h": float(h), + "proj": "geos", + "units": "m"} area = geometry.AreaDefinition( - 'some_area_name', + "some_area_name", "On-the-fly area", - 'geosmsg', + "geosmsg", proj_dict, ncols, nlines, @@ -326,14 +326,14 @@ def read_band(self, key, info): dtype=output_dtype) def _get_output_info(self): - bpp = self.mda['number_of_bits_per_pixel'] + bpp = self.mda["number_of_bits_per_pixel"] if bpp in [10, 16]: output_dtype = np.uint16 elif bpp == 8: output_dtype = np.uint8 else: raise ValueError(f"Unexpected number of bits per pixel: {bpp}") - output_shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) + output_shape = (self.mda["number_of_lines"], self.mda["number_of_columns"]) return output_dtype, output_shape @@ -361,12 +361,12 @@ def __init__(self, filename, mda): """Set up the segment.""" self.filename = filename self.mda = mda - self.lines = mda['number_of_lines'] - self.cols = mda['number_of_columns'] - self.bpp = mda['number_of_bits_per_pixel'] - self.compressed = mda['compression_flag_for_data'] == 1 - self.offset = mda['total_header_length'] - self.zipped = os.fspath(filename).endswith('.bz2') + self.lines = mda["number_of_lines"] + self.cols = mda["number_of_columns"] + self.bpp = mda["number_of_bits_per_pixel"] + self.compressed = mda["compression_flag_for_data"] == 1 + self.offset = mda["total_header_length"] + self.zipped = os.fspath(filename).endswith(".bz2") def read_data(self): """Read the data.""" @@ -410,7 +410,7 @@ def _get_input_info(self): total_bits = int(self.lines) * int(self.cols) * int(self.bpp) input_shape = int(np.ceil(total_bits / 8.)) if self.bpp == 16: - input_dtype = '>u2' + input_dtype = ">u2" input_shape //= 2 elif self.bpp in [8, 10]: input_dtype = np.uint8 diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 4b06a3d707..2a85a95cd4 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -123,33 +123,33 @@ ) from satpy.readers.utils import get_geostationary_mask -logger = logging.getLogger('hrit_jma') +logger = logging.getLogger("hrit_jma") # JMA implementation: -key_header = np.dtype([('key_number', 'u4')]) +key_header = np.dtype([("key_number", "u4")]) -segment_identification = np.dtype([('image_segm_seq_no', '>u1'), - ('total_no_image_segm', '>u1'), - ('line_no_image_segm', '>u2')]) +segment_identification = np.dtype([("image_segm_seq_no", ">u1"), + ("total_no_image_segm", ">u1"), + ("line_no_image_segm", ">u2")]) -encryption_key_message = np.dtype([('station_number', '>u2')]) +encryption_key_message = np.dtype([("station_number", ">u2")]) -image_compensation_information = np.dtype([('compensation', '|S1')]) +image_compensation_information = np.dtype([("compensation", "|S1")]) -image_observation_time = np.dtype([('times', '|S1')]) +image_observation_time = np.dtype([("times", "|S1")]) -image_quality_information = np.dtype([('quality', '|S1')]) +image_quality_information = np.dtype([("quality", "|S1")]) jma_variable_length_headers: dict = {} -jma_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text', - image_compensation_information: 'image_compensation_information', - image_observation_time: 'image_observation_time', - image_quality_information: 'image_quality_information'} +jma_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text", + image_compensation_information: "image_compensation_information", + image_observation_time: "image_observation_time", + image_quality_information: "image_quality_information"} jma_hdr_map = base_hdr_map.copy() jma_hdr_map.update({7: key_header, @@ -161,45 +161,45 @@ }) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -time_cds_expanded = np.dtype([('days', '>u2'), - ('milliseconds', '>u4'), - ('microseconds', '>u2'), - ('nanoseconds', '>u2')]) +time_cds_expanded = np.dtype([("days", ">u2"), + ("milliseconds", ">u4"), + ("microseconds", ">u2"), + ("nanoseconds", ">u2")]) FULL_DISK = 1 NORTH_HEMIS = 2 SOUTH_HEMIS = 3 UNKNOWN_AREA = -1 -AREA_NAMES = {FULL_DISK: {'short': 'FLDK', 'long': 'Full Disk'}, - NORTH_HEMIS: {'short': 'NH', 'long': 'Northern Hemisphere'}, - SOUTH_HEMIS: {'short': 'SH', 'long': 'Southern Hemisphere'}, - UNKNOWN_AREA: {'short': 'UNKNOWN', 'long': 'Unknown Area'}} - -MTSAT1R = 'MTSAT-1R' -MTSAT2 = 'MTSAT-2' -HIMAWARI8 = 'Himawari-8' -UNKNOWN_PLATFORM = 'Unknown Platform' +AREA_NAMES = {FULL_DISK: {"short": "FLDK", "long": "Full Disk"}, + NORTH_HEMIS: {"short": "NH", "long": "Northern Hemisphere"}, + SOUTH_HEMIS: {"short": "SH", "long": "Southern Hemisphere"}, + UNKNOWN_AREA: {"short": "UNKNOWN", "long": "Unknown Area"}} + +MTSAT1R = "MTSAT-1R" +MTSAT2 = "MTSAT-2" +HIMAWARI8 = "Himawari-8" +UNKNOWN_PLATFORM = "Unknown Platform" PLATFORMS = { - 'GEOS(140.00)': MTSAT1R, - 'GEOS(140.25)': MTSAT1R, - 'GEOS(140.70)': HIMAWARI8, - 'GEOS(145.00)': MTSAT2, + "GEOS(140.00)": MTSAT1R, + "GEOS(140.25)": MTSAT1R, + "GEOS(140.70)": HIMAWARI8, + "GEOS(145.00)": MTSAT2, } SENSORS = { - MTSAT1R: 'jami', - MTSAT2: 'mtsat2_imager', - HIMAWARI8: 'ahi' + MTSAT1R: "jami", + MTSAT2: "mtsat2_imager", + HIMAWARI8: "ahi" } def mjd2datetime64(mjd): """Convert Modified Julian Day (MJD) to datetime64.""" - epoch = np.datetime64('1858-11-17 00:00') + epoch = np.datetime64("1858-11-17 00:00") day2usec = 24 * 3600 * 1E6 - mjd_usec = (mjd * day2usec).astype(np.int64).astype('timedelta64[us]') + mjd_usec = (mjd * day2usec).astype(np.int64).astype("timedelta64[us]") return epoch + mjd_usec @@ -242,20 +242,20 @@ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_ jma_text_headers)) self._use_acquisition_time_as_start_time = use_acquisition_time_as_start_time - self.mda['segment_sequence_number'] = self.mda['image_segm_seq_no'] - self.mda['planned_end_segment_number'] = self.mda['total_no_image_segm'] - self.mda['planned_start_segment_number'] = 1 + self.mda["segment_sequence_number"] = self.mda["image_segm_seq_no"] + self.mda["planned_end_segment_number"] = self.mda["total_no_image_segm"] + self.mda["planned_start_segment_number"] = 1 - items = self.mda['image_data_function'].decode().split('\r') - if items[0].startswith('$HALFTONE'): + items = self.mda["image_data_function"].decode().split("\r") + if items[0].startswith("$HALFTONE"): self.calibration_table = [] for item in items[1:]: - if item == '': + if item == "": continue - key, value = item.split(':=') - if key.startswith('_UNIT'): - self.mda['unit'] = item.split(':=')[1] - elif key.startswith('_NAME'): + key, value = item.split(":=") + if key.startswith("_UNIT"): + self.mda["unit"] = item.split(":=")[1] + elif key.startswith("_NAME"): pass elif key.isdigit(): key = int(key) @@ -264,12 +264,12 @@ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_ self.calibration_table = np.array(self.calibration_table) - self.projection_name = self.mda['projection_name'].decode().strip() - sublon = float(self.projection_name.split('(')[1][:-1]) - self.mda['projection_parameters']['SSP_longitude'] = sublon + self.projection_name = self.mda["projection_name"].decode().strip() + sublon = float(self.projection_name.split("(")[1][:-1]) + self.mda["projection_parameters"]["SSP_longitude"] = sublon self.platform = self._get_platform() - self.is_segmented = self.mda['segment_sequence_number'] > 0 - self.area_id = filename_info.get('area', UNKNOWN_AREA) + self.is_segmented = self.mda["segment_sequence_number"] > 0 + self.area_id = filename_info.get("area", UNKNOWN_AREA) if self.area_id not in AREA_NAMES: self.area_id = UNKNOWN_AREA self.area = self._get_area_def() @@ -304,7 +304,7 @@ def _get_platform(self): try: return PLATFORMS[self.projection_name] except KeyError: - logger.error('Unable to determine platform: Unknown projection ' + logger.error("Unable to determine platform: Unknown projection " 'name "{}"'.format(self.projection_name)) return UNKNOWN_PLATFORM @@ -320,8 +320,8 @@ def _check_sensor_platform_consistency(self, sensor): """ ref_sensor = SENSORS.get(self.platform, None) if ref_sensor and not sensor == ref_sensor: - logger.error('Sensor-Platform mismatch: {} is not a payload ' - 'of {}. Did you choose the correct reader?' + logger.error("Sensor-Platform mismatch: {} is not a payload " + "of {}. Did you choose the correct reader?" .format(sensor, self.platform)) def _get_line_offset(self): @@ -335,41 +335,41 @@ def _get_line_offset(self): because this is what get_geostationary_area_extent() expects. """ # Get line offset from the file - nlines = int(self.mda['number_of_lines']) - loff = np.float32(self.mda['loff']) + nlines = int(self.mda["number_of_lines"]) + loff = np.float32(self.mda["loff"]) # Adapt it to the current segment if self.is_segmented: # loff in the file specifies the offset of the full disk image # centre (1375/2750 for VIS/IR) - segment_number = self.mda['segment_sequence_number'] - 1 - loff -= (self.mda['total_no_image_segm'] - segment_number - 1) * nlines + segment_number = self.mda["segment_sequence_number"] - 1 + loff -= (self.mda["total_no_image_segm"] - segment_number - 1) * nlines elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS): # loff in the file specifies the start line of the half disk image # in the full disk image loff = nlines - loff elif self.area_id == UNKNOWN_AREA: - logger.error('Cannot compute line offset for unknown area') + logger.error("Cannot compute line offset for unknown area") return loff def _get_area_def(self): """Get the area definition of the band.""" pdict = { - 'cfac': np.int32(self.mda['cfac']), - 'lfac': np.int32(self.mda['lfac']), - 'coff': np.float32(self.mda['coff']), - 'loff': self._get_line_offset(), - 'ncols': int(self.mda['number_of_columns']), - 'nlines': int(self.mda['number_of_lines']), - 'scandir': 'N2S', - 'a': float(self.mda['projection_parameters']['a']), - 'b': float(self.mda['projection_parameters']['b']), - 'h': float(self.mda['projection_parameters']['h']), - 'ssp_lon': float(self.mda['projection_parameters']['SSP_longitude']), - 'a_name': AREA_NAMES[self.area_id]['short'], - 'a_desc': AREA_NAMES[self.area_id]['long'], - 'p_id': 'geosmsg' + "cfac": np.int32(self.mda["cfac"]), + "lfac": np.int32(self.mda["lfac"]), + "coff": np.float32(self.mda["coff"]), + "loff": self._get_line_offset(), + "ncols": int(self.mda["number_of_columns"]), + "nlines": int(self.mda["number_of_lines"]), + "scandir": "N2S", + "a": float(self.mda["projection_parameters"]["a"]), + "b": float(self.mda["projection_parameters"]["b"]), + "h": float(self.mda["projection_parameters"]["h"]), + "ssp_lon": float(self.mda["projection_parameters"]["SSP_longitude"]), + "a_name": AREA_NAMES[self.area_id]["short"], + "a_desc": AREA_NAMES[self.area_id]["long"], + "p_id": "geosmsg" } area_extent = get_area_extent(pdict) return get_area_definition(pdict, area_extent) @@ -385,22 +385,22 @@ def get_dataset(self, key, info): # Filenames of segmented data is identical for MTSAT-1R, MTSAT-2 # and Himawari-8/9. Make sure we have the correct reader for the data # at hand. - self._check_sensor_platform_consistency(info['sensor']) + self._check_sensor_platform_consistency(info["sensor"]) # Calibrate and mask space pixels res = self._mask_space(self.calibrate(res, key["calibration"])) # Add scanline acquisition time - res.coords['acq_time'] = ('y', self.acq_time) - res.coords['acq_time'].attrs['long_name'] = 'Scanline acquisition time' + res.coords["acq_time"] = ("y", self.acq_time) + res.coords["acq_time"].attrs["long_name"] = "Scanline acquisition time" # Update attributes res.attrs.update(info) - res.attrs['platform_name'] = self.platform - res.attrs['orbital_parameters'] = { - 'projection_longitude': float(self.mda['projection_parameters']['SSP_longitude']), - 'projection_latitude': 0., - 'projection_altitude': float(self.mda['projection_parameters']['h'])} + res.attrs["platform_name"] = self.platform + res.attrs["orbital_parameters"] = { + "projection_longitude": float(self.mda["projection_parameters"]["SSP_longitude"]), + "projection_latitude": 0., + "projection_altitude": float(self.mda["projection_parameters"]["h"])} return res @@ -419,17 +419,17 @@ def _get_acq_time(self): Missing timestamps in between are computed using linear interpolation. """ - buf_b = np.frombuffer(self.mda['image_observation_time'], + buf_b = np.frombuffer(self.mda["image_observation_time"], dtype=image_observation_time) # Replace \r by \n before encoding, otherwise encoding will drop all # elements except the last one - buf_s = b''.join(buf_b['times']).replace(b'\r', b'\n').decode() + buf_s = b"".join(buf_b["times"]).replace(b"\r", b"\n").decode() # Split into key:=value pairs; then extract line number and timestamp - splits = buf_s.strip().split('\n') - lines_sparse = [int(s.split(':=')[1]) for s in splits[0::2]] - times_sparse = [float(s.split(':=')[1]) for s in splits[1::2]] + splits = buf_s.strip().split("\n") + lines_sparse = [int(s.split(":=")[1]) for s in splits[0::2]] + times_sparse = [float(s.split(":=")[1]) for s in splits[1::2]] if self.platform == HIMAWARI8: # Only a couple of timestamps in the header, and only the first @@ -454,9 +454,9 @@ def calibrate(self, data, calibration): """Calibrate the data.""" tic = datetime.now() - if calibration == 'counts': + if calibration == "counts": return data - if calibration == 'radiance': + if calibration == "radiance": raise NotImplementedError("Can't calibrate to radiance.") cal = self.calibration_table diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py index cbde23559c..2a54eed664 100644 --- a/satpy/readers/hrpt.py +++ b/satpy/readers/hrpt.py @@ -48,21 +48,21 @@ AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5") -dtype = np.dtype([('frame_sync', '>u2', (6, )), - ('id', [('id', '>u2'), - ('spare', '>u2')]), - ('timecode', '>u2', (4, )), - ('telemetry', [("ramp_calibration", '>u2', (5, )), - ("PRT", '>u2', (3, )), - ("ch3_patch_temp", '>u2'), - ("spare", '>u2'), ]), - ('back_scan', '>u2', (10, 3)), - ('space_data', '>u2', (10, 5)), - ('sync', '>u2'), - ('TIP_data', '>u2', (520, )), - ('spare', '>u2', (127, )), - ('image_data', '>u2', (2048, 5)), - ('aux_sync', '>u2', (100, ))]) +dtype = np.dtype([("frame_sync", ">u2", (6, )), + ("id", [("id", ">u2"), + ("spare", ">u2")]), + ("timecode", ">u2", (4, )), + ("telemetry", [("ramp_calibration", ">u2", (5, )), + ("PRT", ">u2", (3, )), + ("ch3_patch_temp", ">u2"), + ("spare", ">u2"), ]), + ("back_scan", ">u2", (10, 3)), + ("space_data", ">u2", (10, 5)), + ("sync", ">u2"), + ("TIP_data", ">u2", (520, )), + ("spare", ">u2", (127, )), + ("image_data", ">u2", (2048, 5)), + ("aux_sync", ">u2", (100, ))]) def time_seconds(tc_array, year): @@ -78,9 +78,9 @@ def time_seconds(tc_array, year): word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( - str(year) + '-01-01T00:00:00Z', 's') + - msecs[:].astype('timedelta64[ms]') + - (day - 1)[:].astype('timedelta64[D]')) + str(year) + "-01-01T00:00:00Z", "s") + + msecs[:].astype("timedelta64[ms]") + + (day - 1)[:].astype("timedelta64[D]")) def bfield(array, bit): @@ -111,13 +111,13 @@ def geo_interpolate(lons32km, lats32km): def _get_channel_index(key): """Get the avhrr channel index.""" - avhrr_channel_index = {'1': 0, - '2': 1, - '3a': 2, - '3b': 2, - '4': 3, - '5': 4} - index = avhrr_channel_index[key['name']] + avhrr_channel_index = {"1": 0, + "2": 1, + "3a": 2, + "3b": 2, + "4": 3, + "5": 4} + index = avhrr_channel_index[key["name"]] return index @@ -128,9 +128,9 @@ def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(HRPTFile, self).__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} - self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} + self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} - self.year = filename_info.get('start_time', datetime.utcnow()).year + self.year = filename_info.get("start_time", datetime.utcnow()).year @cached_property def times(self): @@ -151,7 +151,7 @@ def read(self): """Read the file.""" with open(self.filename, "rb") as fp_: data = np.memmap(fp_, dtype=dtype, mode="r") - if np.all(np.median(data['frame_sync'], axis=0) > 1024): + if np.all(np.median(data["frame_sync"], axis=0) > 1024): data = self._data.newbyteorder() return data @@ -163,32 +163,32 @@ def platform_name(self): def get_dataset(self, key, info): """Get the dataset.""" attrs = info.copy() - attrs['platform_name'] = self.platform_name + attrs["platform_name"] = self.platform_name - if key['name'] in ['latitude', 'longitude']: + if key["name"] in ["latitude", "longitude"]: data = self._get_navigation_data(key) else: data = self._get_channel_data(key) - result = xr.DataArray(data, dims=['y', 'x'], attrs=attrs) + result = xr.DataArray(data, dims=["y", "x"], attrs=attrs) mask = self._get_ch3_mask_or_true(key) return result.where(mask) def _get_channel_data(self, key): """Get channel data.""" data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=self._chunks) - if key['calibration'] != 'counts': - if key['name'] in ['1', '2', '3a']: + if key["calibration"] != "counts": + if key["name"] in ["1", "2", "3a"]: data = self.calibrate_solar_channel(data, key) - if key['name'] in ['3b', '4', '5']: + if key["name"] in ["3b", "4", "5"]: data = self.calibrate_thermal_channel(data, key) return data def _get_navigation_data(self, key): """Get navigation data.""" lons, lats = self.lons_lats - if key['name'] == 'latitude': + if key["name"] == "latitude": data = da.from_array(lats, chunks=self._chunks) else: data = da.from_array(lons, chunks=self._chunks) @@ -196,9 +196,9 @@ def _get_navigation_data(self, key): def _get_ch3_mask_or_true(self, key): mask = True - if key['name'] == '3a': + if key["name"] == "3a": mask = np.tile(np.logical_not(self._is3b), (2048, 1)).T - elif key['name'] == '3b': + elif key["name"] == "3b": mask = np.tile(self._is3b, (2048, 1)).T return mask @@ -211,7 +211,7 @@ def calibrate_thermal_channel(self, data, key): from pygac.calibration import calibrate_thermal line_numbers = ( np.round((self.times - self.times[-1]) / - np.timedelta64(166666667, 'ns'))).astype(int) + np.timedelta64(166666667, "ns"))).astype(int) line_numbers -= line_numbers[0] prt, ict, space = self.telemetry index = _get_channel_index(key) @@ -224,8 +224,8 @@ def calibrate_solar_channel(self, data, key): """Calibrate a solar channel.""" from pygac.calibration import calibrate_solar julian_days = ((np.datetime64(self.start_time) - - np.datetime64(str(self.year) + '-01-01T00:00:00Z')) - / np.timedelta64(1, 'D')) + - np.datetime64(str(self.year) + "-01-01T00:00:00Z")) + / np.timedelta64(1, "D")) data = calibrate_solar(data, _get_channel_index(key), self.year, julian_days, self.calibrator) return data @@ -234,16 +234,16 @@ def calibrate_solar_channel(self, data, key): def calibrator(self): """Create a calibrator for the data.""" from pygac.calibration import Calibrator - pg_spacecraft = ''.join(self.platform_name.split()).lower() + pg_spacecraft = "".join(self.platform_name.split()).lower() return Calibrator(pg_spacecraft) @cached_property def telemetry(self): """Get the telemetry.""" # This isn't converted to dask arrays as it does not work with pygac - prt = np.mean(self._data["telemetry"]['PRT'], axis=1) - ict = np.mean(self._data['back_scan'], axis=1) - space = np.mean(self._data['space_data'][:, :], axis=1) + prt = np.mean(self._data["telemetry"]["PRT"], axis=1) + ict = np.mean(self._data["back_scan"], axis=1) + space = np.mean(self._data["space_data"][:, :], axis=1) return prt, ict, space diff --git a/satpy/readers/hsaf_grib.py b/satpy/readers/hsaf_grib.py index 69361cb648..a041bf0c73 100644 --- a/satpy/readers/hsaf_grib.py +++ b/satpy/readers/hsaf_grib.py @@ -39,7 +39,7 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } @@ -67,7 +67,7 @@ def __init__(self, filename, filename_info, filetype_info): @staticmethod def _get_datetime(msg): - dtstr = str(msg['dataDate']) + str(msg['dataTime']).zfill(4) + dtstr = str(msg["dataDate"]) + str(msg["dataTime"]).zfill(4) return datetime.strptime(dtstr, "%Y%m%d%H%M") @property @@ -78,19 +78,19 @@ def analysis_time(self): def get_metadata(self, msg): """Get the metadata.""" try: - center_description = msg['centreDescription'] + center_description = msg["centreDescription"] except (RuntimeError, KeyError): center_description = None ds_info = { - 'filename': self.filename, - 'shortName': msg['shortName'], - 'long_name': msg['name'], - 'units': msg['units'], - 'centreDescription': center_description, - 'data_time': self._analysis_time, - 'nx': msg['Nx'], - 'ny': msg['Ny'], - 'projparams': msg.projparams + "filename": self.filename, + "shortName": msg["shortName"], + "long_name": msg["name"], + "units": msg["units"], + "centreDescription": center_description, + "data_time": self._analysis_time, + "nx": msg["Nx"], + "ny": msg["Ny"], + "projparams": msg.projparams } return ds_info @@ -106,28 +106,28 @@ def _get_area_def(self, msg): """Get the area definition of the datasets in the file.""" proj_param = msg.projparams.copy() - Rx = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dx'] - Ry = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dy'] + Rx = 2 * np.arcsin(1. / msg["NrInRadiusOfEarth"]) / msg["dx"] + Ry = 2 * np.arcsin(1. / msg["NrInRadiusOfEarth"]) / msg["dy"] - x_0 = - msg['XpInGridLengths'] - x_1 = msg['Nx'] - msg['XpInGridLengths'] - y_0 = (msg['Ny'] - msg['YpInGridLengths']) * -1 - y_1 = msg['YpInGridLengths'] + x_0 = - msg["XpInGridLengths"] + x_1 = msg["Nx"] - msg["XpInGridLengths"] + y_0 = (msg["Ny"] - msg["YpInGridLengths"]) * -1 + y_1 = msg["YpInGridLengths"] - min_x = (x_0 * Rx) * proj_param['h'] - max_x = (x_1 * Rx) * proj_param['h'] + min_x = (x_0 * Rx) * proj_param["h"] + max_x = (x_1 * Rx) * proj_param["h"] - min_y = (y_0 * Ry) * proj_param['h'] - max_y = (y_1 * Ry) * proj_param['h'] + min_y = (y_0 * Ry) * proj_param["h"] + max_y = (y_1 * Ry) * proj_param["h"] area_extent = (min_x, min_y, max_x, max_y) - area = geometry.AreaDefinition('hsaf_region', - 'A region from H-SAF', - 'geos', + area = geometry.AreaDefinition("hsaf_region", + "A region from H-SAF", + "geos", proj_param, - msg['Nx'], - msg['Ny'], + msg["Nx"], + msg["Ny"], area_extent) return area @@ -139,24 +139,24 @@ def _get_message(self, idx): def get_dataset(self, ds_id, ds_info): """Read a GRIB message into an xarray DataArray.""" - if (ds_id['name'] not in self.filename): - raise IOError("File does not contain {} data".format(ds_id['name'])) + if (ds_id["name"] not in self.filename): + raise IOError("File does not contain {} data".format(ds_id["name"])) msg = self._get_message(1) ds_info = self.get_metadata(msg) - ds_info['end_time'] = ds_info['data_time'] + ds_info["end_time"] = ds_info["data_time"] - if (ds_id['name'] == 'h05' or ds_id['name'] == 'h05B'): + if (ds_id["name"] == "h05" or ds_id["name"] == "h05B"): flen = len(self.filename) timedelt = self.filename[flen-10:flen-8] - ds_info['start_time'] = (ds_info['end_time'] - + ds_info["start_time"] = (ds_info["end_time"] - timedelta(hours=int(timedelt))) else: - ds_info['start_time'] = ds_info['end_time'] - fill = msg['missingValue'] + ds_info["start_time"] = ds_info["end_time"] + fill = msg["missingValue"] data = msg.values.astype(np.float32) - if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: + if msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): @@ -166,4 +166,4 @@ def get_dataset(self, ds_id, ds_info): data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) diff --git a/satpy/readers/hsaf_h5.py b/satpy/readers/hsaf_h5.py index 73be63b29f..478b91ce2d 100644 --- a/satpy/readers/hsaf_h5.py +++ b/satpy/readers/hsaf_h5.py @@ -42,7 +42,7 @@ def __init__(self, filename, filename_info, filetype_info): super(HSAFFileHandler, self).__init__(filename, filename_info, filetype_info) - self._h5fh = h5py.File(self.filename, 'r') + self._h5fh = h5py.File(self.filename, "r") @property def end_time(self): @@ -52,21 +52,21 @@ def end_time(self): @property def start_time(self): """Get start time.""" - return self.filename_info['sensing_time'] + return self.filename_info["sensing_time"] def _prepare_variable_for_palette(self, dset, ds_info): colormap = np.array(dset) - return xr.DataArray(colormap, attrs=ds_info, dims=('idx', 'RGB')) + return xr.DataArray(colormap, attrs=ds_info, dims=("idx", "RGB")) def get_metadata(self, dset, name): """Get the metadata.""" - ds_info = {'name': name} - if name == 'SC': + ds_info = {"name": name} + if name == "SC": ds_info.update({ - 'filename': self.filename, - 'data_time': self.start_time, - 'nx': dset.shape[1], - 'ny': dset.shape[0] + "filename": self.filename, + "data_time": self.start_time, + "nx": dset.shape[1], + "ny": dset.shape[0] }) return ds_info @@ -76,7 +76,7 @@ def get_area_def(self, dsid): Since it is not available in the HDF5 message, using hardcoded one (it's known). """ - if dsid['name'] == 'SC': + if dsid["name"] == "SC": return self._get_area_def() raise NotImplementedError @@ -109,31 +109,31 @@ def _get_area_def(self): units: m """ - fd_def = get_area_def('msg_seviri_fes_3km') + fd_def = get_area_def("msg_seviri_fes_3km") hsaf_def = fd_def[AREA_Y_OFFSET:AREA_Y_OFFSET+916, AREA_X_OFFSET:AREA_X_OFFSET+1902] return hsaf_def def _get_dataset(self, ds_name): - if ds_name == 'SC_pal': - _ds_name = 'colormap' + if ds_name == "SC_pal": + _ds_name = "colormap" else: _ds_name = ds_name return self._h5fh.get(_ds_name) def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds = self._get_dataset(ds_id['name']) - ds_info = self.get_metadata(ds, ds_id['name']) + ds = self._get_dataset(ds_id["name"]) + ds_info = self.get_metadata(ds, ds_id["name"]) - if ds_id['name'] == 'SC': - ds_info['start_time'] = self.start_time - ds_info['data_time'] = self.start_time - ds_info['end_time'] = self.end_time + if ds_id["name"] == "SC": + ds_info["start_time"] = self.start_time + ds_info["data_time"] = self.start_time + ds_info["end_time"] = self.end_time data = da.from_array(ds, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) - elif ds_id['name'] == 'SC_pal': + elif ds_id["name"] == "SC_pal": return self._prepare_variable_for_palette(ds, ds_info) diff --git a/satpy/readers/hy2_scat_l2b_h5.py b/satpy/readers/hy2_scat_l2b_h5.py index 64520bae9a..929d7dc934 100644 --- a/satpy/readers/hy2_scat_l2b_h5.py +++ b/satpy/readers/hy2_scat_l2b_h5.py @@ -35,82 +35,82 @@ class HY2SCATL2BH5FileHandler(HDF5FileHandler): @property def start_time(self): """Time for first observation.""" - return datetime.strptime(self['/attr/Range_Beginning_Time'], - '%Y%m%dT%H:%M:%S') + return datetime.strptime(self["/attr/Range_Beginning_Time"], + "%Y%m%dT%H:%M:%S") @property def end_time(self): """Time for final observation.""" - return datetime.strptime(self['/attr/Range_Ending_Time'], - '%Y%m%dT%H:%M:%S') + return datetime.strptime(self["/attr/Range_Ending_Time"], + "%Y%m%dT%H:%M:%S") @property def platform_name(self): """Get the Platform ShortName.""" - return self['/attr/Platform_ShortName'] + return self["/attr/Platform_ShortName"] def get_variable_metadata(self): """Get the variable metadata.""" - info = getattr(self, 'attrs', {}) + info = getattr(self, "attrs", {}) info.update({ - "Equator_Crossing_Longitude": self['/attr/Equator_Crossing_Longitude'], - "Equator_Crossing_Time": self['/attr/Equator_Crossing_Time'], - "Input_L2A_Filename": self['/attr/Input_L2A_Filename'], - "L2B_Actual_WVC_Rows": self['/attr/L2B_Actual_WVC_Rows'], - "Orbit_Inclination": self['/attr/Orbit_Inclination'], - "Orbit_Number": self['/attr/Orbit_Number'], - "Output_L2B_Filename": self['/attr/Output_L2B_Filename'], - "Production_Date_Time": self['/attr/Production_Date_Time'], - "L2B_Expected_WVC_Rows": self['/attr/L2B_Expected_WVC_Rows'] + "Equator_Crossing_Longitude": self["/attr/Equator_Crossing_Longitude"], + "Equator_Crossing_Time": self["/attr/Equator_Crossing_Time"], + "Input_L2A_Filename": self["/attr/Input_L2A_Filename"], + "L2B_Actual_WVC_Rows": self["/attr/L2B_Actual_WVC_Rows"], + "Orbit_Inclination": self["/attr/Orbit_Inclination"], + "Orbit_Number": self["/attr/Orbit_Number"], + "Output_L2B_Filename": self["/attr/Output_L2B_Filename"], + "Production_Date_Time": self["/attr/Production_Date_Time"], + "L2B_Expected_WVC_Rows": self["/attr/L2B_Expected_WVC_Rows"] }) try: - info.update({"L2B_Number_WVC_cells": self['/attr/L2B_Number_WVC_cells']}) + info.update({"L2B_Number_WVC_cells": self["/attr/L2B_Number_WVC_cells"]}) except KeyError: - info.update({"L2B_Expected_WVC_Cells": self['/attr/L2B_Expected_WVC_Cells']}) + info.update({"L2B_Expected_WVC_Cells": self["/attr/L2B_Expected_WVC_Cells"]}) return info def get_metadata(self): """Get the metadata.""" - info = getattr(self, 'attrs', {}) + info = getattr(self, "attrs", {}) info.update({ - "WVC_Size": self['/attr/WVC_Size'], - "HDF_Version_Id": self['/attr/HDF_Version_Id'], - "Instrument_ShorName": self['/attr/Instrument_ShorName'], - "L2A_Inputdata_Version": self['/attr/L2A_Inputdata_Version'], - "L2B_Algorithm_Descriptor": self['/attr/L2B_Algorithm_Descriptor'], - "L2B_Data_Version": self['/attr/L2B_Data_Version'], - "L2B_Processing_Type": self['/attr/L2B_Processing_Type'], - "L2B_Processor_Name": self['/attr/L2B_Processor_Name'], - "L2B_Processor_Version": self['/attr/L2B_Processor_Version'], - "Long_Name": self['/attr/Long_Name'], - "Platform_LongName": self['/attr/Platform_LongName'], - "Platform_ShortName": self['/attr/Platform_ShortName'], - "Platform_Type": self['/attr/Platform_Type'], - "Producer_Agency": self['/attr/Producer_Agency'], - "Producer_Institution": self['/attr/Producer_Institution'], - "Rev_Orbit_Perio": self['/attr/Rev_Orbit_Period'], - "Short_Name": self['/attr/Short_Name'], - "Sigma0_Granularity": self['/attr/Sigma0_Granularity'], + "WVC_Size": self["/attr/WVC_Size"], + "HDF_Version_Id": self["/attr/HDF_Version_Id"], + "Instrument_ShorName": self["/attr/Instrument_ShorName"], + "L2A_Inputdata_Version": self["/attr/L2A_Inputdata_Version"], + "L2B_Algorithm_Descriptor": self["/attr/L2B_Algorithm_Descriptor"], + "L2B_Data_Version": self["/attr/L2B_Data_Version"], + "L2B_Processing_Type": self["/attr/L2B_Processing_Type"], + "L2B_Processor_Name": self["/attr/L2B_Processor_Name"], + "L2B_Processor_Version": self["/attr/L2B_Processor_Version"], + "Long_Name": self["/attr/Long_Name"], + "Platform_LongName": self["/attr/Platform_LongName"], + "Platform_ShortName": self["/attr/Platform_ShortName"], + "Platform_Type": self["/attr/Platform_Type"], + "Producer_Agency": self["/attr/Producer_Agency"], + "Producer_Institution": self["/attr/Producer_Institution"], + "Rev_Orbit_Perio": self["/attr/Rev_Orbit_Period"], + "Short_Name": self["/attr/Short_Name"], + "Sigma0_Granularity": self["/attr/Sigma0_Granularity"], }) return info def get_dataset(self, key, info): """Get the dataset.""" - dims = ['y', 'x'] - if self[key['name']].ndim == 3: - dims = ['y', 'x', 'selection'] - data = self[key['name']] + dims = ["y", "x"] + if self[key["name"]].ndim == 3: + dims = ["y", "x", "selection"] + data = self[key["name"]] if "valid range" in data.attrs: - data.attrs.update({'valid_range': data.attrs.pop('valid range')}) - if key['name'] in 'wvc_row_time': - data = data.rename({data.dims[0]: 'y'}) + data.attrs.update({"valid_range": data.attrs.pop("valid range")}) + if key["name"] in "wvc_row_time": + data = data.rename({data.dims[0]: "y"}) else: dim_map = {curr_dim: new_dim for curr_dim, new_dim in zip(data.dims, dims)} data = data.rename(dim_map) data = self._mask_data(data) data = self._scale_data(data) - if key['name'] in 'wvc_lon': + if key["name"] in "wvc_lon": _attrs = data.attrs data = xr.where(data > 180, data - 360., data) data.attrs.update(_attrs) @@ -118,17 +118,17 @@ def get_dataset(self, key, info): data.attrs.update(self.get_metadata()) data.attrs.update(self.get_variable_metadata()) if "Platform_ShortName" in data.attrs: - data.attrs.update({'platform_name': data.attrs['Platform_ShortName']}) + data.attrs.update({"platform_name": data.attrs["Platform_ShortName"]}) return data def _scale_data(self, data): - return data * data.attrs['scale_factor'] + data.attrs['add_offset'] + return data * data.attrs["scale_factor"] + data.attrs["add_offset"] def _mask_data(self, data): _attrs = data.attrs - valid_range = data.attrs['valid_range'] - data = xr.where(data == data.attrs['fill_value'], np.nan, data) + valid_range = data.attrs["valid_range"] + data = xr.where(data == data.attrs["fill_value"], np.nan, data) data = xr.where(data < valid_range[0], np.nan, data) data = xr.where(data > valid_range[1], np.nan, data) data.attrs.update(_attrs) diff --git a/satpy/readers/iasi_l2.py b/satpy/readers/iasi_l2.py index 64a060a789..8280416d8b 100644 --- a/satpy/readers/iasi_l2.py +++ b/satpy/readers/iasi_l2.py @@ -45,33 +45,33 @@ # Epoch for the dates EPOCH = dt.datetime(2000, 1, 1) -SHORT_NAMES = {'M01': 'Metop-B', - 'M02': 'Metop-A', - 'M03': 'Metop-C'} - -DSET_NAMES = {'ozone_mixing_ratio': 'O', - 'ozone_mixing_ratio_quality': 'QO', - 'pressure': 'P', - 'pressure_quality': 'QP', - 'temperature': 'T', - 'temperature_quality': 'QT', - 'water_mixing_ratio': 'W', - 'water_mixing_ratio_quality': 'QW', - 'water_total_column': 'WC', - 'ozone_total_column': 'OC', - 'surface_skin_temperature': 'Ts', - 'surface_skin_temperature_quality': 'QTs', - 'emissivity': 'E', - 'emissivity_quality': 'QE'} - -GEO_NAMES = {'latitude': 'Latitude', - 'longitude': 'Longitude', - 'satellite_azimuth_angle': 'SatAzimuth', - 'satellite_zenith_angle': 'SatZenith', - 'sensing_time': {'day': 'SensingTime_day', - 'msec': 'SensingTime_msec'}, - 'solar_azimuth_angle': 'SunAzimuth', - 'solar_zenith_angle': 'SunZenith'} +SHORT_NAMES = {"M01": "Metop-B", + "M02": "Metop-A", + "M03": "Metop-C"} + +DSET_NAMES = {"ozone_mixing_ratio": "O", + "ozone_mixing_ratio_quality": "QO", + "pressure": "P", + "pressure_quality": "QP", + "temperature": "T", + "temperature_quality": "QT", + "water_mixing_ratio": "W", + "water_mixing_ratio_quality": "QW", + "water_total_column": "WC", + "ozone_total_column": "OC", + "surface_skin_temperature": "Ts", + "surface_skin_temperature_quality": "QTs", + "emissivity": "E", + "emissivity_quality": "QE"} + +GEO_NAMES = {"latitude": "Latitude", + "longitude": "Longitude", + "satellite_azimuth_angle": "SatAzimuth", + "satellite_zenith_angle": "SatZenith", + "sensing_time": {"day": "SensingTime_day", + "msec": "SensingTime_msec"}, + "solar_azimuth_angle": "SunAzimuth", + "solar_zenith_angle": "SunZenith"} LOGGER = logging.getLogger(__name__) @@ -88,51 +88,51 @@ def __init__(self, filename, filename_info, filetype_info): self.finfo = filename_info self.lons = None self.lats = None - self.sensor = 'iasi' + self.sensor = "iasi" self.mda = {} - short_name = filename_info['platform_id'] - self.mda['platform_name'] = SHORT_NAMES.get(short_name, short_name) - self.mda['sensor'] = 'iasi' + short_name = filename_info["platform_id"] + self.mda["platform_name"] = SHORT_NAMES.get(short_name, short_name) + self.mda["sensor"] = "iasi" @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" end_time = dt.datetime.combine(self.start_time.date(), - self.finfo['end_time'].time()) + self.finfo["end_time"].time()) if end_time < self.start_time: end_time += dt.timedelta(days=1) return end_time def get_dataset(self, key, info): """Load a dataset.""" - with h5py.File(self.filename, 'r') as fid: - LOGGER.debug('Reading %s.', key['name']) - if key['name'] in DSET_NAMES: + with h5py.File(self.filename, "r") as fid: + LOGGER.debug("Reading %s.", key["name"]) + if key["name"] in DSET_NAMES: m_data = read_dataset(fid, key) else: m_data = read_geo(fid, key) m_data.attrs.update(info) - m_data.attrs['sensor'] = self.sensor + m_data.attrs["sensor"] = self.sensor return m_data def read_dataset(fid, key): """Read dataset.""" - dsid = DSET_NAMES[key['name']] + dsid = DSET_NAMES[key["name"]] dset = fid["/PWLR/" + dsid] if dset.ndim == 3: - dims = ['y', 'x', 'level'] + dims = ["y", "x", "level"] else: - dims = ['y', 'x'] + dims = ["y", "x"] data = xr.DataArray(da.from_array(dset[()], chunks=CHUNK_SIZE), - name=key['name'], dims=dims).astype(np.float32) + name=key["name"], dims=dims).astype(np.float32) data = xr.where(data > 1e30, np.nan, data) dset_attrs = dict(dset.attrs) @@ -143,9 +143,9 @@ def read_dataset(fid, key): def read_geo(fid, key): """Read geolocation and related datasets.""" - dsid = GEO_NAMES[key['name']] + dsid = GEO_NAMES[key["name"]] add_epoch = False - if "time" in key['name']: + if "time" in key["name"]: days = fid["/L1C/" + dsid["day"]][()] msecs = fid["/L1C/" + dsid["msec"]][()] data = _form_datetimes(days, msecs) @@ -155,10 +155,10 @@ def read_geo(fid, key): data = fid["/L1C/" + dsid][()] dtype = np.float32 data = xr.DataArray(da.from_array(data, chunks=CHUNK_SIZE), - name=key['name'], dims=['y', 'x']).astype(dtype) + name=key["name"], dims=["y", "x"]).astype(dtype) if add_epoch: - data.attrs['sensing_time_epoch'] = EPOCH + data.attrs["sensing_time_epoch"] = EPOCH return data diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index 1bb1fbf0e0..500c2b29df 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -101,9 +101,9 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('IASIL2SO2BUFR') +logger = logging.getLogger("IASIL2SO2BUFR") CHUNK_SIZE = get_legacy_chunk_size() -data_center_dict = {3: 'METOP-1', 4: 'METOP-2', 5: 'METOP-3'} +data_center_dict = {3: "METOP-1", 4: "METOP-2", 5: "METOP-3"} class IASIL2SO2BUFR(BaseFileHandler): @@ -115,27 +115,27 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): start_time, end_time = self.get_start_end_date() - sc_id = self.get_attribute('satelliteIdentifier') + sc_id = self.get_attribute("satelliteIdentifier") self.metadata = {} - self.metadata['start_time'] = start_time - self.metadata['end_time'] = end_time - self.metadata['SpacecraftName'] = data_center_dict[sc_id] + self.metadata["start_time"] = start_time + self.metadata["end_time"] = end_time + self.metadata["SpacecraftName"] = data_center_dict[sc_id] @property def start_time(self): """Return the start time of data acqusition.""" - return self.metadata['start_time'] + return self.metadata["start_time"] @property def end_time(self): """Return the end time of data acquisition.""" - return self.metadata['end_time'] + return self.metadata["end_time"] @property def platform_name(self): """Return spacecraft name.""" - return '{}'.format(self.metadata['SpacecraftName']) + return "{}".format(self.metadata["SpacecraftName"]) def get_start_end_date(self): """Get the first and last date from the bufr file.""" @@ -146,13 +146,13 @@ def get_start_end_date(self): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) - year = ec.codes_get(bufr, 'year') - month = ec.codes_get(bufr, 'month') - day = ec.codes_get(bufr, 'day') - hour = ec.codes_get(bufr, 'hour') - minute = ec.codes_get(bufr, 'minute') - second = ec.codes_get(bufr, 'second') + ec.codes_set(bufr, "unpack", 1) + year = ec.codes_get(bufr, "year") + month = ec.codes_get(bufr, "month") + day = ec.codes_get(bufr, "day") + hour = ec.codes_get(bufr, "hour") + minute = ec.codes_get(bufr, "minute") + second = ec.codes_get(bufr, "second") obs_time = datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) @@ -181,7 +181,7 @@ def get_attribute(self, key): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) @@ -198,7 +198,7 @@ def get_array(self, key): if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) values = ec.codes_get_array( bufr, key, float) @@ -225,12 +225,12 @@ def get_array(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" - arr = self.get_array(dataset_info['key']) - arr[arr == dataset_info['fill_value']] = np.nan + arr = self.get_array(dataset_info["key"]) + arr[arr == dataset_info["fill_value"]] = np.nan - xarr = xr.DataArray(arr, dims=["y", "x"], name=dataset_info['name']) - xarr.attrs['sensor'] = 'IASI' - xarr.attrs['platform_name'] = self.platform_name + xarr = xr.DataArray(arr, dims=["y", "x"], name=dataset_info["name"]) + xarr.attrs["sensor"] = "IASI" + xarr.attrs["platform_name"] = self.platform_name xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index a4f15c3c35..d6ebea0c56 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -65,26 +65,26 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): filename, filename_info, filetype_info, auto_maskandscale=True, ) # Read the variables which are required for the calibration - measurement = 'data/measurement_data' - self._bt_conversion_a = self[f'{measurement}/bt_conversion_a'].values - self._bt_conversion_b = self[f'{measurement}/bt_conversion_b'].values - self._channel_cw = self[f'{measurement}/centre_wavenumber'].values + measurement = "data/measurement_data" + self._bt_conversion_a = self[f"{measurement}/bt_conversion_a"].values + self._bt_conversion_b = self[f"{measurement}/bt_conversion_b"].values + self._channel_cw = self[f"{measurement}/centre_wavenumber"].values self._n_samples = self[measurement].n_samples.size self._filetype_info = filetype_info - self.orthorect = filetype_info.get('orthorect', True) + self.orthorect = filetype_info.get("orthorect", True) @property def start_time(self): """Get observation start time.""" try: start_time = datetime.strptime( - self['/attr/sensing_start_time_utc'], - '%Y%m%d%H%M%S.%f', + self["/attr/sensing_start_time_utc"], + "%Y%m%d%H%M%S.%f", ) except ValueError: start_time = datetime.strptime( - self['/attr/sensing_start_time_utc'], - '%Y-%m-%d %H:%M:%S.%f', + self["/attr/sensing_start_time_utc"], + "%Y-%m-%d %H:%M:%S.%f", ) return start_time @@ -93,25 +93,25 @@ def end_time(self): """Get observation end time.""" try: end_time = datetime.strptime( - self['/attr/sensing_end_time_utc'], - '%Y%m%d%H%M%S.%f', + self["/attr/sensing_end_time_utc"], + "%Y%m%d%H%M%S.%f", ) except ValueError: end_time = datetime.strptime( - self['/attr/sensing_end_time_utc'], - '%Y-%m-%d %H:%M:%S.%f', + self["/attr/sensing_end_time_utc"], + "%Y-%m-%d %H:%M:%S.%f", ) return end_time @property def platform_name(self): """Return platform name.""" - return self['/attr/spacecraft'] + return self["/attr/spacecraft"] @property def sensor(self): """Return sensor.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def ssp_lon(self): @@ -208,7 +208,7 @@ def _interpolate_geo( lons_horn, lats_horn = satint.interpolate() lons[:, :, horn] = lons_horn lats[:, :, horn] = lats_horn - dims = ['y', 'x', third_dim_name] + dims = ["y", "x", third_dim_name] lon = xr.DataArray( lons, attrs=longitude.attrs, @@ -258,16 +258,16 @@ def _interpolate( """Interpolate from tie points to pixel points.""" try: if interpolation_type is InterpolationType.SOLAR_ANGLES: - var_key1 = self.filetype_info['solar_azimuth'] - var_key2 = self.filetype_info['solar_zenith'] + var_key1 = self.filetype_info["solar_azimuth"] + var_key2 = self.filetype_info["solar_zenith"] interp_method = self._interpolate_viewing_angle elif interpolation_type is InterpolationType.OBSERVATION_ANGLES: - var_key1 = self.filetype_info['observation_azimuth'] - var_key2 = self.filetype_info['observation_zenith'] + var_key1 = self.filetype_info["observation_azimuth"] + var_key2 = self.filetype_info["observation_zenith"] interp_method = self._interpolate_viewing_angle else: - var_key1 = self.filetype_info['longitude'] - var_key2 = self.filetype_info['latitude'] + var_key1 = self.filetype_info["longitude"] + var_key2 = self.filetype_info["latitude"] interp_method = self._interpolate_geo return interp_method( self[var_key1], @@ -275,7 +275,7 @@ def _interpolate( self._n_samples, ) except KeyError: - logger.warning(f'Datasets for {interpolation_type.name} interpolation not correctly defined in YAML file') # noqa: E501 + logger.warning(f"Datasets for {interpolation_type.name} interpolation not correctly defined in YAML file") # noqa: E501 return None, None @staticmethod @@ -308,18 +308,18 @@ def _calibrate(self, variable, dataset_info): original metadata. """ - calibration_name = dataset_info['calibration'] - if calibration_name == 'brightness_temperature': - chan_index = dataset_info['chan_index'] + calibration_name = dataset_info["calibration"] + if calibration_name == "brightness_temperature": + chan_index = dataset_info["chan_index"] cw = self._channel_cw[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'radiance': + elif calibration_name == "radiance": calibrated_variable = variable else: - raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) # noqa: E501 + raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info["name"])) # noqa: E501 return calibrated_variable @@ -345,18 +345,18 @@ def _orthorectify(self, variable, orthorect_data_name): orthorect_data = orthorect_data.sel({dim: variable[dim]}) variable += np.degrees(orthorect_data.values / MEAN_EARTH_RADIUS) except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) # noqa: E501 + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) # noqa: E501 return variable @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" - if 'n_scan' in variable.dims: - variable = variable.rename({'n_scan': 'y'}) - if 'n_samples' in variable.dims: - variable = variable.rename({'n_samples': 'x'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "n_scan" in variable.dims: + variable = variable.rename({"n_scan": "y"}) + if "n_samples" in variable.dims: + variable = variable.rename({"n_samples": "x"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable def _filter_variable(self, variable, dataset_info): @@ -385,12 +385,12 @@ def _get_third_dimension_name(variable): def _fetch_variable(self, var_key): """Fetch variable.""" if var_key in [ - 'longitude', - 'latitude', - 'observation_zenith', - 'observation_azimuth', - 'solar_zenith', - 'solar_azimuth', + "longitude", + "latitude", + "observation_zenith", + "observation_azimuth", + "solar_zenith", + "solar_azimuth", ] and getattr(self, var_key) is not None: variable = getattr(self, var_key).copy() else: @@ -399,18 +399,18 @@ def _fetch_variable(self, var_key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug(f'Reading in file to get dataset with key {var_key}.') + var_key = dataset_info["file_key"] + logger.debug(f"Reading in file to get dataset with key {var_key}.") try: variable = self._fetch_variable(var_key) except KeyError: - logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 + logger.warning(f"Could not find key {var_key} in NetCDF file, no valid Dataset created") # noqa: E501 return None variable = self._filter_variable(variable, dataset_info) - if dataset_info.get('calibration') is not None: + if dataset_info.get("calibration") is not None: variable = self._calibrate(variable, dataset_info) if self.orthorect: - orthorect_data_name = dataset_info.get('orthorect_data', None) + orthorect_data_name = dataset_info.get("orthorect_data", None) if orthorect_data_name is not None: variable = self._orthorectify(variable, orthorect_data_name) variable = self._manage_attributes(variable, dataset_info) @@ -420,7 +420,7 @@ def get_dataset(self, dataset_id, dataset_info): def _manage_attributes(self, variable, dataset_info): """Manage attributes of the dataset.""" - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable @@ -428,21 +428,21 @@ def _manage_attributes(self, variable, dataset_info): def _get_global_attributes(self): """Create a dictionary of global attributes.""" return { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.platform_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['sensing_start_time'], - 'filename_end_time': self.filename_info['sensing_end_time'], - 'platform_name': self.platform_name, - 'quality_group': self._get_quality_attributes(), + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.platform_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor, + "filename_start_time": self.filename_info["sensing_start_time"], + "filename_end_time": self.filename_info["sensing_end_time"], + "platform_name": self.platform_name, + "quality_group": self._get_quality_attributes(), } def _get_quality_attributes(self): """Get quality attributes.""" - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index fb0697be45..a7dcf371cc 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -120,13 +120,13 @@ class Insat3DIMGL1BH5FileHandler(BaseFileHandler): @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.datatree.attrs['Acquisition_Start_Time'], '%d-%b-%YT%H:%M:%S') + start_time = datetime.strptime(self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.datatree.attrs['Acquisition_End_Time'], '%d-%b-%YT%H:%M:%S') + end_time = datetime.strptime(self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S") return end_time @cached_property @@ -154,7 +154,7 @@ def get_dataset(self, ds_id, ds_info): darr = ds["IMG_" + ds_id["name"] + calibration] - nlat, nlon = ds.attrs['Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude'] + nlat, nlon = ds.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"] darr.attrs["orbital_parameters"] = dict(satellite_nominal_longitude=float(nlon), satellite_nominal_latitude=float(nlat), satellite_nominal_altitude=float(ds.attrs["Nominal_Altitude(km)"]), @@ -183,20 +183,20 @@ def get_area_def(self, ds_id): b = 6356752.314245 pdict = { - 'cfac': cfac, - 'lfac': lfac, - 'coff': cols / 2, - 'loff': lines / 2, - 'ncols': cols, - 'nlines': lines, - 'scandir': 'N2S', - 'a': a, - 'b': b, - 'h': h, - 'ssp_lon': 82.0, - 'a_name': "insat3d82", - 'a_desc': "insat3d82", - 'p_id': 'geosmsg' + "cfac": cfac, + "lfac": lfac, + "coff": cols / 2, + "loff": lines / 2, + "ncols": cols, + "nlines": lines, + "scandir": "N2S", + "a": a, + "b": b, + "h": h, + "ssp_lon": 82.0, + "a_name": "insat3d82", + "a_desc": "insat3d82", + "p_id": "geosmsg" } area_extent = get_area_extent(pdict) adef = get_area_definition(pdict, area_extent) diff --git a/satpy/readers/li_base_nc.py b/satpy/readers/li_base_nc.py index 57e234b910..848306e77c 100644 --- a/satpy/readers/li_base_nc.py +++ b/satpy/readers/li_base_nc.py @@ -210,14 +210,14 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # Note: the default dict assignment is need to avoid error when using the fake # netcdf4 file handler in mock unit tests: self._xarray_kwargs = getattr(self, "_xarray_kwargs", {}) - self._xarray_kwargs['decode_times'] = False - self._xarray_kwargs['mask_and_scale'] = False + self._xarray_kwargs["decode_times"] = False + self._xarray_kwargs["mask_and_scale"] = False # Processing level that should be set by derived classes. - self.processing_level = filetype_info.get('processing_level', 'L0') + self.processing_level = filetype_info.get("processing_level", "L0") # This class will only provide support for the LI sensor: - self.sensors = {'li'} + self.sensors = {"li"} # Set of dataset names explicitly provided by this file handler: # This set is required to filter the retrieval of datasets later in the @@ -234,19 +234,19 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # directly here: self.provided_datasets = set() - self.ds_desc = filetype_info['file_desc'] + self.ds_desc = filetype_info["file_desc"] # Store the extra infos available on specific variables: # Write the correct product type here: - self.product_type = self.ds_desc['product_type'] + self.product_type = self.ds_desc["product_type"] logger.debug("Product type is: %s", self.product_type) - self.variable_transforms = self.ds_desc.get('variable_transforms', {}) + self.variable_transforms = self.ds_desc.get("variable_transforms", {}) # Store the pattern for the default swath coordinates: # Note that we should always have this swath coordinates entry now: - self.swath_coordinates = self.ds_desc.get('swath_coordinates', {}) - patterns = self.swath_coordinates.get('variable_patterns', []) - self.swath_coordinates['patterns'] = [re.compile(pstr) for pstr in patterns] + self.swath_coordinates = self.ds_desc.get("swath_coordinates", {}) + patterns = self.swath_coordinates.get("variable_patterns", []) + self.swath_coordinates["patterns"] = [re.compile(pstr) for pstr in patterns] # check if the current product is in an accumulation grid self.prod_in_accumulation_grid = self.is_prod_in_accumulation_grid() @@ -264,8 +264,8 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # Ordered list of transform operations supported in this file handler: # those transforms are applied if requested in the 'apply_transforms' method below - self.transform_names = ['use_rescaling', 'seconds_to_timedelta', 'milliseconds_to_timedelta', - 'seconds_to_datetime', 'broadcast_to', 'accumulate_index_offset'] + self.transform_names = ["use_rescaling", "seconds_to_timedelta", "milliseconds_to_timedelta", + "seconds_to_datetime", "broadcast_to", "accumulate_index_offset"] # store internal variables self.internal_variables = {} @@ -276,12 +276,12 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): @property def start_time(self): """Get the start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def sensor_names(self): @@ -290,7 +290,7 @@ def sensor_names(self): def is_prod_in_accumulation_grid(self): """Check if the current product is an accumulated product in geos grid.""" - in_grid = self.swath_coordinates.get('projection', None) == 'mtg_geos_projection' + in_grid = self.swath_coordinates.get("projection", None) == "mtg_geos_projection" return in_grid def get_latlon_names(self): @@ -298,14 +298,14 @@ def get_latlon_names(self): Use default 'latitude' / 'longitude' if not specified. """ - lon_name = self.swath_coordinates.setdefault('longitude', 'longitude') - lat_name = self.swath_coordinates.setdefault('latitude', 'latitude') + lon_name = self.swath_coordinates.setdefault("longitude", "longitude") + lat_name = self.swath_coordinates.setdefault("latitude", "latitude") return lat_name, lon_name def get_projection_config(self): """Retrieve the projection configuration details.""" # We retrieve the projection variable name directly from our swath settings: - proj_var = self.swath_coordinates['projection'] + proj_var = self.swath_coordinates["projection"] geos_proj = self.get_measured_variable(proj_var, fill_value=None) # cast projection attributes to float/str: @@ -317,12 +317,12 @@ def get_projection_config(self): sweep = str(geos_proj.attrs["sweep_angle_axis"]) # use a (semi-major axis) and rf (reverse flattening) to define ellipsoid as recommended by EUM - proj_dict = {'a': major_axis, - 'lon_0': lon_0, - 'h': point_height, + proj_dict = {"a": major_axis, + "lon_0": lon_0, + "h": point_height, "rf": inv_flattening, - 'proj': 'geos', - 'units': 'm', + "proj": "geos", + "units": "m", "sweep": sweep} return proj_dict @@ -330,10 +330,10 @@ def get_projection_config(self): def get_daskified_lon_lat(self, proj_dict): """Get daskified lon and lat array using map_blocks.""" # Get our azimuth/elevation arrays, - azimuth = self.get_measured_variable(self.swath_coordinates['azimuth']) + azimuth = self.get_measured_variable(self.swath_coordinates["azimuth"]) azimuth = self.apply_use_rescaling(azimuth) - elevation = self.get_measured_variable(self.swath_coordinates['elevation']) + elevation = self.get_measured_variable(self.swath_coordinates["elevation"]) elevation = self.apply_use_rescaling(elevation) # Daskify inverse projection computation: @@ -355,9 +355,9 @@ def generate_coords_from_scan_angles(self): # Finally, we should store those arrays as internal variables for later retrieval as # standard datasets: self.internal_variables[lon_name] = xr.DataArray( - da.asarray(lon), dims=['y'], attrs={'standard_name': 'longitude'}) + da.asarray(lon), dims=["y"], attrs={"standard_name": "longitude"}) self.internal_variables[lat_name] = xr.DataArray( - da.asarray(lat), dims=['y'], attrs={'standard_name': 'latitude'}) + da.asarray(lat), dims=["y"], attrs={"standard_name": "latitude"}) def inverse_projection(self, azimuth, elevation, proj_dict): """Compute inverse projection.""" @@ -365,7 +365,7 @@ def inverse_projection(self, azimuth, elevation, proj_dict): projection = Proj(proj_dict) # Retrieve the point height from the projection config: - point_height = proj_dict['h'] + point_height = proj_dict["h"] # Convert scan angles to projection coordinates by multiplying with perspective point height azimuth = azimuth.values * point_height @@ -444,7 +444,7 @@ def apply_fill_value(self, arr, fill_value): if fill_value is not None: if np.isnan(fill_value): fill_value = np.float32(np.nan) - arr = arr.where(arr != arr.attrs.get('_FillValue'), fill_value) + arr = arr.where(arr != arr.attrs.get("_FillValue"), fill_value) return arr def get_variable_search_paths(self, var_paths): @@ -461,25 +461,25 @@ def add_provided_dataset(self, ds_infos): """Add a provided dataset to our internal list.""" # Check if we have extra infos for that variable: # Note that if available we should use the alias name instead here: - vname = ds_infos["alias_name"] if 'alias_name' in ds_infos else ds_infos["variable_name"] + vname = ds_infos["alias_name"] if "alias_name" in ds_infos else ds_infos["variable_name"] self.check_variable_extra_info(ds_infos, vname) # We check here if we should include the default coordinates on that dataset: - if self.swath_coordinates is not None and 'coordinates' not in ds_infos: + if self.swath_coordinates is not None and "coordinates" not in ds_infos: # Check if the variable corresponding to this dataset will match one of the valid patterns # for the swath usage: - if any([p.search(vname) is not None for p in self.swath_coordinates['patterns']]): + if any([p.search(vname) is not None for p in self.swath_coordinates["patterns"]]): # Get the target coordinate names, applying the sector name as needed: lat_coord_name, lon_coord_name = self.get_coordinate_names(ds_infos) # Ensure we do not try to add the coordinates on the coordinates themself: - dname = ds_infos['name'] + dname = ds_infos["name"] if dname != lat_coord_name and dname != lon_coord_name: - ds_infos['coordinates'] = [lon_coord_name, lat_coord_name] + ds_infos["coordinates"] = [lon_coord_name, lat_coord_name] self.dataset_infos.append(ds_infos) - self.provided_datasets.add(ds_infos['name']) + self.provided_datasets.add(ds_infos["name"]) def check_variable_extra_info(self, ds_infos, vname): """Check if we have extra infos for that variable.""" @@ -492,8 +492,8 @@ def check_variable_extra_info(self, ds_infos, vname): def get_coordinate_names(self, ds_infos): """Get the target coordinate names, applying the sector name as needed.""" lat_coord_name, lon_coord_name = self.get_latlon_names() - if 'sector_name' in ds_infos: - sname = ds_infos['sector_name'] + if "sector_name" in ds_infos: + sname = ds_infos["sector_name"] lat_coord_name = lat_coord_name.replace("{sector_name}", sname) lon_coord_name = lon_coord_name.replace("{sector_name}", sname) return lat_coord_name, lon_coord_name @@ -501,7 +501,7 @@ def get_coordinate_names(self, ds_infos): def get_dataset_infos(self, dname): """Retrieve the dataset infos corresponding to one of the registered datasets.""" for dsinfos in self.dataset_infos: - if dsinfos['name'] == dname: + if dsinfos["name"] == dname: return dsinfos # nothing found. @@ -514,15 +514,15 @@ def register_dataset(self, var_name, oc_name=None): ds_name = var_name if oc_name is None else f"{var_name}_{oc_name}_sector" ds_info = { - 'name': ds_name, - 'variable_name': var_name, - 'sensor': 'li', - 'file_type': self.filetype_info['file_type'] + "name": ds_name, + "variable_name": var_name, + "sensor": "li", + "file_type": self.filetype_info["file_type"] } # add the sector name: if oc_name is not None: - ds_info['sector_name'] = oc_name + ds_info["sector_name"] = oc_name self.add_provided_dataset(ds_info) @@ -535,7 +535,7 @@ def register_available_datasets(self): self.dataset_infos = [] # Assign the search paths for this product type: - self.search_paths = self.ds_desc.get('search_paths', []) + self.search_paths = self.ds_desc.get("search_paths", []) # Register our coordinates from azimuth/elevation data # if the product is accumulated @@ -553,17 +553,17 @@ def register_available_datasets(self): def register_variable_datasets(self): """Register all the available raw (i.e. not in sectors).""" - if 'variables' in self.ds_desc: - all_vars = self.ds_desc['variables'] + if "variables" in self.ds_desc: + all_vars = self.ds_desc["variables"] # No sector to handle so we write simple datasets from the variables: for var_name in all_vars: self.register_dataset(var_name) def register_sector_datasets(self): """Register all the available sector datasets.""" - if 'sectors' in self.ds_desc: - sectors = self.ds_desc['sectors'] - sector_vars = self.ds_desc['sector_variables'] + if "sectors" in self.ds_desc: + sectors = self.ds_desc["sectors"] + sector_vars = self.ds_desc["sector_variables"] # We should generate the datasets per sector: for oc_name in sectors: for var_name in sector_vars: @@ -590,16 +590,16 @@ def apply_use_rescaling(self, data_array, ds_info=None): # Check if we have the scaling elements: attribs = data_array.attrs - if 'scale_factor' in attribs or 'scaling_factor' in attribs or 'add_offset' in attribs: + if "scale_factor" in attribs or "scaling_factor" in attribs or "add_offset" in attribs: # TODO remove scaling_factor fallback after issue in NetCDF is fixed - scale_factor = attribs.setdefault('scale_factor', attribs.get('scaling_factor', 1)) - add_offset = attribs.setdefault('add_offset', 0) + scale_factor = attribs.setdefault("scale_factor", attribs.get("scaling_factor", 1)) + add_offset = attribs.setdefault("add_offset", 0) data_array = (data_array * scale_factor) + add_offset # rescale the valid range accordingly - if 'valid_range' in attribs.keys(): - attribs['valid_range'] = attribs['valid_range'] * scale_factor + add_offset + if "valid_range" in attribs.keys(): + attribs["valid_range"] = attribs["valid_range"] * scale_factor + add_offset data_array.attrs.update(attribs) @@ -607,11 +607,11 @@ def apply_use_rescaling(self, data_array, ds_info=None): def apply_broadcast_to(self, data_array, ds_info): """Apply the broadcast_to transform on a given array.""" - ref_var = self.get_transform_reference('broadcast_to', ds_info) + ref_var = self.get_transform_reference("broadcast_to", ds_info) - logger.debug("Broascasting %s to shape %s", ds_info['name'], ref_var.shape) + logger.debug("Broascasting %s to shape %s", ds_info["name"], ref_var.shape) new_array = da.broadcast_to(data_array, ref_var.shape) - dims = data_array.dims if data_array.ndim > 0 else ('y',) + dims = data_array.dims if data_array.ndim > 0 else ("y",) data_array = xr.DataArray(new_array, coords=data_array.coords, dims=dims, name=data_array.name, attrs=data_array.attrs) return data_array @@ -621,42 +621,42 @@ def apply_accumulate_index_offset(self, data_array, ds_info): # retrieve the __index_offset here, or create it if missing: # And keep track of the shared ds_info dict to reset it later in combine_info() self.current_ds_info = ds_info - offset = ds_info.setdefault('__index_offset', 0) + offset = ds_info.setdefault("__index_offset", 0) - ref_var = self.get_transform_reference('accumulate_index_offset', ds_info) + ref_var = self.get_transform_reference("accumulate_index_offset", ds_info) # Apply the current index_offset already reached on the indices we have in the current dataset: data_array = data_array + offset # Now update the __index_offset adding the number of elements in the reference array: - ds_info['__index_offset'] = offset + ref_var.size + ds_info["__index_offset"] = offset + ref_var.size logger.debug("Adding %d elements for index offset, new value is: %d", - ref_var.size, ds_info['__index_offset']) + ref_var.size, ds_info["__index_offset"]) return data_array def apply_seconds_to_datetime(self, data_array, ds_info): """Apply the seconds_to_datetime transform on a given array.""" # Retrieve the epoch timestamp: - epoch_ts = np.datetime64('2000-01-01T00:00:00.000000') + epoch_ts = np.datetime64("2000-01-01T00:00:00.000000") # And add our values as delta times in seconds: # note that we use a resolution of 1ns here: - data_array = epoch_ts + (data_array * 1e9).astype('timedelta64[ns]') + data_array = epoch_ts + (data_array * 1e9).astype("timedelta64[ns]") return data_array def apply_seconds_to_timedelta(self, data_array, _ds_info): """Apply the seconds_to_timedelta transform on a given array.""" # Apply the type conversion in place in the data_array: # note that we use a resolution of 1ns here: - data_array = (data_array * 1e9).astype('timedelta64[ns]') + data_array = (data_array * 1e9).astype("timedelta64[ns]") return data_array def apply_milliseconds_to_timedelta(self, data_array, _ds_info): """Apply the milliseconds_to_timedelta transform on a given array.""" # Apply the type conversion in place in the data_array: # note that we use a resolution of 1ns here: - data_array = (data_array * 1e6).astype('timedelta64[ns]') + data_array = (data_array * 1e6).astype("timedelta64[ns]") return data_array def get_transform_reference(self, transform_name, ds_info): @@ -665,7 +665,7 @@ def get_transform_reference(self, transform_name, ds_info): if "{sector_name}" in var_path: # We really expect to have a sector name for that variable: - var_path = var_path.replace("{sector_name}", ds_info['sector_name']) + var_path = var_path.replace("{sector_name}", ds_info["sector_name"]) # get the variable on that path: ref_var = self.get_measured_variable(var_path) @@ -679,7 +679,7 @@ def apply_transforms(self, data_array, ds_info): for tname in self.transform_names: if tname in ds_info: # Retrieve the transform function: - transform = getattr(self, f'apply_{tname}') + transform = getattr(self, f"apply_{tname}") # Apply the transformation on the dataset: data_array = transform(data_array, ds_info) return data_array @@ -690,7 +690,7 @@ def combine_info(self, all_infos): This is to be able to reset our __index_offset attribute in the shared ds_info currently being updated. """ if self.current_ds_info is not None: - del self.current_ds_info['__index_offset'] + del self.current_ds_info["__index_offset"] self.current_ds_info = None return super().combine_info(all_infos) @@ -698,10 +698,10 @@ def combine_info(self, all_infos): def get_transformed_dataset(self, ds_info): """Retrieve a dataset with all transformations applied on it.""" # Extract base variable name: - vname = ds_info['variable_name'] + vname = ds_info["variable_name"] # Note that the sector name might be None below: - sname = ds_info.get('sector_name', None) + sname = ds_info.get("sector_name", None) # Use the sector name as prefix for the variable path if applicable: var_paths = vname if sname is None else f"{sname}/{vname}" @@ -717,17 +717,17 @@ def validate_array_dimensions(self, data_array, ds_info=None): # in order to potentially support data array combination in a satpy scene: if data_array.ndim == 0: # If we have no dimension, we should force creating one here: - data_array = data_array.expand_dims({'y': 1}) + data_array = data_array.expand_dims({"y": 1}) - data_array = data_array.rename({data_array.dims[0]: 'y'}) + data_array = data_array.rename({data_array.dims[0]: "y"}) return data_array def update_array_attributes(self, data_array, ds_info): """Inject the attributes from the ds_info structure into the final data array, ignoring the internal entries.""" # ignore some internal processing only entries: - ignored_attribs = ["__index_offset", "broadcast_to", 'accumulate_index_offset', - 'seconds_to_timedelta', 'seconds_to_datetime'] + ignored_attribs = ["__index_offset", "broadcast_to", "accumulate_index_offset", + "seconds_to_timedelta", "seconds_to_datetime"] for key, value in ds_info.items(): if key not in ignored_attribs: data_array.attrs[key] = value @@ -738,13 +738,13 @@ def get_dataset(self, dataset_id, ds_info=None): """Get a dataset.""" # Retrieve default infos if missing: if ds_info is None: - ds_info = self.get_dataset_infos(dataset_id['name']) + ds_info = self.get_dataset_infos(dataset_id["name"]) # check for potential error: if ds_info is None: raise KeyError(f"No dataset registered for {dataset_id}") - ds_name = ds_info['name'] + ds_name = ds_info["name"] # In case this dataset name is not explicitly provided by this file handler then we # should simply return None. if ds_name not in self.provided_datasets: diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 569dc2bf51..4fe0826380 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -71,14 +71,14 @@ def get_area_def(self, dsid): """Compute area definition for a dataset, only supported for accumulated products.""" var_with_swath_coord = self.is_var_with_swath_coord(dsid) if var_with_swath_coord and self.with_area_def: - return get_area_def('mtg_fci_fdss_2km') + return get_area_def("mtg_fci_fdss_2km") - raise NotImplementedError('Area definition is not supported for accumulated products.') + raise NotImplementedError("Area definition is not supported for accumulated products.") def is_var_with_swath_coord(self, dsid): """Check if the variable corresponding to this dataset is listed as variable with swath coordinates.""" # since the patterns are compiled to regex we use the search() method below to find matches - with_swath_coords = any([p.search(dsid['name']) is not None for p in self.swath_coordinates['patterns']]) + with_swath_coords = any([p.search(dsid["name"]) is not None for p in self.swath_coordinates["patterns"]]) return with_swath_coords def get_array_on_fci_grid(self, data_array: xr.DataArray): @@ -92,8 +92,8 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): # Note that x and y have origin in the south-west corner of the image # and start with index 1. - rows = self.get_measured_variable('y') - cols = self.get_measured_variable('x') + rows = self.get_measured_variable("y") + cols = self.get_measured_variable("x") attrs = data_array.attrs rows, cols = da.compute(rows, cols) @@ -110,7 +110,7 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): flattened_result[rows * LI_GRID_SHAPE[0] + cols] = data_array # ... reshape to final 2D grid data_2d = da.reshape(flattened_result, LI_GRID_SHAPE) - xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=('y', 'x')) + xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=("y", "x")) xarr.attrs = attrs return xarr diff --git a/satpy/readers/maia.py b/satpy/readers/maia.py index 941bf34208..75591c59d5 100644 --- a/satpy/readers/maia.py +++ b/satpy/readers/maia.py @@ -48,69 +48,69 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.finfo = filename_info # set the day date part for end_time from the file name - self.finfo['end_time'] = self.finfo['end_time'].replace( - year=self.finfo['start_time'].year, - month=self.finfo['start_time'].month, - day=self.finfo['start_time'].day) - if self.finfo['end_time'] < self.finfo['start_time']: - myday = self.finfo['end_time'].day - self.finfo['end_time'] = self.finfo['end_time'].replace( + self.finfo["end_time"] = self.finfo["end_time"].replace( + year=self.finfo["start_time"].year, + month=self.finfo["start_time"].month, + day=self.finfo["start_time"].day) + if self.finfo["end_time"] < self.finfo["start_time"]: + myday = self.finfo["end_time"].day + self.finfo["end_time"] = self.finfo["end_time"].replace( day=myday + 1) self.selected = None self.read(self.filename) def read(self, filename): """Read the file.""" - self.h5 = h5py.File(filename, 'r') + self.h5 = h5py.File(filename, "r") missing = -9999. - self.Lat = da.from_array(self.h5[u'DATA/Latitude'], chunks=CHUNK_SIZE) / 10000. - self.Lon = da.from_array(self.h5[u'DATA/Longitude'], chunks=CHUNK_SIZE) / 10000. + self.Lat = da.from_array(self.h5[u"DATA/Latitude"], chunks=CHUNK_SIZE) / 10000. + self.Lon = da.from_array(self.h5[u"DATA/Longitude"], chunks=CHUNK_SIZE) / 10000. self.selected = (self.Lon > missing) self.file_content = {} - for key in self.h5['DATA'].keys(): - self.file_content[key] = da.from_array(self.h5[u'DATA/' + key], chunks=CHUNK_SIZE) - for key in self.h5[u'HEADER'].keys(): - self.file_content[key] = self.h5[u'HEADER/' + key][:] + for key in self.h5["DATA"].keys(): + self.file_content[key] = da.from_array(self.h5[u"DATA/" + key], chunks=CHUNK_SIZE) + for key in self.h5[u"HEADER"].keys(): + self.file_content[key] = self.h5[u"HEADER/" + key][:] # Cloud Mask on pixel mask = 2**0 + 2**1 + 2**2 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**0 self.file_content[u"cma"] = lst # Cloud Mask confidence mask = 2**5 + 2**6 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**5 self.file_content[u"cma_conf"] = lst # Cloud Mask Quality mask = 2**3 + 2**4 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**3 - self.file_content[u'cma_qual'] = lst + self.file_content[u"cma_qual"] = lst # Opaque Cloud mask = 2**21 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**21 - self.file_content[u'opaq_cloud'] = lst + self.file_content[u"opaq_cloud"] = lst # land /water Background mask = 2**15 + 2**16 + 2**17 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**15 - self.file_content[u'land_water_background'] = lst + self.file_content[u"land_water_background"] = lst # CT (Actual CloudType) mask = 2**4 + 2**5 + 2**6 + 2**7 + 2**8 - classif = self.file_content[u'CloudType'] & mask + classif = self.file_content[u"CloudType"] & mask classif = classif / 2**4 - self.file_content['ct'] = classif.astype(np.uint8) + self.file_content["ct"] = classif.astype(np.uint8) def get_platform(self, platform): """Get the platform.""" - if self.file_content['sat_id'] in (14,): + if self.file_content["sat_id"] in (14,): return "viirs" else: return "avhrr" @@ -118,26 +118,26 @@ def get_platform(self, platform): @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" - return self.finfo['end_time'] + return self.finfo["end_time"] def get_dataset(self, key, info, out=None): """Get a dataset from the file.""" - logger.debug("Reading %s.", key['name']) - values = self.file_content[key['name']] + logger.debug("Reading %s.", key["name"]) + values = self.file_content[key["name"]] selected = np.array(self.selected) - if key['name'] in ("Latitude", "Longitude"): + if key["name"] in ("Latitude", "Longitude"): values = values / 10000. - if key['name'] in ('Tsurf', 'CloudTopPres', 'CloudTopTemp'): + if key["name"] in ("Tsurf", "CloudTopPres", "CloudTopTemp"): goods = values > -9998. selected = np.array(selected & goods) - if key['name'] in ('Tsurf', "Alt_surface", "CloudTopTemp"): + if key["name"] in ("Tsurf", "Alt_surface", "CloudTopTemp"): values = values / 100. - if key['name'] in ("CloudTopPres"): + if key["name"] in ("CloudTopPres"): values = values / 10. else: selected = self.selected @@ -145,10 +145,10 @@ def get_dataset(self, key, info, out=None): fill_value = np.nan - if key['name'] == 'ct': + if key["name"] == "ct": fill_value = 0 - info['_FillValue'] = 0 - ds = DataArray(values, dims=['y', 'x'], attrs=info).where(selected, fill_value) + info["_FillValue"] = 0 + ds = DataArray(values, dims=["y", "x"], attrs=info).where(selected, fill_value) # update dataset info with file_info return ds diff --git a/satpy/readers/meris_nc_sen3.py b/satpy/readers/meris_nc_sen3.py index 61fc761f50..fa69dad2cc 100644 --- a/satpy/readers/meris_nc_sen3.py +++ b/satpy/readers/meris_nc_sen3.py @@ -40,7 +40,7 @@ class NCMERISCal(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info): """Init the meris reader base.""" super(NCMERISCal, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERISGeo(NCOLCIBase): @@ -49,7 +49,7 @@ class NCMERISGeo(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info): """Init the meris reader base.""" super(NCMERISGeo, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERIS2(NCOLCI2): @@ -58,24 +58,24 @@ class NCMERIS2(NCOLCI2): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERIS2, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' - self.reflectance_prefix = 'M' - self.reflectance_suffix = '_rho_w' + self.sensor = "meris" + self.reflectance_prefix = "M" + self.reflectance_suffix = "_rho_w" def getbitmask(self, wqsf, items=None): """Get the bitmask. Experimental default mask.""" - items = items or ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + items = items or ["SEA_ICE", "MEGLINT", "HIGHGLINT", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] bflags = BitFlags( wqsf, - flag_list=['SEA_ICE', 'MEGLINT', 'HIGHGLINT', 'CASE2_S', 'CASE2_ANOM', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'], + flag_list=["SEA_ICE", "MEGLINT", "HIGHGLINT", "CASE2_S", "CASE2_ANOM", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"], ) return reduce(np.logical_or, [bflags[item] for item in items]) @@ -86,7 +86,7 @@ class NCMERISAngles(NCOLCIAngles): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERISAngles, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERISMeteo(NCOLCIMeteo): @@ -95,4 +95,4 @@ class NCMERISMeteo(NCOLCIMeteo): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERISMeteo, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index b0225ebcb4..905db0654f 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -44,25 +44,25 @@ def _strptime(self, date_attr, time_attr): time = self[time_attr] # "18:27:39.720" # cuts off microseconds because of unknown meaning # is .720 == 720 microseconds or 720000 microseconds - return datetime.strptime(date + " " + time.split('.')[0], "%Y-%m-%d %H:%M:%S") + return datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S") @property def start_time(self): """Time for first observation.""" - return self._strptime('/attr/Observing Beginning Date', '/attr/Observing Beginning Time') + return self._strptime("/attr/Observing Beginning Date", "/attr/Observing Beginning Time") @property def end_time(self): """Time for final observation.""" - return self._strptime('/attr/Observing Ending Date', '/attr/Observing Ending Time') + return self._strptime("/attr/Observing Ending Date", "/attr/Observing Ending Time") @property def sensor_name(self): """Map sensor name to Satpy 'standard' sensor names.""" - file_sensor = self['/attr/Sensor Identification Code'] + file_sensor = self["/attr/Sensor Identification Code"] sensor = { - 'MERSI': 'mersi-2', - 'MERSI LL': 'mersi-ll', + "MERSI": "mersi-2", + "MERSI LL": "mersi-ll", }.get(file_sensor, file_sensor) return sensor @@ -76,8 +76,8 @@ def _get_single_slope_intercept(self, slope, intercept, cal_index): def _get_coefficients(self, cal_key, cal_index): coeffs = self[cal_key][cal_index] - slope = coeffs.attrs.pop('Slope', None) - intercept = coeffs.attrs.pop('Intercept', None) + slope = coeffs.attrs.pop("Slope", None) + intercept = coeffs.attrs.pop("Intercept", None) if slope is not None: slope, intercept = self._get_single_slope_intercept( slope, intercept, cal_index) @@ -86,37 +86,37 @@ def _get_coefficients(self, cal_key, cal_index): def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" - file_key = ds_info.get('file_key', dataset_id['name']) - band_index = ds_info.get('band_index') + file_key = ds_info.get("file_key", dataset_id["name"]) + band_index = ds_info.get("band_index") data = self[file_key] if band_index is not None: data = data[band_index] if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) - if 'rows_per_scan' in self.filetype_info: - attrs.setdefault('rows_per_scan', self.filetype_info['rows_per_scan']) + if "rows_per_scan" in self.filetype_info: + attrs.setdefault("rows_per_scan", self.filetype_info["rows_per_scan"]) data = self._mask_data(data, dataset_id, attrs) - slope = attrs.pop('Slope', None) - intercept = attrs.pop('Intercept', None) - if slope is not None and dataset_id.get('calibration') != 'counts': + slope = attrs.pop("Slope", None) + intercept = attrs.pop("Intercept", None) + if slope is not None and dataset_id.get("calibration") != "counts": if band_index is not None: slope = slope[band_index] intercept = intercept[band_index] data = data * slope + intercept - if dataset_id.get('calibration') == "reflectance": - coeffs = self._get_coefficients(ds_info['calibration_key'], - ds_info['calibration_index']) + if dataset_id.get("calibration") == "reflectance": + coeffs = self._get_coefficients(ds_info["calibration_key"], + ds_info["calibration_index"]) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 - elif dataset_id.get('calibration') == "brightness_temperature": - calibration_index = ds_info['calibration_index'] + elif dataset_id.get("calibration") == "brightness_temperature": + calibration_index = ds_info["calibration_index"] # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. - wave_number = 1. / (dataset_id['wavelength'][1] / 1e6) + wave_number = 1. / (dataset_id["wavelength"][1] / 1e6) data = self._get_bt_dataset(data, calibration_index, wave_number) @@ -125,29 +125,29 @@ def get_dataset(self, dataset_id, ds_info): for key, val in attrs.items(): # python 3 only if bytes is not str and isinstance(val, bytes): - data.attrs[key] = val.decode('utf8') + data.attrs[key] = val.decode("utf8") data.attrs.update({ - 'platform_name': self['/attr/Satellite Name'], - 'sensor': self.sensor_name, + "platform_name": self["/attr/Satellite Name"], + "sensor": self.sensor_name, }) return data def _mask_data(self, data, dataset_id, attrs): """Mask the data using fill_value and valid_range attributes.""" - fill_value = attrs.pop('FillValue', np.nan) # covered by valid_range - valid_range = attrs.pop('valid_range', None) - if dataset_id.get('calibration') == 'counts': + fill_value = attrs.pop("FillValue", np.nan) # covered by valid_range + valid_range = attrs.pop("valid_range", None) + if dataset_id.get("calibration") == "counts": # preserve integer type of counts if possible - attrs['_FillValue'] = fill_value + attrs["_FillValue"] = fill_value new_fill = fill_value else: new_fill = np.nan if valid_range is not None: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. - if dataset_id['name'] in ['24', '25'] and valid_range[1] == 4095: + if dataset_id["name"] in ["24", "25"] and valid_range[1] == 4095: valid_range[1] = 25000 # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 @@ -184,13 +184,13 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): data = data.where(data != 0) # additional corrections from the file - if self.sensor_name == 'mersi-2': - corr_coeff_a = float(self['/attr/TBB_Trans_Coefficient_A'][calibration_index]) - corr_coeff_b = float(self['/attr/TBB_Trans_Coefficient_B'][calibration_index]) - elif self.sensor_name == 'mersi-ll': + if self.sensor_name == "mersi-2": + corr_coeff_a = float(self["/attr/TBB_Trans_Coefficient_A"][calibration_index]) + corr_coeff_b = float(self["/attr/TBB_Trans_Coefficient_B"][calibration_index]) + elif self.sensor_name == "mersi-ll": # MERSI-LL stores these coefficients differently try: - coeffs = self['/attr/TBB_Trans_Coefficient'] + coeffs = self["/attr/TBB_Trans_Coefficient"] corr_coeff_a = coeffs[calibration_index] corr_coeff_b = coeffs[calibration_index + N_TOT_IR_CHANS_LL] except KeyError: diff --git a/satpy/readers/mimic_TPW2_nc.py b/satpy/readers/mimic_TPW2_nc.py index d4b7422ab1..8a22002cf4 100644 --- a/satpy/readers/mimic_TPW2_nc.py +++ b/satpy/readers/mimic_TPW2_nc.py @@ -54,8 +54,8 @@ def __init__(self, filename, filename_info, filetype_info): def available_datasets(self, configured_datasets=None): """Get datasets in file matching gelocation shape (lat/lon).""" - lat_shape = self.file_content.get('/dimension/lat') - lon_shape = self.file_content.get('/dimension/lon') + lat_shape = self.file_content.get("/dimension/lat") + lon_shape = self.file_content.get("/dimension/lon") # Read the lat/lon variables? handled_variables = set() @@ -67,9 +67,9 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) + var_name = ds_info.get("file_key", ds_info["name"]) # logger.debug("Evaluating previously configured variable: %s", var_name) - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self: @@ -98,35 +98,35 @@ def available_datasets(self, configured_datasets=None): handled_variables.add(var_name) # Create new ds_info object new_info = { - 'name': var_name, - 'file_key': var_name, - 'file_type': self.filetype_info['file_type'], + "name": var_name, + "file_key": var_name, + "file_type": self.filetype_info["file_type"], } logger.debug(var_name) yield True, new_info def get_dataset(self, ds_id, info): """Load dataset designated by the given key from file.""" - logger.debug("Getting data for: %s", ds_id['name']) - file_key = info.get('file_key', ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) + file_key = info.get("file_key", ds_id["name"]) data = np.flipud(self[file_key]) - data = xr.DataArray(data, dims=['y', 'x']) + data = xr.DataArray(data, dims=["y", "x"]) data.attrs = self.get_metadata(data, info) - if 'lon' in data.dims: - data.rename({'lon': 'x'}) - if 'lat' in data.dims: - data.rename({'lat': 'y'}) + if "lon" in data.dims: + data.rename({"lon": "x"}) + if "lat" in data.dims: + data.rename({"lat": "y"}) return data def get_area_def(self, dsid): """Flip data up/down and define equirectangular AreaDefintion.""" - flip_lat = np.flipud(self['latArr']) - latlon = np.meshgrid(self['lonArr'], flip_lat) + flip_lat = np.flipud(self["latArr"]) + latlon = np.meshgrid(self["lonArr"], flip_lat) - width = self['lonArr/shape'][0] - height = self['latArr/shape'][0] + width = self["lonArr/shape"][0] + height = self["latArr/shape"][0] lower_left_x = latlon[0][height-1][0] lower_left_y = latlon[1][height-1][0] @@ -136,9 +136,9 @@ def get_area_def(self, dsid): area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "MIMIC TPW WGS84" - area_id = 'mimic' - proj_id = 'World Geodetic System 1984' - projection = 'EPSG:4326' + area_id = "mimic" + proj_id = "World Geodetic System 1984" + projection = "EPSG:4326" area_def = AreaDefinition(area_id, description, proj_id, projection, width, height, area_extent, ) return area_def @@ -148,24 +148,24 @@ def get_metadata(self, data, info): metadata.update(data.attrs) metadata.update(info) metadata.update({ - 'platform_shortname': 'aggregated microwave', - 'sensor': 'mimic', - 'start_time': self.start_time, - 'end_time': self.end_time, + "platform_shortname": "aggregated microwave", + "sensor": "mimic", + "start_time": self.start_time, + "end_time": self.end_time, }) - metadata.update(self[info.get('file_key')].variable.attrs) + metadata.update(self[info.get("file_key")].variable.attrs) return metadata @property def start_time(self): """Start timestamp of the dataset determined from yaml.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """End timestamp of the dataset same as start_time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index de02b1dc36..1ee0912b0f 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -64,7 +64,7 @@ } SENSOR = {"n18": amsu, "n19": amsu, - "n20": 'atms', + "n20": "atms", "np": amsu, "m1": amsu, "m2": amsu, @@ -173,11 +173,11 @@ def get_coeff_by_sfc(coeff_fn, bt_data, idx): def limb_correct_atms_bt(bt_data, surf_type_mask, coeff_fns, ds_info): """Gather data needed for limb correction.""" - idx = ds_info['channel_index'] + idx = ds_info["channel_index"] LOG.info("Starting ATMS Limb Correction...") - sea_bt = get_coeff_by_sfc(coeff_fns['sea'], bt_data, idx) - land_bt = get_coeff_by_sfc(coeff_fns['land'], bt_data, idx) + sea_bt = get_coeff_by_sfc(coeff_fns["sea"], bt_data, idx) + land_bt = get_coeff_by_sfc(coeff_fns["land"], bt_data, idx) LOG.info("Finishing limb correction") is_sea = (surf_type_mask == 0) @@ -217,8 +217,8 @@ def __init__(self, filename, filename_info, filetype_info, decode_cf=True, mask_and_scale=False, decode_coords=True, - chunks={'Field_of_view': CHUNK_SIZE, - 'Scanline': CHUNK_SIZE}) + chunks={"Field_of_view": CHUNK_SIZE, + "Scanline": CHUNK_SIZE}) # y,x is used in satpy, bands rather than channel using in xrimage self.nc = self.nc.rename_dims({"Scanline": "y", "Field_of_view": "x"}) @@ -232,13 +232,13 @@ def __init__(self, filename, filename_info, filetype_info, @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def _get_platform_name(self): """Get platform name.""" try: - res = PLATFORMS[self.filename_info['platform_shortname'].lower()] + res = PLATFORMS[self.filename_info["platform_shortname"].lower()] except KeyError: res = "mirs" return res.lower() @@ -296,13 +296,13 @@ def force_time(self, key): @property def _get_coeff_filenames(self): """Retrieve necessary files for coefficients if needed.""" - coeff_fn = {'sea': None, 'land': None} + coeff_fn = {"sea": None, "land": None} if self.platform_name == "noaa-20": - coeff_fn['land'] = retrieve("readers/limbcoef_atmsland_noaa20.txt") - coeff_fn['sea'] = retrieve("readers/limbcoef_atmssea_noaa20.txt") - if self.platform_name == 'npp': - coeff_fn['land'] = retrieve("readers/limbcoef_atmsland_snpp.txt") - coeff_fn['sea'] = retrieve("readers/limbcoef_atmssea_snpp.txt") + coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_noaa20.txt") + coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_noaa20.txt") + if self.platform_name == "npp": + coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_snpp.txt") + coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_snpp.txt") return coeff_fn @@ -311,10 +311,10 @@ def update_metadata(self, ds_info): metadata = {} metadata.update(ds_info) metadata.update({ - 'sensor': self.sensor, - 'platform_name': self.platform_name, - 'start_time': self.start_time, - 'end_time': self.end_time, + "sensor": self.sensor, + "platform_name": self.platform_name, + "start_time": self.start_time, + "end_time": self.end_time, }) return metadata @@ -325,9 +325,9 @@ def _nan_for_dtype(data_arr_dtype): if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): - return np.timedelta64('NaT') + return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): - return np.datetime64('NaT') + return np.datetime64("NaT") return np.nan @staticmethod @@ -375,19 +375,19 @@ def apply_attributes(self, data, ds_info): ds_info.update(data.attrs) # special cases - if ds_info['name'] in ["latitude", "longitude"]: + if ds_info["name"] in ["latitude", "longitude"]: ds_info["standard_name"] = ds_info.get("standard_name", - ds_info['name']) + ds_info["name"]) # try to assign appropriate units (if "Kelvin" covert to K) units_convert = {"Kelvin": "K"} - data_unit = ds_info.get('units', None) - ds_info['units'] = units_convert.get(data_unit, data_unit) + data_unit = ds_info.get("units", None) + ds_info["units"] = units_convert.get(data_unit, data_unit) - scale = ds_info.pop('scale_factor', 1.0) - offset = ds_info.pop('add_offset', 0.) + scale = ds_info.pop("scale_factor", 1.0) + offset = ds_info.pop("add_offset", 0.) fill_value = ds_info.pop("_FillValue", global_attr_fill) - valid_range = ds_info.pop('valid_range', None) + valid_range = ds_info.pop("valid_range", None) data = self._scale_data(data, scale, offset) data = self._fill_data(data, fill_value, scale, offset) @@ -399,14 +399,14 @@ def apply_attributes(self, data, ds_info): def get_dataset(self, ds_id, ds_info): """Get datasets.""" - if 'dependencies' in ds_info.keys(): - idx = ds_info['channel_index'] - data = self['BT'] + if "dependencies" in ds_info.keys(): + idx = ds_info["channel_index"] + data = self["BT"] data = data.rename(new_name_or_name_dict=ds_info["name"]) data, ds_info = self.apply_attributes(data, ds_info) if self.sensor.lower() == "atms" and self.limb_correction: - sfc_type_mask = self['Sfc_type'] + sfc_type_mask = self["Sfc_type"] data = limb_correct_atms_bt(data, sfc_type_mask, self._get_coeff_filenames, ds_info) @@ -416,7 +416,7 @@ def get_dataset(self, ds_id, ds_info): LOG.info("No Limb Correction applied.") data = data[:, :, idx] else: - data = self[ds_id['name']] + data = self[ds_id["name"]] data, ds_info = self.apply_attributes(data, ds_info) data.attrs = self.update_metadata(ds_info) @@ -440,24 +440,24 @@ def available_datasets(self, configured_datasets=None): continue yaml_info = {} - if self.file_type_matches(ds_info['file_type']): - handled_vars.add(ds_info['name']) + if self.file_type_matches(ds_info["file_type"]): + handled_vars.add(ds_info["name"]) yaml_info = ds_info - if ds_info['name'] == 'BT': + if ds_info["name"] == "BT": yield from self._available_btemp_datasets(yaml_info) yield True, ds_info yield from self._available_new_datasets(handled_vars) def _count_channel_repeat_number(self): """Count channel/polarization pair repetition.""" - freq = self.nc.coords.get('Freq', self.nc.get('Freq')) - polo = self.nc['Polo'] + freq = self.nc.coords.get("Freq", self.nc.get("Freq")) + polo = self.nc["Polo"] chn_total = Counter() normals = [] for idx, (f, p) in enumerate(zip(freq, polo)): normal_f = str(int(f)) - normal_p = 'v' if p == POLO_V else 'h' + normal_p = "v" if p == POLO_V else "h" chn_total[normal_f + normal_p] += 1 normals.append((idx, f, p, normal_f, normal_p)) @@ -471,7 +471,7 @@ def _available_btemp_datasets(self, yaml_info): for idx, _f, _p, normal_f, normal_p in normals: chn_cnt[normal_f + normal_p] += 1 p_count = str(chn_cnt[normal_f + normal_p] - if chn_total[normal_f + normal_p] > 1 else '') + if chn_total[normal_f + normal_p] > 1 else "") new_name = "btemp_{}{}{}".format(normal_f, normal_p, p_count) @@ -479,22 +479,22 @@ def _available_btemp_datasets(self, yaml_info): desc_bt = desc_bt.format(idx, normal_f, normal_p, p_count) ds_info = yaml_info.copy() ds_info.update({ - 'file_type': self.filetype_info['file_type'], - 'name': new_name, - 'description': desc_bt, - 'channel_index': idx, - 'frequency': "{}GHz".format(normal_f), - 'polarization': normal_p, - 'dependencies': ('BT', 'Sfc_type'), - 'coordinates': ['longitude', 'latitude'] + "file_type": self.filetype_info["file_type"], + "name": new_name, + "description": desc_bt, + "channel_index": idx, + "frequency": "{}GHz".format(normal_f), + "polarization": normal_p, + "dependencies": ("BT", "Sfc_type"), + "coordinates": ["longitude", "latitude"] }) yield True, ds_info def _get_ds_info_for_data_arr(self, var_name): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name, - 'coordinates': ["longitude", "latitude"] + "file_type": self.filetype_info["file_type"], + "name": var_name, + "coordinates": ["longitude", "latitude"] } return ds_info @@ -524,7 +524,7 @@ def __getitem__(self, item): data = self.nc[item] # 'Freq' dimension causes issues in other processing - if 'Freq' in data.coords: - data = data.drop_vars('Freq') + if "Freq" in data.coords: + data = data.drop_vars("Freq") return data diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index 5f0627b95d..d2bb1c9661 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -92,13 +92,13 @@ class HDFEOSBandReader(HDFEOSBaseFileReader): "H": 500} res_to_possible_variable_names = { - 1000: ['EV_250_Aggr1km_RefSB', - 'EV_500_Aggr1km_RefSB', - 'EV_1KM_RefSB', - 'EV_1KM_Emissive'], - 500: ['EV_250_Aggr500_RefSB', - 'EV_500_RefSB'], - 250: ['EV_250_RefSB'], + 1000: ["EV_250_Aggr1km_RefSB", + "EV_500_Aggr1km_RefSB", + "EV_1KM_RefSB", + "EV_1KM_Emissive"], + 500: ["EV_250_Aggr500_RefSB", + "EV_500_RefSB"], + 250: ["EV_250_RefSB"], } def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, **kwargs): @@ -106,21 +106,21 @@ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, super().__init__(filename, filename_info, filetype_info, **kwargs) self._mask_saturated = mask_saturated - ds = self.metadata['INVENTORYMETADATA'][ - 'COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] + ds = self.metadata["INVENTORYMETADATA"][ + "COLLECTIONDESCRIPTIONCLASS"]["SHORTNAME"]["VALUE"] self.resolution = self.res[ds[-3]] def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" - if self.resolution != key['resolution']: + if self.resolution != key["resolution"]: return var_name, band_index = self._get_band_variable_name_and_index(key["name"]) subdata = self.sd.select(var_name) var_attrs = subdata.attributes() uncertainty = self.sd.select(var_name + "_Uncert_Indexes") array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[band_index, :, :], - dims=['y', 'x']).astype(np.float32) - valid_range = var_attrs['valid_range'] + dims=["y", "x"]).astype(np.float32) + valid_range = var_attrs["valid_range"] valid_min = np.float32(valid_range[0]) valid_max = np.float32(valid_range[1]) if not self._mask_saturated: @@ -219,24 +219,24 @@ def _mask_uncertain_pixels(self, array, uncertainty, band_index): return array def _calibrate_data(self, key, info, array, var_attrs, index): - if key['calibration'] == 'brightness_temperature': - projectable = calibrate_bt(array, var_attrs, index, key['name']) - info.setdefault('units', 'K') - info.setdefault('standard_name', 'toa_brightness_temperature') - elif key['calibration'] == 'reflectance': + if key["calibration"] == "brightness_temperature": + projectable = calibrate_bt(array, var_attrs, index, key["name"]) + info.setdefault("units", "K") + info.setdefault("standard_name", "toa_brightness_temperature") + elif key["calibration"] == "reflectance": projectable = calibrate_refl(array, var_attrs, index) - info.setdefault('units', '%') - info.setdefault('standard_name', - 'toa_bidirectional_reflectance') - elif key['calibration'] == 'radiance': + info.setdefault("units", "%") + info.setdefault("standard_name", + "toa_bidirectional_reflectance") + elif key["calibration"] == "radiance": projectable = calibrate_radiance(array, var_attrs, index) - info.setdefault('units', var_attrs.get('radiance_units')) - info.setdefault('standard_name', - 'toa_outgoing_radiance_per_unit_wavelength') - elif key['calibration'] == 'counts': + info.setdefault("units", var_attrs.get("radiance_units")) + info.setdefault("standard_name", + "toa_outgoing_radiance_per_unit_wavelength") + elif key["calibration"] == "counts": projectable = calibrate_counts(array, var_attrs, index) - info.setdefault('units', 'counts') - info.setdefault('standard_name', 'counts') # made up + info.setdefault("units", "counts") + info.setdefault("standard_name", "counts") # made up else: raise ValueError("Unknown calibration for " "key: {}".format(key)) @@ -254,7 +254,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): def get_dataset(self, key, info): """Get the dataset.""" - if key['name'] in HDFEOSGeoReader.DATASET_NAMES: + if key["name"] in HDFEOSGeoReader.DATASET_NAMES: return HDFEOSGeoReader.get_dataset(self, key, info) return HDFEOSBandReader.get_dataset(self, key, info) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 1a7fc3ae38..1b526a07a2 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -113,30 +113,30 @@ def _select_hdf_dataset(self, hdf_dataset_name, byte_dimension): dataset = self.sd.select(hdf_dataset_name) dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) attrs = dataset.attributes() - dims = ['y', 'x'] + dims = ["y", "x"] if byte_dimension == 0: - dims = ['i', 'y', 'x'] + dims = ["i", "y", "x"] dask_arr = dask_arr.astype(np.uint8) elif byte_dimension == 2: - dims = ['y', 'x', 'i'] + dims = ["y", "x", "i"] dask_arr = dask_arr.astype(np.uint8) dataset = xr.DataArray(dask_arr, dims=dims, attrs=attrs) - if 'i' in dataset.dims: + if "i" in dataset.dims: # Reorder dimensions for consistency - dataset = dataset.transpose('i', 'y', 'x') + dataset = dataset.transpose("i", "y", "x") return dataset def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" - dataset_name = dataset_id['name'] + dataset_name = dataset_id["name"] if self.is_geo_loadable_dataset(dataset_name): return HDFEOSGeoReader.get_dataset(self, dataset_id, dataset_info) - dataset_name_in_file = dataset_info['file_key'] + dataset_name_in_file = dataset_info["file_key"] if self.is_imapp_mask_byte1: - dataset_name_in_file = dataset_info.get('imapp_file_key', dataset_name_in_file) + dataset_name_in_file = dataset_info.get("imapp_file_key", dataset_name_in_file) # The dataset asked correspond to a given set of bits of the HDF EOS dataset - if 'byte' in dataset_info and 'byte_dimension' in dataset_info: + if "byte" in dataset_info and "byte_dimension" in dataset_info: dataset = self._extract_and_mask_category_dataset(dataset_id, dataset_info, dataset_name_in_file) else: # No byte manipulation required @@ -147,39 +147,39 @@ def get_dataset(self, dataset_id, dataset_info): def _extract_and_mask_category_dataset(self, dataset_id, dataset_info, var_name): # what dimension is per-byte - byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info['byte_dimension'] + byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info["byte_dimension"] dataset = self._select_hdf_dataset(var_name, byte_dimension) # category products always have factor=1/offset=0 so don't apply them # also remove them so they don't screw up future satpy processing - dataset.attrs.pop('scale_factor', None) - dataset.attrs.pop('add_offset', None) + dataset.attrs.pop("scale_factor", None) + dataset.attrs.pop("add_offset", None) # Don't do this byte work if we are using the IMAPP mask_byte1 file if self.is_imapp_mask_byte1: return dataset dataset = _extract_byte_mask(dataset, - dataset_info['byte'], - dataset_info['bit_start'], - dataset_info['bit_count']) + dataset_info["byte"], + dataset_info["bit_start"], + dataset_info["bit_count"]) dataset = self._mask_with_quality_assurance_if_needed(dataset, dataset_info, dataset_id) return dataset def _mask_with_quality_assurance_if_needed(self, dataset, dataset_info, dataset_id): - if not dataset_info.get('quality_assurance', False): + if not dataset_info.get("quality_assurance", False): return dataset # Get quality assurance dataset recursively quality_assurance_dataset_id = dataset_id.from_dict( - dict(name='quality_assurance', resolution=1000) + dict(name="quality_assurance", resolution=1000) ) quality_assurance_dataset_info = { - 'name': 'quality_assurance', - 'resolution': 1000, - 'byte_dimension': 2, - 'byte': 0, - 'bit_start': 0, - 'bit_count': 1, - 'file_key': 'Quality_Assurance' + "name": "quality_assurance", + "resolution": 1000, + "byte_dimension": 2, + "byte": 0, + "bit_start": 0, + "bit_count": 1, + "file_key": "Quality_Assurance" } quality_assurance = self.get_dataset( quality_assurance_dataset_id, quality_assurance_dataset_info diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 517e096db8..1131e40a96 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -49,10 +49,10 @@ logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() -PLATFORMS = {'S2A': "Sentinel-2A", - 'S2B': "Sentinel-2B", - 'S2C': "Sentinel-2C", - 'S2D': "Sentinel-2D"} +PLATFORMS = {"S2A": "Sentinel-2A", + "S2B": "Sentinel-2B", + "S2C": "Sentinel-2C", + "S2D": "Sentinel-2D"} class SAFEMSIL1C(BaseFileHandler): @@ -63,23 +63,23 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated - self._start_time = filename_info['observation_time'] - self._end_time = filename_info['observation_time'] - self._channel = filename_info['band_name'] + self._start_time = filename_info["observation_time"] + self._end_time = filename_info["observation_time"] + self._channel = filename_info["band_name"] self._tile_mda = tile_mda self._mda = mda - self.platform_name = PLATFORMS[filename_info['fmission_id']] + self.platform_name = PLATFORMS[filename_info["fmission_id"]] def get_dataset(self, key, info): """Load a dataset.""" - if self._channel != key['name']: + if self._channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) proj = self._read_from_file(key) proj.attrs = info.copy() - proj.attrs['units'] = '%' - proj.attrs['platform_name'] = self.platform_name + proj.attrs["units"] = "%" + proj.attrs["platform_name"] = self.platform_name return proj def _read_from_file(self, key): @@ -102,7 +102,7 @@ def end_time(self): def get_area_def(self, dsid): """Get the area def.""" - if self._channel != dsid['name']: + if self._channel != dsid["name"]: return return self._tile_mda.get_area_def(dsid) @@ -113,11 +113,11 @@ class SAFEMSIXMLMetadata(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['observation_time'] - self._end_time = filename_info['observation_time'] + self._start_time = filename_info["observation_time"] + self._end_time = filename_info["observation_time"] self.root = ET.parse(self.filename) - self.tile = filename_info['dtile_number'] - self.platform_name = PLATFORMS[filename_info['fmission_id']] + self.tile = filename_info["dtile_number"] + self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated import bottleneck # noqa import geotiepoints # noqa @@ -138,7 +138,7 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find('.//QUANTIFICATION_VALUE').text) + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 @@ -163,14 +163,14 @@ def _band_index(self, band): @cached_property def band_indices(self): """Get the band indices from the metadata.""" - spectral_info = self.root.findall('.//Spectral_Information') + spectral_info = self.root.findall(".//Spectral_Information") band_indices = {spec.attrib["physicalBand"]: int(spec.attrib["bandId"]) for spec in spectral_info} return band_indices @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find('.//Radiometric_Offset_List') + offsets = self.root.find(".//Radiometric_Offset_List") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: @@ -180,7 +180,7 @@ def band_offsets(self): @cached_property def special_values(self): """Get the special values from the metadata.""" - special_values = self.root.findall('.//Special_Values') + special_values = self.root.findall(".//Special_Values") special_values_dict = {value[0].text: float(value[1].text) for value in special_values} return special_values_dict @@ -214,11 +214,11 @@ def physical_gains(self): def _fill_swath_edges(angles): """Fill gaps at edges of swath.""" - darr = xr.DataArray(angles, dims=['y', 'x']) - darr = darr.bfill('x') - darr = darr.ffill('x') - darr = darr.bfill('y') - darr = darr.ffill('y') + darr = xr.DataArray(angles, dims=["y", "x"]) + darr = darr.bfill("x") + darr = darr.ffill("x") + darr = darr.bfill("y") + darr = darr.ffill("y") angles = darr.data return angles @@ -229,12 +229,12 @@ class SAFEMSITileMDXML(SAFEMSIXMLMetadata): def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info, mask_saturated) - self.geocoding = self.root.find('.//Tile_Geocoding') + self.geocoding = self.root.find(".//Tile_Geocoding") def get_area_def(self, dsid): """Get the area definition of the dataset.""" - area_extent = self._area_extent(dsid['resolution']) - cols, rows = self._shape(dsid['resolution']) + area_extent = self._area_extent(dsid["resolution"]) + cols, rows = self._shape(dsid["resolution"]) area = geometry.AreaDefinition( self.tile, "On-the-fly area", @@ -249,16 +249,16 @@ def get_area_def(self, dsid): def projection(self): """Get the geographic projection.""" from pyproj import CRS - epsg = self.geocoding.find('HORIZONTAL_CS_CODE').text + epsg = self.geocoding.find("HORIZONTAL_CS_CODE").text return CRS(epsg) def _area_extent(self, resolution): cols, rows = self._shape(resolution) geoposition = self.geocoding.find('Geoposition[@resolution="' + str(resolution) + '"]') - ulx = float(geoposition.find('ULX').text) - uly = float(geoposition.find('ULY').text) - xdim = float(geoposition.find('XDIM').text) - ydim = float(geoposition.find('YDIM').text) + ulx = float(geoposition.find("ULX").text) + uly = float(geoposition.find("ULY").text) + xdim = float(geoposition.find("XDIM").text) + ydim = float(geoposition.find("YDIM").text) area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly) return area_extent @@ -292,30 +292,30 @@ def interpolate_angles(self, angles, resolution): def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" - angles = self.root.find('.//Tile_Angles') - if key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle']: + angles = self.root.find(".//Tile_Angles") + if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle"]: angles = self._get_solar_angles(angles, info) - elif key['name'] in ['satellite_zenith_angle', 'satellite_azimuth_angle']: + elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle"]: angles = self._get_satellite_angles(angles, info) else: angles = None return angles def _get_solar_angles(self, angles, info): - angles = self._get_values_from_tag(angles, info['xml_tag']) + angles = self._get_values_from_tag(angles, info["xml_tag"]) return angles @staticmethod def _get_values_from_tag(xml_tree, xml_tag): - elts = xml_tree.findall(xml_tag + '/Values_List/VALUES') + elts = xml_tree.findall(xml_tag + "/Values_List/VALUES") return np.array([[val for val in elt.text.split()] for elt in elts], dtype=np.float64) def _get_satellite_angles(self, angles, info): arrays = [] - elts = angles.findall(info['xml_tag'] + '[@bandId="1"]') + elts = angles.findall(info["xml_tag"] + '[@bandId="1"]') for elt in elts: - arrays.append(self._get_values_from_tag(elt, info['xml_item'])) + arrays.append(self._get_values_from_tag(elt, info["xml_item"])) angles = np.nanmean(np.dstack(arrays), -1) return angles @@ -327,10 +327,10 @@ def get_dataset(self, key, info): angles = _fill_swath_edges(angles) - res = self.interpolate_angles(angles, key['resolution']) + res = self.interpolate_angles(angles, key["resolution"]) - proj = xr.DataArray(res, dims=['y', 'x']) + proj = xr.DataArray(res, dims=["y", "x"]) proj.attrs = info.copy() - proj.attrs['units'] = 'degrees' - proj.attrs['platform_name'] = self.platform_name + proj.attrs["units"] = "degrees" + proj.attrs["platform_name"] = self.platform_name return proj diff --git a/satpy/readers/msu_gsa_l1b.py b/satpy/readers/msu_gsa_l1b.py index df06239b43..c4e45aa333 100644 --- a/satpy/readers/msu_gsa_l1b.py +++ b/satpy/readers/msu_gsa_l1b.py @@ -37,7 +37,7 @@ class MSUGSAFileHandler(HDF5FileHandler): @property def start_time(self): """Time for timeslot scan start.""" - dtstr = self['/attr/timestamp_without_timezone'] + dtstr = self["/attr/timestamp_without_timezone"] return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") @property @@ -47,65 +47,65 @@ def satellite_altitude(self): There is no documentation but this appears to be height above surface in meters. """ - return float(self['/attr/satellite_observation_point_height']) + return float(self["/attr/satellite_observation_point_height"]) @property def satellite_latitude(self): """Satellite latitude at time of scan.""" - return float(self['/attr/satellite_observation_point_latitude']) + return float(self["/attr/satellite_observation_point_latitude"]) @property def satellite_longitude(self): """Satellite longitude at time of scan.""" - return float(self['/attr/satellite_observation_point_longitude']) + return float(self["/attr/satellite_observation_point_longitude"]) @property def sensor_name(self): """Sensor name is hardcoded.""" - sensor = 'msu_gsa' + sensor = "msu_gsa" return sensor @property def platform_name(self): """Platform name is also hardcoded.""" - platform = 'Arctica-M-N1' + platform = "Arctica-M-N1" return platform @staticmethod def _apply_scale_offset(in_data): """Apply the scale and offset to data.""" - scl = in_data.attrs['scale'] - off = in_data.attrs['offset'] + scl = in_data.attrs["scale"] + off = in_data.attrs["offset"] return in_data * scl + off def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self[file_key] attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) # The fill value also needs to be applied - fill_val = attrs.pop('fill_value') + fill_val = attrs.pop("fill_value") data = data.where(data != fill_val, np.nan) # Data has a scale and offset that we must apply data = self._apply_scale_offset(data) # Data is given as radiance values, we must convert if we want reflectance - if dataset_id.get('calibration') == "reflectance": - solconst = float(attrs.pop('F_solar_constant')) + if dataset_id.get("calibration") == "reflectance": + solconst = float(attrs.pop("F_solar_constant")) data = np.pi * data / solconst # Satpy expects reflectance values in 0-100 range data = data * 100. data.attrs = attrs data.attrs.update({ - 'platform_name': self.platform_name, - 'sensor': self.sensor_name, - 'sat_altitude': self.satellite_altitude, - 'sat_latitude': self.satellite_latitude, - 'sat_longitude': self.satellite_longitude, + "platform_name": self.platform_name, + "sensor": self.sensor_name, + "sat_altitude": self.satellite_altitude, + "sat_latitude": self.satellite_latitude, + "sat_longitude": self.satellite_longitude, }) return data diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 121e7d7d1b..979483513a 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -173,16 +173,16 @@ MVIRI_FIELD_OF_VIEW = 18.0 """[Handbook] section 5.3.2.1.""" -CHANNELS = ['VIS', 'WV', 'IR'] +CHANNELS = ["VIS", "WV", "IR"] ANGLES = [ - 'solar_zenith_angle', - 'solar_azimuth_angle', - 'satellite_zenith_angle', - 'satellite_azimuth_angle' + "solar_zenith_angle", + "solar_azimuth_angle", + "satellite_zenith_angle", + "satellite_azimuth_angle" ] OTHER_REFLECTANCES = [ - 'u_independent_toa_bidirectional_reflectance', - 'u_structured_toa_bidirectional_reflectance' + "u_independent_toa_bidirectional_reflectance", + "u_structured_toa_bidirectional_reflectance" ] HIGH_RESOL = 2250 @@ -200,19 +200,19 @@ def __init__(self, coefs): def calibrate(self, counts, calibration): """Calibrate IR/WV counts to the given calibration.""" - if calibration == 'counts': + if calibration == "counts": return counts - elif calibration in ('radiance', 'brightness_temperature'): + elif calibration in ("radiance", "brightness_temperature"): return self._calibrate_rad_bt(counts, calibration) else: raise KeyError( - 'Invalid IR/WV calibration: {}'.format(calibration.name) + "Invalid IR/WV calibration: {}".format(calibration.name) ) def _calibrate_rad_bt(self, counts, calibration): """Calibrate counts to radiance or brightness temperature.""" rad = self._counts_to_radiance(counts) - if calibration == 'radiance': + if calibration == "radiance": return rad bt = self._radiance_to_brightness_temperature(rad) return bt @@ -222,7 +222,7 @@ def _counts_to_radiance(self, counts): Reference: [PUG], equations (4.1) and (4.2). """ - rad = self.coefs['a'] + self.coefs['b'] * counts + rad = self.coefs["a"] + self.coefs["b"] * counts return rad.where(rad > 0, np.float32(np.nan)) def _radiance_to_brightness_temperature(self, rad): @@ -230,7 +230,7 @@ def _radiance_to_brightness_temperature(self, rad): Reference: [PUG], equations (5.1) and (5.2). """ - bt = self.coefs['bt_b'] / (np.log(rad) - self.coefs['bt_a']) + bt = self.coefs["bt_b"] / (np.log(rad) - self.coefs["bt_a"]) return bt.where(bt > 0, np.float32(np.nan)) @@ -252,19 +252,19 @@ def __init__(self, coefs, solar_zenith_angle=None): def calibrate(self, counts, calibration): """Calibrate VIS counts.""" - if calibration == 'counts': + if calibration == "counts": return counts - elif calibration in ('radiance', 'reflectance'): + elif calibration in ("radiance", "reflectance"): return self._calibrate_rad_refl(counts, calibration) else: raise KeyError( - 'Invalid VIS calibration: {}'.format(calibration.name) + "Invalid VIS calibration: {}".format(calibration.name) ) def _calibrate_rad_refl(self, counts, calibration): """Calibrate counts to radiance or reflectance.""" rad = self._counts_to_radiance(counts) - if calibration == 'radiance': + if calibration == "radiance": return rad refl = self._radiance_to_reflectance(rad) refl = self.update_refl_attrs(refl) @@ -275,11 +275,11 @@ def _counts_to_radiance(self, counts): Reference: [PUG], equations (7) and (8). """ - years_since_launch = self.coefs['years_since_launch'] - a_cf = (self.coefs['a0'] + - self.coefs['a1'] * years_since_launch + - self.coefs['a2'] * years_since_launch ** 2) - mean_count_space_vis = self.coefs['mean_count_space'] + years_since_launch = self.coefs["years_since_launch"] + a_cf = (self.coefs["a0"] + + self.coefs["a1"] * years_since_launch + + self.coefs["a2"] * years_since_launch ** 2) + mean_count_space_vis = self.coefs["mean_count_space"] rad = (counts - mean_count_space_vis) * a_cf return rad.where(rad > 0, np.float32(np.nan)) @@ -298,17 +298,17 @@ def _radiance_to_reflectance(self, rad): ) # direct illumination only cos_sza = np.cos(np.deg2rad(sza)) refl = ( - (np.pi * self.coefs['distance_sun_earth'] ** 2) / - (self.coefs['solar_irradiance'] * cos_sza) * + (np.pi * self.coefs["distance_sun_earth"] ** 2) / + (self.coefs["solar_irradiance"] * cos_sza) * rad ) return self.refl_factor_to_percent(refl) def update_refl_attrs(self, refl): """Update attributes of reflectance datasets.""" - refl.attrs['sun_earth_distance_correction_applied'] = True - refl.attrs['sun_earth_distance_correction_factor'] = self.coefs[ - 'distance_sun_earth'].item() + refl.attrs["sun_earth_distance_correction_applied"] = True + refl.attrs["sun_earth_distance_correction_factor"] = self.coefs[ + "distance_sun_earth"].item() return refl @staticmethod @@ -328,24 +328,24 @@ def get_area_def(self, im_size, projection_longitude): def _get_proj_params(self, im_size, projection_longitude): """Get projection parameters for the given settings.""" - area_name = 'geos_mviri_{0}x{0}'.format(im_size) + area_name = "geos_mviri_{0}x{0}".format(im_size) lfac, cfac, loff, coff = self._get_factors_offsets(im_size) return { - 'ssp_lon': projection_longitude, - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'h': ALTITUDE, - 'units': 'm', - 'loff': loff - im_size, - 'coff': coff, - 'lfac': -lfac, - 'cfac': -cfac, - 'nlines': im_size, - 'ncols': im_size, - 'scandir': 'S2N', # Reference: [PUG] section 2. - 'p_id': area_name, - 'a_name': area_name, - 'a_desc': 'MVIRI Geostationary Projection' + "ssp_lon": projection_longitude, + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "h": ALTITUDE, + "units": "m", + "loff": loff - im_size, + "coff": coff, + "lfac": -lfac, + "cfac": -cfac, + "nlines": im_size, + "ncols": im_size, + "scandir": "S2N", # Reference: [PUG] section 2. + "p_id": area_name, + "a_name": area_name, + "a_desc": "MVIRI Geostationary Projection" } def _get_factors_offsets(self, im_size): @@ -382,7 +382,7 @@ def interp_tiepoints(ds, target_x, target_y): # No tiepoint coordinates specified in the files. Use dimensions # to calculate tiepoint sampling and assign tiepoint coordinates # accordingly. - sampling = target_x.size // ds.coords['x'].size + sampling = target_x.size // ds.coords["x"].size ds = ds.assign_coords(x=target_x.values[::sampling], y=target_y.values[::sampling]) @@ -406,11 +406,11 @@ def interp_acq_time(time2d, target_y): Mean scanline acquisition timestamps """ # Compute mean timestamp per scanline - time = time2d.mean(dim='x') + time = time2d.mean(dim="x") # If required, repeat timestamps in y-direction to obtain higher # resolution - y = time.coords['y'].values + y = time.coords["y"].values if y.size < target_y.size: reps = target_y.size // y.size y_rep = np.repeat(y, reps) @@ -476,15 +476,15 @@ def __getitem__(self, item): def _should_dims_be_renamed(self, ds): """Determine whether dataset dimensions need to be renamed.""" - return 'y_ir_wv' in ds.dims or 'y_tie' in ds.dims + return "y_ir_wv" in ds.dims or "y_tie" in ds.dims def _rename_dims(self, ds): """Rename dataset dimensions to match satpy's expectations.""" new_names = { - 'y_ir_wv': 'y', - 'x_ir_wv': 'x', - 'y_tie': 'y', - 'x_tie': 'x' + "y_ir_wv": "y", + "x_ir_wv": "x", + "y_tie": "y", + "x_tie": "x" } for old_name, new_name in new_names.items(): if old_name in ds.dims: @@ -492,7 +492,7 @@ def _rename_dims(self, ds): return ds def _coordinates_not_assigned(self, ds): - return 'y' in ds.dims and 'y' not in ds.coords + return "y" in ds.dims and "y" not in ds.coords def _reassign_coords(self, ds): """Re-assign coordinates. @@ -500,14 +500,14 @@ def _reassign_coords(self, ds): For some reason xarray doesn't assign coordinates to all high resolution data variables. """ - return ds.assign_coords({'y': self.nc.coords['y'], - 'x': self.nc.coords['x']}) + return ds.assign_coords({"y": self.nc.coords["y"], + "x": self.nc.coords["x"]}) def _cleanup_attrs(self, ds): """Cleanup dataset attributes.""" # Remove ancillary_variables attribute to avoid downstream # satpy warnings. - ds.attrs.pop('ancillary_variables', None) + ds.attrs.pop("ancillary_variables", None) def get_time(self): """Get time coordinate. @@ -515,29 +515,29 @@ def get_time(self): Variable is sometimes named "time" and sometimes "time_ir_wv". """ try: - return self['time_ir_wv'] + return self["time_ir_wv"] except KeyError: - return self['time'] + return self["time"] def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords['x'], self.nc.coords['y'] - return self.nc.coords['x_ir_wv'], self.nc.coords['x_ir_wv'] + return self.nc.coords["x"], self.nc.coords["y"] + return self.nc.coords["x_ir_wv"], self.nc.coords["x_ir_wv"] def get_image_size(self, resolution): """Get image size for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords['y'].size - return self.nc.coords['y_ir_wv'].size + return self.nc.coords["y"].size + return self.nc.coords["y_ir_wv"].size class FiduceoMviriBase(BaseFileHandler): """Baseclass for FIDUCEO MVIRI file handlers.""" nc_keys = { - 'WV': 'count_wv', - 'IR': 'count_ir' + "WV": "count_wv", + "IR": "count_ir" } def __init__(self, filename, filename_info, filetype_info, @@ -555,16 +555,16 @@ def __init__(self, filename, filename_info, filetype_info, self.mask_bad_quality = mask_bad_quality nc_raw = xr.open_dataset( filename, - chunks={'x': CHUNK_SIZE, - 'y': CHUNK_SIZE, - 'x_ir_wv': CHUNK_SIZE, - 'y_ir_wv': CHUNK_SIZE} + chunks={"x": CHUNK_SIZE, + "y": CHUNK_SIZE, + "x_ir_wv": CHUNK_SIZE, + "y_ir_wv": CHUNK_SIZE} ) self.nc = DatasetWrapper(nc_raw) # Projection longitude is not provided in the file, read it from the # filename. - self.projection_longitude = float(filename_info['projection_longitude']) + self.projection_longitude = float(filename_info["projection_longitude"]) self.calib_coefs = self._get_calib_coefs() self._get_angles = functools.lru_cache(maxsize=8)( @@ -576,12 +576,12 @@ def __init__(self, filename, filename_info, filetype_info, def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - name = dataset_id['name'] - resolution = dataset_id['resolution'] + name = dataset_id["name"] + resolution = dataset_id["resolution"] if name in ANGLES: ds = self._get_angles(name, resolution) elif name in CHANNELS: - ds = self._get_channel(name, resolution, dataset_id['calibration']) + ds = self._get_channel(name, resolution, dataset_id["calibration"]) else: ds = self._get_other_dataset(name) ds = self._cleanup_coords(ds) @@ -590,7 +590,7 @@ def get_dataset(self, dataset_id, dataset_info): def get_area_def(self, dataset_id): """Get area definition of the given dataset.""" - im_size = self.nc.get_image_size(dataset_id['resolution']) + im_size = self.nc.get_image_size(dataset_id["resolution"]) nav = Navigator() return nav.get_area_def( im_size=im_size, @@ -605,13 +605,13 @@ def _get_channel(self, name, resolution, calibration): channel=name, calibration=calibration ) - if name == 'VIS': - qc = VisQualityControl(self.nc['quality_pixel_bitmask']) + if name == "VIS": + qc = VisQualityControl(self.nc["quality_pixel_bitmask"]) if self.mask_bad_quality: ds = qc.mask(ds) else: qc.check() - ds['acq_time'] = self._get_acq_time(resolution) + ds["acq_time"] = self._get_acq_time(resolution) return ds def _get_angles_uncached(self, name, resolution): @@ -638,10 +638,10 @@ def _get_other_dataset(self, name): def _update_attrs(self, ds, info): """Update dataset attributes.""" ds.attrs.update(info) - ds.attrs.update({'platform': self.filename_info['platform'], - 'sensor': self.filename_info['sensor']}) - ds.attrs['raw_metadata'] = self.nc.attrs - ds.attrs['orbital_parameters'] = self._get_orbital_parameters() + ds.attrs.update({"platform": self.filename_info["platform"], + "sensor": self.filename_info["sensor"]}) + ds.attrs["raw_metadata"] = self.nc.attrs + ds.attrs["orbital_parameters"] = self._get_orbital_parameters() def _cleanup_coords(self, ds): """Cleanup dataset coordinates. @@ -651,11 +651,11 @@ def _cleanup_coords(self, ds): can assign projection coordinates upstream (based on the area definition). """ - return ds.drop_vars(['y', 'x']) + return ds.drop_vars(["y", "x"]) def _calibrate(self, ds, channel, calibration): """Calibrate the given dataset.""" - if channel == 'VIS': + if channel == "VIS": return self._calibrate_vis(ds, channel, calibration) calib = IRWVCalibrator(self.calib_coefs[channel]) return calib.calibrate(ds, calibration) @@ -671,21 +671,21 @@ def _get_calib_coefs(self): Note: Only coefficients present in both file types. """ coefs = { - 'VIS': { - 'distance_sun_earth': self.nc['distance_sun_earth'], - 'solar_irradiance': self.nc['solar_irradiance_vis'] + "VIS": { + "distance_sun_earth": self.nc["distance_sun_earth"], + "solar_irradiance": self.nc["solar_irradiance_vis"] }, - 'IR': { - 'a': self.nc['a_ir'], - 'b': self.nc['b_ir'], - 'bt_a': self.nc['bt_a_ir'], - 'bt_b': self.nc['bt_b_ir'] + "IR": { + "a": self.nc["a_ir"], + "b": self.nc["b_ir"], + "bt_a": self.nc["bt_a_ir"], + "bt_b": self.nc["bt_b_ir"] }, - 'WV': { - 'a': self.nc['a_wv'], - 'b': self.nc['b_wv'], - 'bt_a': self.nc['bt_a_wv'], - 'bt_b': self.nc['bt_b_wv'] + "WV": { + "a": self.nc["a_wv"], + "b": self.nc["b_wv"], + "bt_a": self.nc["bt_a_wv"], + "bt_b": self.nc["bt_b_wv"] }, } @@ -710,15 +710,15 @@ def _get_acq_time_uncached(self, resolution): def _get_orbital_parameters(self): """Get the orbital parameters.""" orbital_parameters = { - 'projection_longitude': self.projection_longitude, - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE + "projection_longitude": self.projection_longitude, + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE } ssp_lon, ssp_lat = self._get_ssp_lonlat() if not np.isnan(ssp_lon) and not np.isnan(ssp_lat): orbital_parameters.update({ - 'satellite_actual_longitude': ssp_lon, - 'satellite_actual_latitude': ssp_lat, + "satellite_actual_longitude": ssp_lon, + "satellite_actual_latitude": ssp_lat, # altitude not available }) return orbital_parameters @@ -733,13 +733,13 @@ def _get_ssp_lonlat(self): Returns: Subsatellite longitude and latitude """ - ssp_lon = self._get_ssp('longitude') - ssp_lat = self._get_ssp('latitude') + ssp_lon = self._get_ssp("longitude") + ssp_lat = self._get_ssp("latitude") return ssp_lon, ssp_lat def _get_ssp(self, coord): - key_start = 'sub_satellite_{}_start'.format(coord) - key_end = 'sub_satellite_{}_end'.format(coord) + key_start = "sub_satellite_{}_start".format(coord) + key_end = "sub_satellite_{}_end".format(coord) try: sub_lonlat = np.nanmean( [self.nc[key_start].values, @@ -755,42 +755,42 @@ class FiduceoMviriEasyFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Easy FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() - nc_keys['VIS'] = 'toa_bidirectional_reflectance_vis' + nc_keys["VIS"] = "toa_bidirectional_reflectance_vis" def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel. Easy FCDR provides reflectance only, no counts or radiance. """ - if calibration == 'reflectance': + if calibration == "reflectance": coefs = self.calib_coefs[channel] cal = VISCalibrator(coefs) refl = cal.refl_factor_to_percent(ds) refl = cal.update_refl_attrs(refl) return refl - elif calibration in ('counts', 'radiance'): - raise ValueError('Cannot calibrate to {}. Easy FCDR provides ' - 'reflectance only.'.format(calibration.name)) + elif calibration in ("counts", "radiance"): + raise ValueError("Cannot calibrate to {}. Easy FCDR provides " + "reflectance only.".format(calibration.name)) else: - raise KeyError('Invalid calibration: {}'.format(calibration.name)) + raise KeyError("Invalid calibration: {}".format(calibration.name)) class FiduceoMviriFullFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Full FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() - nc_keys['VIS'] = 'count_vis' + nc_keys["VIS"] = "count_vis" def _get_calib_coefs(self): """Add additional VIS coefficients only present in full FCDR.""" coefs = super()._get_calib_coefs() - coefs['VIS'].update({ - 'years_since_launch': np.float32(self.nc['years_since_launch']), - 'a0': np.float32(self.nc['a0_vis']), - 'a1': np.float32(self.nc['a1_vis']), - 'a2': np.float32(self.nc['a2_vis']), - 'mean_count_space': np.float32( - self.nc['mean_count_space_vis'] + coefs["VIS"].update({ + "years_since_launch": np.float32(self.nc["years_since_launch"]), + "a0": np.float32(self.nc["a0_vis"]), + "a1": np.float32(self.nc["a1_vis"]), + "a2": np.float32(self.nc["a2_vis"]), + "mean_count_space": np.float32( + self.nc["mean_count_space_vis"] ) }) return coefs @@ -798,7 +798,7 @@ def _get_calib_coefs(self): def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel.""" sza = None - if calibration == 'reflectance': - sza = self._get_angles('solar_zenith_angle', HIGH_RESOL) + if calibration == "reflectance": + sza = self._get_angles("solar_zenith_angle", HIGH_RESOL) cal = VISCalibrator(self.calib_coefs[channel], sza) return cal.calibrate(ds, calibration) diff --git a/satpy/readers/mws_l1b.py b/satpy/readers/mws_l1b.py index 528675eeb5..372a59ac37 100644 --- a/satpy/readers/mws_l1b.py +++ b/satpy/readers/mws_l1b.py @@ -33,23 +33,23 @@ # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { - 'scantime_utc': 'data/navigation/mws_scantime_utc', - 'solar_azimuth': 'data/navigation/mws_solar_azimuth_angle', - 'solar_zenith': 'data/navigation/mws_solar_zenith_angle', - 'satellite_azimuth': 'data/navigation/mws_satellite_azimuth_angle', - 'satellite_zenith': 'data/navigation/mws_satellite_zenith_angle', - 'surface_type': 'data/navigation/mws_surface_type', - 'terrain_elevation': 'data/navigation/mws_terrain_elevation', - 'mws_lat': 'data/navigation/mws_lat', - 'mws_lon': 'data/navigation/mws_lon', + "scantime_utc": "data/navigation/mws_scantime_utc", + "solar_azimuth": "data/navigation/mws_solar_azimuth_angle", + "solar_zenith": "data/navigation/mws_solar_zenith_angle", + "satellite_azimuth": "data/navigation/mws_satellite_azimuth_angle", + "satellite_zenith": "data/navigation/mws_satellite_zenith_angle", + "surface_type": "data/navigation/mws_surface_type", + "terrain_elevation": "data/navigation/mws_terrain_elevation", + "mws_lat": "data/navigation/mws_lat", + "mws_lon": "data/navigation/mws_lon", } -MWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4, - '5': 5, '6': 6, '7': 7, '8': 8, - '9': 9, '10': 10, '11': 11, '12': 12, - '13': 13, '14': 14, '15': 15, '16': 16, - '17': 17, '18': 18, '19': 19, '20': 20, - '21': 21, '22': 22, '23': 23, '24': 24} +MWS_CHANNEL_NAMES_TO_NUMBER = {"1": 1, "2": 2, "3": 3, "4": 4, + "5": 5, "6": 6, "7": 7, "8": 8, + "9": 9, "10": 10, "11": 11, "12": 12, + "13": 13, "14": 14, "15": 15, "16": 16, + "17": 17, "18": 18, "19": 19, "20": 20, + "21": 21, "22": 22, "23": 23, "24": 24} MWS_CHANNEL_NAMES = list(MWS_CHANNEL_NAMES_TO_NUMBER.keys()) MWS_CHANNELS = set(MWS_CHANNEL_NAMES) @@ -90,9 +90,9 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info, cache_var_size=10000, cache_handle=True) - logger.debug('Reading: {}'.format(self.filename)) - logger.debug('Start: {}'.format(self.start_time)) - logger.debug('End: {}'.format(self.end_time)) + logger.debug("Reading: {}".format(self.filename)) + logger.debug("Start: {}".format(self.start_time)) + logger.debug("End: {}".format(self.end_time)) self._cache = {} @@ -101,57 +101,57 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get start time.""" - return datetime.strptime(self['/attr/sensing_start_time_utc'], - '%Y-%m-%d %H:%M:%S.%f') + return datetime.strptime(self["/attr/sensing_start_time_utc"], + "%Y-%m-%d %H:%M:%S.%f") @property def end_time(self): """Get end time.""" - return datetime.strptime(self['/attr/sensing_end_time_utc'], - '%Y-%m-%d %H:%M:%S.%f') + return datetime.strptime(self["/attr/sensing_end_time_utc"], + "%Y-%m-%d %H:%M:%S.%f") @property def sensor(self): """Get the sensor name.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def platform_name(self): """Get the platform name.""" - return self._platform_name_translate.get(self['/attr/spacecraft']) + return self._platform_name_translate.get(self["/attr/spacecraft"]) @property def sub_satellite_longitude_start(self): """Get the longitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_longitude_start'].data.item() + return self["status/satellite/subsat_longitude_start"].data.item() @property def sub_satellite_latitude_start(self): """Get the latitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_latitude_start'].data.item() + return self["status/satellite/subsat_latitude_start"].data.item() @property def sub_satellite_longitude_end(self): """Get the longitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_longitude_end'].data.item() + return self["status/satellite/subsat_longitude_end"].data.item() @property def sub_satellite_latitude_end(self): """Get the latitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_latitude_end'].data.item() + return self["status/satellite/subsat_latitude_end"].data.item() def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - logger.debug('Reading {} from {}'.format(dataset_id['name'], self.filename)) + logger.debug("Reading {} from {}".format(dataset_id["name"], self.filename)) - var_key = dataset_info['file_key'] - if _get_aux_data_name_from_dsname(dataset_id['name']) is not None: - variable = self._get_dataset_aux_data(dataset_id['name']) - elif any(lb in dataset_id['name'] for lb in MWS_CHANNELS): - logger.debug(f'Reading in file to get dataset with key {var_key}.') + var_key = dataset_info["file_key"] + if _get_aux_data_name_from_dsname(dataset_id["name"]) is not None: + variable = self._get_dataset_aux_data(dataset_id["name"]) + elif any(lb in dataset_id["name"] for lb in MWS_CHANNELS): + logger.debug(f"Reading in file to get dataset with key {var_key}.") variable = self._get_dataset_channel(dataset_id, dataset_info) else: - logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 + logger.warning(f"Could not find key {var_key} in NetCDF file, no valid Dataset created") # noqa: E501 return None variable = self._manage_attributes(variable, dataset_info) @@ -162,10 +162,10 @@ def get_dataset(self, dataset_id, dataset_info): @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" - if 'n_scans' in variable.dims: - variable = variable.rename({'n_fovs': 'x', 'n_scans': 'y'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "n_scans" in variable.dims: + variable = variable.rename({"n_fovs": "x", "n_scans": "y"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable @staticmethod @@ -178,7 +178,7 @@ def _drop_coords(variable): def _manage_attributes(self, variable, dataset_info): """Manage attributes of the dataset.""" - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable @@ -192,8 +192,8 @@ def _get_dataset_channel(self, key, dataset_info): """ # Get the dataset # Get metadata for given dataset - grp_pth = dataset_info['file_key'] - channel_index = get_channel_index_from_name(key['name']) + grp_pth = dataset_info["file_key"] + channel_index = get_channel_index_from_name(key["name"]) data = self[grp_pth][:, :, channel_index] attrs = data.attrs.copy() @@ -203,7 +203,7 @@ def _get_dataset_channel(self, key, dataset_info): default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.get("valid_range", [-np.inf, np.inf]) - if key['calibration'] == "counts": + if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = fv else: @@ -212,18 +212,18 @@ def _get_dataset_channel(self, key, dataset_info): data = data.where(data <= vr[1], nfv) # Manage the attributes of the dataset - data.attrs.setdefault('units', None) + data.attrs.setdefault("units", None) data.attrs.update(dataset_info) - dataset_attrs = getattr(data, 'attrs', {}) + dataset_attrs = getattr(data, "attrs", {}) dataset_attrs.update(dataset_info) dataset_attrs.update({ "platform_name": self.platform_name, "sensor": self.sensor, - "orbital_parameters": {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, - 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, - 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, - 'sub_satellite_longitude_end': self.sub_satellite_longitude_end}, + "orbital_parameters": {"sub_satellite_latitude_start": self.sub_satellite_latitude_start, + "sub_satellite_longitude_start": self.sub_satellite_longitude_start, + "sub_satellite_latitude_end": self.sub_satellite_latitude_end, + "sub_satellite_longitude_end": self.sub_satellite_longitude_end}, }) try: @@ -237,10 +237,10 @@ def _get_dataset_channel(self, key, dataset_info): def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # Geolocation and navigation data: - if dsname in ['mws_lat', 'mws_lon', - 'solar_azimuth', 'solar_zenith', - 'satellite_azimuth', 'satellite_zenith', - 'surface_type', 'terrain_elevation']: + if dsname in ["mws_lat", "mws_lon", + "solar_azimuth", "solar_zenith", + "satellite_azimuth", "satellite_zenith", + "surface_type", "terrain_elevation"]: var_key = AUX_DATA.get(dsname) else: raise NotImplementedError(f"Dataset {dsname!r} not supported!") @@ -252,30 +252,30 @@ def _get_dataset_aux_data(self, dsname): raise # Scale the data: - if 'scale_factor' in variable.attrs and 'add_offset' in variable.attrs: - missing_value = variable.attrs['missing_value'] + if "scale_factor" in variable.attrs and "add_offset" in variable.attrs: + missing_value = variable.attrs["missing_value"] variable.data = da.where(variable.data == missing_value, np.nan, - variable.data * variable.attrs['scale_factor'] + variable.attrs['add_offset']) + variable.data * variable.attrs["scale_factor"] + variable.attrs["add_offset"]) return variable def _get_global_attributes(self): """Create a dictionary of global attributes.""" return { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.platform_name, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['start_time'], - 'filename_end_time': self.filename_info['end_time'], - 'platform_name': self.platform_name, - 'quality_group': self._get_quality_attributes(), + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.platform_name, + "sensor": self.sensor, + "filename_start_time": self.filename_info["start_time"], + "filename_end_time": self.filename_info["end_time"], + "platform_name": self.platform_name, + "quality_group": self._get_quality_attributes(), } def _get_quality_attributes(self): """Get quality attributes.""" - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index ac98f81d8d..cb5c38d1cf 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -103,7 +103,7 @@ def __init__(self, filename, filename_info, filetype_info, file_handle = self._get_file_handle() except IOError: LOG.exception( - 'Failed reading file %s. Possibly corrupted file', self.filename) + "Failed reading file %s. Possibly corrupted file", self.filename) raise self._set_file_handle_auto_maskandscale(file_handle, auto_maskandscale) @@ -123,7 +123,7 @@ def __init__(self, filename, filename_info, filetype_info, file_handle.close() def _get_file_handle(self): - return netCDF4.Dataset(self.filename, 'r') + return netCDF4.Dataset(self.filename, "r") @staticmethod def _set_file_handle_auto_maskandscale(file_handle, auto_maskandscale): @@ -132,8 +132,8 @@ def _set_file_handle_auto_maskandscale(file_handle, auto_maskandscale): def _set_xarray_kwargs(self, xarray_kwargs, auto_maskandscale): self._xarray_kwargs = xarray_kwargs or {} - self._xarray_kwargs.setdefault('chunks', CHUNK_SIZE) - self._xarray_kwargs.setdefault('mask_and_scale', auto_maskandscale) + self._xarray_kwargs.setdefault("chunks", CHUNK_SIZE) + self._xarray_kwargs.setdefault("mask_and_scale", auto_maskandscale) def collect_metadata(self, name, obj): """Collect all file variables and attributes for the provided file object. @@ -171,11 +171,11 @@ def _collect_variable_info(self, var_name, var_obj): def _collect_listed_variables(self, file_handle, listed_variables): variable_name_replacements = self.filetype_info.get("variable_name_replacements") for itm in self._get_required_variable_names(listed_variables, variable_name_replacements): - parts = itm.split('/') + parts = itm.split("/") grp = file_handle for p in parts[:-1]: if p == "attr": - n = '/'.join(parts) + n = "/".join(parts) self.file_content[n] = self._get_attr_value(grp, parts[-1]) break grp = grp[p] @@ -188,7 +188,7 @@ def _collect_listed_variables(self, file_handle, listed_variables): def _get_required_variable_names(listed_variables, variable_name_replacements): variable_names = [] for var in listed_variables: - if variable_name_replacements and '{' in var: + if variable_name_replacements and "{" in var: _compose_replacement_names(variable_name_replacements, var, variable_names) else: variable_names.append(var) @@ -290,7 +290,7 @@ def _get_variable(self, key, val): # these datasets are closed and inaccessible when the file is # closed, need to reopen # TODO: Handle HDF4 versus NetCDF3 versus NetCDF4 - parts = key.rsplit('/', 1) + parts = key.rsplit("/", 1) if len(parts) == 2: group, key = parts else: @@ -392,7 +392,7 @@ def _get_file_handle(self): import h5netcdf f_obj = open_file_or_filename(self.filename) self._use_h5netcdf = True - return h5netcdf.File(f_obj, 'r') + return h5netcdf.File(f_obj, "r") def __getitem__(self, key): """Get item for given key.""" diff --git a/satpy/readers/nucaps.py b/satpy/readers/nucaps.py index 806a20cece..2c9e2ba39f 100644 --- a/satpy/readers/nucaps.py +++ b/satpy/readers/nucaps.py @@ -70,10 +70,10 @@ class NUCAPSFileHandler(NetCDF4FileHandler): def __init__(self, *args, **kwargs): """Initialize file handler.""" # remove kwargs that reader instance used that file handler does not - kwargs.pop('mask_surface', None) - kwargs.pop('mask_quality', None) - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'decode_times', False) + kwargs.pop("mask_surface", None) + kwargs.pop("mask_quality", None) + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "decode_times", False) super(NUCAPSFileHandler, self).__init__(*args, **kwargs) def __contains__(self, item): @@ -88,25 +88,25 @@ def _parse_datetime(self, datestr): def start_time(self): """Get start time.""" try: - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) except KeyError: # If attribute not present, use time from file name - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" try: - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) except KeyError: # If attribute not present, use time from file name - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def start_orbit_number(self): """Return orbit number for the beginning of the swath.""" try: - return int(self['/attr/start_orbit_number']) + return int(self["/attr/start_orbit_number"]) except KeyError: return 0 @@ -114,7 +114,7 @@ def start_orbit_number(self): def end_orbit_number(self): """Return orbit number for the end of the swath.""" try: - return int(self['/attr/end_orbit_number']) + return int(self["/attr/end_orbit_number"]) except KeyError: return 0 @@ -122,29 +122,29 @@ def end_orbit_number(self): def platform_name(self): """Return standard platform name for the file's data.""" try: - res = self['/attr/platform_name'] + res = self["/attr/platform_name"] if isinstance(res, np.ndarray): return str(res.astype(str)) return res except KeyError: - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def sensor_names(self): """Return standard sensor or instrument name for the file's data.""" try: - res = self['/attr/instrument_name'] - res = [x.strip() for x in res.split(',')] + res = self["/attr/instrument_name"] + res = [x.strip() for x in res.split(",")] if len(res) == 1: return res[0].lower() except KeyError: - res = ['CrIS', 'ATMS', 'VIIRS'] + res = ["CrIS", "ATMS", "VIIRS"] return set(name.lower() for name in res) def get_shape(self, ds_id, ds_info): """Return data array shape for item specified.""" - var_path = ds_info.get('file_key', '{}'.format(ds_id['name'])) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(ds_id["name"])) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: @@ -157,11 +157,11 @@ def get_shape(self, ds_id, ds_info): def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) shape = self.get_shape(dataset_id, ds_info) - file_units = ds_info.get('file_units', - self.get(var_path + '/attr/units')) - ds_info.update(getattr(self[var_path], 'attrs', {})) + file_units = ds_info.get("file_units", + self.get(var_path + "/attr/units")) + ds_info.update(getattr(self[var_path], "attrs", {})) # don't overwrite information in the files attrs because the same # `.attrs` is used for each separate Temperature pressure level dataset # Plus, if someone gets metadata multiple times then we are screwed @@ -176,22 +176,22 @@ def get_metadata(self, dataset_id, ds_info): "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) - if 'standard_name' not in info: - sname_path = var_path + '/attr/standard_name' - info['standard_name'] = self.get(sname_path) - if dataset_id['name'] != 'Quality_Flag': - anc_vars = info.get('ancillary_variables', []) - if 'Quality_Flag' not in anc_vars: - anc_vars.append('Quality_Flag') - info['ancillary_variables'] = anc_vars + if "standard_name" not in info: + sname_path = var_path + "/attr/standard_name" + info["standard_name"] = self.get(sname_path) + if dataset_id["name"] != "Quality_Flag": + anc_vars = info.get("ancillary_variables", []) + if "Quality_Flag" not in anc_vars: + anc_vars.append("Quality_Flag") + info["ancillary_variables"] = anc_vars return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata for specified dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - valid_min, valid_max = self[var_path + '/attr/valid_range'] - fill_value = self.get(var_path + '/attr/_FillValue') + valid_min, valid_max = self[var_path + "/attr/valid_range"] + fill_value = self.get(var_path + "/attr/_FillValue") d_tmp = self[var_path] if "index" in ds_info: @@ -200,19 +200,19 @@ def get_dataset(self, dataset_id, ds_info): d_tmp = d_tmp[..., int(ds_info["pressure_index"])] # this is a pressure based field # include surface_pressure as metadata - sp = self['Surface_Pressure'] + sp = self["Surface_Pressure"] # Older format - if 'number_of_FORs' in sp.dims: - sp = sp.rename({'number_of_FORs': 'y'}) + if "number_of_FORs" in sp.dims: + sp = sp.rename({"number_of_FORs": "y"}) # Newer format - if 'Number_of_CrIS_FORs' in sp.dims: - sp = sp.rename({'Number_of_CrIS_FORs': 'y'}) - if 'surface_pressure' in ds_info: - ds_info['surface_pressure'] = xr.concat((ds_info['surface_pressure'], sp), dim='y') + if "Number_of_CrIS_FORs" in sp.dims: + sp = sp.rename({"Number_of_CrIS_FORs": "y"}) + if "surface_pressure" in ds_info: + ds_info["surface_pressure"] = xr.concat((ds_info["surface_pressure"], sp), dim="y") else: - ds_info['surface_pressure'] = sp + ds_info["surface_pressure"] = sp # include all the pressure levels - ds_info.setdefault('pressure_levels', self['Pressure'][0]) + ds_info.setdefault("pressure_levels", self["Pressure"][0]) data = d_tmp if valid_min is not None and valid_max is not None: @@ -221,16 +221,16 @@ def get_dataset(self, dataset_id, ds_info): if fill_value is not None: data = data.where(data != fill_value) # this _FillValue is no longer valid - metadata.pop('_FillValue', None) - data.attrs.pop('_FillValue', None) + metadata.pop("_FillValue", None) + data.attrs.pop("_FillValue", None) data.attrs.update(metadata) # Older format - if 'number_of_FORs' in data.dims: - data = data.rename({'number_of_FORs': 'y'}) + if "number_of_FORs" in data.dims: + data = data.rename({"number_of_FORs": "y"}) # Newer format - if 'Number_of_CrIS_FORs' in data.dims: - data = data.rename({'Number_of_CrIS_FORs': 'y'}) + if "Number_of_CrIS_FORs" in data.dims: + data = data.rename({"Number_of_CrIS_FORs": "y"}) return data @@ -248,8 +248,8 @@ def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs) self.pressure_dataset_names = defaultdict(list) super(NUCAPSReader, self).__init__(config_files, **kwargs) - self.mask_surface = self.info.get('mask_surface', mask_surface) - self.mask_quality = self.info.get('mask_quality', mask_quality) + self.mask_surface = self.info.get("mask_surface", mask_surface) + self.mask_quality = self.info.get("mask_quality", mask_quality) def load_ds_ids_from_config(self): """Convert config dataset entries to DataIDs. @@ -263,7 +263,7 @@ def load_ds_ids_from_config(self): super(NUCAPSReader, self).load_ds_ids_from_config() for ds_id in list(self.all_ids.keys()): ds_info = self.all_ids[ds_id] - if ds_info.get('pressure_based', False): + if ds_info.get("pressure_based", False): for idx, lvl_num in enumerate(ALL_PRESSURE_LEVELS): if lvl_num < 5.0: suffix = "_{:0.03f}mb".format(lvl_num) @@ -271,14 +271,14 @@ def load_ds_ids_from_config(self): suffix = "_{:0.0f}mb".format(lvl_num) new_info = ds_info.copy() - new_info['pressure_level'] = lvl_num - new_info['pressure_index'] = idx - new_info['file_key'] = '{}'.format(ds_id['name']) - new_info['name'] = ds_id['name'] + suffix - new_ds_id = ds_id._replace(name=new_info['name']) - new_info['id'] = new_ds_id + new_info["pressure_level"] = lvl_num + new_info["pressure_index"] = idx + new_info["file_key"] = "{}".format(ds_id["name"]) + new_info["name"] = ds_id["name"] + suffix + new_ds_id = ds_id._replace(name=new_info["name"]) + new_info["id"] = new_ds_id self.all_ids[new_ds_id] = new_info - self.pressure_dataset_names[ds_id['name']].append(new_info['name']) + self.pressure_dataset_names[ds_id["name"]].append(new_info["name"]) def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): """Load data from one or more set of files. @@ -294,7 +294,7 @@ def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): # Add pressure levels to the datasets to load if needed so # we can do further filtering after loading - plevels_ds_id = self.get_dataset_key('Pressure_Levels') + plevels_ds_id = self.get_dataset_key("Pressure_Levels") remove_plevels = False if plevels_ds_id not in dataset_keys: dataset_keys.add(plevels_ds_id) @@ -353,7 +353,7 @@ def _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels if cond is not None: datasets_loaded[ds_id] = ds_obj.where(cond, drop=True) - datasets_loaded[ds_id].attrs['pressure_levels'] = new_plevels + datasets_loaded[ds_id].attrs["pressure_levels"] = new_plevels def _get_pressure_level_condition(plevels_ds, pressure_levels): @@ -399,8 +399,8 @@ def _mask_data_with_quality_flag(datasets_loaded, dataset_keys): for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] quality_flag = [ - x for x in ds.attrs.get('ancillary_variables', []) - if x.attrs.get('name') == 'Quality_Flag'] + x for x in ds.attrs.get("ancillary_variables", []) + if x.attrs.get("name") == "Quality_Flag"] if not quality_flag: continue diff --git a/satpy/readers/nwcsaf_msg2013_hdf5.py b/satpy/readers/nwcsaf_msg2013_hdf5.py index a8fdf45f3c..40a6441655 100644 --- a/satpy/readers/nwcsaf_msg2013_hdf5.py +++ b/satpy/readers/nwcsaf_msg2013_hdf5.py @@ -38,10 +38,10 @@ logger = logging.getLogger(__name__) -PLATFORM_NAMES = {'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', } +PLATFORM_NAMES = {"MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", } class Hdf5NWCSAF(HDF5FileHandler): @@ -56,27 +56,27 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self[file_key] nodata = None - if 'SCALING_FACTOR' in data.attrs and 'OFFSET' in data.attrs: + if "SCALING_FACTOR" in data.attrs and "OFFSET" in data.attrs: dtype = np.dtype(data.data) - if dataset_id['name'] in ['ctth_alti']: - data.attrs['valid_range'] = (0, 27000) - data.attrs['_FillValue'] = np.nan + if dataset_id["name"] in ["ctth_alti"]: + data.attrs["valid_range"] = (0, 27000) + data.attrs["_FillValue"] = np.nan - if dataset_id['name'] in ['ctth_alti', 'ctth_pres', 'ctth_tempe', 'ctth_effective_cloudiness']: - dtype = np.dtype('float32') + if dataset_id["name"] in ["ctth_alti", "ctth_pres", "ctth_tempe", "ctth_effective_cloudiness"]: + dtype = np.dtype("float32") nodata = 255 - if dataset_id['name'] in ['ct']: - data.attrs['valid_range'] = (0, 20) - data.attrs['_FillValue'] = 255 + if dataset_id["name"] in ["ct"]: + data.attrs["valid_range"] = (0, 20) + data.attrs["_FillValue"] = 255 # data.attrs['palette_meanings'] = list(range(21)) attrs = data.attrs - scaled_data = (data * data.attrs['SCALING_FACTOR'] + data.attrs['OFFSET']).astype(dtype) + scaled_data = (data * data.attrs["SCALING_FACTOR"] + data.attrs["OFFSET"]).astype(dtype) if nodata: scaled_data = scaled_data.where(data != nodata) scaled_data = scaled_data.where(scaled_data >= 0) @@ -92,18 +92,18 @@ def get_dataset(self, dataset_id, ds_info): def get_area_def(self, dsid): """Get the area definition of the datasets in the file.""" - if dsid['name'].endswith('_pal'): + if dsid["name"].endswith("_pal"): raise NotImplementedError - cfac = self.file_content['/attr/CFAC'] - lfac = self.file_content['/attr/LFAC'] - coff = self.file_content['/attr/COFF'] - loff = self.file_content['/attr/LOFF'] - numcols = int(self.file_content['/attr/NC']) - numlines = int(self.file_content['/attr/NL']) + cfac = self.file_content["/attr/CFAC"] + lfac = self.file_content["/attr/LFAC"] + coff = self.file_content["/attr/COFF"] + loff = self.file_content["/attr/LOFF"] + numcols = int(self.file_content["/attr/NC"]) + numlines = int(self.file_content["/attr/NL"]) aex = get_area_extent(cfac, lfac, coff, loff, numcols, numlines) - pname = self.file_content['/attr/PROJECTION_NAME'] + pname = self.file_content["/attr/PROJECTION_NAME"] proj = {} if pname.startswith("GEOS"): proj["proj"] = "geos" @@ -114,8 +114,8 @@ def get_area_def(self, dsid): else: raise NotImplementedError("Only geos projection supported yet.") - area_def = AreaDefinition(self.file_content['/attr/REGION_NAME'], - self.file_content['/attr/REGION_NAME'], + area_def = AreaDefinition(self.file_content["/attr/REGION_NAME"], + self.file_content["/attr/REGION_NAME"], pname, proj, numcols, @@ -127,7 +127,7 @@ def get_area_def(self, dsid): @property def start_time(self): """Return the start time of the object.""" - return datetime.strptime(self.file_content['/attr/IMAGE_ACQUISITION_TIME'], '%Y%m%d%H%M') + return datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M") def get_area_extent(cfac, lfac, coff, loff, numcols, numlines): diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 7ecc5f43f4..1b3d65cb96 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -42,36 +42,36 @@ CHUNK_SIZE = get_legacy_chunk_size() -SENSOR = {'NOAA-19': 'avhrr-3', - 'NOAA-18': 'avhrr-3', - 'NOAA-15': 'avhrr-3', - 'Metop-A': 'avhrr-3', - 'Metop-B': 'avhrr-3', - 'Metop-C': 'avhrr-3', - 'EOS-Aqua': 'modis', - 'EOS-Terra': 'modis', - 'Suomi-NPP': 'viirs', - 'NOAA-20': 'viirs', - 'NOAA-21': 'viirs', - 'NOAA-22': 'viirs', - 'NOAA-23': 'viirs', - 'JPSS-1': 'viirs', - 'Metop-SG-A1': 'metimage', - 'Metop-SG-A2': 'metimage', - 'Metop-SG-A3': 'metimage', - 'GOES-16': 'abi', - 'GOES-17': 'abi', - 'Himawari-8': 'ahi', - 'Himawari-9': 'ahi', +SENSOR = {"NOAA-19": "avhrr-3", + "NOAA-18": "avhrr-3", + "NOAA-15": "avhrr-3", + "Metop-A": "avhrr-3", + "Metop-B": "avhrr-3", + "Metop-C": "avhrr-3", + "EOS-Aqua": "modis", + "EOS-Terra": "modis", + "Suomi-NPP": "viirs", + "NOAA-20": "viirs", + "NOAA-21": "viirs", + "NOAA-22": "viirs", + "NOAA-23": "viirs", + "JPSS-1": "viirs", + "Metop-SG-A1": "metimage", + "Metop-SG-A2": "metimage", + "Metop-SG-A3": "metimage", + "GOES-16": "abi", + "GOES-17": "abi", + "Himawari-8": "ahi", + "Himawari-9": "ahi", } -PLATFORM_NAMES = {'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', - 'GOES16': 'GOES-16', - 'GOES17': 'GOES-17', +PLATFORM_NAMES = {"MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", + "GOES16": "GOES-16", + "GOES17": "GOES-17", } @@ -93,8 +93,8 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=False, chunks=CHUNK_SIZE) - self.nc = self.nc.rename({'nx': 'x', 'ny': 'y'}) - self.sw_version = self.nc.attrs['source'] + self.nc = self.nc.rename({"nx": "x", "ny": "y"}) + self.sw_version = self.nc.attrs["source"] self.pps = False self.platform_name = None @@ -104,12 +104,12 @@ def __init__(self, filename, filename_info, filetype_info): try: # NWCSAF/Geo: try: - kwrgs = {'sat_id': self.nc.attrs['satellite_identifier']} + kwrgs = {"sat_id": self.nc.attrs["satellite_identifier"]} except KeyError: - kwrgs = {'sat_id': self.nc.attrs['satellite_identifier'].astype(str)} + kwrgs = {"sat_id": self.nc.attrs["satellite_identifier"].astype(str)} except KeyError: # NWCSAF/PPS: - kwrgs = {'platform_name': self.nc.attrs['platform']} + kwrgs = {"platform_name": self.nc.attrs["platform"]} self.set_platform_and_sensor(**kwrgs) @@ -121,17 +121,17 @@ def set_platform_and_sensor(self, **kwargs): """Set some metadata: platform_name, sensors, and pps (identifying PPS or Geo).""" try: # NWCSAF/Geo - self.platform_name = PLATFORM_NAMES.get(kwargs['sat_id'], kwargs['sat_id']) + self.platform_name = PLATFORM_NAMES.get(kwargs["sat_id"], kwargs["sat_id"]) except KeyError: # NWCSAF/PPS - self.platform_name = kwargs['platform_name'] + self.platform_name = kwargs["platform_name"] self.pps = True - self.sensor = set([SENSOR.get(self.platform_name, 'seviri')]) + self.sensor = set([SENSOR.get(self.platform_name, "seviri")]) def remove_timedim(self, var): """Remove time dimension from dataset.""" - if self.pps and var.dims[0] == 'time': + if self.pps and var.dims[0] == "time": data = var[0, :, :] data.attrs = var.attrs var = data @@ -140,19 +140,19 @@ def remove_timedim(self, var): def drop_xycoords(self, variable): """Drop x, y coords when y is scan line number.""" try: - if variable.coords['y'].attrs['long_name'] == "scan line number": - return variable.drop_vars(['y', 'x']) + if variable.coords["y"].attrs["long_name"] == "scan line number": + return variable.drop_vars(["y", "x"]) except KeyError: pass return variable def get_dataset(self, dsid, info): """Load a dataset.""" - dsid_name = dsid['name'] + dsid_name = dsid["name"] if dsid_name in self.cache: - logger.debug('Get the data set from cache: %s.', dsid_name) + logger.debug("Get the data set from cache: %s.", dsid_name) return self.cache[dsid_name] - if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc: + if dsid_name in ["lon", "lat"] and dsid_name not in self.nc: # Get full resolution lon,lat from the reduced (tie points) grid lon, lat = self.upsample_geolocation() if dsid_name == "lon": @@ -160,7 +160,7 @@ def get_dataset(self, dsid, info): else: return lat - logger.debug('Reading %s.', dsid_name) + logger.debug("Reading %s.", dsid_name) file_key = self._get_filekeys(dsid_name, info) variable = self.nc[file_key] variable = self.remove_timedim(variable) @@ -204,91 +204,91 @@ def scale_dataset(self, variable, info): """ variable = remove_empties(variable) - scale = variable.attrs.get('scale_factor', np.array(1)) - offset = variable.attrs.get('add_offset', np.array(0)) - if '_FillValue' in variable.attrs: - variable.attrs['scaled_FillValue'] = variable.attrs['_FillValue'] * scale + offset + scale = variable.attrs.get("scale_factor", np.array(1)) + offset = variable.attrs.get("add_offset", np.array(0)) + if "_FillValue" in variable.attrs: + variable.attrs["scaled_FillValue"] = variable.attrs["_FillValue"] * scale + offset if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating): variable = self._mask_variable(variable) attrs = variable.attrs.copy() variable = variable * scale + offset variable.attrs = attrs - if 'valid_range' in variable.attrs: - variable.attrs['valid_range'] = variable.attrs['valid_range'] * scale + offset + if "valid_range" in variable.attrs: + variable.attrs["valid_range"] = variable.attrs["valid_range"] * scale + offset - variable.attrs.pop('add_offset', None) - variable.attrs.pop('scale_factor', None) + variable.attrs.pop("add_offset", None) + variable.attrs.pop("scale_factor", None) - variable.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + variable.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) - if not variable.attrs.get('standard_name', '').endswith('status_flag'): + if not variable.attrs.get("standard_name", "").endswith("status_flag"): # TODO: do we really need to add units to everything ? - variable.attrs.setdefault('units', '1') + variable.attrs.setdefault("units", "1") - ancillary_names = variable.attrs.get('ancillary_variables', '') + ancillary_names = variable.attrs.get("ancillary_variables", "") try: - variable.attrs['ancillary_variables'] = ancillary_names.split() + variable.attrs["ancillary_variables"] = ancillary_names.split() except AttributeError: pass - if 'palette_meanings' in variable.attrs: + if "palette_meanings" in variable.attrs: variable = self._prepare_variable_for_palette(variable, info) - if 'standard_name' in info: - variable.attrs.setdefault('standard_name', info['standard_name']) + if "standard_name" in info: + variable.attrs.setdefault("standard_name", info["standard_name"]) variable = self._adjust_variable_for_legacy_software(variable) return variable @staticmethod def _mask_variable(variable): - if '_FillValue' in variable.attrs: + if "_FillValue" in variable.attrs: variable = variable.where( - variable != variable.attrs['_FillValue']) - variable.attrs['_FillValue'] = np.nan - if 'valid_range' in variable.attrs: + variable != variable.attrs["_FillValue"]) + variable.attrs["_FillValue"] = np.nan + if "valid_range" in variable.attrs: variable = variable.where( - variable <= variable.attrs['valid_range'][1]) + variable <= variable.attrs["valid_range"][1]) variable = variable.where( - variable >= variable.attrs['valid_range'][0]) - if 'valid_max' in variable.attrs: + variable >= variable.attrs["valid_range"][0]) + if "valid_max" in variable.attrs: variable = variable.where( - variable <= variable.attrs['valid_max']) - if 'valid_min' in variable.attrs: + variable <= variable.attrs["valid_max"]) + if "valid_min" in variable.attrs: variable = variable.where( - variable >= variable.attrs['valid_min']) + variable >= variable.attrs["valid_min"]) return variable def _prepare_variable_for_palette(self, variable, info): try: - so_dataset = self.nc[self._get_varname_in_file(info, info_type='scale_offset_dataset')] + so_dataset = self.nc[self._get_varname_in_file(info, info_type="scale_offset_dataset")] except KeyError: scale = 1 offset = 0 fill_value = 255 else: - scale = so_dataset.attrs['scale_factor'] - offset = so_dataset.attrs['add_offset'] - fill_value = so_dataset.attrs['_FillValue'] - variable.attrs['palette_meanings'] = [int(val) - for val in variable.attrs['palette_meanings'].split()] - - if fill_value not in variable.attrs['palette_meanings'] and 'fill_value_color' in variable.attrs: - variable.attrs['palette_meanings'] = [fill_value] + variable.attrs['palette_meanings'] - variable = xr.DataArray(da.vstack((np.array(variable.attrs['fill_value_color']), variable.data)), + scale = so_dataset.attrs["scale_factor"] + offset = so_dataset.attrs["add_offset"] + fill_value = so_dataset.attrs["_FillValue"] + variable.attrs["palette_meanings"] = [int(val) + for val in variable.attrs["palette_meanings"].split()] + + if fill_value not in variable.attrs["palette_meanings"] and "fill_value_color" in variable.attrs: + variable.attrs["palette_meanings"] = [fill_value] + variable.attrs["palette_meanings"] + variable = xr.DataArray(da.vstack((np.array(variable.attrs["fill_value_color"]), variable.data)), coords=variable.coords, dims=variable.dims, attrs=variable.attrs) - val, idx = np.unique(variable.attrs['palette_meanings'], return_index=True) - variable.attrs['palette_meanings'] = val * scale + offset + val, idx = np.unique(variable.attrs["palette_meanings"], return_index=True) + variable.attrs["palette_meanings"] = val * scale + offset variable = variable[idx] return variable def _adjust_variable_for_legacy_software(self, variable): - if self.sw_version == 'NWC/PPS version v2014' and variable.attrs.get('standard_name') == 'cloud_top_altitude': + if self.sw_version == "NWC/PPS version v2014" and variable.attrs.get("standard_name") == "cloud_top_altitude": # pps 2014 valid range and palette don't match - variable.attrs['valid_range'] = (0., 9000.) - if (self.sw_version == 'NWC/PPS version v2014' and - variable.attrs.get('long_name') == 'RGB Palette for ctth_alti'): + variable.attrs["valid_range"] = (0., 9000.) + if (self.sw_version == "NWC/PPS version v2014" and + variable.attrs.get("long_name") == "RGB Palette for ctth_alti"): # pps 2014 palette has the nodata color (black) first variable = variable[1:, :] return variable @@ -298,12 +298,12 @@ def _upsample_geolocation_uncached(self): from geotiepoints import SatelliteInterpolator # Read the fields needed: - col_indices = self.nc['nx_reduced'].values - row_indices = self.nc['ny_reduced'].values - lat_reduced = self.scale_dataset(self.nc['lat_reduced'], {}) - lon_reduced = self.scale_dataset(self.nc['lon_reduced'], {}) + col_indices = self.nc["nx_reduced"].values + row_indices = self.nc["ny_reduced"].values + lat_reduced = self.scale_dataset(self.nc["lat_reduced"], {}) + lon_reduced = self.scale_dataset(self.nc["lon_reduced"], {}) - shape = (self.nc['y'].shape[0], self.nc['x'].shape[0]) + shape = (self.nc["y"].shape[0], self.nc["x"].shape[0]) cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) @@ -313,8 +313,8 @@ def _upsample_geolocation_uncached(self): (rows_full, cols_full)) lons, lats = satint.interpolate() - lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) - lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) + lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=["y", "x"]) + lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=["y", "x"]) lat = self.drop_xycoords(lat) lon = self.drop_xycoords(lon) return lon, lat @@ -328,15 +328,15 @@ def get_area_def(self, dsid): # PPS: raise NotImplementedError - if dsid['name'].endswith('_pal'): + if dsid["name"].endswith("_pal"): raise NotImplementedError crs, area_extent = self._get_projection() crs, area_extent = self._ensure_crs_extents_in_meters(crs, area_extent) - nlines, ncols = self.nc[dsid['name']].shape - area = AreaDefinition('some_area_name', + nlines, ncols = self.nc[dsid["name"]].shape + area = AreaDefinition("some_area_name", "On-the-fly area", - 'geosmsg', + "geosmsg", crs, ncols, nlines, @@ -347,7 +347,7 @@ def get_area_def(self, dsid): @staticmethod def _ensure_crs_extents_in_meters(crs, area_extent): """Fix units in Earth shape, satellite altitude and 'units' attribute.""" - if 'kilo' in crs.axis_info[0].unit_name: + if "kilo" in crs.axis_info[0].unit_name: proj_dict = crs.to_dict() proj_dict["units"] = "m" if "a" in proj_dict: @@ -372,12 +372,12 @@ def __del__(self): @property def start_time(self): """Return the start time of the object.""" - return read_nwcsaf_time(self.nc.attrs['time_coverage_start']) + return read_nwcsaf_time(self.nc.attrs["time_coverage_start"]) @property def end_time(self): """Return the end time of the object.""" - return read_nwcsaf_time(self.nc.attrs['time_coverage_end']) + return read_nwcsaf_time(self.nc.attrs["time_coverage_end"]) @property def sensor_names(self): @@ -387,26 +387,26 @@ def sensor_names(self): def _get_projection(self): """Get projection from the NetCDF4 attributes.""" try: - proj_str = self.nc.attrs['gdal_projection'] + proj_str = self.nc.attrs["gdal_projection"] except TypeError: - proj_str = self.nc.attrs['gdal_projection'].decode() + proj_str = self.nc.attrs["gdal_projection"].decode() # Check the a/b/h units - radius_a = proj_str.split('+a=')[-1].split()[0] + radius_a = proj_str.split("+a=")[-1].split()[0] if float(radius_a) > 10e3: - units = 'm' + units = "m" scale = 1.0 else: - units = 'km' + units = "km" scale = 1e3 - if 'units' not in proj_str: - proj_str = proj_str + ' +units=' + units + if "units" not in proj_str: + proj_str = proj_str + " +units=" + units - area_extent = (float(self.nc.attrs['gdal_xgeo_up_left']) / scale, - float(self.nc.attrs['gdal_ygeo_low_right']) / scale, - float(self.nc.attrs['gdal_xgeo_low_right']) / scale, - float(self.nc.attrs['gdal_ygeo_up_left']) / scale) + area_extent = (float(self.nc.attrs["gdal_xgeo_up_left"]) / scale, + float(self.nc.attrs["gdal_ygeo_low_right"]) / scale, + float(self.nc.attrs["gdal_xgeo_low_right"]) / scale, + float(self.nc.attrs["gdal_ygeo_up_left"]) / scale) crs = CRS.from_string(proj_str) return crs, area_extent @@ -427,9 +427,9 @@ def read_nwcsaf_time(time_value): try: # MSG: try: - return datetime.strptime(time_value, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ") except TypeError: # Remove this in summer 2024 (this is not needed since h5netcdf 0.14) - return datetime.strptime(time_value.astype(str), '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ") except ValueError: # PPS: - return datetime.strptime(time_value, '%Y%m%dT%H%M%S%fZ') + return datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ") diff --git a/satpy/readers/oceancolorcci_l3_nc.py b/satpy/readers/oceancolorcci_l3_nc.py index b4ce1f7772..075e885b36 100644 --- a/satpy/readers/oceancolorcci_l3_nc.py +++ b/satpy/readers/oceancolorcci_l3_nc.py @@ -46,48 +46,48 @@ def _parse_datetime(datestr): @property def start_time(self): """Get the start time.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get the end time.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) @property def composite_period(self): """Determine composite period from filename information.""" - comp1 = self.filename_info['composite_period_1'] - comp2 = self.filename_info['composite_period_2'] - if comp2 == 'MONTHLY' and comp1 == "1M": - return 'monthly' - elif comp1 == '1D': - return 'daily' - elif comp1 == '5D': - return '5-day' - elif comp1 == '8D': - return '8-day' + comp1 = self.filename_info["composite_period_1"] + comp2 = self.filename_info["composite_period_2"] + if comp2 == "MONTHLY" and comp1 == "1M": + return "monthly" + elif comp1 == "1D": + return "daily" + elif comp1 == "5D": + return "5-day" + elif comp1 == "8D": + return "8-day" else: raise ValueError(f"Unknown data compositing period: {comp1}_{comp2}") def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs.update(self[dataset_info['nc_key']].attrs) + dataset.attrs.update(self[dataset_info["nc_key"]].attrs) dataset.attrs.update(dataset_info) - dataset.attrs['sensor'] = 'merged' - dataset.attrs['composite_period'] = self.composite_period + dataset.attrs["sensor"] = "merged" + dataset.attrs["composite_period"] = self.composite_period # remove attributes from original file which don't apply anymore dataset.attrs.pop("nc_key") def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - dataset = da.squeeze(self[ds_info['nc_key']]) - if '_FillValue' in dataset.attrs: - dataset.data = da.where(dataset.data == dataset.attrs['_FillValue'], np.nan, dataset.data) + dataset = da.squeeze(self[ds_info["nc_key"]]) + if "_FillValue" in dataset.attrs: + dataset.data = da.where(dataset.data == dataset.attrs["_FillValue"], np.nan, dataset.data) self._update_attrs(dataset, ds_info) - if 'lat' in dataset.dims: - dataset = dataset.rename({'lat': 'y'}) - if 'lon' in dataset.dims: - dataset = dataset.rename({'lon': 'x'}) + if "lat" in dataset.dims: + dataset = dataset.rename({"lat": "y"}) + if "lon" in dataset.dims: + dataset = dataset.rename({"lon": "x"}) return dataset def get_area_def(self, dsid): @@ -96,23 +96,23 @@ def get_area_def(self, dsid): There is no area definition in the file itself, so we have to compute it from the metadata, which specifies the area extent and pixel resolution. """ - proj_param = 'EPSG:4326' + proj_param = "EPSG:4326" - lon_res = float(self['/attr/geospatial_lon_resolution']) - lat_res = float(self['/attr/geospatial_lat_resolution']) + lon_res = float(self["/attr/geospatial_lon_resolution"]) + lat_res = float(self["/attr/geospatial_lat_resolution"]) - min_lon = self['/attr/geospatial_lon_min'] - max_lon = self['/attr/geospatial_lon_max'] - min_lat = self['/attr/geospatial_lat_min'] - max_lat = self['/attr/geospatial_lat_max'] + min_lon = self["/attr/geospatial_lon_min"] + max_lon = self["/attr/geospatial_lon_max"] + min_lat = self["/attr/geospatial_lat_min"] + max_lat = self["/attr/geospatial_lat_max"] area_extent = (min_lon, min_lat, max_lon, max_lat) lon_size = np.round((max_lon - min_lon) / lon_res).astype(int) lat_size = np.round((max_lat - min_lat) / lat_res).astype(int) - area = geometry.AreaDefinition('gridded_occci', - 'Full globe gridded area', - 'longlat', + area = geometry.AreaDefinition("gridded_occci", + "Full globe gridded area", + "longlat", proj_param, lon_size, lat_size, diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 112f5455ac..84b21c3284 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -59,9 +59,9 @@ CHUNK_SIZE = get_legacy_chunk_size() -PLATFORM_NAMES = {'S3A': 'Sentinel-3A', - 'S3B': 'Sentinel-3B', - 'ENV': 'Environmental Satellite'} +PLATFORM_NAMES = {"S3A": "Sentinel-3A", + "S3B": "Sentinel-3B", + "ENV": "Environmental Satellite"} class BitFlags: @@ -70,16 +70,16 @@ class BitFlags: def __init__(self, value, flag_list=None): """Init the flags.""" self._value = value - flag_list = flag_list or ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', - 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', - 'HISOLZEN', 'SATURATED', 'MEGLINT', 'HIGHGLINT', - 'WHITECAPS', 'ADJAC', 'WV_FAIL', 'PAR_FAIL', - 'AC_FAIL', 'OC4ME_FAIL', 'OCNN_FAIL', - 'Extra_1', - 'KDM_FAIL', - 'Extra_2', - 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW'] + flag_list = flag_list or ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", + "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", + "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", + "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", + "Extra_1", + "KDM_FAIL", + "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW"] self.meaning = {f: i for i, f in enumerate(flag_list)} def __getitem__(self, item): @@ -108,11 +108,11 @@ def __init__(self, filename, filename_info, filetype_info, """Init the olci reader base.""" super().__init__(filename, filename_info, filetype_info) self._engine = engine - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'olci' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "olci" @cached_property def nc(self): @@ -124,7 +124,7 @@ def nc(self): engine=self._engine, chunks={self.cols_name: CHUNK_SIZE, self.rows_name: CHUNK_SIZE}) - return dataset.rename({self.cols_name: 'x', self.rows_name: 'y'}) + return dataset.rename({self.cols_name: "x", self.rows_name: "y"}) @property def start_time(self): @@ -138,8 +138,8 @@ def end_time(self): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - variable = self.nc[key['name']] + logger.debug("Reading %s.", key["name"]) + variable = self.nc[key["name"]] return variable @@ -158,9 +158,9 @@ class NCOLCIChannelBase(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) - self.channel = filename_info.get('dataset_name') - self.reflectance_prefix = 'Oa' - self.reflectance_suffix = '_reflectance' + self.channel = filename_info.get("dataset_name") + self.reflectance_prefix = "Oa" + self.reflectance_suffix = "_reflectance" class NCOLCI1B(NCOLCIChannelBase): @@ -178,28 +178,28 @@ def _get_items(idx, solar_flux): def _get_solar_flux(self, band): """Get the solar flux for the band.""" - solar_flux = self.cal['solar_flux'].isel(bands=band).values - d_index = self.cal['detector_index'].fillna(0).astype(int) + solar_flux = self.cal["solar_flux"].isel(bands=band).values + d_index = self.cal["detector_index"].fillna(0).astype(int) return da.map_blocks(self._get_items, d_index.data, solar_flux=solar_flux, dtype=solar_flux.dtype) def get_dataset(self, key, info): """Load a dataset.""" - if self.channel != key['name']: + if self.channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - radiances = self.nc[self.channel + '_radiance'] + radiances = self.nc[self.channel + "_radiance"] - if key['calibration'] == 'reflectance': - idx = int(key['name'][2:]) - 1 + if key["calibration"] == "reflectance": + idx = int(key["name"][2:]) - 1 sflux = self._get_solar_flux(idx) radiances = radiances / sflux * np.pi * 100 - radiances.attrs['units'] = '%' + radiances.attrs["units"] = "%" - radiances.attrs['platform_name'] = self.platform_name - radiances.attrs['sensor'] = self.sensor + radiances.attrs["platform_name"] = self.platform_name + radiances.attrs["sensor"] = self.sensor radiances.attrs.update(key.to_dict()) return radiances @@ -215,20 +215,20 @@ def __init__(self, filename, filename_info, filetype_info, engine=None, unlog=Fa def get_dataset(self, key, info): """Load a dataset.""" - if self.channel is not None and self.channel != key['name']: + if self.channel is not None and self.channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) if self.channel is not None and self.channel.startswith(self.reflectance_prefix): dataset = self.nc[self.channel + self.reflectance_suffix] else: - dataset = self.nc[info['nc_key']] + dataset = self.nc[info["nc_key"]] - if key['name'] == 'wqsf': - dataset.attrs['_FillValue'] = 1 - elif key['name'] == 'mask': + if key["name"] == "wqsf": + dataset.attrs["_FillValue"] = 1 + elif key["name"] == "mask": dataset = self.getbitmask(dataset, self.mask_items) - dataset.attrs['platform_name'] = self.platform_name - dataset.attrs['sensor'] = self.sensor + dataset.attrs["platform_name"] = self.platform_name + dataset.attrs["sensor"] = self.sensor dataset.attrs.update(key.to_dict()) if self.unlog: dataset = self.delog(dataset) @@ -262,8 +262,8 @@ def __init__(self, filename, filename_info, filetype_info, engine=None, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) - self.l_step = self.nc.attrs['al_subsampling_factor'] - self.c_step = self.nc.attrs['ac_subsampling_factor'] + self.l_step = self.nc.attrs["al_subsampling_factor"] + self.c_step = self.nc.attrs["ac_subsampling_factor"] def _do_interpolate(self, data): @@ -287,7 +287,7 @@ def _do_interpolate(self, data): int_data = satint.interpolate() return [xr.DataArray(da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), - dims=['y', 'x']) for x in int_data] + dims=["y", "x"]) for x in int_data] @property def _need_interpolation(self): @@ -297,37 +297,37 @@ def _need_interpolation(self): class NCOLCIAngles(NCOLCILowResData): """File handler for the OLCI angles.""" - datasets = {'satellite_azimuth_angle': 'OAA', - 'satellite_zenith_angle': 'OZA', - 'solar_azimuth_angle': 'SAA', - 'solar_zenith_angle': 'SZA'} + datasets = {"satellite_azimuth_angle": "OAA", + "satellite_zenith_angle": "OZA", + "solar_azimuth_angle": "SAA", + "solar_zenith_angle": "SZA"} def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] not in self.datasets: + if key["name"] not in self.datasets: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) if self._need_interpolation: - if key['name'].startswith('satellite'): + if key["name"].startswith("satellite"): azi, zen = self.satellite_angles - elif key['name'].startswith('solar'): + elif key["name"].startswith("solar"): azi, zen = self.sun_angles else: - raise NotImplementedError("Don't know how to read " + key['name']) + raise NotImplementedError("Don't know how to read " + key["name"]) - if 'zenith' in key['name']: + if "zenith" in key["name"]: values = zen - elif 'azimuth' in key['name']: + elif "azimuth" in key["name"]: values = azi else: - raise NotImplementedError("Don't know how to read " + key['name']) + raise NotImplementedError("Don't know how to read " + key["name"]) else: - values = self.nc[self.datasets[key['name']]] + values = self.nc[self.datasets[key["name"]]] - values.attrs['platform_name'] = self.platform_name - values.attrs['sensor'] = self.sensor + values.attrs["platform_name"] = self.platform_name + values.attrs["sensor"] = self.sensor values.attrs.update(key.to_dict()) return values @@ -335,16 +335,16 @@ def get_dataset(self, key, info): @cached_property def sun_angles(self): """Return the sun angles.""" - zen = self.nc[self.datasets['solar_zenith_angle']] - azi = self.nc[self.datasets['solar_azimuth_angle']] + zen = self.nc[self.datasets["solar_zenith_angle"]] + azi = self.nc[self.datasets["solar_azimuth_angle"]] azi, zen = self._interpolate_angles(azi, zen) return azi, zen @cached_property def satellite_angles(self): """Return the satellite angles.""" - zen = self.nc[self.datasets['satellite_zenith_angle']] - azi = self.nc[self.datasets['satellite_azimuth_angle']] + zen = self.nc[self.datasets["satellite_zenith_angle"]] + azi = self.nc[self.datasets["satellite_azimuth_angle"]] azi, zen = self._interpolate_angles(azi, zen) return azi, zen @@ -362,7 +362,7 @@ def _interpolate_angles(self, azi, zen): class NCOLCIMeteo(NCOLCILowResData): """File handler for the OLCI meteo data.""" - datasets = ['humidity', 'sea_level_pressure', 'total_columnar_water_vapour', 'total_ozone'] + datasets = ["humidity", "sea_level_pressure", "total_columnar_water_vapour", "total_ozone"] def __init__(self, filename, filename_info, filetype_info, engine=None): @@ -377,27 +377,27 @@ def __init__(self, filename, filename_info, filetype_info, def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] not in self.datasets: + if key["name"] not in self.datasets: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - if self._need_interpolation and self.cache.get(key['name']) is None: + if self._need_interpolation and self.cache.get(key["name"]) is None: - data = self.nc[key['name']] + data = self.nc[key["name"]] values, = self._do_interpolate(data) values.attrs = data.attrs - self.cache[key['name']] = values + self.cache[key["name"]] = values - elif key['name'] in self.cache: - values = self.cache[key['name']] + elif key["name"] in self.cache: + values = self.cache[key["name"]] else: - values = self.nc[key['name']] + values = self.nc[key["name"]] - values.attrs['platform_name'] = self.platform_name - values.attrs['sensor'] = self.sensor + values.attrs["platform_name"] = self.platform_name + values.attrs["sensor"] = self.sensor values.attrs.update(key.to_dict()) return values diff --git a/satpy/readers/omps_edr.py b/satpy/readers/omps_edr.py index 9de71d4efa..5421ae2cd2 100644 --- a/satpy/readers/omps_edr.py +++ b/satpy/readers/omps_edr.py @@ -36,26 +36,26 @@ class EDRFileHandler(HDF5FileHandler): @property def start_orbit_number(self): """Get the start orbit number.""" - return self.filename_info['orbit'] + return self.filename_info["orbit"] @property def end_orbit_number(self): """Get the end orbit number.""" - return self.filename_info['orbit'] + return self.filename_info["orbit"] @property def platform_name(self): """Get the platform name.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def sensor_name(self): """Get the sensor name.""" - return self.filename_info['instrument_shortname'] + return self.filename_info["instrument_shortname"] def get_shape(self, ds_id, ds_info): """Get the shape.""" - return self[ds_info['file_key'] + '/shape'] + return self[ds_info["file_key"] + "/shape"] def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" @@ -68,20 +68,20 @@ def adjust_scaling_factors(self, factors, file_units, output_units): def get_metadata(self, dataset_id, ds_info): """Get the metadata.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) - info = getattr(self[var_path], 'attrs', {}).copy() - info.pop('DIMENSION_LIST', None) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) + info = getattr(self[var_path], "attrs", {}).copy() + info.pop("DIMENSION_LIST", None) info.update(ds_info) - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: - file_units = self.get(var_path + '/attr/units', self.get(var_path + '/attr/Units')) + file_units = self.get(var_path + "/attr/units", self.get(var_path + "/attr/Units")) if file_units is None: raise KeyError("File variable '{}' has no units attribute".format(var_path)) - if file_units == 'deg': - file_units = 'degrees' - elif file_units == 'Unitless': - file_units = '1' + if file_units == "deg": + file_units = "degrees" + elif file_units == "Unitless": + file_units = "1" info.update({ "shape": self.get_shape(dataset_id, ds_info), @@ -93,32 +93,32 @@ def get_metadata(self, dataset_id, ds_info): "end_orbit": self.end_orbit_number, }) info.update(dataset_id.to_dict()) - if 'standard_name' not in ds_info: - info['standard_name'] = self.get(var_path + '/attr/Title', dataset_id['name']) + if "standard_name" not in ds_info: + info["standard_name"] = self.get(var_path + "/attr/Title", dataset_id["name"]) return info def get_dataset(self, dataset_id, ds_info): """Get the dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - valid_min, valid_max = self.get(var_path + '/attr/valid_range', - self.get(var_path + '/attr/ValidRange', (None, None))) + valid_min, valid_max = self.get(var_path + "/attr/valid_range", + self.get(var_path + "/attr/ValidRange", (None, None))) if valid_min is None or valid_max is None: - valid_min = self.get(var_path + '/attr/valid_min', None) - valid_max = self.get(var_path + '/attr/valid_max', None) + valid_min = self.get(var_path + "/attr/valid_min", None) + valid_max = self.get(var_path + "/attr/valid_max", None) if valid_min is None or valid_max is None: raise KeyError("File variable '{}' has no valid range attribute".format(var_path)) - fill_name = var_path + '/attr/{}'.format(self._fill_name) + fill_name = var_path + "/attr/{}".format(self._fill_name) if fill_name in self: fill_value = self[fill_name] else: fill_value = None data = self[var_path] - scale_factor_path = var_path + '/attr/ScaleFactor' + scale_factor_path = var_path + "/attr/ScaleFactor" if scale_factor_path in self: scale_factor = self[scale_factor_path] - scale_offset = self[var_path + '/attr/Offset'] + scale_offset = self[var_path + "/attr/Offset"] else: scale_factor = None scale_offset = None @@ -130,14 +130,14 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data != fill_value) factors = (scale_factor, scale_offset) - factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) + factors = self.adjust_scaling_factors(factors, metadata["file_units"], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data = data * factors[0] + factors[1] data.attrs.update(metadata) - if 'DIMENSION_LIST' in data.attrs: - data.attrs.pop('DIMENSION_LIST') - dimensions = self.get_reference(var_path, 'DIMENSION_LIST') + if "DIMENSION_LIST" in data.attrs: + data.attrs.pop("DIMENSION_LIST") + dimensions = self.get_reference(var_path, "DIMENSION_LIST") for dim, coord in zip(data.dims, dimensions): data.coords[dim] = coord[0] return data diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py index 267f8683f8..c5b63dd8be 100644 --- a/satpy/readers/safe_sar_l2_ocn.py +++ b/satpy/readers/safe_sar_l2_ocn.py @@ -45,15 +45,15 @@ def __init__(self, filename, filename_info, filetype_info): super(SAFENC, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] # For some SAFE packages, fstart_time differs, but start_time is the same # To avoid over writing exiting file with same start_time, a solution is to # use fstart_time - self._fstart_time = filename_info['fstart_time'] - self._fend_time = filename_info['fend_time'] + self._fstart_time = filename_info["fstart_time"] + self._fend_time = filename_info["fend_time"] - self._polarization = filename_info['polarization'] + self._polarization = filename_info["polarization"] self.lats = None self.lons = None @@ -63,19 +63,19 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, - chunks={'owiAzSize': CHUNK_SIZE, - 'owiRaSize': CHUNK_SIZE}) - self.nc = self.nc.rename({'owiAzSize': 'y'}) - self.nc = self.nc.rename({'owiRaSize': 'x'}) + chunks={"owiAzSize": CHUNK_SIZE, + "owiRaSize": CHUNK_SIZE}) + self.nc = self.nc.rename({"owiAzSize": "y"}) + self.nc = self.nc.rename({"owiRaSize": "x"}) self.filename = filename def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] in ['owiLat', 'owiLon']: + if key["name"] in ["owiLat", "owiLon"]: if self.lons is None or self.lats is None: - self.lons = self.nc['owiLon'] - self.lats = self.nc['owiLat'] - if key['name'] == 'owiLat': + self.lons = self.nc["owiLon"] + self.lats = self.nc["owiLat"] + if key["name"] == "owiLat": res = self.lats else: res = self.lons @@ -83,11 +83,11 @@ def get_dataset(self, key, info): else: res = self._get_data_channels(key, info) - if 'missionName' in self.nc.attrs: - res.attrs.update({'platform_name': self.nc.attrs['missionName']}) + if "missionName" in self.nc.attrs: + res.attrs.update({"platform_name": self.nc.attrs["missionName"]}) - res.attrs.update({'fstart_time': self._fstart_time}) - res.attrs.update({'fend_time': self._fend_time}) + res.attrs.update({"fstart_time": self._fstart_time}) + res.attrs.update({"fend_time": self._fend_time}) if not self._shape: self._shape = res.shape @@ -95,23 +95,23 @@ def get_dataset(self, key, info): return res def _get_data_channels(self, key, info): - res = self.nc[key['name']] - if key['name'] in ['owiHs', 'owiWl', 'owiDirmet']: - res = xr.DataArray(res, dims=['y', 'x', 'oswPartitions']) - elif key['name'] in ['owiNrcs', 'owiNesz', 'owiNrcsNeszCorr']: - res = xr.DataArray(res, dims=['y', 'x', 'oswPolarisation']) - elif key['name'] in ['owiPolarisationName']: - res = xr.DataArray(res, dims=['owiPolarisation']) - elif key['name'] in ['owiCalConstObsi', 'owiCalConstInci']: - res = xr.DataArray(res, dims=['owiIncSize']) - elif key['name'].startswith('owi'): - res = xr.DataArray(res, dims=['y', 'x']) + res = self.nc[key["name"]] + if key["name"] in ["owiHs", "owiWl", "owiDirmet"]: + res = xr.DataArray(res, dims=["y", "x", "oswPartitions"]) + elif key["name"] in ["owiNrcs", "owiNesz", "owiNrcsNeszCorr"]: + res = xr.DataArray(res, dims=["y", "x", "oswPolarisation"]) + elif key["name"] in ["owiPolarisationName"]: + res = xr.DataArray(res, dims=["owiPolarisation"]) + elif key["name"] in ["owiCalConstObsi", "owiCalConstInci"]: + res = xr.DataArray(res, dims=["owiIncSize"]) + elif key["name"].startswith("owi"): + res = xr.DataArray(res, dims=["y", "x"]) else: - res = xr.DataArray(res, dims=['y', 'x']) + res = xr.DataArray(res, dims=["y", "x"]) res.attrs.update(info) - if '_FillValue' in res.attrs: - res = res.where(res != res.attrs['_FillValue']) - res.attrs['_FillValue'] = np.nan + if "_FillValue" in res.attrs: + res = res.where(res != res.attrs["_FillValue"]) + res.attrs["_FillValue"] = np.nan return res @property diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 4b2d214187..19e5396b61 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -80,11 +80,11 @@ def _dictify(r): def _get_calibration_name(calibration): """Get the proper calibration name.""" - calibration_name = getattr(calibration, "name", calibration) or 'gamma' - if calibration_name == 'sigma_nought': - calibration_name = 'sigmaNought' - elif calibration_name == 'beta_nought': - calibration_name = 'betaNought' + calibration_name = getattr(calibration, "name", calibration) or "gamma" + if calibration_name == "sigma_nought": + calibration_name = "sigmaNought" + elif calibration_name == "beta_nought": + calibration_name = "betaNought" return calibration_name @@ -96,17 +96,17 @@ def __init__(self, filename, filename_info, filetype_info, """Init the xml filehandler.""" super(SAFEXML, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] - self._polarization = filename_info['polarization'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] + self._polarization = filename_info["polarization"] self.root = ET.parse(self.filename) self.hdr = {} if header_file is not None: self.hdr = header_file.get_metadata() else: self.hdr = self.get_metadata() - self._image_shape = (self.hdr['product']['imageAnnotation']['imageInformation']['numberOfLines'], - self.hdr['product']['imageAnnotation']['imageInformation']['numberOfSamples']) + self._image_shape = (self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfLines"], + self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfSamples"]) def get_metadata(self): """Convert the xml metadata to dict.""" @@ -169,7 +169,7 @@ def get_dataset(self, key, info, chunks=None): def get_calibration_constant(self): """Load the calibration constant.""" - return float(self.root.find('.//absoluteCalibrationConstant').text) + return float(self.root.find(".//absoluteCalibrationConstant").text) def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" @@ -280,9 +280,9 @@ def _assemble_azimuth_noise_blocks(self, chunks): # relying mostly on dask arrays. slices = self._create_dask_slices_from_blocks(chunks) populated_array = da.vstack(slices).rechunk(chunks) - populated_array = xr.DataArray(populated_array, dims=['y', 'x'], - coords={'x': np.arange(self._image_shape[1]), - 'y': np.arange(self._image_shape[0])}) + populated_array = xr.DataArray(populated_array, dims=["y", "x"], + coords={"x": np.arange(self._image_shape[1]), + "y": np.arange(self._image_shape[0])}) return populated_array def _create_dask_slices_from_blocks(self, chunks): @@ -306,7 +306,7 @@ def _create_dask_slice_from_block_line(self, current_line, chunks): def _get_array_pieces_for_current_line(self, current_line): """Get the array pieces that cover the current line.""" current_blocks = self._find_blocks_covering_line(current_line) - current_blocks.sort(key=(lambda x: x.coords['x'][0])) + current_blocks.sort(key=(lambda x: x.coords["x"][0])) next_line = self._get_next_start_line(current_blocks, current_line) current_y = np.arange(current_line, next_line) pieces = [arr.sel(y=current_y) for arr in current_blocks] @@ -316,12 +316,12 @@ def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" current_blocks = [] for block in self.blocks: - if block.coords['y'][0] <= current_line <= block.coords['y'][-1]: + if block.coords["y"][0] <= current_line <= block.coords["y"][-1]: current_blocks.append(block) return current_blocks def _get_next_start_line(self, current_blocks, current_line): - next_line = min((arr.coords['y'][-1] for arr in current_blocks)) + 1 + next_line = min((arr.coords["y"][-1] for arr in current_blocks)) + 1 blocks_starting_soon = [block for block in self.blocks if current_line < block.coords["y"][0] < next_line] if blocks_starting_soon: next_start_line = min((arr.coords["y"][0] for arr in blocks_starting_soon)) @@ -330,21 +330,21 @@ def _get_next_start_line(self, current_blocks, current_line): def _get_padded_dask_pieces(self, pieces, chunks): """Get the padded pieces of a slice.""" - pieces = sorted(pieces, key=(lambda x: x.coords['x'][0])) + pieces = sorted(pieces, key=(lambda x: x.coords["x"][0])) dask_pieces = [] previous_x_end = -1 piece = pieces[0] - next_x_start = piece.coords['x'][0].item() - y_shape = len(piece.coords['y']) + next_x_start = piece.coords["x"][0].item() + y_shape = len(piece.coords["y"]) x_shape = (next_x_start - previous_x_end - 1) self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) for i, piece in enumerate(pieces): dask_pieces.append(piece.data) - previous_x_end = piece.coords['x'][-1].item() + previous_x_end = piece.coords["x"][-1].item() try: - next_x_start = pieces[i + 1].coords['x'][0].item() + next_x_start = pieces[i + 1].coords["x"][0].item() except IndexError: next_x_start = self._image_shape[1] @@ -405,35 +405,35 @@ def expand(self, chunks): new_arr = (da.ones((len(y_coord), len(x_coord)), chunks=chunks) * np.interp(y_coord, self.lines, data)[:, np.newaxis]) new_arr = xr.DataArray(new_arr, - dims=['y', 'x'], - coords={'x': x_coord, - 'y': y_coord}) + dims=["y", "x"], + coords={"x": x_coord, + "y": y_coord}) return new_arr @property def first_pixel(self): - return int(self.element.find('firstRangeSample').text) + return int(self.element.find("firstRangeSample").text) @property def last_pixel(self): - return int(self.element.find('lastRangeSample').text) + return int(self.element.find("lastRangeSample").text) @property def first_line(self): - return int(self.element.find('firstAzimuthLine').text) + return int(self.element.find("firstAzimuthLine").text) @property def last_line(self): - return int(self.element.find('lastAzimuthLine').text) + return int(self.element.find("lastAzimuthLine").text) @property def lines(self): - lines = self.element.find('line').text.split() + lines = self.element.find("line").text.split() return np.array(lines).astype(int) @property def lut(self): - lut = self.element.find('noiseAzimuthLut').text.split() + lut = self.element.find("noiseAzimuthLut").text.split() return np.array(lut).astype(float) @@ -458,8 +458,8 @@ def _read_xml_array(self): x = [] data = [] for elt in elements: - new_x = elt.find('pixel').text.split() - y += [int(elt.find('line').text)] * len(new_x) + new_x = elt.find("pixel").text.split() + y += [int(elt.find("line").text)] * len(new_x) x += [int(val) for val in new_x] data += [float(val) for val in elt.find(self.element_tag).text.split()] @@ -488,7 +488,7 @@ def interpolate_xarray(xpoints, ypoints, values, shape, hchunks = range(0, shape[1], blocksize) token = tokenize(blocksize, xpoints, ypoints, values, shape) - name = 'interpolate-' + token + name = "interpolate-" + token spline = RectBivariateSpline(xpoints, ypoints, values.T) @@ -507,7 +507,7 @@ def interpolator(xnew, ynew): res = da.Array(dskx, name, shape=list(shape), chunks=(blocksize, blocksize), dtype=values.dtype) - return DataArray(res, dims=('y', 'x')) + return DataArray(res, dims=("y", "x")) def intp(grid_x, grid_y, interpolator): @@ -536,7 +536,7 @@ def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE interpolator((0, 0)) res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator) - return DataArray(res, dims=('y', 'x')) + return DataArray(res, dims=("y", "x")) class SAFEGRD(BaseFileHandler): @@ -552,19 +552,19 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annot super(SAFEGRD, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] - self._polarization = filename_info['polarization'] + self._polarization = filename_info["polarization"] - self._mission_id = filename_info['mission_id'] + self._mission_id = filename_info["mission_id"] self.calibration = calfh self.noise = noisefh self.annotation = annotationfh self.read_lock = Lock() - self.filehandle = rasterio.open(self.filename, 'r', sharing=False) + self.filehandle = rasterio.open(self.filename, "r", sharing=False) self.get_lonlatalts = functools.lru_cache(maxsize=2)( self._get_lonlatalts_uncached ) @@ -574,37 +574,37 @@ def get_dataset(self, key, info): if self._polarization != key["polarization"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - if key['name'] in ['longitude', 'latitude', 'altitude']: - logger.debug('Constructing coordinate arrays.') + if key["name"] in ["longitude", "latitude", "altitude"]: + logger.debug("Constructing coordinate arrays.") arrays = dict() - arrays['longitude'], arrays['latitude'], arrays['altitude'] = self.get_lonlatalts() + arrays["longitude"], arrays["latitude"], arrays["altitude"] = self.get_lonlatalts() - data = arrays[key['name']] + data = arrays[key["name"]] data.attrs.update(info) else: data = xr.open_dataset(self.filename, engine="rasterio", chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE})["band_data"].squeeze() - data = data.assign_coords(x=np.arange(len(data.coords['x'])), - y=np.arange(len(data.coords['y']))) + data = data.assign_coords(x=np.arange(len(data.coords["x"])), + y=np.arange(len(data.coords["y"]))) data = self._calibrate_and_denoise(data, key) data.attrs.update(info) - data.attrs.update({'platform_name': self._mission_id}) + data.attrs.update({"platform_name": self._mission_id}) - data = self._change_quantity(data, key['quantity']) + data = self._change_quantity(data, key["quantity"]) return data @staticmethod def _change_quantity(data, quantity): """Change quantity to dB if needed.""" - if quantity == 'dB': + if quantity == "dB": data.data = 10 * np.log10(data.data) - data.attrs['units'] = 'dB' + data.attrs["units"] = "dB" else: - data.attrs['units'] = '1' + data.attrs["units"] = "1" return data @@ -627,17 +627,17 @@ def _get_digital_number(self, data): def _denoise(self, dn, chunks): """Denoise the data.""" - logger.debug('Reading noise data.') + logger.debug("Reading noise data.") noise = self.noise.get_noise_correction(chunks=chunks).fillna(0) dn = dn - noise return dn def _calibrate(self, dn, chunks, key): """Calibrate the data.""" - logger.debug('Reading calibration data.') - cal = self.calibration.get_calibration(key['calibration'], chunks=chunks) + logger.debug("Reading calibration data.") + cal = self.calibration.get_calibration(key["calibration"], chunks=chunks) cal_constant = self.calibration.get_calibration_constant() - logger.debug('Calibrating.') + logger.debug("Calibrating.") data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) return data @@ -661,12 +661,12 @@ def _get_lonlatalts_uncached(self): latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape) altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) - longitudes.attrs['gcps'] = gcps - longitudes.attrs['crs'] = crs - latitudes.attrs['gcps'] = gcps - latitudes.attrs['crs'] = crs - altitudes.attrs['gcps'] = gcps - altitudes.attrs['crs'] = crs + longitudes.attrs["gcps"] = gcps + longitudes.attrs["crs"] = crs + latitudes.attrs["gcps"] = gcps + latitudes.attrs["crs"] = crs + altitudes.attrs["gcps"] = gcps + altitudes.attrs["crs"] = crs return longitudes, latitudes, altitudes diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 7add1df265..cf99b57e7d 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -196,7 +196,7 @@ class SatpyCFFileHandler(BaseFileHandler): """File handler for Satpy's CF netCDF files.""" - def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix='CHANNEL_'): + def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix="CHANNEL_"): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info) self.engine = None @@ -205,12 +205,12 @@ def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix=' @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_names(self): @@ -239,11 +239,11 @@ def _existing_datasets(self, configured_datasets=None): def fix_modifier_attr(self, ds_info): """Fix modifiers attribute.""" # Empty modifiers are read as [], which causes problems later - if 'modifiers' in ds_info and not ds_info['modifiers']: - ds_info['modifiers'] = () + if "modifiers" in ds_info and not ds_info["modifiers"]: + ds_info["modifiers"] = () try: try: - ds_info['modifiers'] = tuple(ds_info['modifiers'].split(' ')) + ds_info["modifiers"] = tuple(ds_info["modifiers"].split(" ")) except AttributeError: pass except KeyError: @@ -252,14 +252,14 @@ def fix_modifier_attr(self, ds_info): def _assign_ds_info(self, var_name, val): """Assign ds_info.""" ds_info = dict(val.attrs) - ds_info['file_type'] = self.filetype_info['file_type'] - ds_info['name'] = ds_info['nc_store_name'] = var_name - if 'original_name' in ds_info: - ds_info['name'] = ds_info['original_name'] + ds_info["file_type"] = self.filetype_info["file_type"] + ds_info["name"] = ds_info["nc_store_name"] = var_name + if "original_name" in ds_info: + ds_info["name"] = ds_info["original_name"] elif self._numeric_name_prefix and var_name.startswith(self._numeric_name_prefix): - ds_info['name'] = var_name.replace(self._numeric_name_prefix, '') + ds_info["name"] = var_name.replace(self._numeric_name_prefix, "") try: - ds_info['wavelength'] = WavelengthRange.from_cf(ds_info['wavelength']) + ds_info["wavelength"] = WavelengthRange.from_cf(ds_info["wavelength"]) except KeyError: pass return ds_info @@ -278,15 +278,15 @@ def _coordinate_datasets(self, configured_datasets=None): nc = xr.open_dataset(self.filename, engine=self.engine) for var_name, val in nc.coords.items(): ds_info = dict(val.attrs) - ds_info['file_type'] = self.filetype_info['file_type'] - ds_info['name'] = var_name + ds_info["file_type"] = self.filetype_info["file_type"] + ds_info["name"] = var_name self.fix_modifier_attr(ds_info) yield True, ds_info def _compare_attr(self, _ds_id_dict, key, data): - if key in ['name', 'modifiers']: + if key in ["name", "modifiers"]: return True - elif key == 'wavelength': + elif key == "wavelength": return _ds_id_dict[key] == WavelengthRange.from_cf(data.attrs[key]) else: return data.attrs[key] == _ds_id_dict[key] @@ -303,15 +303,15 @@ def _dataid_attrs_equal(self, ds_id, data): def get_dataset(self, ds_id, ds_info): """Get dataset.""" - logger.debug("Getting data for: %s", ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) nc = xr.open_dataset(self.filename, engine=self.engine, - chunks={'y': CHUNK_SIZE, 'x': CHUNK_SIZE}) - name = ds_info.get('nc_store_name', ds_id['name']) - data = nc[ds_info.get('file_key', name)] + chunks={"y": CHUNK_SIZE, "x": CHUNK_SIZE}) + name = ds_info.get("nc_store_name", ds_id["name"]) + data = nc[ds_info.get("file_key", name)] if not self._dataid_attrs_equal(ds_id, data): return - if name != ds_id['name']: - data = data.rename(ds_id['name']) + if name != ds_id["name"]: + data = data.rename(ds_id["name"]) data.attrs.update(nc.attrs) # For now add global attributes to all datasets if "orbital_parameters" in data.attrs: data.attrs["orbital_parameters"] = _str2dict(data.attrs["orbital_parameters"]) diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py index e507cdb2bc..9989bf3d86 100644 --- a/satpy/readers/scatsat1_l2b.py +++ b/satpy/readers/scatsat1_l2b.py @@ -30,38 +30,38 @@ class SCATSAT1L2BFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") - h5data = self.h5f['science_data'] + h5data = self.h5f["science_data"] - self.filename_info['start_time'] = datetime.strptime(h5data.attrs['Range Beginning Date'], '%Y-%jT%H:%M:%S.%f') - self.filename_info['end_time'] = datetime.strptime(h5data.attrs['Range Ending Date'], '%Y-%jT%H:%M:%S.%f') + self.filename_info["start_time"] = datetime.strptime(h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f") + self.filename_info["end_time"] = datetime.strptime(h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f") self.lons = None self.lats = None - self.wind_speed_scale = float(h5data.attrs['Wind Speed Selection Scale']) - self.wind_direction_scale = float(h5data.attrs['Wind Direction Selection Scale']) - self.latitude_scale = float(h5data.attrs['Latitude Scale']) - self.longitude_scale = float(h5data.attrs['Longitude Scale']) + self.wind_speed_scale = float(h5data.attrs["Wind Speed Selection Scale"]) + self.wind_direction_scale = float(h5data.attrs["Wind Direction Selection Scale"]) + self.latitude_scale = float(h5data.attrs["Latitude Scale"]) + self.longitude_scale = float(h5data.attrs["Longitude Scale"]) def get_dataset(self, key, info): - h5data = self.h5f['science_data'] - stdname = info.get('standard_name') + h5data = self.h5f["science_data"] + stdname = info.get("standard_name") - if stdname in ['latitude', 'longitude']: + if stdname in ["latitude", "longitude"]: if self.lons is None or self.lats is None: - self.lons = h5data['Longitude'][:]*self.longitude_scale - self.lats = h5data['Latitude'][:]*self.latitude_scale + self.lons = h5data["Longitude"][:]*self.longitude_scale + self.lats = h5data["Latitude"][:]*self.latitude_scale - if info['standard_name'] == 'longitude': + if info["standard_name"] == "longitude": return Dataset(self.lons, id=key, **info) else: return Dataset(self.lats, id=key, **info) - if stdname in ['wind_speed']: - windspeed = h5data['Wind_speed_selection'][:, :] * self.wind_speed_scale + if stdname in ["wind_speed"]: + windspeed = h5data["Wind_speed_selection"][:, :] * self.wind_speed_scale return Dataset(windspeed, id=key, **info) - if stdname in ['wind_direction']: - wind_direction = h5data['Wind_direction_selection'][:, :] * self.wind_direction_scale + if stdname in ["wind_direction"]: + wind_direction = h5data["Wind_direction_selection"][:, :] * self.wind_direction_scale return Dataset(wind_direction, id=key, **info) diff --git a/satpy/readers/scmi.py b/satpy/readers/scmi.py index f53073c751..a4b8620f8b 100644 --- a/satpy/readers/scmi.py +++ b/satpy/readers/scmi.py @@ -54,7 +54,7 @@ CHUNK_SIZE = get_legacy_chunk_size() # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations -LOAD_CHUNK_SIZE = int(os.getenv('PYTROLL_LOAD_CHUNK_SIZE', -1)) +LOAD_CHUNK_SIZE = int(os.getenv("PYTROLL_LOAD_CHUNK_SIZE", -1)) logger = logging.getLogger(__name__) @@ -69,20 +69,20 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'x': LOAD_CHUNK_SIZE, 'y': LOAD_CHUNK_SIZE}) - self.platform_name = self.nc.attrs['satellite_id'] + chunks={"x": LOAD_CHUNK_SIZE, "y": LOAD_CHUNK_SIZE}) + self.platform_name = self.nc.attrs["satellite_id"] self.sensor = self._get_sensor() - self.nlines = self.nc.dims['y'] - self.ncols = self.nc.dims['x'] + self.nlines = self.nc.dims["y"] + self.ncols = self.nc.dims["x"] self.coords = {} def _get_sensor(self): """Determine the sensor for this file.""" # sometimes Himawari-8 (or 9) data is stored in SCMI format - is_h8 = 'H8' in self.platform_name - is_h9 = 'H9' in self.platform_name + is_h8 = "H8" in self.platform_name + is_h9 = "H9" in self.platform_name is_ahi = is_h8 or is_h9 - return 'ahi' if is_ahi else 'abi' + return "ahi" if is_ahi else "abi" @property def sensor_names(self): @@ -99,9 +99,9 @@ def __getitem__(self, item): """ data = self.nc[item] attrs = data.attrs - factor = data.attrs.get('scale_factor') - offset = data.attrs.get('add_offset') - fill = data.attrs.get('_FillValue') + factor = data.attrs.get("scale_factor") + offset = data.attrs.get("add_offset") + fill = data.attrs.get("_FillValue") if fill is not None: data = data.where(data != fill) if factor is not None: @@ -114,8 +114,8 @@ def __getitem__(self, item): # handle coordinates (and recursive fun) new_coords = {} # 'time' dimension causes issues in other processing - if 'time' in data.coords: - data = data.drop_vars('time') + if "time" in data.coords: + data = data.drop_vars("time") if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): @@ -131,60 +131,60 @@ def get_shape(self, key, info): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) - var_name = info.get('file_key', self.filetype_info.get('file_key')) + logger.debug("Reading in get_dataset %s.", key["name"]) + var_name = info.get("file_key", self.filetype_info.get("file_key")) if var_name: data = self[var_name] - elif 'Sectorized_CMI' in self.nc: - data = self['Sectorized_CMI'] - elif 'data' in self.nc: - data = self['data'] + elif "Sectorized_CMI" in self.nc: + data = self["Sectorized_CMI"] + elif "data" in self.nc: + data = self["data"] # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations - data = data.chunk({'x': CHUNK_SIZE, 'y': CHUNK_SIZE}) + data = data.chunk({"x": CHUNK_SIZE, "y": CHUNK_SIZE}) # convert to satpy standard units - factor = data.attrs.pop('scale_factor', 1) - offset = data.attrs.pop('add_offset', 0) - units = data.attrs.get('units', 1) + factor = data.attrs.pop("scale_factor", 1) + offset = data.attrs.pop("add_offset", 0) + units = data.attrs.get("units", 1) # the '*1' unit is some weird convention added/needed by AWIPS - if units in ['1', '*1'] and key['calibration'] == 'reflectance': + if units in ["1", "*1"] and key["calibration"] == "reflectance": data *= 100 factor *= 100 # used for valid_min/max - data.attrs['units'] = '%' + data.attrs["units"] = "%" # set up all the attributes that might be useful to the user/satpy - data.attrs.update({'platform_name': self.platform_name, - 'sensor': data.attrs.get('sensor', self.sensor), + data.attrs.update({"platform_name": self.platform_name, + "sensor": data.attrs.get("sensor", self.sensor), }) - if 'satellite_longitude' in self.nc.attrs: - data.attrs['orbital_parameters'] = { - 'projection_longitude': self.nc.attrs['satellite_longitude'], - 'projection_latitude': self.nc.attrs['satellite_latitude'], - 'projection_altitude': self.nc.attrs['satellite_altitude'], + if "satellite_longitude" in self.nc.attrs: + data.attrs["orbital_parameters"] = { + "projection_longitude": self.nc.attrs["satellite_longitude"], + "projection_latitude": self.nc.attrs["satellite_latitude"], + "projection_altitude": self.nc.attrs["satellite_altitude"], } - scene_id = self.nc.attrs.get('scene_id') + scene_id = self.nc.attrs.get("scene_id") if scene_id is not None: - data.attrs['scene_id'] = scene_id + data.attrs["scene_id"] = scene_id data.attrs.update(key.to_dict()) - data.attrs.pop('_FillValue', None) - if 'valid_min' in data.attrs: - vmin = data.attrs.pop('valid_min') - vmax = data.attrs.pop('valid_max') + data.attrs.pop("_FillValue", None) + if "valid_min" in data.attrs: + vmin = data.attrs.pop("valid_min") + vmax = data.attrs.pop("valid_max") vmin = vmin * factor + offset vmax = vmax * factor + offset - data.attrs['valid_min'] = vmin - data.attrs['valid_max'] = vmax + data.attrs["valid_min"] = vmin + data.attrs["valid_max"] = vmax return data def _get_cf_grid_mapping_var(self): """Figure out which grid mapping should be used.""" - gmaps = ['fixedgrid_projection', 'goes_imager_projection', - 'lambert_projection', 'polar_projection', - 'mercator_projection'] - if 'grid_mapping' in self.filename_info: - gmaps = [self.filename_info.get('grid_mapping')] + gmaps + gmaps = ["fixedgrid_projection", "goes_imager_projection", + "lambert_projection", "polar_projection", + "mercator_projection"] + if "grid_mapping" in self.filename_info: + gmaps = [self.filename_info.get("grid_mapping")] + gmaps for grid_mapping in gmaps: if grid_mapping in self.nc: return self.nc[grid_mapping] @@ -192,12 +192,12 @@ def _get_cf_grid_mapping_var(self): def _get_proj4_name(self, projection): """Map CF projection name to PROJ.4 name.""" - gmap_name = projection.attrs['grid_mapping_name'] + gmap_name = projection.attrs["grid_mapping_name"] proj = { - 'geostationary': 'geos', - 'lambert_conformal_conic': 'lcc', - 'polar_stereographic': 'stere', - 'mercator': 'merc', + "geostationary": "geos", + "lambert_conformal_conic": "lcc", + "polar_stereographic": "stere", + "mercator": "merc", }.get(gmap_name, gmap_name) return proj @@ -205,42 +205,42 @@ def _get_proj_specific_params(self, projection): """Convert CF projection parameters to PROJ.4 dict.""" proj = self._get_proj4_name(projection) proj_dict = { - 'proj': proj, - 'a': float(projection.attrs['semi_major_axis']), - 'b': float(projection.attrs['semi_minor_axis']), - 'units': 'm', + "proj": proj, + "a": float(projection.attrs["semi_major_axis"]), + "b": float(projection.attrs["semi_minor_axis"]), + "units": "m", } - if proj == 'geos': - proj_dict['h'] = float(projection.attrs['perspective_point_height']) - proj_dict['sweep'] = projection.attrs.get('sweep_angle_axis', 'y') - proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) - proj_dict['lat_0'] = float(projection.attrs.get('latitude_of_projection_origin', 0.0)) - elif proj == 'lcc': - proj_dict['lat_0'] = float(projection.attrs['standard_parallel']) - proj_dict['lon_0'] = float(projection.attrs['longitude_of_central_meridian']) - proj_dict['lat_1'] = float(projection.attrs['latitude_of_projection_origin']) - elif proj == 'stere': - proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) - proj_dict['lon_0'] = float(projection.attrs['straight_vertical_longitude_from_pole']) - proj_dict['lat_0'] = float(projection.attrs['latitude_of_projection_origin']) - elif proj == 'merc': - proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) - proj_dict['lat_0'] = proj_dict['lat_ts'] - proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) + if proj == "geos": + proj_dict["h"] = float(projection.attrs["perspective_point_height"]) + proj_dict["sweep"] = projection.attrs.get("sweep_angle_axis", "y") + proj_dict["lon_0"] = float(projection.attrs["longitude_of_projection_origin"]) + proj_dict["lat_0"] = float(projection.attrs.get("latitude_of_projection_origin", 0.0)) + elif proj == "lcc": + proj_dict["lat_0"] = float(projection.attrs["standard_parallel"]) + proj_dict["lon_0"] = float(projection.attrs["longitude_of_central_meridian"]) + proj_dict["lat_1"] = float(projection.attrs["latitude_of_projection_origin"]) + elif proj == "stere": + proj_dict["lat_ts"] = float(projection.attrs["standard_parallel"]) + proj_dict["lon_0"] = float(projection.attrs["straight_vertical_longitude_from_pole"]) + proj_dict["lat_0"] = float(projection.attrs["latitude_of_projection_origin"]) + elif proj == "merc": + proj_dict["lat_ts"] = float(projection.attrs["standard_parallel"]) + proj_dict["lat_0"] = proj_dict["lat_ts"] + proj_dict["lon_0"] = float(projection.attrs["longitude_of_projection_origin"]) else: raise ValueError("Can't handle projection '{}'".format(proj)) return proj_dict def _calc_extents(self, proj_dict): """Calculate area extents from x/y variables.""" - h = float(proj_dict.get('h', 1.)) # force to 64-bit float - x = self['x'] - y = self['y'] - x_units = x.attrs.get('units', 'rad') - if x_units == 'meters': + h = float(proj_dict.get("h", 1.)) # force to 64-bit float + x = self["x"] + y = self["y"] + x_units = x.attrs.get("units", "rad") + if x_units == "meters": h_factor = 1. factor = 1. - elif x_units == 'microradian': + elif x_units == "microradian": h_factor = h factor = 1e6 else: # radians @@ -260,7 +260,7 @@ def get_area_def(self, key): projection = self._get_cf_grid_mapping_var() proj_dict = self._get_proj_specific_params(projection) area_extent = self._calc_extents(proj_dict) - area_name = '{}_{}'.format(self.sensor, proj_dict['proj']) + area_name = "{}_{}".format(self.sensor, proj_dict["proj"]) return geometry.AreaDefinition( area_name, "SCMI file area", @@ -273,7 +273,7 @@ def get_area_def(self, key): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_date_time'], '%Y%j%H%M%S') + return datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S") @property def end_time(self): diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py index 281a0132af..03fa648330 100644 --- a/satpy/readers/seadas_l2.py +++ b/satpy/readers/seadas_l2.py @@ -57,9 +57,9 @@ def _rows_per_scan(self): def _platform_name(self): platform = self[self.platform_attr_name] - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'JPSS-2': 'NOAA-21'} + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "JPSS-2": "NOAA-21"} return platform_dict.get(platform, platform) @property diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 131fe39ad4..0df97d83a7 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -179,14 +179,14 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_DICT = { - 'MET08': 'Meteosat-8', - 'MET09': 'Meteosat-9', - 'MET10': 'Meteosat-10', - 'MET11': 'Meteosat-11', - 'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', + "MET08": "Meteosat-8", + "MET09": "Meteosat-9", + "MET10": "Meteosat-10", + "MET11": "Meteosat-11", + "MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", } REPEAT_CYCLE_DURATION = 15 @@ -212,19 +212,19 @@ 11: "IR_134", 12: "HRV"} -VIS_CHANNELS = ['HRV', 'VIS006', 'VIS008', 'IR_016'] +VIS_CHANNELS = ["HRV", "VIS006", "VIS008", "IR_016"] # Polynomial coefficients for spectral-effective BT fits BTFIT = dict() # [A, B, C] -BTFIT['IR_039'] = [0.0, 1.011751900, -3.550400] -BTFIT['WV_062'] = [0.00001805700, 1.000255533, -1.790930] -BTFIT['WV_073'] = [0.00000231818, 1.000668281, -0.456166] -BTFIT['IR_087'] = [-0.00002332000, 1.011803400, -1.507390] -BTFIT['IR_097'] = [-0.00002055330, 1.009370670, -1.030600] -BTFIT['IR_108'] = [-0.00007392770, 1.032889800, -3.296740] -BTFIT['IR_120'] = [-0.00007009840, 1.031314600, -3.181090] -BTFIT['IR_134'] = [-0.00007293450, 1.030424800, -2.645950] +BTFIT["IR_039"] = [0.0, 1.011751900, -3.550400] +BTFIT["WV_062"] = [0.00001805700, 1.000255533, -1.790930] +BTFIT["WV_073"] = [0.00000231818, 1.000668281, -0.456166] +BTFIT["IR_087"] = [-0.00002332000, 1.011803400, -1.507390] +BTFIT["IR_097"] = [-0.00002055330, 1.009370670, -1.030600] +BTFIT["IR_108"] = [-0.00007392770, 1.032889800, -3.296740] +BTFIT["IR_120"] = [-0.00007009840, 1.031314600, -3.181090] +BTFIT["IR_134"] = [-0.00007293450, 1.030424800, -2.645950] SATNUM = {321: "8", 322: "9", @@ -234,124 +234,124 @@ CALIB = dict() # Meteosat 8 -CALIB[321] = {'HRV': {'F': 78.7599}, - 'VIS006': {'F': 65.2296}, - 'VIS008': {'F': 73.0127}, - 'IR_016': {'F': 62.3715}, - 'IR_039': {'VC': 2567.33, - 'ALPHA': 0.9956, - 'BETA': 3.41}, - 'WV_062': {'VC': 1598.103, - 'ALPHA': 0.9962, - 'BETA': 2.218}, - 'WV_073': {'VC': 1362.081, - 'ALPHA': 0.9991, - 'BETA': 0.478}, - 'IR_087': {'VC': 1149.069, - 'ALPHA': 0.9996, - 'BETA': 0.179}, - 'IR_097': {'VC': 1034.343, - 'ALPHA': 0.9999, - 'BETA': 0.06}, - 'IR_108': {'VC': 930.647, - 'ALPHA': 0.9983, - 'BETA': 0.625}, - 'IR_120': {'VC': 839.66, - 'ALPHA': 0.9988, - 'BETA': 0.397}, - 'IR_134': {'VC': 752.387, - 'ALPHA': 0.9981, - 'BETA': 0.578}} +CALIB[321] = {"HRV": {"F": 78.7599}, + "VIS006": {"F": 65.2296}, + "VIS008": {"F": 73.0127}, + "IR_016": {"F": 62.3715}, + "IR_039": {"VC": 2567.33, + "ALPHA": 0.9956, + "BETA": 3.41}, + "WV_062": {"VC": 1598.103, + "ALPHA": 0.9962, + "BETA": 2.218}, + "WV_073": {"VC": 1362.081, + "ALPHA": 0.9991, + "BETA": 0.478}, + "IR_087": {"VC": 1149.069, + "ALPHA": 0.9996, + "BETA": 0.179}, + "IR_097": {"VC": 1034.343, + "ALPHA": 0.9999, + "BETA": 0.06}, + "IR_108": {"VC": 930.647, + "ALPHA": 0.9983, + "BETA": 0.625}, + "IR_120": {"VC": 839.66, + "ALPHA": 0.9988, + "BETA": 0.397}, + "IR_134": {"VC": 752.387, + "ALPHA": 0.9981, + "BETA": 0.578}} # Meteosat 9 -CALIB[322] = {'HRV': {'F': 79.0113}, - 'VIS006': {'F': 65.2065}, - 'VIS008': {'F': 73.1869}, - 'IR_016': {'F': 61.9923}, - 'IR_039': {'VC': 2568.832, - 'ALPHA': 0.9954, - 'BETA': 3.438}, - 'WV_062': {'VC': 1600.548, - 'ALPHA': 0.9963, - 'BETA': 2.185}, - 'WV_073': {'VC': 1360.330, - 'ALPHA': 0.9991, - 'BETA': 0.47}, - 'IR_087': {'VC': 1148.620, - 'ALPHA': 0.9996, - 'BETA': 0.179}, - 'IR_097': {'VC': 1035.289, - 'ALPHA': 0.9999, - 'BETA': 0.056}, - 'IR_108': {'VC': 931.7, - 'ALPHA': 0.9983, - 'BETA': 0.64}, - 'IR_120': {'VC': 836.445, - 'ALPHA': 0.9988, - 'BETA': 0.408}, - 'IR_134': {'VC': 751.792, - 'ALPHA': 0.9981, - 'BETA': 0.561}} +CALIB[322] = {"HRV": {"F": 79.0113}, + "VIS006": {"F": 65.2065}, + "VIS008": {"F": 73.1869}, + "IR_016": {"F": 61.9923}, + "IR_039": {"VC": 2568.832, + "ALPHA": 0.9954, + "BETA": 3.438}, + "WV_062": {"VC": 1600.548, + "ALPHA": 0.9963, + "BETA": 2.185}, + "WV_073": {"VC": 1360.330, + "ALPHA": 0.9991, + "BETA": 0.47}, + "IR_087": {"VC": 1148.620, + "ALPHA": 0.9996, + "BETA": 0.179}, + "IR_097": {"VC": 1035.289, + "ALPHA": 0.9999, + "BETA": 0.056}, + "IR_108": {"VC": 931.7, + "ALPHA": 0.9983, + "BETA": 0.64}, + "IR_120": {"VC": 836.445, + "ALPHA": 0.9988, + "BETA": 0.408}, + "IR_134": {"VC": 751.792, + "ALPHA": 0.9981, + "BETA": 0.561}} # Meteosat 10 -CALIB[323] = {'HRV': {'F': 78.9416}, - 'VIS006': {'F': 65.5148}, - 'VIS008': {'F': 73.1807}, - 'IR_016': {'F': 62.0208}, - 'IR_039': {'VC': 2547.771, - 'ALPHA': 0.9915, - 'BETA': 2.9002}, - 'WV_062': {'VC': 1595.621, - 'ALPHA': 0.9960, - 'BETA': 2.0337}, - 'WV_073': {'VC': 1360.337, - 'ALPHA': 0.9991, - 'BETA': 0.4340}, - 'IR_087': {'VC': 1148.130, - 'ALPHA': 0.9996, - 'BETA': 0.1714}, - 'IR_097': {'VC': 1034.715, - 'ALPHA': 0.9999, - 'BETA': 0.0527}, - 'IR_108': {'VC': 929.842, - 'ALPHA': 0.9983, - 'BETA': 0.6084}, - 'IR_120': {'VC': 838.659, - 'ALPHA': 0.9988, - 'BETA': 0.3882}, - 'IR_134': {'VC': 750.653, - 'ALPHA': 0.9982, - 'BETA': 0.5390}} +CALIB[323] = {"HRV": {"F": 78.9416}, + "VIS006": {"F": 65.5148}, + "VIS008": {"F": 73.1807}, + "IR_016": {"F": 62.0208}, + "IR_039": {"VC": 2547.771, + "ALPHA": 0.9915, + "BETA": 2.9002}, + "WV_062": {"VC": 1595.621, + "ALPHA": 0.9960, + "BETA": 2.0337}, + "WV_073": {"VC": 1360.337, + "ALPHA": 0.9991, + "BETA": 0.4340}, + "IR_087": {"VC": 1148.130, + "ALPHA": 0.9996, + "BETA": 0.1714}, + "IR_097": {"VC": 1034.715, + "ALPHA": 0.9999, + "BETA": 0.0527}, + "IR_108": {"VC": 929.842, + "ALPHA": 0.9983, + "BETA": 0.6084}, + "IR_120": {"VC": 838.659, + "ALPHA": 0.9988, + "BETA": 0.3882}, + "IR_134": {"VC": 750.653, + "ALPHA": 0.9982, + "BETA": 0.5390}} # Meteosat 11 -CALIB[324] = {'HRV': {'F': 79.0035}, - 'VIS006': {'F': 65.2656}, - 'VIS008': {'F': 73.1692}, - 'IR_016': {'F': 61.9416}, - 'IR_039': {'VC': 2555.280, - 'ALPHA': 0.9916, - 'BETA': 2.9438}, - 'WV_062': {'VC': 1596.080, - 'ALPHA': 0.9959, - 'BETA': 2.0780}, - 'WV_073': {'VC': 1361.748, - 'ALPHA': 0.9990, - 'BETA': 0.4929}, - 'IR_087': {'VC': 1147.433, - 'ALPHA': 0.9996, - 'BETA': 0.1731}, - 'IR_097': {'VC': 1034.851, - 'ALPHA': 0.9998, - 'BETA': 0.0597}, - 'IR_108': {'VC': 931.122, - 'ALPHA': 0.9983, - 'BETA': 0.6256}, - 'IR_120': {'VC': 839.113, - 'ALPHA': 0.9988, - 'BETA': 0.4002}, - 'IR_134': {'VC': 748.585, - 'ALPHA': 0.9981, - 'BETA': 0.5635}} +CALIB[324] = {"HRV": {"F": 79.0035}, + "VIS006": {"F": 65.2656}, + "VIS008": {"F": 73.1692}, + "IR_016": {"F": 61.9416}, + "IR_039": {"VC": 2555.280, + "ALPHA": 0.9916, + "BETA": 2.9438}, + "WV_062": {"VC": 1596.080, + "ALPHA": 0.9959, + "BETA": 2.0780}, + "WV_073": {"VC": 1361.748, + "ALPHA": 0.9990, + "BETA": 0.4929}, + "IR_087": {"VC": 1147.433, + "ALPHA": 0.9996, + "BETA": 0.1731}, + "IR_097": {"VC": 1034.851, + "ALPHA": 0.9998, + "BETA": 0.0597}, + "IR_108": {"VC": 931.122, + "ALPHA": 0.9983, + "BETA": 0.6256}, + "IR_120": {"VC": 839.113, + "ALPHA": 0.9988, + "BETA": 0.4002}, + "IR_134": {"VC": 748.585, + "ALPHA": 0.9981, + "BETA": 0.5635}} def get_cds_time(days, msecs): @@ -370,12 +370,12 @@ def get_cds_time(days, msecs): """ if np.isscalar(days): - days = np.array([days], dtype='int64') - msecs = np.array([msecs], dtype='int64') + days = np.array([days], dtype="int64") + msecs = np.array([msecs], dtype="int64") - time = np.datetime64('1958-01-01').astype('datetime64[ms]') + \ - days.astype('timedelta64[D]') + msecs.astype('timedelta64[ms]') - time[time == np.datetime64('1958-01-01 00:00')] = np.datetime64("NaT") + time = np.datetime64("1958-01-01").astype("datetime64[ms]") + \ + days.astype("timedelta64[D]") + msecs.astype("timedelta64[ms]") + time[time == np.datetime64("1958-01-01 00:00")] = np.datetime64("NaT") if len(time) == 1: return time[0] @@ -384,9 +384,9 @@ def get_cds_time(days, msecs): def add_scanline_acq_time(dataset, acq_time): """Add scanline acquisition time to the given dataset.""" - dataset.coords['acq_time'] = ('y', acq_time) - dataset.coords['acq_time'].attrs[ - 'long_name'] = 'Mean scanline acquisition time' + dataset.coords["acq_time"] = ("y", acq_time) + dataset.coords["acq_time"].attrs[ + "long_name"] = "Mean scanline acquisition time" def dec10216(inbuf): @@ -436,53 +436,53 @@ class MpefProductHeader(object): def get(self): """Return numpy record_array for MPEF product header.""" record = [ - ('MPEF_File_Id', np.int16), - ('MPEF_Header_Version', np.uint8), - ('ManualDissAuthRequest', bool), - ('ManualDisseminationAuth', bool), - ('DisseminationAuth', bool), - ('NominalTime', time_cds_short), - ('ProductQuality', np.uint8), - ('ProductCompleteness', np.uint8), - ('ProductTimeliness', np.uint8), - ('ProcessingInstanceId', np.int8), - ('ImagesUsed', self.images_used, (4,)), - ('BaseAlgorithmVersion', + ("MPEF_File_Id", np.int16), + ("MPEF_Header_Version", np.uint8), + ("ManualDissAuthRequest", bool), + ("ManualDisseminationAuth", bool), + ("DisseminationAuth", bool), + ("NominalTime", time_cds_short), + ("ProductQuality", np.uint8), + ("ProductCompleteness", np.uint8), + ("ProductTimeliness", np.uint8), + ("ProcessingInstanceId", np.int8), + ("ImagesUsed", self.images_used, (4,)), + ("BaseAlgorithmVersion", issue_revision), - ('ProductAlgorithmVersion', + ("ProductAlgorithmVersion", issue_revision), - ('InstanceServerName', 'S2'), - ('SpacecraftName', 'S2'), - ('Mission', 'S3'), - ('RectificationLongitude', 'S5'), - ('Encoding', 'S1'), - ('TerminationSpace', 'S1'), - ('EncodingVersion', np.uint16), - ('Channel', np.uint8), - ('ImageLocation', 'S3'), - ('GsicsCalMode', np.bool_), - ('GsicsCalValidity', np.bool_), - ('Padding', 'S2'), - ('OffsetToData', np.uint32), - ('Padding2', 'S9'), - ('RepeatCycle', 'S15'), + ("InstanceServerName", "S2"), + ("SpacecraftName", "S2"), + ("Mission", "S3"), + ("RectificationLongitude", "S5"), + ("Encoding", "S1"), + ("TerminationSpace", "S1"), + ("EncodingVersion", np.uint16), + ("Channel", np.uint8), + ("ImageLocation", "S3"), + ("GsicsCalMode", np.bool_), + ("GsicsCalValidity", np.bool_), + ("Padding", "S2"), + ("OffsetToData", np.uint32), + ("Padding2", "S9"), + ("RepeatCycle", "S15"), ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") @property def images_used(self): """Return structure for images_used.""" record = [ - ('Padding1', 'S2'), - ('ExpectedImage', time_cds_short), - ('ImageReceived', bool), - ('Padding2', 'S1'), - ('UsedImageStart_Day', np.uint16), - ('UsedImageStart_Millsec', np.uint32), - ('Padding3', 'S2'), - ('UsedImageEnd_Day', np.uint16), - ('UsedImageEndt_Millsec', np.uint32), + ("Padding1", "S2"), + ("ExpectedImage", time_cds_short), + ("ImageReceived", bool), + ("Padding2", "S1"), + ("UsedImageStart_Day", np.uint16), + ("UsedImageStart_Millsec", np.uint32), + ("Padding3", "S2"), + ("UsedImageEnd_Day", np.uint16), + ("UsedImageEndt_Millsec", np.uint32), ] return record @@ -522,7 +522,7 @@ def ir_calibrate(self, data, channel_name, cal_type): # effective radiances return self._erads2bt(data, channel_name) else: - raise NotImplementedError('Unknown calibration type') + raise NotImplementedError("Unknown calibration type") def _srads2bt(self, data, channel_name): """Convert spectral radiance to brightness temperature.""" @@ -566,36 +566,36 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS') + valid_modes = ("NOMINAL", "GSICS") if self._calib_mode not in valid_modes: raise ValueError( - 'Invalid calibration mode: {}. Choose one of {}'.format( + "Invalid calibration mode: {}. Choose one of {}".format( self._calib_mode, valid_modes) ) def calibrate(self, data, calibration): """Calibrate the given data.""" - if calibration == 'counts': + if calibration == "counts": res = data - elif calibration in ['radiance', 'reflectance', - 'brightness_temperature']: + elif calibration in ["radiance", "reflectance", + "brightness_temperature"]: gain, offset = self.get_gain_offset() res = self._algo.convert_to_radiance( data.astype(np.float32), gain, offset ) else: raise ValueError( - 'Invalid calibration {} for channel {}'.format( + "Invalid calibration {} for channel {}".format( calibration, self._channel_name ) ) - if calibration == 'reflectance': + if calibration == "reflectance": solar_irradiance = CALIB[self._platform_id][self._channel_name]["F"] res = self._algo.vis_calibrate(res, solar_irradiance) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": res = self._algo.ir_calibrate( - res, self._channel_name, self._coefs['radiance_type'] + res, self._channel_name, self._coefs["radiance_type"] ) return res @@ -608,14 +608,14 @@ def get_gain_offset(self): nominal coefficients. External coefficients take precedence over internal coefficients. """ - coefs = self._coefs['coefs'] + coefs = self._coefs["coefs"] # Select internal coefficients for the given calibration mode - internal_gain = coefs['NOMINAL']['gain'] - internal_offset = coefs['NOMINAL']['offset'] - if self._calib_mode == 'GSICS': - gsics_gain = coefs['GSICS']['gain'] - gsics_offset = coefs['GSICS']['offset'] * gsics_gain + internal_gain = coefs["NOMINAL"]["gain"] + internal_offset = coefs["NOMINAL"]["offset"] + if self._calib_mode == "GSICS": + gsics_gain = coefs["GSICS"]["gain"] + gsics_offset = coefs["GSICS"]["offset"] * gsics_gain if gsics_gain != 0 and gsics_offset != 0: # If no GSICS coefficients are available for a certain channel, # they are set to zero in the file. @@ -623,8 +623,8 @@ def get_gain_offset(self): internal_offset = gsics_offset # Override with external coefficients, if any. - gain = coefs['EXTERNAL'].get('gain', internal_gain) - offset = coefs['EXTERNAL'].get('offset', internal_offset) + gain = coefs["EXTERNAL"].get("gain", internal_gain) + offset = coefs["EXTERNAL"].get("offset", internal_offset) return gain, offset @@ -689,9 +689,9 @@ def evaluate(self, time): Returns: Earth-centered cartesian coordinates (x, y, z) in meters """ - domain = [np.datetime64(self.start_time).astype('int64'), - np.datetime64(self.end_time).astype('int64')] - time = np.datetime64(time).astype('int64') + domain = [np.datetime64(self.start_time).astype("int64"), + np.datetime64(self.end_time).astype("int64")] + time = np.datetime64(time).astype("int64") x, y, z = chebyshev_3d(self.coefs, time, domain) return x * 1000, y * 1000, z * 1000 # km -> m @@ -718,10 +718,10 @@ def get_satpos(orbit_polynomial, time, semi_major_axis, semi_minor_axis): """ x, y, z = orbit_polynomial.evaluate(time) geocent = pyproj.CRS( - proj='geocent', a=semi_major_axis, b=semi_minor_axis, units='m' + proj="geocent", a=semi_major_axis, b=semi_minor_axis, units="m" ) latlong = pyproj.CRS( - proj='latlong', a=semi_major_axis, b=semi_minor_axis, units='m' + proj="latlong", a=semi_major_axis, b=semi_minor_axis, units="m" ) transformer = pyproj.Transformer.from_crs(geocent, latlong) lon, lat, alt = transformer.transform(x, y, z) @@ -750,10 +750,10 @@ def __init__(self, orbit_polynomials): self.orbit_polynomials = orbit_polynomials # Left/right boundaries of time intervals for which the polynomials are # valid. - self.valid_from = orbit_polynomials['StartTime'][0, :].astype( - 'datetime64[us]') - self.valid_to = orbit_polynomials['EndTime'][0, :].astype( - 'datetime64[us]') + self.valid_from = orbit_polynomials["StartTime"][0, :].astype( + "datetime64[us]") + self.valid_to = orbit_polynomials["EndTime"][0, :].astype( + "datetime64[us]") def get_orbit_polynomial(self, time, max_delta=6): """Get orbit polynomial valid for the given time. @@ -782,16 +782,16 @@ def get_orbit_polynomial(self, time, max_delta=6): match = self._get_enclosing_interval(time) except ValueError: warnings.warn( - 'No orbit polynomial valid for {}. Using closest ' - 'match.'.format(time), + "No orbit polynomial valid for {}. Using closest " + "match.".format(time), stacklevel=2 ) match = self._get_closest_interval_within(time, max_delta) return OrbitPolynomial( coefs=( - self.orbit_polynomials['X'][match], - self.orbit_polynomials['Y'][match], - self.orbit_polynomials['Z'][match] + self.orbit_polynomials["X"][match], + self.orbit_polynomials["Y"][match], + self.orbit_polynomials["Z"][match] ), start_time=self.valid_from[match], end_time=self.valid_to[match] @@ -819,12 +819,12 @@ def _get_closest_interval_within(self, time, threshold): Index of closest interval """ closest_match, distance = self._get_closest_interval(time) - threshold_diff = np.timedelta64(threshold, 'h') + threshold_diff = np.timedelta64(threshold, "h") if distance < threshold_diff: return closest_match raise NoValidOrbitParams( - 'Unable to find orbit coefficients valid for {} +/- {}' - 'hours'.format(time, threshold) + "Unable to find orbit coefficients valid for {} +/- {}" + "hours".format(time, threshold) ) def _get_closest_interval(self, time): @@ -836,7 +836,7 @@ def _get_closest_interval(self, time): intervals_centre = self.valid_from + 0.5 * ( self.valid_to - self.valid_from ) - diffs_us = (time - intervals_centre).astype('i8') + diffs_us = (time - intervals_centre).astype("i8") closest_match = np.argmin(np.fabs(diffs_us)) distance = abs(intervals_centre[closest_match] - time) return closest_match, distance @@ -864,15 +864,15 @@ def calculate_area_extent(area_dict): # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 . # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels. """ - center_point = area_dict['center_point'] - east = area_dict['east'] - west = area_dict['west'] - south = area_dict['south'] - north = area_dict['north'] - column_step = area_dict['column_step'] - line_step = area_dict['line_step'] - column_offset = area_dict.get('column_offset', 0) - line_offset = area_dict.get('line_offset', 0) + center_point = area_dict["center_point"] + east = area_dict["east"] + west = area_dict["west"] + south = area_dict["south"] + north = area_dict["north"] + column_step = area_dict["column_step"] + line_step = area_dict["line_step"] + column_offset = area_dict.get("column_offset", 0) + line_offset = area_dict.get("line_offset", 0) ll_c = (center_point - east + 0.5 + column_offset) * column_step ll_l = (north - center_point + 0.5 + line_offset) * line_step @@ -885,18 +885,18 @@ def calculate_area_extent(area_dict): def create_coef_dict(coefs_nominal, coefs_gsics, radiance_type, ext_coefs): """Create coefficient dictionary expected by calibration class.""" return { - 'coefs': { - 'NOMINAL': { - 'gain': coefs_nominal[0], - 'offset': coefs_nominal[1], + "coefs": { + "NOMINAL": { + "gain": coefs_nominal[0], + "offset": coefs_nominal[1], }, - 'GSICS': { - 'gain': coefs_gsics[0], - 'offset': coefs_gsics[1] + "GSICS": { + "gain": coefs_gsics[0], + "offset": coefs_gsics[1] }, - 'EXTERNAL': ext_coefs + "EXTERNAL": ext_coefs }, - 'radiance_type': radiance_type + "radiance_type": radiance_type } @@ -916,7 +916,7 @@ def pad_data_horizontally(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: - raise IndexError('East and west bounds do not match data shape') + raise IndexError("East and west bounds do not match data shape") padding_east = get_padding_area((nlines, east_bound - 1), data.dtype) padding_west = get_padding_area((nlines, (final_size[1] - west_bound)), data.dtype) @@ -928,7 +928,7 @@ def pad_data_vertically(data, final_size, south_bound, north_bound): """Pad the data given south and north bounds and the desired size.""" ncols = final_size[1] if north_bound - south_bound != data.shape[0] - 1: - raise IndexError('South and north bounds do not match data shape') + raise IndexError("South and north bounds do not match data shape") padding_south = get_padding_area((south_bound - 1, ncols), data.dtype) padding_north = get_padding_area(((final_size[0] - north_bound), ncols), data.dtype) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 2b153edfcc..83fc82f687 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -252,33 +252,33 @@ from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() -logger = logging.getLogger('hrit_msg') +logger = logging.getLogger("hrit_msg") # MSG implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) msg_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -msg_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +msg_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} msg_hdr_map = base_hdr_map.copy() msg_hdr_map.update({7: key_header, @@ -287,23 +287,23 @@ }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) class HRITMSGPrologueEpilogueBase(HRITFileHandler): @@ -328,7 +328,7 @@ def reduce(self, max_size): class HRITMSGPrologueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT prologue reader.""" - def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', + def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None): """Initialize the reader.""" @@ -340,22 +340,22 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', self.prologue = {} self.read_prologue() - service = filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + service = filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service + self.mda["service"] = service def read_prologue(self): """Read the prologue metadata.""" with utils.generic_open(self.filename, mode="rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.frombuffer(fp_.read(hrit_prologue.itemsize), dtype=hrit_prologue, count=1) self.prologue.update(recarray2dict(data)) try: impf = np.frombuffer(fp_.read(impf_configuration.itemsize), dtype=impf_configuration, count=1)[0] except ValueError: - logger.info('No IMPF configuration field found in prologue.') + logger.info("No IMPF configuration field found in prologue.") else: self.prologue.update(recarray2dict(impf)) @@ -368,8 +368,8 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ a, b = self.get_earth_radii() - poly_finder = OrbitPolynomialFinder(self.prologue['SatelliteStatus'][ - 'Orbit']['OrbitPolynomial']) + poly_finder = OrbitPolynomialFinder(self.prologue["SatelliteStatus"][ + "Orbit"]["OrbitPolynomial"]) orbit_polynomial = poly_finder.get_orbit_polynomial(self.observation_start_time) return get_satpos( orbit_polynomial=orbit_polynomial, @@ -385,10 +385,10 @@ def get_earth_radii(self): Equatorial radius, polar radius [m] """ - earth_model = self.prologue['GeometricProcessing']['EarthModel'] - a = earth_model['EquatorialRadius'] * 1000 - b = (earth_model['NorthPolarRadius'] + - earth_model['SouthPolarRadius']) / 2.0 * 1000 + earth_model = self.prologue["GeometricProcessing"]["EarthModel"] + a = earth_model["EquatorialRadius"] * 1000 + b = (earth_model["NorthPolarRadius"] + + earth_model["SouthPolarRadius"]) / 2.0 * 1000 return a, b def reduce(self, max_size): @@ -399,7 +399,7 @@ def reduce(self, max_size): class HRITMSGEpilogueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT epilogue reader.""" - def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', + def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None): """Initialize the reader.""" @@ -411,16 +411,16 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', self.epilogue = {} self.read_epilogue() - service = filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + service = filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service + self.mda["service"] = service def read_epilogue(self): """Read the epilogue metadata.""" with utils.generic_open(self.filename, mode="rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.frombuffer(fp_.read(hrit_epilogue.itemsize), dtype=hrit_epilogue, count=1) self.epilogue.update(recarray2dict(data)) @@ -454,7 +454,7 @@ class HRITMSGFileHandler(HRITFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - prologue, epilogue, calib_mode='nominal', + prologue, epilogue, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100, fill_hrv=True, mask_bad_quality_scan_lines=True): @@ -480,27 +480,27 @@ def __init__(self, filename, filename_info, filetype_info, def _get_header(self): """Read the header info, and fill the metadata dictionary.""" - earth_model = self.prologue['GeometricProcessing']['EarthModel'] - self.mda['offset_corrected'] = earth_model['TypeOfEarthModel'] == 2 + earth_model = self.prologue["GeometricProcessing"]["EarthModel"] + self.mda["offset_corrected"] = earth_model["TypeOfEarthModel"] == 2 # Projection a, b = self.prologue_.get_earth_radii() - self.mda['projection_parameters']['a'] = a - self.mda['projection_parameters']['b'] = b - ssp = self.prologue['ImageDescription'][ - 'ProjectionDescription']['LongitudeOfSSP'] - self.mda['projection_parameters']['SSP_longitude'] = ssp - self.mda['projection_parameters']['SSP_latitude'] = 0.0 + self.mda["projection_parameters"]["a"] = a + self.mda["projection_parameters"]["b"] = b + ssp = self.prologue["ImageDescription"][ + "ProjectionDescription"]["LongitudeOfSSP"] + self.mda["projection_parameters"]["SSP_longitude"] = ssp + self.mda["projection_parameters"]["SSP_latitude"] = 0.0 # Orbital parameters - self.mda['orbital_parameters']['satellite_nominal_longitude'] = self.prologue['SatelliteStatus'][ - 'SatelliteDefinition']['NominalLongitude'] - self.mda['orbital_parameters']['satellite_nominal_latitude'] = 0.0 + self.mda["orbital_parameters"]["satellite_nominal_longitude"] = self.prologue["SatelliteStatus"][ + "SatelliteDefinition"]["NominalLongitude"] + self.mda["orbital_parameters"]["satellite_nominal_latitude"] = 0.0 try: actual_lon, actual_lat, actual_alt = self.prologue_.satpos - self.mda['orbital_parameters']['satellite_actual_longitude'] = actual_lon - self.mda['orbital_parameters']['satellite_actual_latitude'] = actual_lat - self.mda['orbital_parameters']['satellite_actual_altitude'] = actual_alt + self.mda["orbital_parameters"]["satellite_actual_longitude"] = actual_lon + self.mda["orbital_parameters"]["satellite_actual_latitude"] = actual_lat + self.mda["orbital_parameters"]["satellite_actual_altitude"] = actual_alt except NoValidOrbitParams as err: logger.warning(err) @@ -508,46 +508,46 @@ def _get_header(self): self.platform_id = self.prologue["SatelliteStatus"][ "SatelliteDefinition"]["SatelliteId"] self.platform_name = "Meteosat-" + SATNUM[self.platform_id] - self.mda['platform_name'] = self.platform_name - service = self._filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + self.mda["platform_name"] = self.platform_name + service = self._filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service - self.channel_name = CHANNEL_NAMES[self.mda['spectral_channel_id']] + self.mda["service"] = service + self.channel_name = CHANNEL_NAMES[self.mda["spectral_channel_id"]] @property def _repeat_cycle_duration(self): """Get repeat cycle duration from epilogue.""" - if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + if self.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] == 1: return 5 return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Get the start time and round it according to scan law.""" - tm = self.prologue['ImageAcquisition'][ - 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] + tm = self.prologue["ImageAcquisition"][ + "PlannedAcquisitionTime"]["TrueRepeatCycleStart"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" - tm = self.prologue['ImageAcquisition'][ - 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + tm = self.prologue["ImageAcquisition"][ + "PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get the observation start time.""" - return self.epilogue['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanStart'] + return self.epilogue["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanStart"] @property def observation_end_time(self): """Get the observation end time.""" - return self.epilogue['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanEnd'] + return self.epilogue["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanEnd"] @property def start_time(self): @@ -572,7 +572,7 @@ def _get_area_extent(self, pdict): """ aex = get_area_extent(pdict) - if not self.mda['offset_corrected']: + if not self.mda["offset_corrected"]: # Geo-referencing offset present. Adjust area extent to match the shifted data. Note that we have to adjust # the corners in the *opposite* direction, i.e. S-E. Think of it as if the coastlines were fixed, and you # dragged the image to S-E until coastlines and data area aligned correctly. @@ -589,80 +589,80 @@ def _get_area_extent(self, pdict): def get_area_def(self, dsid): """Get the area definition of the band.""" # Common parameters for both HRV and other channels - nlines = int(self.mda['number_of_lines']) - loff = np.float32(self.mda['loff']) + nlines = int(self.mda["number_of_lines"]) + loff = np.float32(self.mda["loff"]) pdict = dict() - pdict['cfac'] = np.int32(self.mda['cfac']) - pdict['lfac'] = np.int32(self.mda['lfac']) - pdict['coff'] = np.float32(self.mda['coff']) - - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] - - pdict['nlines'] = nlines - pdict['ncols'] = int(self.mda['number_of_columns']) - if (self.prologue['ImageDescription']['Level15ImageProduction'] - ['ImageProcDirection'] == 0): - pdict['scandir'] = 'N2S' + pdict["cfac"] = np.int32(self.mda["cfac"]) + pdict["lfac"] = np.int32(self.mda["lfac"]) + pdict["coff"] = np.float32(self.mda["coff"]) + + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["SSP_longitude"] + + pdict["nlines"] = nlines + pdict["ncols"] = int(self.mda["number_of_columns"]) + if (self.prologue["ImageDescription"]["Level15ImageProduction"] + ["ImageProcDirection"] == 0): + pdict["scandir"] = "N2S" else: - pdict['scandir'] = 'S2N' + pdict["scandir"] = "S2N" - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dsid['resolution']) + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dsid["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) + **get_service_mode("seviri", pdict["ssp_lon"])}) # Compute area definition for non-HRV channels: - if dsid['name'] != 'HRV': - pdict['loff'] = loff - nlines + if dsid["name"] != "HRV": + pdict["loff"] = loff - nlines aex = self._get_area_extent(pdict) - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area = get_area_definition(pdict, aex) self.area = area return self.area - segment_number = self.mda['segment_sequence_number'] + segment_number = self.mda["segment_sequence_number"] current_first_line = ((segment_number - - self.mda['planned_start_segment_number']) - * pdict['nlines']) + self.mda["planned_start_segment_number"]) + * pdict["nlines"]) # Or, if we are processing HRV: - pdict['a_name'] = area_naming['area_id'] - pdict['p_id'] = "" - bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'].copy() + pdict["a_name"] = area_naming["area_id"] + pdict["p_id"] = "" + bounds = self.epilogue["ImageProductionStats"]["ActualL15CoverageHRV"].copy() if self.fill_hrv: - bounds['UpperEastColumnActual'] = 1 - bounds['UpperWestColumnActual'] = HRV_NUM_COLUMNS - bounds['LowerEastColumnActual'] = 1 - bounds['LowerWestColumnActual'] = HRV_NUM_COLUMNS - pdict['ncols'] = HRV_NUM_COLUMNS + bounds["UpperEastColumnActual"] = 1 + bounds["UpperWestColumnActual"] = HRV_NUM_COLUMNS + bounds["LowerEastColumnActual"] = 1 + bounds["LowerWestColumnActual"] = HRV_NUM_COLUMNS + pdict["ncols"] = HRV_NUM_COLUMNS upper_south_line = bounds[ - 'LowerNorthLineActual'] - current_first_line - 1 - upper_south_line = min(max(upper_south_line, 0), pdict['nlines']) - lower_coff = (5566 - bounds['LowerEastColumnActual'] + 1) - upper_coff = (5566 - bounds['UpperEastColumnActual'] + 1) + "LowerNorthLineActual"] - current_first_line - 1 + upper_south_line = min(max(upper_south_line, 0), pdict["nlines"]) + lower_coff = (5566 - bounds["LowerEastColumnActual"] + 1) + upper_coff = (5566 - bounds["UpperEastColumnActual"] + 1) # First we look at the lower window - pdict['nlines'] = upper_south_line - pdict['loff'] = loff - upper_south_line - pdict['coff'] = lower_coff - pdict['a_desc'] = area_naming['description'] + pdict["nlines"] = upper_south_line + pdict["loff"] = loff - upper_south_line + pdict["coff"] = lower_coff + pdict["a_desc"] = area_naming["description"] lower_area_extent = self._get_area_extent(pdict) lower_area = get_area_definition(pdict, lower_area_extent) # Now the upper window - pdict['nlines'] = nlines - upper_south_line - pdict['loff'] = loff - pdict['nlines'] - upper_south_line - pdict['coff'] = upper_coff - pdict['a_desc'] = area_naming['description'] + pdict["nlines"] = nlines - upper_south_line + pdict["loff"] = loff - pdict["nlines"] - upper_south_line + pdict["coff"] = upper_coff + pdict["a_desc"] = area_naming["description"] upper_area_extent = self._get_area_extent(pdict) upper_area = get_area_definition(pdict, upper_area_extent) @@ -674,13 +674,13 @@ def get_area_def(self, dsid): def get_dataset(self, key, info): """Get the dataset.""" res = super(HRITMSGFileHandler, self).get_dataset(key, info) - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) - is_calibration = key['calibration'] in ['radiance', 'reflectance', 'brightness_temperature'] + is_calibration = key["calibration"] in ["radiance", "reflectance", "brightness_temperature"] if is_calibration and self.mask_bad_quality_scan_lines: # noqa: E129 res = self._mask_bad_quality(res) - if key['name'] == 'HRV' and self.fill_hrv: + if key["name"] == "HRV" and self.fill_hrv: res = self.pad_hrv_data(res) self._update_attrs(res, info) self._add_scanline_acq_time(res) @@ -688,17 +688,17 @@ def get_dataset(self, key, info): def pad_hrv_data(self, res): """Add empty pixels around the HRV.""" - logger.debug('Padding HRV data to full disk') - nlines = int(self.mda['number_of_lines']) + logger.debug("Padding HRV data to full disk") + nlines = int(self.mda["number_of_lines"]) - segment_number = self.mda['segment_sequence_number'] + segment_number = self.mda["segment_sequence_number"] current_first_line = (segment_number - - self.mda['planned_start_segment_number']) * nlines - bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'] + - self.mda["planned_start_segment_number"]) * nlines + bounds = self.epilogue["ImageProductionStats"]["ActualL15CoverageHRV"] upper_south_line = bounds[ - 'LowerNorthLineActual'] - current_first_line - 1 + "LowerNorthLineActual"] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), nlines) data_list = list() @@ -706,18 +706,18 @@ def pad_hrv_data(self, res): # we have some of the lower window data_lower = pad_data_horizontally(res[:upper_south_line, :].data, (upper_south_line, HRV_NUM_COLUMNS), - bounds['LowerEastColumnActual'], - bounds['LowerWestColumnActual']) + bounds["LowerEastColumnActual"], + bounds["LowerWestColumnActual"]) data_list.append(data_lower) if upper_south_line < nlines: # we have some of the upper window data_upper = pad_data_horizontally(res[upper_south_line:, :].data, (nlines - upper_south_line, HRV_NUM_COLUMNS), - bounds['UpperEastColumnActual'], - bounds['UpperWestColumnActual']) + bounds["UpperEastColumnActual"], + bounds["UpperWestColumnActual"]) data_list.append(data_upper) - return xr.DataArray(da.vstack(data_list), dims=('y', 'x'), attrs=res.attrs.copy()) + return xr.DataArray(da.vstack(data_list), dims=("y", "x"), attrs=res.attrs.copy()) def calibrate(self, data, calibration): """Calibrate the data.""" @@ -733,9 +733,9 @@ def calibrate(self, data, calibration): def _mask_bad_quality(self, data): """Mask scanlines with bad quality.""" - line_validity = self.mda['image_segment_line_quality']['line_validity'] - line_radiometric_quality = self.mda['image_segment_line_quality']['line_radiometric_quality'] - line_geometric_quality = self.mda['image_segment_line_quality']['line_geometric_quality'] + line_validity = self.mda["image_segment_line_quality"]["line_validity"] + line_radiometric_quality = self.mda["image_segment_line_quality"]["line_radiometric_quality"] + line_geometric_quality = self.mda["image_segment_line_quality"]["line_geometric_quality"] data = mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometric_quality) return data @@ -743,7 +743,7 @@ def _get_raw_mda(self): """Compile raw metadata to be included in the dataset attributes.""" # Metadata from segment header (excluding items which vary among the different segments) raw_mda = copy.deepcopy(self.mda) - for key in ('image_segment_line_quality', 'segment_sequence_number', 'annotation_header', 'loff'): + for key in ("image_segment_line_quality", "segment_sequence_number", "annotation_header", "loff"): raw_mda.pop(key, None) # Metadata from prologue and epilogue (large arrays removed) @@ -754,50 +754,50 @@ def _get_raw_mda(self): def _add_scanline_acq_time(self, dataset): """Add scanline acquisition time to the given dataset.""" - tline = self.mda['image_segment_line_quality']['line_mean_acquisition'] - acq_time = get_cds_time(days=tline['days'], msecs=tline['milliseconds']) + tline = self.mda["image_segment_line_quality"]["line_mean_acquisition"] + acq_time = get_cds_time(days=tline["days"], msecs=tline["milliseconds"]) add_scanline_acq_time(dataset, acq_time) def _update_attrs(self, res, info): """Update dataset attributes.""" - res.attrs['units'] = info['units'] - res.attrs['wavelength'] = info['wavelength'] - res.attrs['standard_name'] = info['standard_name'] - res.attrs['platform_name'] = self.platform_name - res.attrs['sensor'] = 'seviri' - res.attrs['nominal_start_time'] = self.nominal_start_time, - res.attrs['nominal_end_time'] = self.nominal_end_time, - res.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + res.attrs["units"] = info["units"] + res.attrs["wavelength"] = info["wavelength"] + res.attrs["standard_name"] = info["standard_name"] + res.attrs["platform_name"] = self.platform_name + res.attrs["sensor"] = "seviri" + res.attrs["nominal_start_time"] = self.nominal_start_time, + res.attrs["nominal_end_time"] = self.nominal_end_time, + res.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } - res.attrs['orbital_parameters'] = { - 'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': self.mda['projection_parameters']['SSP_latitude'], - 'projection_altitude': self.mda['projection_parameters']['h']} - res.attrs['orbital_parameters'].update(self.mda['orbital_parameters']) - res.attrs['georef_offset_corrected'] = self.mda['offset_corrected'] + res.attrs["orbital_parameters"] = { + "projection_longitude": self.mda["projection_parameters"]["SSP_longitude"], + "projection_latitude": self.mda["projection_parameters"]["SSP_latitude"], + "projection_altitude": self.mda["projection_parameters"]["h"]} + res.attrs["orbital_parameters"].update(self.mda["orbital_parameters"]) + res.attrs["georef_offset_corrected"] = self.mda["offset_corrected"] if self.include_raw_metadata: - res.attrs['raw_metadata'] = self._get_raw_mda() + res.attrs["raw_metadata"] = self._get_raw_mda() def _get_calib_coefs(self, channel_name): """Get coefficients for calibration from counts to radiance.""" - band_idx = self.mda['spectral_channel_id'] - 1 + band_idx = self.mda["spectral_channel_id"] - 1 coefs_nominal = self.prologue["RadiometricProcessing"][ "Level15ImageCalibration"] - coefs_gsics = self.prologue["RadiometricProcessing"]['MPEFCalFeedback'] - radiance_types = self.prologue['ImageDescription'][ - 'Level15ImageProduction']['PlannedChanProcessing'] + coefs_gsics = self.prologue["RadiometricProcessing"]["MPEFCalFeedback"] + radiance_types = self.prologue["ImageDescription"][ + "Level15ImageProduction"]["PlannedChanProcessing"] return create_coef_dict( coefs_nominal=( - coefs_nominal['CalSlope'][band_idx], - coefs_nominal['CalOffset'][band_idx] + coefs_nominal["CalSlope"][band_idx], + coefs_nominal["CalOffset"][band_idx] ), coefs_gsics=( - coefs_gsics['GSICSCalCoeff'][band_idx], - coefs_gsics['GSICSOffsetCount'][band_idx] + coefs_gsics["GSICSCalCoeff"][band_idx], + coefs_gsics["GSICSOffsetCount"][band_idx] ), ext_coefs=self.ext_calib_coefs.get(channel_name, {}), radiance_type=radiance_types[band_idx] @@ -808,7 +808,7 @@ def pad_data(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: - raise IndexError('East and west bounds do not match data shape') + raise IndexError("East and west bounds do not match data shape") padding_east = da.zeros((nlines, east_bound - 1), dtype=data.dtype, chunks=CHUNK_SIZE) padding_west = da.zeros((nlines, (final_size[1] - west_bound)), diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index 38c4360744..b2ceb30313 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -86,18 +86,18 @@ def __init__(self, filename, filename_info, filetype_info): filename_info, filetype_info) # These are VIS bands - self.ref_bands = ['HRV', 'VIS006', 'VIS008', 'IR_016'] + self.ref_bands = ["HRV", "VIS006", "VIS008", "IR_016"] # And these are IR bands - self.bt_bands = ['IR_039', 'IR_062', 'IR_073', - 'IR_087', 'IR_097', 'IR_108', - 'IR_120', 'IR_134', - 'WV_062', 'WV_073'] + self.bt_bands = ["IR_039", "IR_062", "IR_073", + "IR_087", "IR_097", "IR_108", + "IR_120", "IR_134", + "WV_062", "WV_073"] @property def sensor_name(self): """Get the sensor name.""" # the sensor and platform names are stored together, eg: MSG1/SEVIRI - attr = self['/attr/Sensors'] + attr = self["/attr/Sensors"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() else: @@ -105,14 +105,14 @@ def sensor_name(self): plat = attr[0:4] sens = attr[5:] # icare uses non-standard platform names - if plat == 'msg1': - plat = 'Meteosat-08' - elif plat == 'msg2': - plat = 'Meteosat-09' - elif plat == 'msg3': - plat = 'Meteosat-10' - elif plat == 'msg4': - plat = 'Meteosat-11' + if plat == "msg1": + plat = "Meteosat-08" + elif plat == "msg2": + plat = "Meteosat-09" + elif plat == "msg3": + plat = "Meteosat-10" + elif plat == "msg4": + plat = "Meteosat-11" else: raise NameError("Unsupported satellite platform:"+plat) return [plat, sens] @@ -120,7 +120,7 @@ def sensor_name(self): @property def satlon(self): """Get the satellite longitude.""" - attr = self['/attr/Sub_Satellite_Longitude'] + attr = self["/attr/Sub_Satellite_Longitude"] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @@ -128,7 +128,7 @@ def satlon(self): @property def projlon(self): """Get the projection longitude.""" - attr = self['/attr/Projection_Longitude'] + attr = self["/attr/Projection_Longitude"] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @@ -136,11 +136,11 @@ def projlon(self): @property def projection(self): """Get the projection.""" - attr = self['/attr/Geographic_Projection'] + attr = self["/attr/Geographic_Projection"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) attr = attr.lower() - if attr != 'geos': + if attr != "geos": raise NotImplementedError("Only the GEOS projection is supported.\ This is:", attr) return attr @@ -148,7 +148,7 @@ def projection(self): @property def zone(self): """Get the zone.""" - attr = self['/attr/Zone'] + attr = self["/attr/Zone"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return attr @@ -156,7 +156,7 @@ def zone(self): @property def res(self): """Get the resolution.""" - attr = self['/attr/Nadir_Pixel_Size'] + attr = self["/attr/Nadir_Pixel_Size"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return float(attr) @@ -164,7 +164,7 @@ def res(self): @property def end_time(self): """Get the end time.""" - attr = self['/attr/End_Acquisition_Date'] + attr = self["/attr/End_Acquisition_Date"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. @@ -177,7 +177,7 @@ def end_time(self): @property def start_time(self): """Get the start time.""" - attr = self['/attr/Beginning_Acquisition_Date'] + attr = self["/attr/Beginning_Acquisition_Date"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. @@ -190,7 +190,7 @@ def start_time(self): @property def alt(self): """Get the altitude.""" - attr = self['/attr/Altitude'] + attr = self["/attr/Altitude"] if isinstance(attr, np.ndarray): attr = attr.astype(str) attr = float(attr) @@ -201,7 +201,7 @@ def alt(self): @property def geoloc(self): """Get the geolocation.""" - attr = self['/attr/Geolocation'] + attr = self["/attr/Geolocation"] if isinstance(attr, np.ndarray): attr = attr.astype(str) cfac = float(attr[0]) @@ -217,32 +217,32 @@ def get_metadata(self, data, ds_info): mda.update(ds_info) geoloc = self.geoloc mda.update({ - 'start_time': self.start_time, - 'end_time': self.end_time, - 'platform_name': self.sensor_name[0], - 'sensor': self.sensor_name[1], - 'zone': self.zone, - 'projection_altitude': self.alt, - 'cfac': geoloc[0], - 'lfac': geoloc[1], - 'coff': geoloc[2], - 'loff': geoloc[3], - 'resolution': self.res, - 'satellite_actual_longitude': self.satlon, - 'projection_longitude': self.projlon, - 'projection_type': self.projection + "start_time": self.start_time, + "end_time": self.end_time, + "platform_name": self.sensor_name[0], + "sensor": self.sensor_name[1], + "zone": self.zone, + "projection_altitude": self.alt, + "cfac": geoloc[0], + "lfac": geoloc[1], + "coff": geoloc[2], + "loff": geoloc[3], + "resolution": self.res, + "satellite_actual_longitude": self.satlon, + "projection_longitude": self.projlon, + "projection_type": self.projection }) return mda def _get_dsname(self, ds_id): """Return the correct dataset name based on requested band.""" - if ds_id['name'] in self.ref_bands: - ds_get_name = 'Normalized_Radiance' - elif ds_id['name'] in self.bt_bands: - ds_get_name = 'Brightness_Temperature' + if ds_id["name"] in self.ref_bands: + ds_get_name = "Normalized_Radiance" + elif ds_id["name"] in self.bt_bands: + ds_get_name = "Brightness_Temperature" else: - raise NameError("Datset type "+ds_id['name']+" is not supported.") + raise NameError("Datset type "+ds_id["name"]+" is not supported.") return ds_get_name def get_dataset(self, ds_id, ds_info): @@ -250,50 +250,50 @@ def get_dataset(self, ds_id, ds_info): ds_get_name = self._get_dsname(ds_id) data = self[ds_get_name] data.attrs = self.get_metadata(data, ds_info) - fill = data.attrs.pop('_FillValue') - offset = data.attrs.get('add_offset') - scale_factor = data.attrs.get('scale_factor') + fill = data.attrs.pop("_FillValue") + offset = data.attrs.get("add_offset") + scale_factor = data.attrs.get("scale_factor") data = data.where(data != fill) data = data.astype(np.float32) if scale_factor is not None and offset is not None: data = data * scale_factor data = data + offset # Now we correct range from 0-1 to 0-100 for VIS: - if ds_id['name'] in self.ref_bands: + if ds_id["name"] in self.ref_bands: data = data * 100. return data def get_area_def(self, ds_id): """Get the area def.""" ds_get_name = self._get_dsname(ds_id) - ds_shape = self[ds_get_name + '/shape'] + ds_shape = self[ds_get_name + "/shape"] geoloc = self.geoloc pdict = {} - pdict['cfac'] = np.int32(geoloc[0]) - pdict['lfac'] = np.int32(geoloc[1]) - pdict['coff'] = np.float32(geoloc[2]) - pdict['loff'] = -np.float32(geoloc[3]) + pdict["cfac"] = np.int32(geoloc[0]) + pdict["lfac"] = np.int32(geoloc[1]) + pdict["coff"] = np.float32(geoloc[2]) + pdict["loff"] = -np.float32(geoloc[3]) # Unfortunately this dataset does not store a, b or h. # We assume a and b here, and calculate h from altitude # a and b are from SEVIRI data HRIT header (201912101300) - pdict['a'] = 6378169 - pdict['b'] = 6356583.8 - pdict['h'] = self.alt - pdict['a'] - pdict['ssp_lon'] = self.projlon - pdict['ncols'] = int(ds_shape[0]) - pdict['nlines'] = int(ds_shape[1]) + pdict["a"] = 6378169 + pdict["b"] = 6356583.8 + pdict["h"] = self.alt - pdict["a"] + pdict["ssp_lon"] = self.projlon + pdict["ncols"] = int(ds_shape[0]) + pdict["nlines"] = int(ds_shape[1]) # Force scandir to SEVIRI default, not known from file - pdict['scandir'] = 'S2N' - pdict['a_name'] = 'geosmsg' - if ds_id['name'] == 'HRV': - pdict['a_desc'] = 'MSG/SEVIRI HRV channel area' - pdict['p_id'] = 'msg_hires' + pdict["scandir"] = "S2N" + pdict["a_name"] = "geosmsg" + if ds_id["name"] == "HRV": + pdict["a_desc"] = "MSG/SEVIRI HRV channel area" + pdict["p_id"] = "msg_hires" else: - pdict['a_desc'] = 'MSG/SEVIRI low resolution channel area' - pdict['p_id'] = 'msg_lowres' + pdict["a_desc"] = "MSG/SEVIRI low resolution channel area" + pdict["p_id"] = "msg_lowres" aex = get_area_extent(pdict) area = get_area_definition(pdict, aex) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index cdad865f0c..81f2d01300 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -140,9 +140,9 @@ from satpy.readers.utils import reduce_mda from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('native_msg') +logger = logging.getLogger("native_msg") CHUNK_SIZE = get_legacy_chunk_size() -ASCII_STARTSWITH = b'FormatName : NATIVE' +ASCII_STARTSWITH = b"FormatName : NATIVE" class NativeMSGFileHandler(BaseFileHandler): @@ -170,7 +170,7 @@ class NativeMSGFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - calib_mode='nominal', fill_disk=False, ext_calib_coefs=None, + calib_mode="nominal", fill_disk=False, ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100): """Initialize the reader.""" super(NativeMSGFileHandler, self).__init__(filename, @@ -199,33 +199,33 @@ def __init__(self, filename, filename_info, filetype_info, @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the trailer.""" - if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + if self.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] == 1: return 5 return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Get the repeat cycle nominal start time from file header and round it to expected nominal time slot.""" - tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] + tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["TrueRepeatCycleStart"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the repeat cycle nominal end time from file header and round it to expected nominal time slot.""" - tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get observation start time from trailer.""" - return self.trailer['15TRAILER']['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanStart'] + return self.trailer["15TRAILER"]["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanStart"] @property def observation_end_time(self): """Get observation end time from trailer.""" - return self.trailer['15TRAILER']['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanEnd'] + return self.trailer["15TRAILER"]["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanEnd"] @property def start_time(self): @@ -240,8 +240,8 @@ def end_time(self): def _get_data_dtype(self): """Get the dtype of the file based on the actual available channels.""" pkhrec = [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1) ] pk_head_dtype = np.dtype(pkhrec) @@ -264,14 +264,14 @@ def get_lrec(cols): # each pixel is 10-bits -> one line of data has 25% more bytes # than the number of columns suggest (10/8 = 1.25) - visir_rec = get_lrec(int(self.mda['number_of_columns'] * 1.25)) + visir_rec = get_lrec(int(self.mda["number_of_columns"] * 1.25)) number_of_visir_channels = len( - [s for s in self.mda['channel_list'] if not s == 'HRV']) - drec = [('visir', (visir_rec, number_of_visir_channels))] + [s for s in self.mda["channel_list"] if not s == "HRV"]) + drec = [("visir", (visir_rec, number_of_visir_channels))] - if self.mda['available_channels']['HRV']: - hrv_rec = get_lrec(int(self.mda['hrv_number_of_columns'] * 1.25)) - drec.append(('hrv', (hrv_rec, 3))) + if self.mda["available_channels"]["HRV"]: + hrv_rec = get_lrec(int(self.mda["hrv_number_of_columns"] * 1.25)) + drec.append(("hrv", (hrv_rec, 3))) return np.dtype(drec) @@ -282,51 +282,51 @@ def _get_memmap(self): hdr_size = self.header_type.itemsize return np.memmap(fp, dtype=data_dtype, - shape=(self.mda['number_of_lines'],), + shape=(self.mda["number_of_lines"],), offset=hdr_size, mode="r") def _read_header(self): """Read the header info.""" self.header.update(read_header(self.filename)) - if '15_SECONDARY_PRODUCT_HEADER' not in self.header: + if "15_SECONDARY_PRODUCT_HEADER" not in self.header: # No archive header, that means we have a complete file # including all channels. - self.header['15_SECONDARY_PRODUCT_HEADER'] = DEFAULT_15_SECONDARY_PRODUCT_HEADER + self.header["15_SECONDARY_PRODUCT_HEADER"] = DEFAULT_15_SECONDARY_PRODUCT_HEADER - data15hd = self.header['15_DATA_HEADER'] - sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] + data15hd = self.header["15_DATA_HEADER"] + sec15hd = self.header["15_SECONDARY_PRODUCT_HEADER"] # Set the list of available channels: - self.mda['available_channels'] = get_available_channels(self.header) - self.mda['channel_list'] = [i for i in CHANNEL_NAMES.values() - if self.mda['available_channels'][i]] + self.mda["available_channels"] = get_available_channels(self.header) + self.mda["channel_list"] = [i for i in CHANNEL_NAMES.values() + if self.mda["available_channels"][i]] self.platform_id = data15hd[ - 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] - self.mda['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] - self.mda['offset_corrected'] = data15hd['GeometricProcessing'][ - 'EarthModel']['TypeOfEarthModel'] == 2 + "SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"] + self.mda["platform_name"] = "Meteosat-" + SATNUM[self.platform_id] + self.mda["offset_corrected"] = data15hd["GeometricProcessing"][ + "EarthModel"]["TypeOfEarthModel"] == 2 - equator_radius = data15hd['GeometricProcessing'][ - 'EarthModel']['EquatorialRadius'] * 1000. + equator_radius = data15hd["GeometricProcessing"][ + "EarthModel"]["EquatorialRadius"] * 1000. north_polar_radius = data15hd[ - 'GeometricProcessing']['EarthModel']['NorthPolarRadius'] * 1000. + "GeometricProcessing"]["EarthModel"]["NorthPolarRadius"] * 1000. south_polar_radius = data15hd[ - 'GeometricProcessing']['EarthModel']['SouthPolarRadius'] * 1000. + "GeometricProcessing"]["EarthModel"]["SouthPolarRadius"] * 1000. polar_radius = (north_polar_radius + south_polar_radius) * 0.5 - ssp_lon = data15hd['ImageDescription'][ - 'ProjectionDescription']['LongitudeOfSSP'] + ssp_lon = data15hd["ImageDescription"][ + "ProjectionDescription"]["LongitudeOfSSP"] - self.mda['projection_parameters'] = {'a': equator_radius, - 'b': polar_radius, - 'h': 35785831.00, - 'ssp_longitude': ssp_lon} + self.mda["projection_parameters"] = {"a": equator_radius, + "b": polar_radius, + "h": 35785831.00, + "ssp_longitude": ssp_lon} - north = int(sec15hd['NorthLineSelectedRectangle']['Value']) - east = int(sec15hd['EastColumnSelectedRectangle']['Value']) - south = int(sec15hd['SouthLineSelectedRectangle']['Value']) - west = int(sec15hd['WestColumnSelectedRectangle']['Value']) + north = int(sec15hd["NorthLineSelectedRectangle"]["Value"]) + east = int(sec15hd["EastColumnSelectedRectangle"]["Value"]) + south = int(sec15hd["SouthLineSelectedRectangle"]["Value"]) + west = int(sec15hd["WestColumnSelectedRectangle"]["Value"]) ncolumns = west - east + 1 nrows = north - south + 1 @@ -335,9 +335,9 @@ def _read_header(self): # the maximum, if so it is a rapid scanning service # or region of interest file if (nrows < VISIR_NUM_LINES) or (ncolumns < VISIR_NUM_COLUMNS): - self.mda['is_full_disk'] = False + self.mda["is_full_disk"] = False else: - self.mda['is_full_disk'] = True + self.mda["is_full_disk"] = True # If the number of columns in the file is not divisible by 4, # UMARF will add extra columns to the file @@ -349,7 +349,7 @@ def _read_header(self): # Check the VISIR calculated column dimension against # the header information - cols_visir_hdr = int(sec15hd['NumberColumnsVISIR']['Value']) + cols_visir_hdr = int(sec15hd["NumberColumnsVISIR"]["Value"]) if cols_visir_hdr != cols_visir: logger.warning( "Number of VISIR columns from the header is incorrect!") @@ -358,21 +358,21 @@ def _read_header(self): # HRV Channel - check if the area is reduced in east west # direction as this affects the number of columns in the file - cols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) + cols_hrv_hdr = int(sec15hd["NumberColumnsHRV"]["Value"]) if ncolumns < VISIR_NUM_COLUMNS: cols_hrv = cols_hrv_hdr else: cols_hrv = int(cols_hrv_hdr / 2) # self.mda represents the 16bit dimensions not 10bit - self.mda['number_of_lines'] = int(sec15hd['NumberLinesVISIR']['Value']) - self.mda['number_of_columns'] = cols_visir - self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) - self.mda['hrv_number_of_columns'] = cols_hrv + self.mda["number_of_lines"] = int(sec15hd["NumberLinesVISIR"]["Value"]) + self.mda["number_of_columns"] = cols_visir + self.mda["hrv_number_of_lines"] = int(sec15hd["NumberLinesHRV"]["Value"]) + self.mda["hrv_number_of_columns"] = cols_hrv - if '15_MAIN_PRODUCT_HEADER' not in self.header: + if "15_MAIN_PRODUCT_HEADER" not in self.header: logger.info("Quality flag check was not possible due to missing 15_MAIN_PRODUCT_HEADER.") - elif self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': + elif self.header["15_MAIN_PRODUCT_HEADER"]["QQOV"]["Value"] == "NOK": warnings.warn( "The quality flag for this file indicates not OK. " "Use this data with caution!", @@ -384,7 +384,7 @@ def _read_trailer(self): hdr_size = self.header_type.itemsize data_size = (self._get_data_dtype().itemsize * - self.mda['number_of_lines']) + self.mda["number_of_lines"]) with open(self.filename) as fp: fp.seek(hdr_size + data_size) @@ -417,27 +417,27 @@ def get_area_def(self, dataset_id): """ pdict = dict() - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] - - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dataset_id['resolution']) + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["ssp_longitude"] + + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dataset_id["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) + **get_service_mode("seviri", pdict["ssp_lon"])}) - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area_extent = self.get_area_extent(dataset_id) areas = list() - for aex, nlines, ncolumns in zip(area_extent['area_extent'], area_extent['nlines'], area_extent['ncolumns']): - pdict['nlines'] = nlines - pdict['ncols'] = ncolumns + for aex, nlines, ncolumns in zip(area_extent["area_extent"], area_extent["nlines"], area_extent["ncolumns"]): + pdict["nlines"] = nlines + pdict["ncols"] = ncolumns areas.append(get_area_definition(pdict, aex)) if len(areas) == 2: @@ -459,80 +459,80 @@ def get_area_extent(self, dataset_id): of the area extent is documented in a `developer's memo `_. """ - data15hd = self.header['15_DATA_HEADER'] + data15hd = self.header["15_DATA_HEADER"] # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description - earth_model = data15hd['GeometricProcessing']['EarthModel'][ - 'TypeOfEarthModel'] + earth_model = data15hd["GeometricProcessing"]["EarthModel"][ + "TypeOfEarthModel"] if earth_model == 2: ns_offset = 0 we_offset = 0 elif earth_model == 1: ns_offset = -0.5 we_offset = 0.5 - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": ns_offset = -1.5 we_offset = 1.5 else: raise NotImplementedError( - 'Unrecognised Earth model: {}'.format(earth_model) + "Unrecognised Earth model: {}".format(earth_model) ) - if dataset_id['name'] == 'HRV': - grid_origin = data15hd['ImageDescription']['ReferenceGridHRV']['GridOrigin'] + if dataset_id["name"] == "HRV": + grid_origin = data15hd["ImageDescription"]["ReferenceGridHRV"]["GridOrigin"] center_point = (HRV_NUM_COLUMNS / 2) - 2 - column_step = data15hd['ImageDescription']['ReferenceGridHRV']['ColumnDirGridStep'] * 1000.0 - line_step = data15hd['ImageDescription']['ReferenceGridHRV']['LineDirGridStep'] * 1000.0 + column_step = data15hd["ImageDescription"]["ReferenceGridHRV"]["ColumnDirGridStep"] * 1000.0 + line_step = data15hd["ImageDescription"]["ReferenceGridHRV"]["LineDirGridStep"] * 1000.0 nlines_fulldisk = HRV_NUM_LINES ncolumns_fulldisk = HRV_NUM_COLUMNS else: - grid_origin = data15hd['ImageDescription']['ReferenceGridVIS_IR']['GridOrigin'] + grid_origin = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["GridOrigin"] center_point = VISIR_NUM_COLUMNS / 2 - column_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['ColumnDirGridStep'] * 1000.0 - line_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['LineDirGridStep'] * 1000.0 + column_step = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["ColumnDirGridStep"] * 1000.0 + line_step = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["LineDirGridStep"] * 1000.0 nlines_fulldisk = VISIR_NUM_LINES ncolumns_fulldisk = VISIR_NUM_COLUMNS # Calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description - origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} + origins = {0: "NW", 1: "SW", 2: "SE", 3: "NE"} if grid_origin != 2: - msg = 'Grid origin not supported number: {}, {} corner'.format( + msg = "Grid origin not supported number: {}, {} corner".format( grid_origin, origins[grid_origin] ) raise NotImplementedError(msg) - aex_data = {'area_extent': [], 'nlines': [], 'ncolumns': []} + aex_data = {"area_extent": [], "nlines": [], "ncolumns": []} img_bounds = self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()) for south_bound, north_bound, east_bound, west_bound in zip(*img_bounds.values()): if self.fill_disk: east_bound, west_bound = 1, ncolumns_fulldisk - if not self.mda['is_full_disk']: + if not self.mda["is_full_disk"]: south_bound, north_bound = 1, nlines_fulldisk nlines = north_bound - south_bound + 1 ncolumns = west_bound - east_bound + 1 - area_dict = {'center_point': center_point, - 'east': east_bound, - 'west': west_bound, - 'south': south_bound, - 'north': north_bound, - 'column_step': column_step, - 'line_step': line_step, - 'column_offset': we_offset, - 'line_offset': ns_offset + area_dict = {"center_point": center_point, + "east": east_bound, + "west": west_bound, + "south": south_bound, + "north": north_bound, + "column_step": column_step, + "line_step": line_step, + "column_offset": we_offset, + "line_offset": ns_offset } aex = calculate_area_extent(area_dict) - aex_data['area_extent'].append(aex) - aex_data['nlines'].append(nlines) - aex_data['ncolumns'].append(ncolumns) + aex_data["area_extent"].append(aex) + aex_data["nlines"].append(nlines) + aex_data["ncolumns"].append(ncolumns) return aex_data @@ -543,28 +543,28 @@ def is_roi(self): of the SEVIRI disk. Hence, if the data does not cover the full disk, nor the standard RSS region in RSS mode, it's assumed to be ROI data. """ - is_rapid_scan = self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] + is_rapid_scan = self.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] # Standard RSS data is assumed to cover the three northmost segments, thus consisting of all 3712 columns and # the 1392 northmost lines - nlines = int(self.mda['number_of_lines']) - ncolumns = int(self.mda['number_of_columns']) - north_bound = int(self.header['15_SECONDARY_PRODUCT_HEADER']['NorthLineSelectedRectangle']['Value']) + nlines = int(self.mda["number_of_lines"]) + ncolumns = int(self.mda["number_of_columns"]) + north_bound = int(self.header["15_SECONDARY_PRODUCT_HEADER"]["NorthLineSelectedRectangle"]["Value"]) is_top3segments = (ncolumns == VISIR_NUM_COLUMNS and nlines == 1392 and north_bound == VISIR_NUM_LINES) - return not self.mda['is_full_disk'] and not (is_rapid_scan and is_top3segments) + return not self.mda["is_full_disk"] and not (is_rapid_scan and is_top3segments) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - if dataset_id['name'] not in self.mda['channel_list']: - raise KeyError('Channel % s not available in the file' % dataset_id['name']) - elif dataset_id['name'] not in ['HRV']: + if dataset_id["name"] not in self.mda["channel_list"]: + raise KeyError("Channel % s not available in the file" % dataset_id["name"]) + elif dataset_id["name"] not in ["HRV"]: data = self._get_visir_channel(dataset_id) else: data = self._get_hrv_channel() - xarr = xr.DataArray(data, dims=['y', 'x']).where(data != 0).astype(np.float32) + xarr = xr.DataArray(data, dims=["y", "x"]).where(data != 0).astype(np.float32) if xarr is None: return None @@ -573,34 +573,34 @@ def get_dataset(self, dataset_id, dataset_info): self._add_scanline_acq_time(dataset, dataset_id) self._update_attrs(dataset, dataset_info) - if self.fill_disk and not (dataset_id['name'] != 'HRV' and self.mda['is_full_disk']): + if self.fill_disk and not (dataset_id["name"] != "HRV" and self.mda["is_full_disk"]): padder = Padder(dataset_id, self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()), - self.mda['is_full_disk']) + self.mda["is_full_disk"]) dataset = padder.pad_data(dataset) return dataset def _get_visir_channel(self, dataset_id): - shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) + shape = (self.mda["number_of_lines"], self.mda["number_of_columns"]) # Check if there is only 1 channel in the list as a change # is needed in the array assignment ie channel id is not present - if len(self.mda['channel_list']) == 1: - raw = self.dask_array['visir']['line_data'] + if len(self.mda["channel_list"]) == 1: + raw = self.dask_array["visir"]["line_data"] else: - i = self.mda['channel_list'].index(dataset_id['name']) - raw = self.dask_array['visir']['line_data'][:, i, :] + i = self.mda["channel_list"].index(dataset_id["name"]) + raw = self.dask_array["visir"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape) return data def _get_hrv_channel(self): - shape = (self.mda['hrv_number_of_lines'], self.mda['hrv_number_of_columns']) - shape_layer = (self.mda['number_of_lines'], self.mda['hrv_number_of_columns']) + shape = (self.mda["hrv_number_of_lines"], self.mda["hrv_number_of_columns"]) + shape_layer = (self.mda["number_of_lines"], self.mda["hrv_number_of_columns"]) data_list = [] for i in range(3): - raw = self.dask_array['hrv']['line_data'][:, i, :] + raw = self.dask_array["hrv"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape_layer) data_list.append(data) @@ -610,7 +610,7 @@ def _get_hrv_channel(self): def calibrate(self, data, dataset_id): """Calibrate the data.""" tic = datetime.now() - channel_name = dataset_id['name'] + channel_name = dataset_id["name"] calib = SEVIRICalibrationHandler( platform_id=self.platform_id, channel_name=channel_name, @@ -618,7 +618,7 @@ def calibrate(self, data, dataset_id): calib_mode=self.calib_mode, scan_time=self.observation_start_time ) - res = calib.calibrate(data, dataset_id['calibration']) + res = calib.calibrate(data, dataset_id["calibration"]) logger.debug("Calibration time " + str(datetime.now() - tic)) return res @@ -629,20 +629,20 @@ def _get_calib_coefs(self, channel_name): # hence, this channel index needs to refer to full channel list band_idx = list(CHANNEL_NAMES.values()).index(channel_name) - coefs_nominal = self.header['15_DATA_HEADER'][ - 'RadiometricProcessing']['Level15ImageCalibration'] - coefs_gsics = self.header['15_DATA_HEADER'][ - 'RadiometricProcessing']['MPEFCalFeedback'] - radiance_types = self.header['15_DATA_HEADER']['ImageDescription'][ - 'Level15ImageProduction']['PlannedChanProcessing'] + coefs_nominal = self.header["15_DATA_HEADER"][ + "RadiometricProcessing"]["Level15ImageCalibration"] + coefs_gsics = self.header["15_DATA_HEADER"][ + "RadiometricProcessing"]["MPEFCalFeedback"] + radiance_types = self.header["15_DATA_HEADER"]["ImageDescription"][ + "Level15ImageProduction"]["PlannedChanProcessing"] return create_coef_dict( coefs_nominal=( - coefs_nominal['CalSlope'][band_idx], - coefs_nominal['CalOffset'][band_idx] + coefs_nominal["CalSlope"][band_idx], + coefs_nominal["CalOffset"][band_idx] ), coefs_gsics=( - coefs_gsics['GSICSCalCoeff'][band_idx], - coefs_gsics['GSICSOffsetCount'][band_idx] + coefs_gsics["GSICSCalCoeff"][band_idx], + coefs_gsics["GSICSOffsetCount"][band_idx] ), ext_coefs=self.ext_calib_coefs.get(channel_name, {}), radiance_type=radiance_types[band_idx] @@ -650,69 +650,69 @@ def _get_calib_coefs(self, channel_name): def _add_scanline_acq_time(self, dataset, dataset_id): """Add scanline acquisition time to the given dataset.""" - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": tline = self._get_acq_time_hrv() else: tline = self._get_acq_time_visir(dataset_id) - acq_time = get_cds_time(days=tline['Days'], msecs=tline['Milliseconds']) + acq_time = get_cds_time(days=tline["Days"], msecs=tline["Milliseconds"]) add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): """Get raw acquisition time for HRV channel.""" - tline = self.dask_array['hrv']['acq_time'] + tline = self.dask_array["hrv"]["acq_time"] tline0 = tline[:, 0] tline1 = tline[:, 1] tline2 = tline[:, 2] return da.stack((tline0, tline1, tline2), axis=1).reshape( - self.mda['hrv_number_of_lines']).compute() + self.mda["hrv_number_of_lines"]).compute() def _get_acq_time_visir(self, dataset_id): """Get raw acquisition time for VIS/IR channels.""" # Check if there is only 1 channel in the list as a change # is needed in the array assignment, i.e. channel id is not present - if len(self.mda['channel_list']) == 1: - return self.dask_array['visir']['acq_time'].compute() - i = self.mda['channel_list'].index(dataset_id['name']) - return self.dask_array['visir']['acq_time'][:, i].compute() + if len(self.mda["channel_list"]) == 1: + return self.dask_array["visir"]["acq_time"].compute() + i = self.mda["channel_list"].index(dataset_id["name"]) + return self.dask_array["visir"]["acq_time"][:, i].compute() def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs['units'] = dataset_info['units'] - dataset.attrs['wavelength'] = dataset_info['wavelength'] - dataset.attrs['standard_name'] = dataset_info['standard_name'] - dataset.attrs['platform_name'] = self.mda['platform_name'] - dataset.attrs['sensor'] = 'seviri' - dataset.attrs['georef_offset_corrected'] = self.mda[ - 'offset_corrected'] - dataset.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + dataset.attrs["units"] = dataset_info["units"] + dataset.attrs["wavelength"] = dataset_info["wavelength"] + dataset.attrs["standard_name"] = dataset_info["standard_name"] + dataset.attrs["platform_name"] = self.mda["platform_name"] + dataset.attrs["sensor"] = "seviri" + dataset.attrs["georef_offset_corrected"] = self.mda[ + "offset_corrected"] + dataset.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } - dataset.attrs['orbital_parameters'] = self._get_orbital_parameters() + dataset.attrs["orbital_parameters"] = self._get_orbital_parameters() if self.include_raw_metadata: - dataset.attrs['raw_metadata'] = reduce_mda( + dataset.attrs["raw_metadata"] = reduce_mda( self.header, max_size=self.mda_max_array_size ) def _get_orbital_parameters(self): orbital_parameters = { - 'projection_longitude': self.mda['projection_parameters'][ - 'ssp_longitude'], - 'projection_latitude': 0., - 'projection_altitude': self.mda['projection_parameters']['h'], - 'satellite_nominal_longitude': self.header['15_DATA_HEADER'][ - 'SatelliteStatus']['SatelliteDefinition'][ - 'NominalLongitude'], - 'satellite_nominal_latitude': 0.0 + "projection_longitude": self.mda["projection_parameters"][ + "ssp_longitude"], + "projection_latitude": 0., + "projection_altitude": self.mda["projection_parameters"]["h"], + "satellite_nominal_longitude": self.header["15_DATA_HEADER"][ + "SatelliteStatus"]["SatelliteDefinition"][ + "NominalLongitude"], + "satellite_nominal_latitude": 0.0 } try: actual_lon, actual_lat, actual_alt = self.satpos orbital_parameters.update({ - 'satellite_actual_longitude': actual_lon, - 'satellite_actual_latitude': actual_lat, - 'satellite_actual_altitude': actual_alt + "satellite_actual_longitude": actual_lon, + "satellite_actual_latitude": actual_lat, + "satellite_actual_altitude": actual_alt }) except NoValidOrbitParams as err: logger.warning(err) @@ -726,14 +726,14 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ - poly_finder = OrbitPolynomialFinder(self.header['15_DATA_HEADER'][ - 'SatelliteStatus']['Orbit']['OrbitPolynomial']) + poly_finder = OrbitPolynomialFinder(self.header["15_DATA_HEADER"][ + "SatelliteStatus"]["Orbit"]["OrbitPolynomial"]) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.observation_start_time, - semi_major_axis=self.mda['projection_parameters']['a'], - semi_minor_axis=self.mda['projection_parameters']['b'] + semi_major_axis=self.mda["projection_parameters"]["a"], + semi_minor_axis=self.mda["projection_parameters"]["b"] ) @@ -755,7 +755,7 @@ def get_img_bounds(self, dataset_id, is_roi): Lists (rather than scalars) are returned since the HRV data in FES mode contain data from two windows/areas. """ - if dataset_id['name'] == 'HRV' and not is_roi: + if dataset_id["name"] == "HRV" and not is_roi: img_bounds = self._get_hrv_actual_img_bounds() else: img_bounds = self._get_selected_img_bounds(dataset_id) @@ -766,28 +766,28 @@ def get_img_bounds(self, dataset_id, is_roi): def _get_hrv_actual_img_bounds(self): """Get HRV (if not ROI) image boundaries from the ActualL15CoverageHRV information stored in the trailer.""" - hrv_bounds = self._trailer['15TRAILER']['ImageProductionStats']['ActualL15CoverageHRV'] + hrv_bounds = self._trailer["15TRAILER"]["ImageProductionStats"]["ActualL15CoverageHRV"] - img_bounds = {'south_bound': [], 'north_bound': [], 'east_bound': [], 'west_bound': []} - for hrv_window in ['Lower', 'Upper']: - img_bounds['south_bound'].append(hrv_bounds['%sSouthLineActual' % hrv_window]) - img_bounds['north_bound'].append(hrv_bounds['%sNorthLineActual' % hrv_window]) - img_bounds['east_bound'].append(hrv_bounds['%sEastColumnActual' % hrv_window]) - img_bounds['west_bound'].append(hrv_bounds['%sWestColumnActual' % hrv_window]) + img_bounds = {"south_bound": [], "north_bound": [], "east_bound": [], "west_bound": []} + for hrv_window in ["Lower", "Upper"]: + img_bounds["south_bound"].append(hrv_bounds["%sSouthLineActual" % hrv_window]) + img_bounds["north_bound"].append(hrv_bounds["%sNorthLineActual" % hrv_window]) + img_bounds["east_bound"].append(hrv_bounds["%sEastColumnActual" % hrv_window]) + img_bounds["west_bound"].append(hrv_bounds["%sWestColumnActual" % hrv_window]) # Data from the upper hrv window are only available in FES mode - if not self._mda['is_full_disk']: + if not self._mda["is_full_disk"]: break return img_bounds def _get_selected_img_bounds(self, dataset_id): """Get VISIR and HRV (if ROI) image boundaries from the SelectedRectangle information stored in the header.""" - sec15hd = self._header['15_SECONDARY_PRODUCT_HEADER'] - south_bound = int(sec15hd['SouthLineSelectedRectangle']['Value']) - east_bound = int(sec15hd['EastColumnSelectedRectangle']['Value']) + sec15hd = self._header["15_SECONDARY_PRODUCT_HEADER"] + south_bound = int(sec15hd["SouthLineSelectedRectangle"]["Value"]) + east_bound = int(sec15hd["EastColumnSelectedRectangle"]["Value"]) - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": nlines, ncolumns = self._get_hrv_img_shape() south_bound = self._convert_visir_bound_to_hrv(south_bound) east_bound = self._convert_visir_bound_to_hrv(east_bound) @@ -797,19 +797,19 @@ def _get_selected_img_bounds(self, dataset_id): north_bound = south_bound + nlines - 1 west_bound = east_bound + ncolumns - 1 - img_bounds = {'south_bound': [south_bound], 'north_bound': [north_bound], - 'east_bound': [east_bound], 'west_bound': [west_bound]} + img_bounds = {"south_bound": [south_bound], "north_bound": [north_bound], + "east_bound": [east_bound], "west_bound": [west_bound]} return img_bounds def _get_hrv_img_shape(self): - nlines = int(self._mda['hrv_number_of_lines']) - ncolumns = int(self._mda['hrv_number_of_columns']) + nlines = int(self._mda["hrv_number_of_lines"]) + ncolumns = int(self._mda["hrv_number_of_columns"]) return nlines, ncolumns def _get_visir_img_shape(self): - nlines = int(self._mda['number_of_lines']) - ncolumns = int(self._mda['number_of_columns']) + nlines = int(self._mda["number_of_lines"]) + ncolumns = int(self._mda["number_of_columns"]) return nlines, ncolumns @staticmethod @@ -824,7 +824,7 @@ def _check_for_valid_bounds(img_bounds): no_empty = (min(len_img_bounds) > 0) if not (same_lengths and no_empty): - raise ValueError('Invalid image boundaries') + raise ValueError("Invalid image boundaries") class Padder: @@ -835,14 +835,14 @@ def __init__(self, dataset_id, img_bounds, is_full_disk): self._img_bounds = img_bounds self._is_full_disk = is_full_disk - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": self._final_shape = (HRV_NUM_LINES, HRV_NUM_COLUMNS) else: self._final_shape = (VISIR_NUM_LINES, VISIR_NUM_COLUMNS) def pad_data(self, dataset): """Pad data to full disk with empty pixels.""" - logger.debug('Padding data to full disk') + logger.debug("Padding data to full disk") data_list = [] for south_bound, north_bound, east_bound, west_bound in zip(*self._img_bounds.values()): @@ -857,7 +857,7 @@ def pad_data(self, dataset): if not self._is_full_disk: padded_data = pad_data_vertically(padded_data, self._final_shape, south_bound, north_bound) - return xr.DataArray(padded_data, dims=('y', 'x'), attrs=dataset.attrs.copy()) + return xr.DataArray(padded_data, dims=("y", "x"), attrs=dataset.attrs.copy()) def _extract_data_to_pad(self, dataset, south_bound, north_bound): """Extract the data that shall be padded. @@ -875,19 +875,19 @@ def _extract_data_to_pad(self, dataset, south_bound, north_bound): def get_available_channels(header): """Get the available channels from the header information.""" - channels_str = header['15_SECONDARY_PRODUCT_HEADER'][ - 'SelectedBandIDs']['Value'] + channels_str = header["15_SECONDARY_PRODUCT_HEADER"][ + "SelectedBandIDs"]["Value"] available_channels = {} for idx, char in zip(range(12), channels_str): - available_channels[CHANNEL_NAMES[idx + 1]] = (char == 'X') + available_channels[CHANNEL_NAMES[idx + 1]] = (char == "X") return available_channels def has_archive_header(filename): """Check whether the file includes an ASCII archive header.""" - with open(filename, mode='rb') as istream: + with open(filename, mode="rb") as istream: return istream.read(36) == ASCII_STARTSWITH diff --git a/satpy/readers/seviri_l1b_native_hdr.py b/satpy/readers/seviri_l1b_native_hdr.py index 8c0212a6f2..56c5c0c3c9 100644 --- a/satpy/readers/seviri_l1b_native_hdr.py +++ b/satpy/readers/seviri_l1b_native_hdr.py @@ -39,37 +39,37 @@ class GSDTRecords(object): # 4 bytes gp_cpu_address = [ - ('Qualifier_1', np.uint8), - ('Qualifier_2', np.uint8), - ('Qualifier_3', np.uint8), - ('Qualifier_4', np.uint8) + ("Qualifier_1", np.uint8), + ("Qualifier_2", np.uint8), + ("Qualifier_3", np.uint8), + ("Qualifier_4", np.uint8) ] # 22 bytes gp_pk_header = [ - ('HeaderVersionNo', np.uint8), - ('PacketType', np.uint8), - ('SubHeaderType', np.uint8), - ('SourceFacilityId', gp_fac_id), - ('SourceEnvId', gp_fac_env), - ('SourceInstanceId', np.uint8), - ('SourceSUId', gp_su_id), - ('SourceCPUId', gp_cpu_address), - ('DestFacilityId', gp_fac_id), - ('DestEnvId', gp_fac_env), - ('SequenceCount', np.uint16), - ('PacketLength', np.int32) + ("HeaderVersionNo", np.uint8), + ("PacketType", np.uint8), + ("SubHeaderType", np.uint8), + ("SourceFacilityId", gp_fac_id), + ("SourceEnvId", gp_fac_env), + ("SourceInstanceId", np.uint8), + ("SourceSUId", gp_su_id), + ("SourceCPUId", gp_cpu_address), + ("DestFacilityId", gp_fac_id), + ("DestEnvId", gp_fac_env), + ("SequenceCount", np.uint16), + ("PacketLength", np.int32) ] # 16 bytes gp_pk_sh1 = [ - ('SubHeaderVersionNo', np.uint8), - ('ChecksumFlag', bool), - ('Acknowledgement', (np.uint8, 4)), - ('ServiceType', gp_svce_type), - ('ServiceSubtype', np.uint8), - ('PacketTime', time_cds_short), - ('SpacecraftId', gp_sc_id) + ("SubHeaderVersionNo", np.uint8), + ("ChecksumFlag", bool), + ("Acknowledgement", (np.uint8, 4)), + ("ServiceType", gp_svce_type), + ("ServiceSubtype", np.uint8), + ("PacketTime", time_cds_short), + ("SpacecraftId", gp_sc_id) ] @@ -83,17 +83,17 @@ def get(self, with_archive_header): record = [] if with_archive_header: record += [ - ('15_MAIN_PRODUCT_HEADER', L15MainProductHeaderRecord().get()), - ('15_SECONDARY_PRODUCT_HEADER', + ("15_MAIN_PRODUCT_HEADER", L15MainProductHeaderRecord().get()), + ("15_SECONDARY_PRODUCT_HEADER", L15SecondaryProductHeaderRecord().get()), ] record += [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), - ('15_DATA_HEADER', L15DataHeaderRecord().get()) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1), + ("15_DATA_HEADER", L15DataHeaderRecord().get()) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") class L15PhData(object): @@ -101,8 +101,8 @@ class L15PhData(object): # 80 bytes l15_ph_data = [ - ('Name', 'S30'), - ('Value', 'S50') + ("Name", "S30"), + ("Value", "S50") ] @@ -118,39 +118,39 @@ def get(self): l15_ph_data = L15PhData.l15_ph_data l15_ph_data_identification = [ - ('Name', 'S30'), - ('Size', 'S16'), - ('Address', 'S16') + ("Name", "S30"), + ("Size", "S16"), + ("Address", "S16") ] # 3674 bytes record = [ - ('FormatName', l15_ph_data), - ('FormatDocumentName', l15_ph_data), - ('FormatDocumentMajorVersion', l15_ph_data), - ('FormatDocumentMinorVersion', l15_ph_data), - ('CreationDateTime', l15_ph_data), - ('CreatingCentre', l15_ph_data), - ('DataSetIdentification', (l15_ph_data_identification, 27)), - ('TotalFileSize', l15_ph_data), - ('GORT', l15_ph_data), - ('ASTI', l15_ph_data), - ('LLOS', l15_ph_data), - ('SNIT', l15_ph_data), - ('AIID', l15_ph_data), - ('SSBT', l15_ph_data), - ('SSST', l15_ph_data), - ('RRCC', l15_ph_data), - ('RRBT', l15_ph_data), - ('RRST', l15_ph_data), - ('PPRC', l15_ph_data), - ('PPDT', l15_ph_data), - ('GPLV', l15_ph_data), - ('APNM', l15_ph_data), - ('AARF', l15_ph_data), - ('UUDT', l15_ph_data), - ('QQOV', l15_ph_data), - ('UDSP', l15_ph_data) + ("FormatName", l15_ph_data), + ("FormatDocumentName", l15_ph_data), + ("FormatDocumentMajorVersion", l15_ph_data), + ("FormatDocumentMinorVersion", l15_ph_data), + ("CreationDateTime", l15_ph_data), + ("CreatingCentre", l15_ph_data), + ("DataSetIdentification", (l15_ph_data_identification, 27)), + ("TotalFileSize", l15_ph_data), + ("GORT", l15_ph_data), + ("ASTI", l15_ph_data), + ("LLOS", l15_ph_data), + ("SNIT", l15_ph_data), + ("AIID", l15_ph_data), + ("SSBT", l15_ph_data), + ("SSST", l15_ph_data), + ("RRCC", l15_ph_data), + ("RRBT", l15_ph_data), + ("RRST", l15_ph_data), + ("PPRC", l15_ph_data), + ("PPDT", l15_ph_data), + ("GPLV", l15_ph_data), + ("APNM", l15_ph_data), + ("AARF", l15_ph_data), + ("UUDT", l15_ph_data), + ("QQOV", l15_ph_data), + ("UDSP", l15_ph_data) ] return record @@ -169,24 +169,24 @@ def get(self): # 1440 bytes record = [ - ('ABID', l15_ph_data), - ('SMOD', l15_ph_data), - ('APXS', l15_ph_data), - ('AVPA', l15_ph_data), - ('LSCD', l15_ph_data), - ('LMAP', l15_ph_data), - ('QDLC', l15_ph_data), - ('QDLP', l15_ph_data), - ('QQAI', l15_ph_data), - ('SelectedBandIDs', l15_ph_data), - ('SouthLineSelectedRectangle', l15_ph_data), - ('NorthLineSelectedRectangle', l15_ph_data), - ('EastColumnSelectedRectangle', l15_ph_data), - ('WestColumnSelectedRectangle', l15_ph_data), - ('NumberLinesVISIR', l15_ph_data), - ('NumberColumnsVISIR', l15_ph_data), - ('NumberLinesHRV', l15_ph_data), - ('NumberColumnsHRV', l15_ph_data) + ("ABID", l15_ph_data), + ("SMOD", l15_ph_data), + ("APXS", l15_ph_data), + ("AVPA", l15_ph_data), + ("LSCD", l15_ph_data), + ("LMAP", l15_ph_data), + ("QDLC", l15_ph_data), + ("QDLP", l15_ph_data), + ("QQAI", l15_ph_data), + ("SelectedBandIDs", l15_ph_data), + ("SouthLineSelectedRectangle", l15_ph_data), + ("NorthLineSelectedRectangle", l15_ph_data), + ("EastColumnSelectedRectangle", l15_ph_data), + ("WestColumnSelectedRectangle", l15_ph_data), + ("NumberLinesVISIR", l15_ph_data), + ("NumberColumnsVISIR", l15_ph_data), + ("NumberLinesHRV", l15_ph_data), + ("NumberColumnsHRV", l15_ph_data) ] return record @@ -203,14 +203,14 @@ def get(self): """Get header record data.""" # 445248 bytes record = [ - ('15HeaderVersion', np.uint8), - ('SatelliteStatus', self.satellite_status), - ('ImageAcquisition', self.image_acquisition), - ('CelestialEvents', self.celestial_events), - ('ImageDescription', self.image_description), - ('RadiometricProcessing', self.radiometric_processing), - ('GeometricProcessing', self.geometric_processing), - ('IMPFConfiguration', self.impf_configuration)] + ("15HeaderVersion", np.uint8), + ("SatelliteStatus", self.satellite_status), + ("ImageAcquisition", self.image_acquisition), + ("CelestialEvents", self.celestial_events), + ("ImageDescription", self.image_description), + ("RadiometricProcessing", self.radiometric_processing), + ("GeometricProcessing", self.geometric_processing), + ("IMPFConfiguration", self.impf_configuration)] return record @@ -219,72 +219,72 @@ def satellite_status(self): """Get satellite status data.""" # 7 bytes satellite_definition = [ - ('SatelliteId', np.uint16), - ('NominalLongitude', np.float32), - ('SatelliteStatus', np.uint8)] + ("SatelliteId", np.uint16), + ("NominalLongitude", np.float32), + ("SatelliteStatus", np.uint8)] # 28 bytes satellite_operations = [ - ('LastManoeuvreFlag', bool), - ('LastManoeuvreStartTime', time_cds_short), - ('LastManoeuvreEndTime', time_cds_short), - ('LastManoeuvreType', np.uint8), - ('NextManoeuvreFlag', bool), - ('NextManoeuvreStartTime', time_cds_short), - ('NextManoeuvreEndTime', time_cds_short), - ('NextManoeuvreType', np.uint8)] + ("LastManoeuvreFlag", bool), + ("LastManoeuvreStartTime", time_cds_short), + ("LastManoeuvreEndTime", time_cds_short), + ("LastManoeuvreType", np.uint8), + ("NextManoeuvreFlag", bool), + ("NextManoeuvreStartTime", time_cds_short), + ("NextManoeuvreEndTime", time_cds_short), + ("NextManoeuvreType", np.uint8)] # 396 bytes orbit_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', (np.float64, 8)), - ('Y', (np.float64, 8)), - ('Z', (np.float64, 8)), - ('VX', (np.float64, 8)), - ('VY', (np.float64, 8)), - ('VZ', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", (np.float64, 8)), + ("Y", (np.float64, 8)), + ("Z", (np.float64, 8)), + ("VX", (np.float64, 8)), + ("VY", (np.float64, 8)), + ("VZ", (np.float64, 8))] # 39612 bytes orbit = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('OrbitPolynomial', (orbit_coeff, 100))] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("OrbitPolynomial", (orbit_coeff, 100))] # 204 bytes attitude_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', (np.float64, 8)), - ('YofSpinAxis', (np.float64, 8)), - ('ZofSpinAxis', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", (np.float64, 8)), + ("YofSpinAxis", (np.float64, 8)), + ("ZofSpinAxis", (np.float64, 8))] # 20420 bytes attitude = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('PrincipleAxisOffsetAngle', np.float64), - ('AttitudePolynomial', (attitude_coeff, 100))] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("PrincipleAxisOffsetAngle", np.float64), + ("AttitudePolynomial", (attitude_coeff, 100))] # 59 bytes utc_correlation = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('OnBoardTimeStart', (np.uint8, 7)), - ('VarOnBoardTimeStart', np.float64), - ('A1', np.float64), - ('VarA1', np.float64), - ('A2', np.float64), - ('VarA2', np.float64)] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("OnBoardTimeStart", (np.uint8, 7)), + ("VarOnBoardTimeStart", np.float64), + ("A1", np.float64), + ("VarA1", np.float64), + ("A2", np.float64), + ("VarA2", np.float64)] # 60134 bytes record = [ - ('SatelliteDefinition', satellite_definition), - ('SatelliteOperations', satellite_operations), - ('Orbit', orbit), - ('Attitude', attitude), - ('SpinRetreatRCStart', np.float64), - ('UTCCorrelation', utc_correlation)] + ("SatelliteDefinition", satellite_definition), + ("SatelliteOperations", satellite_operations), + ("Orbit", orbit), + ("Attitude", attitude), + ("SpinRetreatRCStart", np.float64), + ("UTCCorrelation", utc_correlation)] return record @@ -292,72 +292,72 @@ def satellite_status(self): def image_acquisition(self): """Get image acquisition data.""" planned_acquisition_time = [ - ('TrueRepeatCycleStart', time_cds_expanded), - ('PlanForwardScanEnd', time_cds_expanded), - ('PlannedRepeatCycleEnd', time_cds_expanded)] + ("TrueRepeatCycleStart", time_cds_expanded), + ("PlanForwardScanEnd", time_cds_expanded), + ("PlannedRepeatCycleEnd", time_cds_expanded)] radiometer_status = [ - ('ChannelStatus', (np.uint8, 12)), - ('DetectorStatus', (np.uint8, 42))] + ("ChannelStatus", (np.uint8, 12)), + ("DetectorStatus", (np.uint8, 42))] hrv_frame_offsets = [ - ('MDUNomHRVDelay1', np.uint16), - ('MDUNomHRVDelay2', np.uint16), - ('Spare', np.uint16), - ('MDUNomHRVBreakLine', np.uint16)] + ("MDUNomHRVDelay1", np.uint16), + ("MDUNomHRVDelay2", np.uint16), + ("Spare", np.uint16), + ("MDUNomHRVBreakLine", np.uint16)] operation_parameters = [ - ('L0_LineCounter', np.uint16), - ('K1_RetraceLines', np.uint16), - ('K2_PauseDeciseconds', np.uint16), - ('K3_RetraceLines', np.uint16), - ('K4_PauseDeciseconds', np.uint16), - ('K5_RetraceLines', np.uint16), - ('XDeepSpaceWindowPosition', np.uint8)] + ("L0_LineCounter", np.uint16), + ("K1_RetraceLines", np.uint16), + ("K2_PauseDeciseconds", np.uint16), + ("K3_RetraceLines", np.uint16), + ("K4_PauseDeciseconds", np.uint16), + ("K5_RetraceLines", np.uint16), + ("XDeepSpaceWindowPosition", np.uint8)] radiometer_settings = [ - ('MDUSamplingDelays', (np.uint16, 42)), - ('HRVFrameOffsets', hrv_frame_offsets), - ('DHSSSynchSelection', np.uint8), - ('MDUOutGain', (np.uint16, 42)), - ('MDUCoarseGain', (np.uint8, 42)), - ('MDUFineGain', (np.uint16, 42)), - ('MDUNumericalOffset', (np.uint16, 42)), - ('PUGain', (np.uint16, 42)), - ('PUOffset', (np.uint16, 27)), - ('PUBias', (np.uint16, 15)), - ('OperationParameters', operation_parameters), - ('RefocusingLines', np.uint16), - ('RefocusingDirection', np.uint8), - ('RefocusingPosition', np.uint16), - ('ScanRefPosFlag', bool), - ('ScanRefPosNumber', np.uint16), - ('ScanRefPosVal', np.float32), - ('ScanFirstLine', np.uint16), - ('ScanLastLine', np.uint16), - ('RetraceStartLine', np.uint16)] + ("MDUSamplingDelays", (np.uint16, 42)), + ("HRVFrameOffsets", hrv_frame_offsets), + ("DHSSSynchSelection", np.uint8), + ("MDUOutGain", (np.uint16, 42)), + ("MDUCoarseGain", (np.uint8, 42)), + ("MDUFineGain", (np.uint16, 42)), + ("MDUNumericalOffset", (np.uint16, 42)), + ("PUGain", (np.uint16, 42)), + ("PUOffset", (np.uint16, 27)), + ("PUBias", (np.uint16, 15)), + ("OperationParameters", operation_parameters), + ("RefocusingLines", np.uint16), + ("RefocusingDirection", np.uint8), + ("RefocusingPosition", np.uint16), + ("ScanRefPosFlag", bool), + ("ScanRefPosNumber", np.uint16), + ("ScanRefPosVal", np.float32), + ("ScanFirstLine", np.uint16), + ("ScanLastLine", np.uint16), + ("RetraceStartLine", np.uint16)] decontamination = [ - ('DecontaminationNow', bool), - ('DecontaminationStart', time_cds_short), - ('DecontaminationEnd', time_cds_short)] + ("DecontaminationNow", bool), + ("DecontaminationStart", time_cds_short), + ("DecontaminationEnd", time_cds_short)] radiometer_operations = [ - ('LastGainChangeFlag', bool), - ('LastGainChangeTime', time_cds_short), - ('Decontamination', decontamination), - ('BBCalScheduled', bool), - ('BBCalibrationType', np.uint8), - ('BBFirstLine', np.uint16), - ('BBLastLine', np.uint16), - ('ColdFocalPlaneOpTemp', np.uint16), - ('WarmFocalPlaneOpTemp', np.uint16)] + ("LastGainChangeFlag", bool), + ("LastGainChangeTime", time_cds_short), + ("Decontamination", decontamination), + ("BBCalScheduled", bool), + ("BBCalibrationType", np.uint8), + ("BBFirstLine", np.uint16), + ("BBLastLine", np.uint16), + ("ColdFocalPlaneOpTemp", np.uint16), + ("WarmFocalPlaneOpTemp", np.uint16)] record = [ - ('PlannedAcquisitionTime', planned_acquisition_time), - ('RadiometerStatus', radiometer_status), - ('RadiometerSettings', radiometer_settings), - ('RadiometerOperations', radiometer_operations)] + ("PlannedAcquisitionTime", planned_acquisition_time), + ("RadiometerStatus", radiometer_status), + ("RadiometerSettings", radiometer_settings), + ("RadiometerOperations", radiometer_operations)] return record @@ -365,39 +365,39 @@ def image_acquisition(self): def celestial_events(self): """Get celestial events data.""" earth_moon_sun_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('AlphaCoef', (np.float64, 8)), - ('BetaCoef', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("AlphaCoef", (np.float64, 8)), + ("BetaCoef", (np.float64, 8))] star_coeff = [ - ('StarId', np.uint16), - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('AlphaCoef', (np.float64, 8)), - ('BetaCoef', (np.float64, 8))] + ("StarId", np.uint16), + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("AlphaCoef", (np.float64, 8)), + ("BetaCoef", (np.float64, 8))] ephemeris = [ - ('PeriodTimeStart', time_cds_short), - ('PeriodTimeEnd', time_cds_short), - ('RelatedOrbitFileTime', 'S15'), - ('RelatedAttitudeFileTime', 'S15'), - ('EarthEphemeris', (earth_moon_sun_coeff, 100)), - ('MoonEphemeris', (earth_moon_sun_coeff, 100)), - ('SunEphemeris', (earth_moon_sun_coeff, 100)), - ('StarEphemeris', (star_coeff, (20, 100)))] + ("PeriodTimeStart", time_cds_short), + ("PeriodTimeEnd", time_cds_short), + ("RelatedOrbitFileTime", "S15"), + ("RelatedAttitudeFileTime", "S15"), + ("EarthEphemeris", (earth_moon_sun_coeff, 100)), + ("MoonEphemeris", (earth_moon_sun_coeff, 100)), + ("SunEphemeris", (earth_moon_sun_coeff, 100)), + ("StarEphemeris", (star_coeff, (20, 100)))] relation_to_image = [ - ('TypeOfEclipse', np.uint8), - ('EclipseStartTime', time_cds_short), - ('EclipseEndTime', time_cds_short), - ('VisibleBodiesInImage', np.uint8), - ('BodiesCloseToFOV', np.uint8), - ('ImpactOnImageQuality', np.uint8)] + ("TypeOfEclipse", np.uint8), + ("EclipseStartTime", time_cds_short), + ("EclipseEndTime", time_cds_short), + ("VisibleBodiesInImage", np.uint8), + ("BodiesCloseToFOV", np.uint8), + ("ImpactOnImageQuality", np.uint8)] record = [ - ('CelestialBodiesPosition', ephemeris), - ('RelationToImage', relation_to_image)] + ("CelestialBodiesPosition", ephemeris), + ("RelationToImage", relation_to_image)] return record @@ -405,44 +405,44 @@ def celestial_events(self): def image_description(self): """Get image description data.""" projection_description = [ - ('TypeOfProjection', np.uint8), - ('LongitudeOfSSP', np.float32)] + ("TypeOfProjection", np.uint8), + ("LongitudeOfSSP", np.float32)] reference_grid = [ - ('NumberOfLines', np.int32), - ('NumberOfColumns', np.int32), - ('LineDirGridStep', np.float32), - ('ColumnDirGridStep', np.float32), - ('GridOrigin', np.uint8)] + ("NumberOfLines", np.int32), + ("NumberOfColumns", np.int32), + ("LineDirGridStep", np.float32), + ("ColumnDirGridStep", np.float32), + ("GridOrigin", np.uint8)] planned_coverage_vis_ir = [ - ('SouthernLinePlanned', np.int32), - ('NorthernLinePlanned', np.int32), - ('EasternColumnPlanned', np.int32), - ('WesternColumnPlanned', np.int32)] + ("SouthernLinePlanned", np.int32), + ("NorthernLinePlanned", np.int32), + ("EasternColumnPlanned", np.int32), + ("WesternColumnPlanned", np.int32)] planned_coverage_hrv = [ - ('LowerSouthLinePlanned', np.int32), - ('LowerNorthLinePlanned', np.int32), - ('LowerEastColumnPlanned', np.int32), - ('LowerWestColumnPlanned', np.int32), - ('UpperSouthLinePlanned', np.int32), - ('UpperNorthLinePlanned', np.int32), - ('UpperEastColumnPlanned', np.int32), - ('UpperWestColumnPlanned', np.int32)] + ("LowerSouthLinePlanned", np.int32), + ("LowerNorthLinePlanned", np.int32), + ("LowerEastColumnPlanned", np.int32), + ("LowerWestColumnPlanned", np.int32), + ("UpperSouthLinePlanned", np.int32), + ("UpperNorthLinePlanned", np.int32), + ("UpperEastColumnPlanned", np.int32), + ("UpperWestColumnPlanned", np.int32)] level_15_image_production = [ - ('ImageProcDirection', np.uint8), - ('PixelGenDirection', np.uint8), - ('PlannedChanProcessing', (np.uint8, 12))] + ("ImageProcDirection", np.uint8), + ("PixelGenDirection", np.uint8), + ("PlannedChanProcessing", (np.uint8, 12))] record = [ - ('ProjectionDescription', projection_description), - ('ReferenceGridVIS_IR', reference_grid), - ('ReferenceGridHRV', reference_grid), - ('PlannedCoverageVIS_IR', planned_coverage_vis_ir), - ('PlannedCoverageHRV', planned_coverage_hrv), - ('Level15ImageProduction', level_15_image_production)] + ("ProjectionDescription", projection_description), + ("ReferenceGridVIS_IR", reference_grid), + ("ReferenceGridHRV", reference_grid), + ("PlannedCoverageVIS_IR", planned_coverage_vis_ir), + ("PlannedCoverageHRV", planned_coverage_hrv), + ("Level15ImageProduction", level_15_image_production)] return record @@ -450,122 +450,122 @@ def image_description(self): def radiometric_processing(self): """Get radiometric processing data.""" rp_summary = [ - ('RadianceLinearization', (bool, 12)), - ('DetectorEqualization', (bool, 12)), - ('OnboardCalibrationResult', (bool, 12)), - ('MPEFCalFeedback', (bool, 12)), - ('MTFAdaptation', (bool, 12)), - ('StrayLightCorrection', (bool, 12))] + ("RadianceLinearization", (bool, 12)), + ("DetectorEqualization", (bool, 12)), + ("OnboardCalibrationResult", (bool, 12)), + ("MPEFCalFeedback", (bool, 12)), + ("MTFAdaptation", (bool, 12)), + ("StrayLightCorrection", (bool, 12))] level_15_image_calibration = [ - ('CalSlope', np.float64), - ('CalOffset', np.float64)] + ("CalSlope", np.float64), + ("CalOffset", np.float64)] time_cuc_size = [ - ('CT1', np.uint8), - ('CT2', np.uint8), - ('CT3', np.uint8), - ('CT4', np.uint8), - ('FT1', np.uint8), - ('FT2', np.uint8), - ('FT3', np.uint8)] + ("CT1", np.uint8), + ("CT2", np.uint8), + ("CT3", np.uint8), + ("CT4", np.uint8), + ("FT1", np.uint8), + ("FT2", np.uint8), + ("FT3", np.uint8)] cold_fp_temperature = [ - ('FCUNominalColdFocalPlaneTemp', np.uint16), - ('FCURedundantColdFocalPlaneTemp', np.uint16)] + ("FCUNominalColdFocalPlaneTemp", np.uint16), + ("FCURedundantColdFocalPlaneTemp", np.uint16)] warm_fp_temperature = [ - ('FCUNominalWarmFocalPlaneVHROTemp', np.uint16), - ('FCURedundantWarmFocalPlaneVHROTemp', np.uint16)] + ("FCUNominalWarmFocalPlaneVHROTemp", np.uint16), + ("FCURedundantWarmFocalPlaneVHROTemp", np.uint16)] scan_mirror_temperature = [ - ('FCUNominalScanMirrorSensor1Temp', np.uint16), - ('FCURedundantScanMirrorSensor1Temp', np.uint16), - ('FCUNominalScanMirrorSensor2Temp', np.uint16), - ('FCURedundantScanMirrorSensor2Temp', np.uint16)] + ("FCUNominalScanMirrorSensor1Temp", np.uint16), + ("FCURedundantScanMirrorSensor1Temp", np.uint16), + ("FCUNominalScanMirrorSensor2Temp", np.uint16), + ("FCURedundantScanMirrorSensor2Temp", np.uint16)] m1m2m3_temperature = [ - ('FCUNominalM1MirrorSensor1Temp', np.uint16), - ('FCURedundantM1MirrorSensor1Temp', np.uint16), - ('FCUNominalM1MirrorSensor2Temp', np.uint16), - ('FCURedundantM1MirrorSensor2Temp', np.uint16), - ('FCUNominalM23AssemblySensor1Temp', np.uint8), - ('FCURedundantM23AssemblySensor1Temp', np.uint8), - ('FCUNominalM23AssemblySensor2Temp', np.uint8), - ('FCURedundantM23AssemblySensor2Temp', np.uint8)] + ("FCUNominalM1MirrorSensor1Temp", np.uint16), + ("FCURedundantM1MirrorSensor1Temp", np.uint16), + ("FCUNominalM1MirrorSensor2Temp", np.uint16), + ("FCURedundantM1MirrorSensor2Temp", np.uint16), + ("FCUNominalM23AssemblySensor1Temp", np.uint8), + ("FCURedundantM23AssemblySensor1Temp", np.uint8), + ("FCUNominalM23AssemblySensor2Temp", np.uint8), + ("FCURedundantM23AssemblySensor2Temp", np.uint8)] baffle_temperature = [ - ('FCUNominalM1BaffleTemp', np.uint16), - ('FCURedundantM1BaffleTemp', np.uint16)] + ("FCUNominalM1BaffleTemp", np.uint16), + ("FCURedundantM1BaffleTemp", np.uint16)] blackbody_temperature = [ - ('FCUNominalBlackBodySensorTemp', np.uint16), - ('FCURedundantBlackBodySensorTemp', np.uint16)] + ("FCUNominalBlackBodySensorTemp", np.uint16), + ("FCURedundantBlackBodySensorTemp", np.uint16)] fcu_mode = [ - ('FCUNominalSMMStatus', 'S2'), - ('FCURedundantSMMStatus', 'S2')] + ("FCUNominalSMMStatus", "S2"), + ("FCURedundantSMMStatus", "S2")] extracted_bb_data = [ - ('NumberOfPixelsUsed', np.uint32), - ('MeanCount', np.float32), - ('RMS', np.float32), - ('MaxCount', np.uint16), - ('MinCount', np.uint16), - ('BB_Processing_Slope', np.float64), - ('BB_Processing_Offset', np.float64)] + ("NumberOfPixelsUsed", np.uint32), + ("MeanCount", np.float32), + ("RMS", np.float32), + ("MaxCount", np.uint16), + ("MinCount", np.uint16), + ("BB_Processing_Slope", np.float64), + ("BB_Processing_Offset", np.float64)] bb_related_data = [ - ('OnBoardBBTime', time_cuc_size), - ('MDUOutGain', (np.uint16, 42)), - ('MDUCoarseGain', (np.uint8, 42)), - ('MDUFineGain', (np.uint16, 42)), - ('MDUNumericalOffset', (np.uint16, 42)), - ('PUGain', (np.uint16, 42)), - ('PUOffset', (np.uint16, 27)), - ('PUBias', (np.uint16, 15)), - ('DCRValues', (np.uint8, 63)), - ('X_DeepSpaceWindowPosition', np.int8), - ('ColdFPTemperature', cold_fp_temperature), - ('WarmFPTemperature', warm_fp_temperature), - ('ScanMirrorTemperature', scan_mirror_temperature), - ('M1M2M3Temperature', m1m2m3_temperature), - ('BaffleTemperature', baffle_temperature), - ('BlackBodyTemperature', blackbody_temperature), - ('FCUMode', fcu_mode), - ('ExtractedBBData', (extracted_bb_data, 12))] + ("OnBoardBBTime", time_cuc_size), + ("MDUOutGain", (np.uint16, 42)), + ("MDUCoarseGain", (np.uint8, 42)), + ("MDUFineGain", (np.uint16, 42)), + ("MDUNumericalOffset", (np.uint16, 42)), + ("PUGain", (np.uint16, 42)), + ("PUOffset", (np.uint16, 27)), + ("PUBias", (np.uint16, 15)), + ("DCRValues", (np.uint8, 63)), + ("X_DeepSpaceWindowPosition", np.int8), + ("ColdFPTemperature", cold_fp_temperature), + ("WarmFPTemperature", warm_fp_temperature), + ("ScanMirrorTemperature", scan_mirror_temperature), + ("M1M2M3Temperature", m1m2m3_temperature), + ("BaffleTemperature", baffle_temperature), + ("BlackBodyTemperature", blackbody_temperature), + ("FCUMode", fcu_mode), + ("ExtractedBBData", (extracted_bb_data, 12))] black_body_data_used = [ - ('BBObservationUTC', time_cds_expanded), - ('BBRelatedData', bb_related_data)] + ("BBObservationUTC", time_cds_expanded), + ("BBRelatedData", bb_related_data)] impf_cal_data = [ - ('ImageQualityFlag', np.uint8), - ('ReferenceDataFlag', np.uint8), - ('AbsCalMethod', np.uint8), - ('Pad1', 'S1'), - ('AbsCalWeightVic', np.float32), - ('AbsCalWeightXsat', np.float32), - ('AbsCalCoeff', np.float32), - ('AbsCalError', np.float32), - ('GSICSCalCoeff', np.float32), - ('GSICSCalError', np.float32), - ('GSICSOffsetCount', np.float32)] + ("ImageQualityFlag", np.uint8), + ("ReferenceDataFlag", np.uint8), + ("AbsCalMethod", np.uint8), + ("Pad1", "S1"), + ("AbsCalWeightVic", np.float32), + ("AbsCalWeightXsat", np.float32), + ("AbsCalCoeff", np.float32), + ("AbsCalError", np.float32), + ("GSICSCalCoeff", np.float32), + ("GSICSCalError", np.float32), + ("GSICSOffsetCount", np.float32)] rad_proc_mtf_adaptation = [ - ('VIS_IRMTFCorrectionE_W', (np.float32, (33, 16))), - ('VIS_IRMTFCorrectionN_S', (np.float32, (33, 16))), - ('HRVMTFCorrectionE_W', (np.float32, (9, 16))), - ('HRVMTFCorrectionN_S', (np.float32, (9, 16))), - ('StraylightCorrection', (np.float32, (12, 8, 8)))] + ("VIS_IRMTFCorrectionE_W", (np.float32, (33, 16))), + ("VIS_IRMTFCorrectionN_S", (np.float32, (33, 16))), + ("HRVMTFCorrectionE_W", (np.float32, (9, 16))), + ("HRVMTFCorrectionN_S", (np.float32, (9, 16))), + ("StraylightCorrection", (np.float32, (12, 8, 8)))] record = [ - ('RPSummary', rp_summary), - ('Level15ImageCalibration', (level_15_image_calibration, 12)), - ('BlackBodyDataUsed', black_body_data_used), - ('MPEFCalFeedback', (impf_cal_data, 12)), - ('RadTransform', (np.float32, (42, 64))), - ('RadProcMTFAdaptation', rad_proc_mtf_adaptation)] + ("RPSummary", rp_summary), + ("Level15ImageCalibration", (level_15_image_calibration, 12)), + ("BlackBodyDataUsed", black_body_data_used), + ("MPEFCalFeedback", (impf_cal_data, 12)), + ("RadTransform", (np.float32, (42, 64))), + ("RadProcMTFAdaptation", rad_proc_mtf_adaptation)] return record @@ -573,20 +573,20 @@ def radiometric_processing(self): def geometric_processing(self): """Get geometric processing data.""" opt_axis_distances = [ - ('E-WFocalPlane', (np.float32, 42)), - ('N_SFocalPlane', (np.float32, 42))] + ("E-WFocalPlane", (np.float32, 42)), + ("N_SFocalPlane", (np.float32, 42))] earth_model = [ - ('TypeOfEarthModel', np.uint8), - ('EquatorialRadius', np.float64), - ('NorthPolarRadius', np.float64), - ('SouthPolarRadius', np.float64)] + ("TypeOfEarthModel", np.uint8), + ("EquatorialRadius", np.float64), + ("NorthPolarRadius", np.float64), + ("SouthPolarRadius", np.float64)] record = [ - ('OptAxisDistances', opt_axis_distances), - ('EarthModel', earth_model), - ('AtmosphericModel', (np.float32, (12, 360))), - ('ResamplingFunctions', (np.uint8, 12))] + ("OptAxisDistances", opt_axis_distances), + ("EarthModel", earth_model), + ("AtmosphericModel", (np.float32, (12, 360))), + ("ResamplingFunctions", (np.uint8, 12))] return record @@ -594,8 +594,8 @@ def geometric_processing(self): def impf_configuration(self): """Get impf configuration information.""" overall_configuration = [ - ('Issue', np.uint16), - ('Revision', np.uint16) + ("Issue", np.uint16), + ("Revision", np.uint16) ] sw_version = overall_configuration @@ -603,82 +603,82 @@ def impf_configuration(self): info_base_versions = sw_version su_configuration = [ - ('SWVersion', sw_version), - ('InfoBaseVersions', (info_base_versions, 10)) + ("SWVersion", sw_version), + ("InfoBaseVersions", (info_base_versions, 10)) ] su_details = [ - ('SUId', GSDTRecords.gp_su_id), - ('SUIdInstance', np.int8), - ('SUMode', np.uint8), - ('SUState', np.uint8), - ('SUConfiguration', su_configuration) + ("SUId", GSDTRecords.gp_su_id), + ("SUIdInstance", np.int8), + ("SUMode", np.uint8), + ("SUState", np.uint8), + ("SUConfiguration", su_configuration) ] equalisation_params = [ - ('ConstCoeff', np.float32), - ('LinearCoeff', np.float32), - ('QuadraticCoeff', np.float32) + ("ConstCoeff", np.float32), + ("LinearCoeff", np.float32), + ("QuadraticCoeff", np.float32) ] black_body_data_for_warm_start = [ - ('GTotalForMethod1', (np.float64, 12)), - ('GTotalForMethod2', (np.float64, 12)), - ('GTotalForMethod3', (np.float64, 12)), - ('GBackForMethod1', (np.float64, 12)), - ('GBackForMethod2', (np.float64, 12)), - ('GBackForMethod3', (np.float64, 12)), - ('RatioGTotalToGBack', (np.float64, 12)), - ('GainInFrontOpticsCont', (np.float64, 12)), - ('CalibrationConstants', (np.float32, 12)), - ('maxIncidentRadiance', (np.float64, 12)), - ('TimeOfColdObsSeconds', np.float64), - ('TimeOfColdObsNanoSecs', np.float64), - ('IncidenceRadiance', (np.float64, 12)), - ('TempCal', np.float64), - ('TempM1', np.float64), - ('TempScan', np.float64), - ('TempM1Baf', np.float64), - ('TempCalSurround', np.float64) + ("GTotalForMethod1", (np.float64, 12)), + ("GTotalForMethod2", (np.float64, 12)), + ("GTotalForMethod3", (np.float64, 12)), + ("GBackForMethod1", (np.float64, 12)), + ("GBackForMethod2", (np.float64, 12)), + ("GBackForMethod3", (np.float64, 12)), + ("RatioGTotalToGBack", (np.float64, 12)), + ("GainInFrontOpticsCont", (np.float64, 12)), + ("CalibrationConstants", (np.float32, 12)), + ("maxIncidentRadiance", (np.float64, 12)), + ("TimeOfColdObsSeconds", np.float64), + ("TimeOfColdObsNanoSecs", np.float64), + ("IncidenceRadiance", (np.float64, 12)), + ("TempCal", np.float64), + ("TempM1", np.float64), + ("TempScan", np.float64), + ("TempM1Baf", np.float64), + ("TempCalSurround", np.float64) ] mirror_parameters = [ - ('MaxFeedbackVoltage', np.float64), - ('MinFeedbackVoltage', np.float64), - ('MirrorSlipEstimate', np.float64) + ("MaxFeedbackVoltage", np.float64), + ("MinFeedbackVoltage", np.float64), + ("MirrorSlipEstimate", np.float64) ] hktm_parameters = [ - ('TimeS0Packet', time_cds_short), - ('TimeS1Packet', time_cds_short), - ('TimeS2Packet', time_cds_short), - ('TimeS3Packet', time_cds_short), - ('TimeS4Packet', time_cds_short), - ('TimeS5Packet', time_cds_short), - ('TimeS6Packet', time_cds_short), - ('TimeS7Packet', time_cds_short), - ('TimeS8Packet', time_cds_short), - ('TimeS9Packet', time_cds_short), - ('TimeSYPacket', time_cds_short), - ('TimePSPacket', time_cds_short) + ("TimeS0Packet", time_cds_short), + ("TimeS1Packet", time_cds_short), + ("TimeS2Packet", time_cds_short), + ("TimeS3Packet", time_cds_short), + ("TimeS4Packet", time_cds_short), + ("TimeS5Packet", time_cds_short), + ("TimeS6Packet", time_cds_short), + ("TimeS7Packet", time_cds_short), + ("TimeS8Packet", time_cds_short), + ("TimeS9Packet", time_cds_short), + ("TimeSYPacket", time_cds_short), + ("TimePSPacket", time_cds_short) ] warm_start_params = [ - ('ScanningLaw', (np.float64, 1527)), - ('RadFramesAlignment', (np.float64, 3)), - ('ScanningLawVariation', (np.float32, 2)), - ('EqualisationParams', (equalisation_params, 42)), - ('BlackBodyDataForWarmStart', black_body_data_for_warm_start), - ('MirrorParameters', mirror_parameters), - ('LastSpinPeriod', np.float64), - ('HKTMParameters', hktm_parameters), - ('WSPReserved', (np.uint8, 3312)) + ("ScanningLaw", (np.float64, 1527)), + ("RadFramesAlignment", (np.float64, 3)), + ("ScanningLawVariation", (np.float32, 2)), + ("EqualisationParams", (equalisation_params, 42)), + ("BlackBodyDataForWarmStart", black_body_data_for_warm_start), + ("MirrorParameters", mirror_parameters), + ("LastSpinPeriod", np.float64), + ("HKTMParameters", hktm_parameters), + ("WSPReserved", (np.uint8, 3312)) ] record = [ - ('OverallConfiguration', overall_configuration), - ('SUDetails', (su_details, 50)), - ('WarmStartParams', warm_start_params) + ("OverallConfiguration", overall_configuration), + ("SUDetails", (su_details, 50)), + ("WarmStartParams", warm_start_params) ] return record @@ -695,23 +695,23 @@ def get(self): """Get header record data.""" # 380363 bytes record = [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), - ('15TRAILER', self.seviri_l15_trailer) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1), + ("15TRAILER", self.seviri_l15_trailer) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") @property def seviri_l15_trailer(self): """Get file trailer data.""" record = [ - ('15TrailerVersion', np.uint8), - ('ImageProductionStats', self.image_production_stats), - ('NavigationExtractionResults', self.navigation_extraction_results), - ('RadiometricQuality', self.radiometric_quality), - ('GeometricQuality', self.geometric_quality), - ('TimelinessAndCompleteness', self.timeliness_and_completeness) + ("15TrailerVersion", np.uint8), + ("ImageProductionStats", self.image_production_stats), + ("NavigationExtractionResults", self.navigation_extraction_results), + ("RadiometricQuality", self.radiometric_quality), + ("GeometricQuality", self.geometric_quality), + ("TimelinessAndCompleteness", self.timeliness_and_completeness) ] return record @@ -721,69 +721,69 @@ def image_production_stats(self): gp_sc_id = GSDTRecords.gp_sc_id actual_scanning_summary = [ - ('NominalImageScanning', np.uint8), - ('ReducedScan', np.uint8), - ('ForwardScanStart', time_cds_short), - ('ForwardScanEnd', time_cds_short) + ("NominalImageScanning", np.uint8), + ("ReducedScan", np.uint8), + ("ForwardScanStart", time_cds_short), + ("ForwardScanEnd", time_cds_short) ] radiometric_behaviour = [ - ('NominalBehaviour', np.uint8), - ('RadScanIrregularity', np.uint8), - ('RadStoppage', np.uint8), - ('RepeatCycleNotCompleted', np.uint8), - ('GainChangeTookPlace', np.uint8), - ('DecontaminationTookPlace', np.uint8), - ('NoBBCalibrationAchieved', np.uint8), - ('IncorrectTemperature', np.uint8), - ('InvalidBBData', np.uint8), - ('InvalidAuxOrHKTMData', np.uint8), - ('RefocusingMechanismActuated', np.uint8), - ('MirrorBackToReferencePos', np.uint8) + ("NominalBehaviour", np.uint8), + ("RadScanIrregularity", np.uint8), + ("RadStoppage", np.uint8), + ("RepeatCycleNotCompleted", np.uint8), + ("GainChangeTookPlace", np.uint8), + ("DecontaminationTookPlace", np.uint8), + ("NoBBCalibrationAchieved", np.uint8), + ("IncorrectTemperature", np.uint8), + ("InvalidBBData", np.uint8), + ("InvalidAuxOrHKTMData", np.uint8), + ("RefocusingMechanismActuated", np.uint8), + ("MirrorBackToReferencePos", np.uint8) ] reception_summary_stats = [ - ('PlannedNumberOfL10Lines', (np.uint32, 12)), - ('NumberOfMissingL10Lines', (np.uint32, 12)), - ('NumberOfCorruptedL10Lines', (np.uint32, 12)), - ('NumberOfReplacedL10Lines', (np.uint32, 12)) + ("PlannedNumberOfL10Lines", (np.uint32, 12)), + ("NumberOfMissingL10Lines", (np.uint32, 12)), + ("NumberOfCorruptedL10Lines", (np.uint32, 12)), + ("NumberOfReplacedL10Lines", (np.uint32, 12)) ] l15_image_validity = [ - ('NominalImage', np.uint8), - ('NonNominalBecauseIncomplete', np.uint8), - ('NonNominalRadiometricQuality', np.uint8), - ('NonNominalGeometricQuality', np.uint8), - ('NonNominalTimeliness', np.uint8), - ('IncompleteL15', np.uint8), + ("NominalImage", np.uint8), + ("NonNominalBecauseIncomplete", np.uint8), + ("NonNominalRadiometricQuality", np.uint8), + ("NonNominalGeometricQuality", np.uint8), + ("NonNominalTimeliness", np.uint8), + ("IncompleteL15", np.uint8), ] actual_l15_coverage_vis_ir = [ - ('SouthernLineActual', np.int32), - ('NorthernLineActual', np.int32), - ('EasternColumnActual', np.int32), - ('WesternColumnActual', np.int32) + ("SouthernLineActual", np.int32), + ("NorthernLineActual", np.int32), + ("EasternColumnActual", np.int32), + ("WesternColumnActual", np.int32) ] actual_l15_coverage_hrv = [ - ('LowerSouthLineActual', np.int32), - ('LowerNorthLineActual', np.int32), - ('LowerEastColumnActual', np.int32), - ('LowerWestColumnActual', np.int32), - ('UpperSouthLineActual', np.int32), - ('UpperNorthLineActual', np.int32), - ('UpperEastColumnActual', np.int32), - ('UpperWestColumnActual', np.int32), + ("LowerSouthLineActual", np.int32), + ("LowerNorthLineActual", np.int32), + ("LowerEastColumnActual", np.int32), + ("LowerWestColumnActual", np.int32), + ("UpperSouthLineActual", np.int32), + ("UpperNorthLineActual", np.int32), + ("UpperEastColumnActual", np.int32), + ("UpperWestColumnActual", np.int32), ] record = [ - ('SatelliteId', gp_sc_id), - ('ActualScanningSummary', actual_scanning_summary), - ('RadiometricBehaviour', radiometric_behaviour), - ('ReceptionSummaryStats', reception_summary_stats), - ('L15ImageValidity', (l15_image_validity, 12)), - ('ActualL15CoverageVIS_IR', actual_l15_coverage_vis_ir), - ('ActualL15CoverageHRV', actual_l15_coverage_hrv) + ("SatelliteId", gp_sc_id), + ("ActualScanningSummary", actual_scanning_summary), + ("RadiometricBehaviour", radiometric_behaviour), + ("ReceptionSummaryStats", reception_summary_stats), + ("L15ImageValidity", (l15_image_validity, 12)), + ("ActualL15CoverageVIS_IR", actual_l15_coverage_vis_ir), + ("ActualL15CoverageHRV", actual_l15_coverage_hrv) ] return record @@ -792,47 +792,47 @@ def image_production_stats(self): def navigation_extraction_results(self): """Get navigation extraction data.""" horizon_observation = [ - ('HorizonId', np.uint8), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("HorizonId", np.uint8), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] star_observation = [ - ('StarId', np.uint16), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("StarId", np.uint16), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] landmark_observation = [ - ('LandmarkId', np.uint16), - ('LandmarkLongitude', np.float64), - ('LandmarkLatitude', np.float64), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("LandmarkId", np.uint16), + ("LandmarkLongitude", np.float64), + ("LandmarkLatitude", np.float64), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] record = [ - ('ExtractedHorizons', (horizon_observation, 4)), - ('ExtractedStars', (star_observation, 20)), - ('ExtractedLandmarks', (landmark_observation, 50)) + ("ExtractedHorizons", (horizon_observation, 4)), + ("ExtractedStars", (star_observation, 20)), + ("ExtractedLandmarks", (landmark_observation, 50)) ] return record @@ -841,85 +841,85 @@ def navigation_extraction_results(self): def radiometric_quality(self): """Get radiometric quality record data.""" l10_rad_quality = [ - ('FullImageMinimumCount', np.uint16), - ('FullImageMaximumCount', np.uint16), - ('EarthDiskMinimumCount', np.uint16), - ('EarthDiskMaximumCount', np.uint16), - ('MoonMinimumCount', np.uint16), - ('MoonMaximumCount', np.uint16), - ('FullImageMeanCount', np.float32), - ('FullImageStandardDeviation', np.float32), - ('EarthDiskMeanCount', np.float32), - ('EarthDiskStandardDeviation', np.float32), - ('MoonMeanCount', np.float32), - ('MoonStandardDeviation', np.float32), - ('SpaceMeanCount', np.float32), - ('SpaceStandardDeviation', np.float32), - ('SESpaceCornerMeanCount', np.float32), - ('SESpaceCornerStandardDeviation', np.float32), - ('SWSpaceCornerMeanCount', np.float32), - ('SWSpaceCornerStandardDeviation', np.float32), - ('NESpaceCornerMeanCount', np.float32), - ('NESpaceCornerStandardDeviation', np.float32), - ('NWSpaceCornerMeanCount', np.float32), - ('NWSpaceCornerStandardDeviation', np.float32), - ('4SpaceCornersMeanCount', np.float32), - ('4SpaceCornersStandardDeviation', np.float32), - ('FullImageHistogram', (np.uint32, 256)), - ('EarthDiskHistogram', (np.uint32, 256)), - ('ImageCentreSquareHistogram', (np.uint32, 256)), - ('SESpaceCornerHistogram', (np.uint32, 128)), - ('SWSpaceCornerHistogram', (np.uint32, 128)), - ('NESpaceCornerHistogram', (np.uint32, 128)), - ('NWSpaceCornerHistogram', (np.uint32, 128)), - ('FullImageEntropy', (np.float32, 3)), - ('EarthDiskEntropy', (np.float32, 3)), - ('ImageCentreSquareEntropy', (np.float32, 3)), - ('SESpaceCornerEntropy', (np.float32, 3)), - ('SWSpaceCornerEntropy', (np.float32, 3)), - ('NESpaceCornerEntropy', (np.float32, 3)), - ('NWSpaceCornerEntropy', (np.float32, 3)), - ('4SpaceCornersEntropy', (np.float32, 3)), - ('ImageCentreSquarePSD_EW', (np.float32, 128)), - ('FullImagePSD_EW', (np.float32, 128)), - ('ImageCentreSquarePSD_NS', (np.float32, 128)), - ('FullImagePSD_NS', (np.float32, 128)) + ("FullImageMinimumCount", np.uint16), + ("FullImageMaximumCount", np.uint16), + ("EarthDiskMinimumCount", np.uint16), + ("EarthDiskMaximumCount", np.uint16), + ("MoonMinimumCount", np.uint16), + ("MoonMaximumCount", np.uint16), + ("FullImageMeanCount", np.float32), + ("FullImageStandardDeviation", np.float32), + ("EarthDiskMeanCount", np.float32), + ("EarthDiskStandardDeviation", np.float32), + ("MoonMeanCount", np.float32), + ("MoonStandardDeviation", np.float32), + ("SpaceMeanCount", np.float32), + ("SpaceStandardDeviation", np.float32), + ("SESpaceCornerMeanCount", np.float32), + ("SESpaceCornerStandardDeviation", np.float32), + ("SWSpaceCornerMeanCount", np.float32), + ("SWSpaceCornerStandardDeviation", np.float32), + ("NESpaceCornerMeanCount", np.float32), + ("NESpaceCornerStandardDeviation", np.float32), + ("NWSpaceCornerMeanCount", np.float32), + ("NWSpaceCornerStandardDeviation", np.float32), + ("4SpaceCornersMeanCount", np.float32), + ("4SpaceCornersStandardDeviation", np.float32), + ("FullImageHistogram", (np.uint32, 256)), + ("EarthDiskHistogram", (np.uint32, 256)), + ("ImageCentreSquareHistogram", (np.uint32, 256)), + ("SESpaceCornerHistogram", (np.uint32, 128)), + ("SWSpaceCornerHistogram", (np.uint32, 128)), + ("NESpaceCornerHistogram", (np.uint32, 128)), + ("NWSpaceCornerHistogram", (np.uint32, 128)), + ("FullImageEntropy", (np.float32, 3)), + ("EarthDiskEntropy", (np.float32, 3)), + ("ImageCentreSquareEntropy", (np.float32, 3)), + ("SESpaceCornerEntropy", (np.float32, 3)), + ("SWSpaceCornerEntropy", (np.float32, 3)), + ("NESpaceCornerEntropy", (np.float32, 3)), + ("NWSpaceCornerEntropy", (np.float32, 3)), + ("4SpaceCornersEntropy", (np.float32, 3)), + ("ImageCentreSquarePSD_EW", (np.float32, 128)), + ("FullImagePSD_EW", (np.float32, 128)), + ("ImageCentreSquarePSD_NS", (np.float32, 128)), + ("FullImagePSD_NS", (np.float32, 128)) ] l15_rad_quality = [ - ('FullImageMinimumCount', np.uint16), - ('FullImageMaximumCount', np.uint16), - ('EarthDiskMinimumCount', np.uint16), - ('EarthDiskMaximumCount', np.uint16), - ('FullImageMeanCount', np.float32), - ('FullImageStandardDeviation', np.float32), - ('EarthDiskMeanCount', np.float32), - ('EarthDiskStandardDeviation', np.float32), - ('SpaceMeanCount', np.float32), - ('SpaceStandardDeviation', np.float32), - ('FullImageHistogram', (np.uint32, 256)), - ('EarthDiskHistogram', (np.uint32, 256)), - ('ImageCentreSquareHistogram', (np.uint32, 256)), - ('FullImageEntropy', (np.float32, 3)), - ('EarthDiskEntropy', (np.float32, 3)), - ('ImageCentreSquareEntropy', (np.float32, 3)), - ('ImageCentreSquarePSD_EW', (np.float32, 128)), - ('FullImagePSD_EW', (np.float32, 128)), - ('ImageCentreSquarePSD_NS', (np.float32, 128)), - ('FullImagePSD_NS', (np.float32, 128)), - ('SESpaceCornerL15_RMS', np.float32), - ('SESpaceCornerL15_Mean', np.float32), - ('SWSpaceCornerL15_RMS', np.float32), - ('SWSpaceCornerL15_Mean', np.float32), - ('NESpaceCornerL15_RMS', np.float32), - ('NESpaceCornerL15_Mean', np.float32), - ('NWSpaceCornerL15_RMS', np.float32), - ('NWSpaceCornerL15_Mean', np.float32) + ("FullImageMinimumCount", np.uint16), + ("FullImageMaximumCount", np.uint16), + ("EarthDiskMinimumCount", np.uint16), + ("EarthDiskMaximumCount", np.uint16), + ("FullImageMeanCount", np.float32), + ("FullImageStandardDeviation", np.float32), + ("EarthDiskMeanCount", np.float32), + ("EarthDiskStandardDeviation", np.float32), + ("SpaceMeanCount", np.float32), + ("SpaceStandardDeviation", np.float32), + ("FullImageHistogram", (np.uint32, 256)), + ("EarthDiskHistogram", (np.uint32, 256)), + ("ImageCentreSquareHistogram", (np.uint32, 256)), + ("FullImageEntropy", (np.float32, 3)), + ("EarthDiskEntropy", (np.float32, 3)), + ("ImageCentreSquareEntropy", (np.float32, 3)), + ("ImageCentreSquarePSD_EW", (np.float32, 128)), + ("FullImagePSD_EW", (np.float32, 128)), + ("ImageCentreSquarePSD_NS", (np.float32, 128)), + ("FullImagePSD_NS", (np.float32, 128)), + ("SESpaceCornerL15_RMS", np.float32), + ("SESpaceCornerL15_Mean", np.float32), + ("SWSpaceCornerL15_RMS", np.float32), + ("SWSpaceCornerL15_Mean", np.float32), + ("NESpaceCornerL15_RMS", np.float32), + ("NESpaceCornerL15_Mean", np.float32), + ("NWSpaceCornerL15_RMS", np.float32), + ("NWSpaceCornerL15_Mean", np.float32) ] record = [ - ('L10RadQuality', (l10_rad_quality, 42)), - ('L15RadQuality', (l15_rad_quality, 12)) + ("L10RadQuality", (l10_rad_quality, 42)), + ("L15RadQuality", (l15_rad_quality, 12)) ] return record @@ -928,19 +928,19 @@ def radiometric_quality(self): def geometric_quality(self): """Get geometric quality record data.""" absolute_accuracy = [ - ('QualityInfoValidity', np.uint8), - ('EastWestAccuracyRMS', np.float32), - ('NorthSouthAccuracyRMS', np.float32), - ('MagnitudeRMS', np.float32), - ('EastWestUncertaintyRMS', np.float32), - ('NorthSouthUncertaintyRMS', np.float32), - ('MagnitudeUncertaintyRMS', np.float32), - ('EastWestMaxDeviation', np.float32), - ('NorthSouthMaxDeviation', np.float32), - ('MagnitudeMaxDeviation', np.float32), - ('EastWestUncertaintyMax', np.float32), - ('NorthSouthUncertaintyMax', np.float32), - ('MagnitudeUncertaintyMax', np.float32) + ("QualityInfoValidity", np.uint8), + ("EastWestAccuracyRMS", np.float32), + ("NorthSouthAccuracyRMS", np.float32), + ("MagnitudeRMS", np.float32), + ("EastWestUncertaintyRMS", np.float32), + ("NorthSouthUncertaintyRMS", np.float32), + ("MagnitudeUncertaintyRMS", np.float32), + ("EastWestMaxDeviation", np.float32), + ("NorthSouthMaxDeviation", np.float32), + ("MagnitudeMaxDeviation", np.float32), + ("EastWestUncertaintyMax", np.float32), + ("NorthSouthUncertaintyMax", np.float32), + ("MagnitudeUncertaintyMax", np.float32) ] relative_accuracy = absolute_accuracy @@ -948,35 +948,35 @@ def geometric_quality(self): pixels_16_relative_accuracy = absolute_accuracy misregistration_residuals = [ - ('QualityInfoValidity', np.uint8), - ('EastWestResidual', np.float32), - ('NorthSouthResidual', np.float32), - ('EastWestUncertainty', np.float32), - ('NorthSouthUncertainty', np.float32), - ('EastWestRMS', np.float32), - ('NorthSouthRMS', np.float32), - ('EastWestMagnitude', np.float32), - ('NorthSouthMagnitude', np.float32), - ('EastWestMagnitudeUncertainty', np.float32), - ('NorthSouthMagnitudeUncertainty', np.float32) + ("QualityInfoValidity", np.uint8), + ("EastWestResidual", np.float32), + ("NorthSouthResidual", np.float32), + ("EastWestUncertainty", np.float32), + ("NorthSouthUncertainty", np.float32), + ("EastWestRMS", np.float32), + ("NorthSouthRMS", np.float32), + ("EastWestMagnitude", np.float32), + ("NorthSouthMagnitude", np.float32), + ("EastWestMagnitudeUncertainty", np.float32), + ("NorthSouthMagnitudeUncertainty", np.float32) ] geometric_quality_status = [ - ('QualityNominal', np.uint8), - ('NominalAbsolute', np.uint8), - ('NominalRelativeToPreviousImage', np.uint8), - ('NominalForREL500', np.uint8), - ('NominalForREL16', np.uint8), - ('NominalForResMisreg', np.uint8) + ("QualityNominal", np.uint8), + ("NominalAbsolute", np.uint8), + ("NominalRelativeToPreviousImage", np.uint8), + ("NominalForREL500", np.uint8), + ("NominalForREL16", np.uint8), + ("NominalForResMisreg", np.uint8) ] record = [ - ('AbsoluteAccuracy', (absolute_accuracy, 12)), - ('RelativeAccuracy', (relative_accuracy, 12)), - ('500PixelsRelativeAccuracy', (pixels_500_relative_accuracy, 12)), - ('16PixelsRelativeAccuracy', (pixels_16_relative_accuracy, 12)), - ('MisregistrationResiduals', (misregistration_residuals, 12)), - ('GeometricQualityStatus', (geometric_quality_status, 12)) + ("AbsoluteAccuracy", (absolute_accuracy, 12)), + ("RelativeAccuracy", (relative_accuracy, 12)), + ("500PixelsRelativeAccuracy", (pixels_500_relative_accuracy, 12)), + ("16PixelsRelativeAccuracy", (pixels_16_relative_accuracy, 12)), + ("MisregistrationResiduals", (misregistration_residuals, 12)), + ("GeometricQualityStatus", (geometric_quality_status, 12)) ] return record @@ -985,22 +985,22 @@ def geometric_quality(self): def timeliness_and_completeness(self): """Get time and completeness record data.""" timeliness = [ - ('MaxDelay', np.float32), - ('MinDelay', np.float32), - ('MeanDelay', np.float32) + ("MaxDelay", np.float32), + ("MinDelay", np.float32), + ("MeanDelay", np.float32) ] completeness = [ - ('PlannedL15ImageLines', np.uint16), - ('GeneratedL15ImageLines', np.uint16), - ('ValidL15ImageLines', np.uint16), - ('DummyL15ImageLines', np.uint16), - ('CorruptedL15ImageLines', np.uint16) + ("PlannedL15ImageLines", np.uint16), + ("GeneratedL15ImageLines", np.uint16), + ("ValidL15ImageLines", np.uint16), + ("DummyL15ImageLines", np.uint16), + ("CorruptedL15ImageLines", np.uint16) ] record = [ - ('Timeliness', timeliness), - ('Completeness', (completeness, 12)) + ("Timeliness", timeliness), + ("Completeness", (completeness, 12)) ] return record @@ -1013,15 +1013,15 @@ def get(self): """Get record data array.""" # X bytes record = [ - ('SatelliteStatus', self.satellite_status), - ('ImageAcquisition', self.image_acquisition), - ('CelestialEvents', self.celestial_events), - ('ImageDescription', self.image_description), - ('RadiometricProcessing', self.radiometric_processing), - ('GeometricProcessing', self.geometric_processing) + ("SatelliteStatus", self.satellite_status), + ("ImageAcquisition", self.image_acquisition), + ("CelestialEvents", self.celestial_events), + ("ImageDescription", self.image_description), + ("RadiometricProcessing", self.radiometric_processing), + ("GeometricProcessing", self.geometric_processing) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") def get_native_header(with_archive_header=True): @@ -1035,22 +1035,22 @@ def get_native_header(with_archive_header=True): DEFAULT_15_SECONDARY_PRODUCT_HEADER = { - 'NorthLineSelectedRectangle': {'Value': VISIR_NUM_LINES}, - 'SouthLineSelectedRectangle': {'Value': 1}, - 'EastColumnSelectedRectangle': {'Value': 1}, - 'WestColumnSelectedRectangle': {'Value': VISIR_NUM_COLUMNS}, - 'NumberColumnsVISIR': {'Value': VISIR_NUM_COLUMNS}, - 'NumberLinesVISIR': {'Value': VISIR_NUM_LINES}, - 'NumberColumnsHRV': {'Value': HRV_NUM_COLUMNS}, - 'NumberLinesHRV': {'Value': HRV_NUM_LINES}, - 'SelectedBandIDs': {'Value': 'XXXXXXXXXXXX'} + "NorthLineSelectedRectangle": {"Value": VISIR_NUM_LINES}, + "SouthLineSelectedRectangle": {"Value": 1}, + "EastColumnSelectedRectangle": {"Value": 1}, + "WestColumnSelectedRectangle": {"Value": VISIR_NUM_COLUMNS}, + "NumberColumnsVISIR": {"Value": VISIR_NUM_COLUMNS}, + "NumberLinesVISIR": {"Value": VISIR_NUM_LINES}, + "NumberColumnsHRV": {"Value": HRV_NUM_COLUMNS}, + "NumberLinesHRV": {"Value": HRV_NUM_LINES}, + "SelectedBandIDs": {"Value": "XXXXXXXXXXXX"} } """Default secondary product header for files containing all channels.""" hrit_epilogue = np.dtype( - Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder('>') + Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder(">") hrit_prologue = HritPrologue().get() impf_configuration = np.dtype( - L15DataHeaderRecord().impf_configuration).newbyteorder('>') + L15DataHeaderRecord().impf_configuration).newbyteorder(">") native_trailer = Msg15NativeTrailerRecord().get() diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index ae56053114..82e3b15297 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -41,7 +41,7 @@ ) from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('nc_msg') +logger = logging.getLogger("nc_msg") CHUNK_SIZE = get_legacy_chunk_size() @@ -73,9 +73,9 @@ def __init__(self, filename, filename_info, filetype_info, @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the metadata.""" - if self.nc.attrs['nominal_image_scanning'] == 'T': + if self.nc.attrs["nominal_image_scanning"] == "T": return 15 - elif self.nc.attrs['reduced_scanning'] == 'T': + elif self.nc.attrs["reduced_scanning"] == "T": return 5 @property @@ -114,55 +114,55 @@ def end_time(self): def nc(self): """Read the file.""" return open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks=CHUNK_SIZE).rename({'num_columns_vis_ir': 'x', - 'num_rows_vis_ir': 'y'}) + chunks=CHUNK_SIZE).rename({"num_columns_vis_ir": "x", + "num_rows_vis_ir": "y"}) def get_metadata(self): """Get metadata.""" # Obtain some area definition attributes - equatorial_radius = self.nc.attrs['equatorial_radius'] * 1000. - polar_radius = (self.nc.attrs['north_polar_radius'] * 1000 + self.nc.attrs['south_polar_radius'] * 1000) * 0.5 - ssp_lon = self.nc.attrs['longitude_of_SSP'] - self.mda['vis_ir_grid_origin'] = self.nc.attrs['vis_ir_grid_origin'] - self.mda['vis_ir_column_dir_grid_step'] = self.nc.attrs['vis_ir_column_dir_grid_step'] * 1000.0 - self.mda['vis_ir_line_dir_grid_step'] = self.nc.attrs['vis_ir_line_dir_grid_step'] * 1000.0 + equatorial_radius = self.nc.attrs["equatorial_radius"] * 1000. + polar_radius = (self.nc.attrs["north_polar_radius"] * 1000 + self.nc.attrs["south_polar_radius"] * 1000) * 0.5 + ssp_lon = self.nc.attrs["longitude_of_SSP"] + self.mda["vis_ir_grid_origin"] = self.nc.attrs["vis_ir_grid_origin"] + self.mda["vis_ir_column_dir_grid_step"] = self.nc.attrs["vis_ir_column_dir_grid_step"] * 1000.0 + self.mda["vis_ir_line_dir_grid_step"] = self.nc.attrs["vis_ir_line_dir_grid_step"] * 1000.0 # if FSFile is used h5netcdf engine is used which outputs arrays instead of floats for attributes if isinstance(equatorial_radius, np.ndarray): equatorial_radius = equatorial_radius.item() polar_radius = polar_radius.item() ssp_lon = ssp_lon.item() - self.mda['vis_ir_column_dir_grid_step'] = self.mda['vis_ir_column_dir_grid_step'].item() - self.mda['vis_ir_line_dir_grid_step'] = self.mda['vis_ir_line_dir_grid_step'].item() + self.mda["vis_ir_column_dir_grid_step"] = self.mda["vis_ir_column_dir_grid_step"].item() + self.mda["vis_ir_line_dir_grid_step"] = self.mda["vis_ir_line_dir_grid_step"].item() - self.mda['projection_parameters'] = {'a': equatorial_radius, - 'b': polar_radius, - 'h': 35785831.00, - 'ssp_longitude': ssp_lon} + self.mda["projection_parameters"] = {"a": equatorial_radius, + "b": polar_radius, + "h": 35785831.00, + "ssp_longitude": ssp_lon} - self.mda['number_of_lines'] = int(self.nc.dims['y']) - self.mda['number_of_columns'] = int(self.nc.dims['x']) + self.mda["number_of_lines"] = int(self.nc.dims["y"]) + self.mda["number_of_columns"] = int(self.nc.dims["x"]) # only needed for HRV channel which is not implemented yet # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) # self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv']) self.deltaSt = self.reference + datetime.timedelta( - days=int(self.nc.attrs['true_repeat_cycle_start_day']), - milliseconds=int(self.nc.attrs['true_repeat_cycle_start_mi_sec'])) + days=int(self.nc.attrs["true_repeat_cycle_start_day"]), + milliseconds=int(self.nc.attrs["true_repeat_cycle_start_mi_sec"])) self.deltaEnd = self.reference + datetime.timedelta( - days=int(self.nc.attrs['planned_repeat_cycle_end_day']), - milliseconds=int(self.nc.attrs['planned_repeat_cycle_end_mi_sec'])) + days=int(self.nc.attrs["planned_repeat_cycle_end_day"]), + milliseconds=int(self.nc.attrs["planned_repeat_cycle_end_mi_sec"])) - self.north = int(self.nc.attrs['north_most_line']) - self.east = int(self.nc.attrs['east_most_pixel']) - self.west = int(self.nc.attrs['west_most_pixel']) - self.south = int(self.nc.attrs['south_most_line']) - self.platform_id = int(self.nc.attrs['satellite_id']) + self.north = int(self.nc.attrs["north_most_line"]) + self.east = int(self.nc.attrs["east_most_pixel"]) + self.west = int(self.nc.attrs["west_most_pixel"]) + self.south = int(self.nc.attrs["south_most_line"]) + self.platform_id = int(self.nc.attrs["satellite_id"]) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - dataset = self.nc[dataset_info['nc_key']] + dataset = self.nc[dataset_info["nc_key"]] # Correct for the scan line order # TODO: Move _add_scanline_acq_time() call to the end of the method @@ -171,7 +171,7 @@ def get_dataset(self, dataset_id, dataset_info): dataset = dataset.sel(y=slice(None, None, -1)) dataset = self.calibrate(dataset, dataset_id) - is_calibration = dataset_id['calibration'] in ['radiance', 'reflectance', 'brightness_temperature'] + is_calibration = dataset_id["calibration"] in ["radiance", "reflectance", "brightness_temperature"] if (is_calibration and self.mask_bad_quality_scan_lines): # noqa: E129 dataset = self._mask_bad_quality(dataset, dataset_info) @@ -180,17 +180,17 @@ def get_dataset(self, dataset_id, dataset_info): def calibrate(self, dataset, dataset_id): """Calibrate the data.""" - channel = dataset_id['name'] - calibration = dataset_id['calibration'] + channel = dataset_id["name"] + calibration = dataset_id["calibration"] - if dataset_id['calibration'] == 'counts': - dataset.attrs['_FillValue'] = 0 + if dataset_id["calibration"] == "counts": + dataset.attrs["_FillValue"] = 0 calib = SEVIRICalibrationHandler( platform_id=int(self.platform_id), channel_name=channel, coefs=self._get_calib_coefs(dataset, channel), - calib_mode='NOMINAL', + calib_mode="NOMINAL", scan_time=self.observation_start_time ) @@ -199,59 +199,59 @@ def calibrate(self, dataset, dataset_id): def _get_calib_coefs(self, dataset, channel): """Get coefficients for calibration from counts to radiance.""" band_idx = list(CHANNEL_NAMES.values()).index(channel) - offset = dataset.attrs['add_offset'].astype('float32') - gain = dataset.attrs['scale_factor'].astype('float32') + offset = dataset.attrs["add_offset"].astype("float32") + gain = dataset.attrs["scale_factor"].astype("float32") # Only one calibration available here return { - 'coefs': { - 'NOMINAL': { - 'gain': gain, - 'offset': offset + "coefs": { + "NOMINAL": { + "gain": gain, + "offset": offset }, - 'EXTERNAL': self.ext_calib_coefs.get(channel, {}) + "EXTERNAL": self.ext_calib_coefs.get(channel, {}) }, - 'radiance_type': self.nc['planned_chan_processing'].values[band_idx] + "radiance_type": self.nc["planned_chan_processing"].values[band_idx] } def _mask_bad_quality(self, dataset, dataset_info): """Mask scanlines with bad quality.""" - ch_number = int(dataset_info['nc_key'][2:]) - line_validity = self.nc['channel_data_visir_data_line_validity'][:, ch_number - 1].data - line_geometric_quality = self.nc['channel_data_visir_data_line_geometric_quality'][:, ch_number - 1].data - line_radiometric_quality = self.nc['channel_data_visir_data_line_radiometric_quality'][:, ch_number - 1].data + ch_number = int(dataset_info["nc_key"][2:]) + line_validity = self.nc["channel_data_visir_data_line_validity"][:, ch_number - 1].data + line_geometric_quality = self.nc["channel_data_visir_data_line_geometric_quality"][:, ch_number - 1].data + line_radiometric_quality = self.nc["channel_data_visir_data_line_radiometric_quality"][:, ch_number - 1].data return mask_bad_quality(dataset, line_validity, line_geometric_quality, line_radiometric_quality) def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs.update(self.nc[dataset_info['nc_key']].attrs) + dataset.attrs.update(self.nc[dataset_info["nc_key"]].attrs) dataset.attrs.update(dataset_info) - dataset.attrs['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] - dataset.attrs['sensor'] = 'seviri' - dataset.attrs['orbital_parameters'] = { - 'projection_longitude': self.mda['projection_parameters']['ssp_longitude'], - 'projection_latitude': 0., - 'projection_altitude': self.mda['projection_parameters']['h'], - 'satellite_nominal_longitude': float( - self.nc.attrs['nominal_longitude'] + dataset.attrs["platform_name"] = "Meteosat-" + SATNUM[self.platform_id] + dataset.attrs["sensor"] = "seviri" + dataset.attrs["orbital_parameters"] = { + "projection_longitude": self.mda["projection_parameters"]["ssp_longitude"], + "projection_latitude": 0., + "projection_altitude": self.mda["projection_parameters"]["h"], + "satellite_nominal_longitude": float( + self.nc.attrs["nominal_longitude"] ), - 'satellite_nominal_latitude': 0.0, + "satellite_nominal_latitude": 0.0, } - dataset.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + dataset.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } try: actual_lon, actual_lat, actual_alt = self.satpos - dataset.attrs['orbital_parameters'].update({ - 'satellite_actual_longitude': actual_lon, - 'satellite_actual_latitude': actual_lat, - 'satellite_actual_altitude': actual_alt, + dataset.attrs["orbital_parameters"].update({ + "satellite_actual_longitude": actual_lon, + "satellite_actual_latitude": actual_lat, + "satellite_actual_altitude": actual_alt, }) except NoValidOrbitParams as err: logger.warning(err) - dataset.attrs['georef_offset_corrected'] = self._get_earth_model() == 2 + dataset.attrs["georef_offset_corrected"] = self._get_earth_model() == 2 # remove attributes from original file which don't apply anymore strip_attrs = ["comment", "long_name", "nc_key", "scale_factor", "add_offset", "valid_min", "valid_max"] @@ -277,30 +277,30 @@ def get_area_def(self, dataset_id): """ pdict = {} - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] - - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dataset_id['resolution']) + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["ssp_longitude"] + + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dataset_id["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) - - if dataset_id['name'] == 'HRV': - pdict['nlines'] = self.mda['hrv_number_of_lines'] - pdict['ncols'] = self.mda['hrv_number_of_columns'] - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + **get_service_mode("seviri", pdict["ssp_lon"])}) + + if dataset_id["name"] == "HRV": + pdict["nlines"] = self.mda["hrv_number_of_lines"] + pdict["ncols"] = self.mda["hrv_number_of_columns"] + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" else: - pdict['nlines'] = self.mda['number_of_lines'] - pdict['ncols'] = self.mda['number_of_columns'] - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["nlines"] = self.mda["number_of_lines"] + pdict["ncols"] = self.mda["number_of_columns"] + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area = get_area_definition(pdict, self.get_area_extent(dataset_id)) @@ -310,20 +310,20 @@ def get_area_extent(self, dsid): """Get the area extent.""" # following calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description - origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} - grid_origin = self.mda['vis_ir_grid_origin'] + origins = {0: "NW", 1: "SW", 2: "SE", 3: "NE"} + grid_origin = self.mda["vis_ir_grid_origin"] grid_origin = int(grid_origin, 16) if grid_origin != 2: raise NotImplementedError( - 'Grid origin not supported number: {}, {} corner' + "Grid origin not supported number: {}, {} corner" .format(grid_origin, origins[grid_origin]) ) center_point = 3712 / 2 - column_step = self.mda['vis_ir_column_dir_grid_step'] + column_step = self.mda["vis_ir_column_dir_grid_step"] - line_step = self.mda['vis_ir_line_dir_grid_step'] + line_step = self.mda["vis_ir_line_dir_grid_step"] # check for Earth model as this affects the north-south and # west-east offsets @@ -337,7 +337,7 @@ def get_area_extent(self, dsid): we_offset = 0.5 # west +ve else: raise NotImplementedError( - 'unrecognised earth model: {}'.format(earth_model) + "unrecognised earth model: {}".format(earth_model) ) # section 3.1.5 of MSG Level 1.5 Image Data Format Description ll_c = (center_point - self.west - 0.5 + we_offset) * column_step @@ -349,7 +349,7 @@ def get_area_extent(self, dsid): return area_extent def _add_scanline_acq_time(self, dataset, dataset_id): - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": # TODO: Enable once HRV reading has been fixed. return # days, msecs = self._get_acq_time_hrv() @@ -359,16 +359,16 @@ def _add_scanline_acq_time(self, dataset, dataset_id): add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): - day_key = 'channel_data_hrv_data_l10_line_mean_acquisition_time_day' - msec_key = 'channel_data_hrv_data_l10_line_mean_acquisition_msec' + day_key = "channel_data_hrv_data_l10_line_mean_acquisition_time_day" + msec_key = "channel_data_hrv_data_l10_line_mean_acquisition_msec" days = self.nc[day_key].isel(channels_hrv_dim=0) msecs = self.nc[msec_key].isel(channels_hrv_dim=0) return days, msecs def _get_acq_time_visir(self, dataset_id): - band_idx = list(CHANNEL_NAMES.values()).index(dataset_id['name']) - day_key = 'channel_data_visir_data_l10_line_mean_acquisition_time_day' - msec_key = 'channel_data_visir_data_l10_line_mean_acquisition_msec' + band_idx = list(CHANNEL_NAMES.values()).index(dataset_id["name"]) + day_key = "channel_data_visir_data_l10_line_mean_acquisition_time_day" + msec_key = "channel_data_visir_data_l10_line_mean_acquisition_msec" days = self.nc[day_key].isel(channels_vis_ir_dim=band_idx) msecs = self.nc[msec_key].isel(channels_vis_ir_dim=band_idx) return days, msecs @@ -382,31 +382,31 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ start_times_poly = get_cds_time( - days=self.nc['orbit_polynomial_start_time_day'].values, - msecs=self.nc['orbit_polynomial_start_time_msec'].values + days=self.nc["orbit_polynomial_start_time_day"].values, + msecs=self.nc["orbit_polynomial_start_time_msec"].values ) end_times_poly = get_cds_time( - days=self.nc['orbit_polynomial_end_time_day'].values, - msecs=self.nc['orbit_polynomial_end_time_msec'].values + days=self.nc["orbit_polynomial_end_time_day"].values, + msecs=self.nc["orbit_polynomial_end_time_msec"].values ) orbit_polynomials = { - 'StartTime': np.array([start_times_poly]), - 'EndTime': np.array([end_times_poly]), - 'X': self.nc['orbit_polynomial_x'].values, - 'Y': self.nc['orbit_polynomial_y'].values, - 'Z': self.nc['orbit_polynomial_z'].values, + "StartTime": np.array([start_times_poly]), + "EndTime": np.array([end_times_poly]), + "X": self.nc["orbit_polynomial_x"].values, + "Y": self.nc["orbit_polynomial_y"].values, + "Z": self.nc["orbit_polynomial_z"].values, } poly_finder = OrbitPolynomialFinder(orbit_polynomials) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.start_time, - semi_major_axis=self.mda['projection_parameters']['a'], - semi_minor_axis=self.mda['projection_parameters']['b'], + semi_major_axis=self.mda["projection_parameters"]["a"], + semi_minor_axis=self.mda["projection_parameters"]["b"], ) def _get_earth_model(self): - return int(self.nc.attrs['type_of_earth_model'], 16) + return int(self.nc.attrs["type_of_earth_model"], 16) class NCSEVIRIHRVFileHandler(NCSEVIRIFileHandler, SEVIRICalibrationHandler): diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index cb38f44acf..3b7188945c 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -44,15 +44,15 @@ "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") CHUNK_SIZE = get_legacy_chunk_size() -logger = logging.getLogger('SeviriL2Bufr') +logger = logging.getLogger("SeviriL2Bufr") -data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0455', 'name': '09'}, - 57: {'ssp': 'E0095', 'name': '10'}, 70: {'ssp': 'E0000', 'name': '11'}} +data_center_dict = {55: {"ssp": "E0415", "name": "08"}, 56: {"ssp": "E0455", "name": "09"}, + 57: {"ssp": "E0095", "name": "10"}, 70: {"ssp": "E0000", "name": "11"}} -seg_size_dict = {'seviri_l2_bufr_asr': 16, 'seviri_l2_bufr_cla': 16, - 'seviri_l2_bufr_csr': 16, 'seviri_l2_bufr_gii': 3, - 'seviri_l2_bufr_thu': 16, 'seviri_l2_bufr_toz': 3, - 'seviri_l2_bufr_amv': 24} +seg_size_dict = {"seviri_l2_bufr_asr": 16, "seviri_l2_bufr_cla": 16, + "seviri_l2_bufr_csr": 16, "seviri_l2_bufr_gii": 3, + "seviri_l2_bufr_thu": 16, "seviri_l2_bufr_toz": 3, + "seviri_l2_bufr_amv": 24} class SeviriL2BufrFileHandler(BaseFileHandler): @@ -83,39 +83,39 @@ class SeviriL2BufrFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, with_area_definition=False, - rectification_longitude='default', **kwargs): + rectification_longitude="default", **kwargs): """Initialise the file handler for SEVIRI L2 BUFR data.""" super(SeviriL2BufrFileHandler, self).__init__(filename, filename_info, filetype_info) - if ('server' in filename_info): + if ("server" in filename_info): # EUMETSAT Offline Bufr product self.mpef_header = self._read_mpef_header() else: # Product was retrieved from the EUMETSAT Data Center - timeStr = self.get_attribute('typicalDate')+self.get_attribute('typicalTime') + timeStr = self.get_attribute("typicalDate")+self.get_attribute("typicalTime") buf_start_time = datetime.strptime(timeStr, "%Y%m%d%H%M%S") - sc_id = self.get_attribute('satelliteIdentifier') + sc_id = self.get_attribute("satelliteIdentifier") self.mpef_header = {} - self.mpef_header['NominalTime'] = buf_start_time - self.mpef_header['SpacecraftName'] = data_center_dict[sc_id]['name'] - self.mpef_header['RectificationLongitude'] = data_center_dict[sc_id]['ssp'] + self.mpef_header["NominalTime"] = buf_start_time + self.mpef_header["SpacecraftName"] = data_center_dict[sc_id]["name"] + self.mpef_header["RectificationLongitude"] = data_center_dict[sc_id]["ssp"] - if rectification_longitude != 'default': - self.mpef_header['RectificationLongitude'] = f'E{int(rectification_longitude * 10):04d}' + if rectification_longitude != "default": + self.mpef_header["RectificationLongitude"] = f"E{int(rectification_longitude * 10):04d}" self.with_adef = with_area_definition - if self.with_adef and filetype_info['file_type'] == 'seviri_l2_bufr_amv': + if self.with_adef and filetype_info["file_type"] == "seviri_l2_bufr_amv": logging.warning("AMV BUFR data cannot be loaded with an area definition. Setting self.with_def = False.") self.with_adef = False - self.seg_size = seg_size_dict[filetype_info['file_type']] + self.seg_size = seg_size_dict[filetype_info["file_type"]] @property def start_time(self): """Return the repeat cycle start time.""" - return self.mpef_header['NominalTime'] + return self.mpef_header["NominalTime"] @property def end_time(self): @@ -125,13 +125,13 @@ def end_time(self): @property def platform_name(self): """Return spacecraft name.""" - return 'MET{}'.format(self.mpef_header['SpacecraftName']) + return "MET{}".format(self.mpef_header["SpacecraftName"]) @property def ssp_lon(self): """Return subsatellite point longitude.""" # e.g. E0415 - ssp_lon = self.mpef_header['RectificationLongitude'] + ssp_lon = self.mpef_header["RectificationLongitude"] return float(ssp_lon[1:])/10. def get_area_def(self, key): @@ -157,7 +157,7 @@ def get_attribute(self, key): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) @@ -173,7 +173,7 @@ def get_array(self, key): if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) # if is the first message initialise our final array if (msgCount == 0): @@ -199,18 +199,18 @@ def get_dataset(self, dataset_id, dataset_info): and create the dataset with or without an AreaDefinition. """ - arr = self.get_array(dataset_info['key']) + arr = self.get_array(dataset_info["key"]) if self.with_adef: xarr = self.get_dataset_with_area_def(arr, dataset_id) # coordinates are not relevant when returning data with an AreaDefinition - if 'coordinates' in dataset_info.keys(): - del dataset_info['coordinates'] + if "coordinates" in dataset_info.keys(): + del dataset_info["coordinates"] else: xarr = xr.DataArray(arr, dims=["y"]) - if 'fill_value' in dataset_info: - xarr = xarr.where(xarr != dataset_info['fill_value']) + if "fill_value" in dataset_info: + xarr = xarr.where(xarr != dataset_info["fill_value"]) self._add_attributes(xarr, dataset_info) @@ -218,8 +218,8 @@ def get_dataset(self, dataset_id, dataset_info): def get_dataset_with_area_def(self, arr, dataset_id): """Get dataset with an AreaDefinition.""" - if dataset_id['name'] in ['latitude', 'longitude']: - self.__setattr__(dataset_id['name'], arr) + if dataset_id["name"] in ["latitude", "longitude"]: + self.__setattr__(dataset_id["name"], arr) xarr = xr.DataArray(arr, dims=["y"]) else: lons_1d, lats_1d, data_1d = da.compute(self.longitude, self.latitude, arr) @@ -231,13 +231,13 @@ def get_dataset_with_area_def(self, arr, dataset_id): data_2d[:] = np.nan data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] - xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=('y', 'x')) + xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=("y", "x")) ntotal = len(icol) nvalid = len(icol.compressed()) if nvalid < ntotal: - logging.warning(f'{ntotal-nvalid} out of {ntotal} data points could not be put on ' - f'the grid {self._area_def.area_id}.') + logging.warning(f"{ntotal-nvalid} out of {ntotal} data points could not be put on " + f"the grid {self._area_def.area_id}.") return xarr @@ -248,31 +248,31 @@ def _construct_area_def(self, dataset_id): AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ - res = dataset_id['resolution'] + res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': res, + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', self.ssp_lon)}) + **get_service_mode("seviri", self.ssp_lon)}) # Datasets with a segment size of 3 pixels extend outside the original SEVIRI 3km grid (with 1238 x 1238 # segments a 3 pixels). Hence, we need to use corresponding area defintions in areas.yaml if self.seg_size == 3: - area_naming['area_id'] += '_ext' - area_naming['description'] += ' (extended outside original 3km grid)' + area_naming["area_id"] += "_ext" + area_naming["description"] += " (extended outside original 3km grid)" # Construct AreaDefinition from standardized area definition in areas.yaml. - stand_area_def = get_area_def(area_naming['area_id']) + stand_area_def = get_area_def(area_naming["area_id"]) return stand_area_def def _add_attributes(self, xarr, dataset_info): """Add dataset attributes to xarray.""" - xarr.attrs['sensor'] = 'SEVIRI' - xarr.attrs['platform_name'] = self.platform_name - xarr.attrs['ssp_lon'] = self.ssp_lon - xarr.attrs['seg_size'] = self.seg_size + xarr.attrs["sensor"] = "SEVIRI" + xarr.attrs["platform_name"] = self.platform_name + xarr.attrs["ssp_lon"] = self.ssp_lon + xarr.attrs["seg_size"] = self.seg_size xarr.attrs.update(dataset_info) diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py index 214193039d..ca021a7bc0 100644 --- a/satpy/readers/seviri_l2_grib.py +++ b/satpy/readers/seviri_l2_grib.py @@ -57,7 +57,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Return the sensing start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): @@ -66,8 +66,8 @@ def end_time(self): def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" - self._area_dict['column_step'] = dataset_id["resolution"] - self._area_dict['line_step'] = dataset_id["resolution"] + self._area_dict["column_step"] = dataset_id["resolution"] + self._area_dict["line_step"] = dataset_id["resolution"] area_extent = calculate_area_extent(self._area_dict) @@ -86,12 +86,12 @@ def get_dataset(self, dataset_id, dataset_info): dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier instance. """ - logger.debug('Reading in file to get dataset with parameter number %d.', - dataset_info['parameter_number']) + logger.debug("Reading in file to get dataset with parameter number %d.", + dataset_info["parameter_number"]) xarr = None message_found = False - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: # Iterate over all messages and fetch data when the correct parameter number is found while True: @@ -101,19 +101,19 @@ def get_dataset(self, dataset_id, dataset_info): if not message_found: # Could not obtain a valid message ID from the grib file logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", - dataset_info['parameter_number']) + dataset_info["parameter_number"]) break # Check if the parameter number in the GRIB message corresponds to the required key - parameter_number = self._get_from_msg(gid, 'parameterNumber') + parameter_number = self._get_from_msg(gid, "parameterNumber") - if parameter_number == dataset_info['parameter_number']: + if parameter_number == dataset_info["parameter_number"]: self._res = dataset_id["resolution"] self._read_attributes(gid) # Read the missing value - missing_value = self._get_from_msg(gid, 'missingValue') + missing_value = self._get_from_msg(gid, "missingValue") # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value xarr = self._get_xarray_from_msg(gid) @@ -137,11 +137,11 @@ def get_dataset(self, dataset_id, dataset_info): def _read_attributes(self, gid): """Read the parameter attributes from the message and create the projection and area dictionaries.""" # Read SSP and date/time - self._ssp_lon = self._get_from_msg(gid, 'longitudeOfSubSatellitePointInDegrees') + self._ssp_lon = self._get_from_msg(gid, "longitudeOfSubSatellitePointInDegrees") # Read number of points on the x and y axes - self._nrows = self._get_from_msg(gid, 'Ny') - self._ncols = self._get_from_msg(gid, 'Nx') + self._nrows = self._get_from_msg(gid, "Ny") + self._ncols = self._get_from_msg(gid, "Nx") # Creates the projection and area dictionaries self._pdict, self._area_dict = self._get_proj_area(gid) @@ -171,45 +171,45 @@ def _get_proj_area(self, gid): south: coodinate of the south limit """ # Get name of area definition - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': self._res, + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": self._res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', self._ssp_lon)}) + **get_service_mode("seviri", self._ssp_lon)}) # Read all projection and area parameters from the message - earth_major_axis_in_meters = self._get_from_msg(gid, 'earthMajorAxis') * 1000.0 # [m] - earth_minor_axis_in_meters = self._get_from_msg(gid, 'earthMinorAxis') * 1000.0 # [m] + earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] + earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) - nr_in_radius_of_earth = self._get_from_msg(gid, 'NrInRadiusOfEarth') - xp_in_grid_lengths = self._get_from_msg(gid, 'XpInGridLengths') + nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") + xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] # Create the dictionary with the projection data pdict = { - 'a': earth_major_axis_in_meters, - 'b': earth_minor_axis_in_meters, - 'h': h_in_meters, - 'ssp_lon': self._ssp_lon, - 'nlines': self._ncols, - 'ncols': self._nrows, - 'a_name': area_naming['area_id'], - 'a_desc': area_naming['description'], - 'p_id': "", + "a": earth_major_axis_in_meters, + "b": earth_minor_axis_in_meters, + "h": h_in_meters, + "ssp_lon": self._ssp_lon, + "nlines": self._ncols, + "ncols": self._nrows, + "a_name": area_naming["area_id"], + "a_desc": area_naming["description"], + "p_id": "", } # Compute the dictionary with the area extension area_dict = { - 'center_point': xp_in_grid_lengths, - 'north': self._nrows, - 'east': 1, - 'west': self._ncols, - 'south': 1, + "center_point": xp_in_grid_lengths, + "north": self._nrows, + "east": 1, + "west": self._ncols, + "south": 1, } return pdict, area_dict @@ -237,7 +237,7 @@ def _get_xarray_from_msg(self, gid): """ # Data from GRIB message are read into an Xarray... xarr = xr.DataArray(da.from_array(ec.codes_get_values( - gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=('y', 'x')) + gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=("y", "x")) return xarr @@ -251,13 +251,13 @@ def _get_attributes(self): platform_name: name of the platform """ orbital_parameters = { - 'projection_longitude': self._ssp_lon + "projection_longitude": self._ssp_lon } attributes = { - 'orbital_parameters': orbital_parameters, - 'sensor': 'seviri', - 'platform_name': PLATFORM_DICT[self.filename_info['spacecraft']] + "orbital_parameters": orbital_parameters, + "sensor": "seviri", + "platform_name": PLATFORM_DICT[self.filename_info["spacecraft"]] } return attributes diff --git a/satpy/readers/slstr_l1b.py b/satpy/readers/slstr_l1b.py index 5024d3d9a1..02aae9f72b 100644 --- a/satpy/readers/slstr_l1b.py +++ b/satpy/readers/slstr_l1b.py @@ -34,30 +34,30 @@ CHUNK_SIZE = get_legacy_chunk_size() -PLATFORM_NAMES = {'S3A': 'Sentinel-3A', - 'S3B': 'Sentinel-3B'} +PLATFORM_NAMES = {"S3A": "Sentinel-3A", + "S3B": "Sentinel-3B"} # These are the default channel adjustment factors. # Defined in the product notice: S3.PN-SLSTR-L1.08 # https://sentinel.esa.int/documents/247904/2731673/Sentinel-3A-and-3B-SLSTR-Product-Notice-Level-1B-SL-1-RBT-at-NRT-and-NTC.pdf -CHANCALIB_FACTORS = {'S1_nadir': 0.97, - 'S2_nadir': 0.98, - 'S3_nadir': 0.98, - 'S4_nadir': 1.0, - 'S5_nadir': 1.11, - 'S6_nadir': 1.13, - 'S7_nadir': 1.0, - 'S8_nadir': 1.0, - 'S9_nadir': 1.0, - 'S1_oblique': 0.94, - 'S2_oblique': 0.95, - 'S3_oblique': 0.95, - 'S4_oblique': 1.0, - 'S5_oblique': 1.04, - 'S6_oblique': 1.07, - 'S7_oblique': 1.0, - 'S8_oblique': 1.0, - 'S9_oblique': 1.0, } +CHANCALIB_FACTORS = {"S1_nadir": 0.97, + "S2_nadir": 0.98, + "S3_nadir": 0.98, + "S4_nadir": 1.0, + "S5_nadir": 1.11, + "S6_nadir": 1.13, + "S7_nadir": 1.0, + "S8_nadir": 1.0, + "S9_nadir": 1.0, + "S1_oblique": 0.94, + "S2_oblique": 0.95, + "S3_oblique": 0.95, + "S4_oblique": 1.0, + "S5_oblique": 1.04, + "S6_oblique": 1.07, + "S7_oblique": 1.0, + "S8_oblique": 1.0, + "S9_oblique": 1.0, } class NCSLSTRGeo(BaseFileHandler): @@ -70,17 +70,17 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) self.cache = {} def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - file_key = info['file_key'].format(view=key['view'].name[0], - stripe=key['stripe'].name) + logger.debug("Reading %s.", key["name"]) + file_key = info["file_key"].format(view=key["view"].name[0], + stripe=key["stripe"].name) try: variable = self.nc[file_key] except KeyError: @@ -95,12 +95,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTR1B(BaseFileHandler): @@ -132,29 +132,29 @@ def __init__(self, filename, filename_info, filetype_info, self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) - self.channel = filename_info['dataset_name'] - self.stripe = filename_info['stripe'] - views = {'n': 'nadir', 'o': 'oblique'} - self.view = views[filename_info['view']] - cal_file = os.path.join(os.path.dirname(self.filename), 'viscal.nc') + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) + self.channel = filename_info["dataset_name"] + self.stripe = filename_info["stripe"] + views = {"n": "nadir", "o": "oblique"} + self.view = views[filename_info["view"]] + cal_file = os.path.join(os.path.dirname(self.filename), "viscal.nc") self.cal = xr.open_dataset(cal_file, decode_cf=True, mask_and_scale=True, - chunks={'views': CHUNK_SIZE}) + chunks={"views": CHUNK_SIZE}) indices_file = os.path.join(os.path.dirname(self.filename), - 'indices_{}{}.nc'.format(self.stripe, self.view[0])) + "indices_{}{}.nc".format(self.stripe, self.view[0])) self.indices = xr.open_dataset(indices_file, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.indices = self.indices.rename({'columns': 'x', 'rows': 'y'}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.indices = self.indices.rename({"columns": "x", "rows": "y"}) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" if isinstance(user_calibration, dict): self.usercalib = user_calibration else: @@ -162,7 +162,7 @@ def __init__(self, filename, filename_info, filetype_info, def _apply_radiance_adjustment(self, radiances): """Adjust SLSTR radiances with default or user supplied values.""" - chan_name = self.channel + '_' + self.view + chan_name = self.channel + "_" + self.view adjust_fac = None if self.usercalib is not None: # If user supplied adjustment, use it. @@ -189,26 +189,26 @@ def _cal_rad(rad, didx, solar_flux=None): def get_dataset(self, key, info): """Load a dataset.""" - if (self.channel not in key['name'] or - self.stripe != key['stripe'].name or - self.view != key['view'].name): + if (self.channel not in key["name"] or + self.stripe != key["stripe"].name or + self.view != key["view"].name): return - logger.debug('Reading %s.', key['name']) - if key['calibration'] == 'brightness_temperature': - variable = self.nc['{}_BT_{}{}'.format(self.channel, self.stripe, self.view[0])] + logger.debug("Reading %s.", key["name"]) + if key["calibration"] == "brightness_temperature": + variable = self.nc["{}_BT_{}{}".format(self.channel, self.stripe, self.view[0])] else: - variable = self.nc['{}_radiance_{}{}'.format(self.channel, self.stripe, self.view[0])] + variable = self.nc["{}_radiance_{}{}".format(self.channel, self.stripe, self.view[0])] radiances = self._apply_radiance_adjustment(variable) - units = variable.attrs['units'] - if key['calibration'] == 'reflectance': + units = variable.attrs["units"] + if key["calibration"] == "reflectance": # TODO take into account sun-earth distance - solar_flux = self.cal[re.sub('_[^_]*$', '', key['name']) + '_solar_irradiances'] - d_index = self.indices['detector_{}{}'.format(self.stripe, self.view[0])] - idx = 0 if self.view[0] == 'n' else 1 # 0: Nadir view, 1: oblique (check). + solar_flux = self.cal[re.sub("_[^_]*$", "", key["name"]) + "_solar_irradiances"] + d_index = self.indices["detector_{}{}".format(self.stripe, self.view[0])] + idx = 0 if self.view[0] == "n" else 1 # 0: Nadir view, 1: oblique (check). radiances.data = da.map_blocks( self._cal_rad, radiances.data, d_index.data, solar_flux=solar_flux[:, idx].values) radiances *= np.pi * 100 - units = '%' + units = "%" info = info.copy() info.update(radiances.attrs) @@ -224,12 +224,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRAngles(BaseFileHandler): @@ -240,8 +240,8 @@ def _loadcart(self, fname): cartf = xr.open_dataset(fname, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) return cartf def __init__(self, filename, filename_info, filetype_info): @@ -252,57 +252,57 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' - self.view = filename_info['view'] - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" + self.view = filename_info["view"] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] carta_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_a{}.nc'.format(self.view[0])) + os.path.dirname(self.filename), "cartesian_a{}.nc".format(self.view[0])) carti_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_i{}.nc'.format(self.view[0])) + os.path.dirname(self.filename), "cartesian_i{}.nc".format(self.view[0])) cartx_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_tx.nc') + os.path.dirname(self.filename), "cartesian_tx.nc") self.carta = self._loadcart(carta_file) self.carti = self._loadcart(carti_file) self.cartx = self._loadcart(cartx_file) def get_dataset(self, key, info): """Load a dataset.""" - if not key['view'].name.startswith(self.view[0]): + if not key["view"].name.startswith(self.view[0]): return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) # Check if file_key is specified in the yaml - file_key = info['file_key'].format(view=key['view'].name[0]) + file_key = info["file_key"].format(view=key["view"].name[0]) variable = self.nc[file_key] - l_step = self.nc.attrs.get('al_subsampling_factor', 1) - c_step = self.nc.attrs.get('ac_subsampling_factor', 16) + l_step = self.nc.attrs.get("al_subsampling_factor", 1) + c_step = self.nc.attrs.get("ac_subsampling_factor", 16) - if key.get('resolution', 1000) == 500: + if key.get("resolution", 1000) == 500: l_step *= 2 c_step *= 2 if c_step != 1 or l_step != 1: - logger.debug('Interpolating %s.', key['name']) + logger.debug("Interpolating %s.", key["name"]) # TODO: do it in cartesian coordinates ! pbs at date line and # possible - tie_x = self.cartx['x_tx'].data[0, :][::-1] - tie_y = self.cartx['y_tx'].data[:, 0] - if key.get('resolution', 1000) == 500: - full_x = self.carta['x_a' + self.view[0]].data - full_y = self.carta['y_a' + self.view[0]].data + tie_x = self.cartx["x_tx"].data[0, :][::-1] + tie_y = self.cartx["y_tx"].data[:, 0] + if key.get("resolution", 1000) == 500: + full_x = self.carta["x_a" + self.view[0]].data + full_y = self.carta["y_a" + self.view[0]].data else: - full_x = self.carti['x_i' + self.view[0]].data - full_y = self.carti['y_i' + self.view[0]].data + full_x = self.carti["x_i" + self.view[0]].data + full_y = self.carti["y_i" + self.view[0]].data variable = variable.fillna(0) - variable.attrs['resolution'] = key.get('resolution', 1000) + variable.attrs["resolution"] = key.get("resolution", 1000) from scipy.interpolate import RectBivariateSpline spl = RectBivariateSpline( @@ -311,13 +311,13 @@ def get_dataset(self, key, info): values = spl.ev(full_y, full_x) variable = xr.DataArray(da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)), - dims=['y', 'x'], attrs=variable.attrs) + dims=["y", "x"], attrs=variable.attrs) - variable.attrs['platform_name'] = self.platform_name - variable.attrs['sensor'] = self.sensor + variable.attrs["platform_name"] = self.platform_name + variable.attrs["sensor"] = self.sensor - if 'units' not in variable.attrs: - variable.attrs['units'] = 'degrees' + if "units" not in variable.attrs: + variable.attrs["units"] = "degrees" variable.attrs.update(key.to_dict()) @@ -326,12 +326,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRFlag(BaseFileHandler): @@ -344,24 +344,24 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) - self.stripe = filename_info['stripe'] - views = {'n': 'nadir', 'o': 'oblique'} - self.view = views[filename_info['view']] + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) + self.stripe = filename_info["stripe"] + views = {"n": "nadir", "o": "oblique"} + self.view = views[filename_info["view"]] # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" def get_dataset(self, key, info): """Load a dataset.""" - if (self.stripe != key['stripe'].name or - self.view != key['view'].name): + if (self.stripe != key["stripe"].name or + self.view != key["view"].name): return - logger.debug('Reading %s.', key['name']) - file_key = info['file_key'].format(view=key['view'].name[0], - stripe=key['stripe'].name) + logger.debug("Reading %s.", key["name"]) + file_key = info["file_key"].format(view=key["view"].name[0], + stripe=key["stripe"].name) variable = self.nc[file_key] info = info.copy() @@ -376,9 +376,9 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py index 116ac39756..c982397c3c 100644 --- a/satpy/readers/smos_l2_wind.py +++ b/satpy/readers/smos_l2_wind.py @@ -41,22 +41,22 @@ class SMOSL2WINDFileHandler(NetCDF4FileHandler): @property def start_time(self): """Get start time.""" - return datetime.strptime(self['/attr/time_coverage_start'], "%Y-%m-%dT%H:%M:%S Z") + return datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z") @property def end_time(self): """Get end time.""" - return datetime.strptime(self['/attr/time_coverage_end'], "%Y-%m-%dT%H:%M:%S Z") + return datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z") @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def platform_name(self): """Get platform.""" - return self['/attr/platform'] + return self["/attr/platform"] def get_metadata(self, data, ds_info): """Get metadata.""" @@ -64,12 +64,12 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'platform_shortname': self.platform_shortname, - 'platform_name': self.platform_name, - 'sensor': self['/attr/instrument'], - 'start_time': self.start_time, - 'end_time': self.end_time, - 'level': self['/attr/processing_level'], + "platform_shortname": self.platform_shortname, + "platform_name": self.platform_name, + "sensor": self["/attr/instrument"], + "start_time": self.start_time, + "end_time": self.end_time, + "level": self["/attr/processing_level"], }) return metadata @@ -88,16 +88,16 @@ def available_datasets(self, configured_datasets=None): continue handled_variables.add(var_name) new_info = { - 'name': var_name, - 'file_type': self.filetype_info['file_type'], + "name": var_name, + "file_type": self.filetype_info["file_type"], } yield True, new_info def _mask_dataset(self, data): """Mask out fill values.""" try: - fill = data.attrs['_FillValue'] - data.attrs['_FillValue'] = np.nan + fill = data.attrs["_FillValue"] + data.attrs["_FillValue"] = np.nan return data.where(data != fill) except KeyError: return data @@ -110,11 +110,11 @@ def _adjust_lon_coord(self, data): def _rename_coords(self, data): """Rename coords.""" rename_dict = {} - if 'lon' in data.dims: + if "lon" in data.dims: data = self._adjust_lon_coord(data) - rename_dict['lon'] = 'x' - if 'lat' in data.dims: - rename_dict['lat'] = 'y' + rename_dict["lon"] = "x" + if "lat" in data.dims: + rename_dict["lat"] = "y" # Rename the coordinates to x and y return data.rename(rename_dict) @@ -123,39 +123,39 @@ def _remove_time_coordinate(self, data): # Remove dimension where size is 1, eg. time data = data.squeeze() # Remove if exists time as coordinate - if 'time' in data.coords: - data = data.drop_vars('time') + if "time" in data.coords: + data = data.drop_vars("time") return data def _roll_dataset_lon_coord(self, data): """Roll dataset along the lon coordinate.""" - if 'lon' in data.dims: + if "lon" in data.dims: data = data.roll(lon=720, roll_coords=True) return data def get_dataset(self, ds_id, ds_info): """Get dataset.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] data.attrs = self.get_metadata(data, ds_info) data = self._remove_time_coordinate(data) data = self._roll_dataset_lon_coord(data) data = self._rename_coords(data) data = self._mask_dataset(data) - if len(data.dims) >= 2 and all([dim in data.dims for dim in ['x', 'y']]): + if len(data.dims) >= 2 and all([dim in data.dims for dim in ["x", "y"]]): # Remove the first and last row as these values extends beyond +-90 latitude # if the dataset contains the y dimmension. # As this is data over open sea these has no values. data = data.where((data.y > -90.0) & (data.y < 90.0), drop=True) - elif len(data.dims) == 1 and 'y' in data.dims: + elif len(data.dims) == 1 and "y" in data.dims: data = data.where((data.y > 0) & (data.y < len(data.y) - 1), drop=True) return data def _create_area_extent(self, width, height): """Create area extent.""" # Creating a meshgrid, not needed actually, but makes it easy to find extremes - _lon = self._adjust_lon_coord(self['lon']) + _lon = self._adjust_lon_coord(self["lon"]) _lon = self._roll_dataset_lon_coord(_lon) - latlon = np.meshgrid(_lon, self['lat'][1:self['lat/shape'][0] - 1]) + latlon = np.meshgrid(_lon, self["lat"][1:self["lat/shape"][0] - 1]) lower_left_x = latlon[0][height - 1][0] - 0.125 lower_left_y = latlon[1][height - 1][0] + 0.125 upper_right_y = latlon[1][1][width - 1] - 0.125 @@ -164,12 +164,12 @@ def _create_area_extent(self, width, height): def get_area_def(self, dsid): """Define AreaDefintion.""" - width = self['lon/shape'][0] - height = self['lat/shape'][0] - 2 + width = self["lon/shape"][0] + height = self["lat/shape"][0] - 2 area_extent = self._create_area_extent(width, height) description = "SMOS L2 Wind Equirectangular Projection" - area_id = 'smos_eqc' - proj_id = 'equirectangular' - proj_dict = {'init': self['/attr/geospatial_bounds_vertical_crs']} + area_id = "smos_eqc" + proj_id = "equirectangular" + proj_dict = {"init": self["/attr/geospatial_bounds_vertical_crs"]} area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def diff --git a/satpy/readers/tropomi_l2.py b/satpy/readers/tropomi_l2.py index c6dda4bd89..768ca70948 100644 --- a/satpy/readers/tropomi_l2.py +++ b/satpy/readers/tropomi_l2.py @@ -40,7 +40,7 @@ from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -DATE_FMT = '%Y-%m-%dT%H:%M:%SZ' +DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" CHUNK_SIZE = get_legacy_chunk_size() @@ -50,32 +50,32 @@ class TROPOMIL2FileHandler(NetCDF4FileHandler): @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def time_coverage_start(self): """Get time_coverage_start.""" - return datetime.strptime(self['/attr/time_coverage_start'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def time_coverage_end(self): """Get time_coverage_end.""" - return datetime.strptime(self['/attr/time_coverage_end'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def sensor(self): """Get sensor.""" - res = self['/attr/sensor'] + res = self["/attr/sensor"] if isinstance(res, np.ndarray): return str(res.astype(str)).lower() return res.lower() @@ -93,7 +93,7 @@ def available_datasets(self, configured_datasets=None): lat_shape = None for var_name, _val in self.file_content.items(): # Could probably avoid this hardcoding, will think on it - if (var_name == 'PRODUCT/latitude'): + if (var_name == "PRODUCT/latitude"): lat_shape = self[var_name + "/shape"] break @@ -102,19 +102,19 @@ def available_datasets(self, configured_datasets=None): # update previously configured datasets logger.debug("Starting previously configured variables loop...") # if bounds exists, we can assemble them later - bounds_exist = 'latitude_bounds' in self and 'longitude_bounds' in self + bounds_exist = "latitude_bounds" in self and "longitude_bounds" in self for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) + var_name = ds_info.get("file_key", ds_info["name"]) # logger.debug("Evaluating previously configured variable: %s", var_name) - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info - assembled = var_name in ['assembled_lat_bounds', 'assembled_lon_bounds'] + assembled = var_name in ["assembled_lat_bounds", "assembled_lon_bounds"] if (matches and var_name in self) or (assembled and bounds_exist): logger.debug("Handling previously configured variable: %s", var_name) if not assembled: @@ -150,20 +150,20 @@ def _iterate_over_dataset_contents(self, handled_variables, shape): logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) - last_index_separator = var_name.rindex('/') + last_index_separator = var_name.rindex("/") last_index_separator = last_index_separator + 1 var_name_no_path = var_name[last_index_separator:] logger.debug("Using short name of: %s", var_name_no_path) # Create new ds_info object - if var_name_no_path in ['latitude_bounds', 'longitude_bounds']: + if var_name_no_path in ["latitude_bounds", "longitude_bounds"]: coordinates = [] else: - coordinates = ['longitude', 'latitude'] + coordinates = ["longitude", "latitude"] new_info = { - 'name': var_name_no_path, - 'file_key': var_name, - 'coordinates': coordinates, - 'file_type': self.filetype_info['file_type'], + "name": var_name_no_path, + "file_key": var_name, + "coordinates": coordinates, + "file_type": self.filetype_info["file_type"], } yield True, new_info @@ -173,12 +173,12 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'platform_shortname': self.platform_shortname, - 'sensor': self.sensor, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'time_coverage_start': self.time_coverage_start, - 'time_coverage_end': self.time_coverage_end, + "platform_shortname": self.platform_shortname, + "sensor": self.sensor, + "start_time": self.start_time, + "end_time": self.end_time, + "time_coverage_start": self.time_coverage_start, + "time_coverage_end": self.time_coverage_end, }) return metadata @@ -186,10 +186,10 @@ def get_metadata(self, data, ds_info): def _rename_dims(self, data_arr): """Normalize dimension names with the rest of Satpy.""" dims_dict = {} - if 'ground_pixel' in data_arr.dims: - dims_dict['ground_pixel'] = 'x' - if 'scanline' in data_arr.dims: - dims_dict['scanline'] = 'y' + if "ground_pixel" in data_arr.dims: + dims_dict["ground_pixel"] = "x" + if "scanline" in data_arr.dims: + dims_dict["scanline"] = "y" return data_arr.rename(dims_dict) def prepare_geo(self, bounds_data): @@ -220,18 +220,18 @@ def prepare_geo(self, bounds_data): # Convert to DataArray dask_dest = da.from_array(dest, chunks=CHUNK_SIZE) dest = xr.DataArray(dask_dest, - dims=('y_bounds', 'x_bounds'), + dims=("y_bounds", "x_bounds"), attrs=bounds_data.attrs ) return dest def get_dataset(self, ds_id, ds_info): """Get dataset.""" - logger.debug("Getting data for: %s", ds_id['name']) - file_key = ds_info.get('file_key', ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) + file_key = ds_info.get("file_key", ds_id["name"]) data = self[file_key] data.attrs = self.get_metadata(data, ds_info) - fill_value = data.attrs.get('_FillValue', np.float32(np.nan)) + fill_value = data.attrs.get("_FillValue", np.float32(np.nan)) data = data.squeeze() # preserve integer data types if possible @@ -239,11 +239,11 @@ def get_dataset(self, ds_id, ds_info): new_fill = fill_value else: new_fill = np.float32(np.nan) - data.attrs.pop('_FillValue', None) + data.attrs.pop("_FillValue", None) good_mask = data != fill_value - scale_factor = data.attrs.get('scale_factor') - add_offset = data.attrs.get('add_offset') + scale_factor = data.attrs.get("scale_factor") + add_offset = data.attrs.get("add_offset") if scale_factor is not None: data = data * scale_factor + add_offset @@ -251,11 +251,11 @@ def get_dataset(self, ds_id, ds_info): data = self._rename_dims(data) # drop coords whose units are not meters - drop_list = ['y', 'x', 'layer', 'vertices'] + drop_list = ["y", "x", "layer", "vertices"] coords_exist = [coord for coord in drop_list if coord in data.coords] if coords_exist: data = data.drop_vars(coords_exist) - if ds_id['name'] in ['assembled_lat_bounds', 'assembled_lon_bounds']: + if ds_id["name"] in ["assembled_lat_bounds", "assembled_lon_bounds"]: data = self.prepare_geo(data) return data diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 31f6dea6d9..892c93acaf 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -53,7 +53,7 @@ def np2str(value): type `numpy.string_` or it is not a numpy array """ - if hasattr(value, 'dtype') and \ + if hasattr(value, "dtype") and \ issubclass(value.dtype.type, (np.str_, np.string_, np.object_)) \ and value.size == 1: value = value.item() @@ -68,13 +68,13 @@ def np2str(value): def _get_geostationary_height(geos_area): params = geos_area.crs.coordinate_operation.params - h_param = [p for p in params if 'satellite height' in p.name.lower()][0] + h_param = [p for p in params if "satellite height" in p.name.lower()][0] return h_param.value def _get_geostationary_reference_longitude(geos_area): params = geos_area.crs.coordinate_operation.params - lon_0_params = [p for p in params if 'longitude of natural origin' in p.name.lower()] + lon_0_params = [p for p in params if "longitude of natural origin" in p.name.lower()] if not lon_0_params: return 0 elif len(lon_0_params) != 1: @@ -232,7 +232,7 @@ def _unzip_local_file(filename: str, prefix=None): Temporary filename path for decompressed file or None. """ - if not os.fspath(filename).endswith('bz2'): + if not os.fspath(filename).endswith("bz2"): return None fdn, tmpfilepath = tempfile.mkstemp(prefix=prefix, dir=config["tmp_dir"]) @@ -248,19 +248,19 @@ def _unzip_local_file(filename: str, prefix=None): def _unzip_with_pbzip(filename, tmpfilepath, fdn): # try pbzip2 - pbzip = which('pbzip2') + pbzip = which("pbzip2") if pbzip is None: return None # Run external pbzip2 - n_thr = os.environ.get('OMP_NUM_THREADS') + n_thr = os.environ.get("OMP_NUM_THREADS") if n_thr: runner = [pbzip, - '-dc', - '-p'+str(n_thr), + "-dc", + "-p"+str(n_thr), filename] else: runner = [pbzip, - '-dc', + "-dc", filename] p = Popen(runner, stdout=PIPE, stderr=PIPE) # nosec stdout = BytesIO(p.communicate()[0]) @@ -268,7 +268,7 @@ def _unzip_with_pbzip(filename, tmpfilepath, fdn): if status != 0: raise IOError("pbzip2 error '%s', failed, status=%d" % (filename, status)) - with closing(os.fdopen(fdn, 'wb')) as ofpt: + with closing(os.fdopen(fdn, "wb")) as ofpt: try: stdout.seek(0) shutil.copyfileobj(stdout, ofpt) @@ -291,7 +291,7 @@ def _unzip_with_bz2(filename, tmpfilepath): def _write_uncompressed_file(content, fdn, filename, tmpfilepath): - with closing(os.fdopen(fdn, 'wb')) as ofpt: + with closing(os.fdopen(fdn, "wb")) as ofpt: try: ofpt.write(content) except IOError: @@ -348,7 +348,7 @@ def generic_open(filename, *args, **kwargs): Returns a file-like object. """ - if os.fspath(filename).endswith('.bz2'): + if os.fspath(filename).endswith(".bz2"): fp = bz2.open(filename, *args, **kwargs) else: try: @@ -413,8 +413,8 @@ def get_user_calibration_factors(band_name, correction_dict): """Retrieve radiance correction factors from user-supplied dict.""" if band_name in correction_dict: try: - slope = correction_dict[band_name]['slope'] - offset = correction_dict[band_name]['offset'] + slope = correction_dict[band_name]["slope"] + offset = correction_dict[band_name]["offset"] except KeyError: raise KeyError("Incorrect correction factor dictionary. You must " "supply 'slope' and 'offset' keys.") @@ -440,13 +440,13 @@ def get_array_date(scn_data, utc_date=None): """Get start time from a channel data array.""" if utc_date is None: try: - utc_date = scn_data.attrs['start_time'] + utc_date = scn_data.attrs["start_time"] except KeyError: try: - utc_date = scn_data.attrs['scheduled_time'] + utc_date = scn_data.attrs["scheduled_time"] except KeyError: - raise KeyError('Scene has no start_time ' - 'or scheduled_time attribute.') + raise KeyError("Scene has no start_time " + "or scheduled_time attribute.") return utc_date @@ -456,8 +456,8 @@ def apply_earthsun_distance_correction(reflectance, utc_date=None): utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) - reflectance.attrs['sun_earth_distance_correction_applied'] = True - reflectance.attrs['sun_earth_distance_correction_factor'] = sun_earth_dist + reflectance.attrs["sun_earth_distance_correction_applied"] = True + reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance * sun_earth_dist * sun_earth_dist return reflectance @@ -469,8 +469,8 @@ def remove_earthsun_distance_correction(reflectance, utc_date=None): utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) - reflectance.attrs['sun_earth_distance_correction_applied'] = False - reflectance.attrs['sun_earth_distance_correction_factor'] = sun_earth_dist + reflectance.attrs["sun_earth_distance_correction_applied"] = False + reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance / (sun_earth_dist * sun_earth_dist) return reflectance diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index 735eb6b3c9..b0bde01573 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -48,11 +48,11 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize VaisalaGLD360TextFileHandler.""" super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) - names = ['gld360_date', 'gld360_time', 'latitude', 'longitude', 'power', 'unit'] - types = ['str', 'str', 'float', 'float', 'float', 'str'] + names = ["gld360_date", "gld360_time", "latitude", "longitude", "power", "unit"] + types = ["str", "str", "float", "float", "float", "str"] dtypes = dict(zip(names, types)) # Combine 'date' and 'time' into a datetime object - parse_dates = {'time': ['gld360_date', 'gld360_time']} + parse_dates = {"time": ["gld360_date", "gld360_time"]} self.data = pd.read_csv(filename, delim_whitespace=True, header=None, names=names, dtype=dtypes, parse_dates=parse_dates) @@ -60,28 +60,28 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get start time.""" - return self.data['time'].iloc[0] + return self.data["time"].iloc[0] @property def end_time(self): """Get end time.""" - return self.data['time'].iloc[-1] + return self.data["time"].iloc[-1] def get_dataset(self, dataset_id, dataset_info): """Load a dataset.""" - xarr = xr.DataArray(da.from_array(self.data[dataset_id['name']], + xarr = xr.DataArray(da.from_array(self.data[dataset_id["name"]], chunks=CHUNK_SIZE), dims=["y"]) # Add time, longitude, and latitude as non-dimensional y-coordinates - xarr['time'] = ('y', self.data['time']) - xarr['longitude'] = ('y', self.data['longitude']) - xarr['latitude'] = ('y', self.data['latitude']) + xarr["time"] = ("y", self.data["time"]) + xarr["longitude"] = ("y", self.data["longitude"]) + xarr["latitude"] = ("y", self.data["latitude"]) - if dataset_id['name'] == 'power': + if dataset_id["name"] == "power": # Check that units in the file match the unit specified in the # reader yaml-file - if not (self.data.unit == dataset_info['units']).all(): - raise ValueError('Inconsistent units found in file!') + if not (self.data.unit == dataset_info["units"]).all(): + raise ValueError("Inconsistent units found in file!") xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py index e51024ba56..83056189dc 100644 --- a/satpy/readers/vii_base_nc.py +++ b/satpy/readers/vii_base_nc.py @@ -46,14 +46,14 @@ def __init__(self, filename, filename_info, filetype_info, orthorect=False): super().__init__(filename, filename_info, filetype_info, auto_maskandscale=True) # Saves the orthorectification flag - self.orthorect = orthorect and filetype_info.get('orthorect', True) + self.orthorect = orthorect and filetype_info.get("orthorect", True) # Saves the interpolation flag - self.interpolate = filetype_info.get('interpolate', True) + self.interpolate = filetype_info.get("interpolate", True) try: - longitude = self[filetype_info['cached_longitude']] - latitude = self[filetype_info['cached_latitude']] + longitude = self[filetype_info["cached_longitude"]] + latitude = self[filetype_info["cached_latitude"]] if self.interpolate: self.longitude, self.latitude = self._perform_geo_interpolation(longitude, latitude) @@ -66,22 +66,22 @@ def __init__(self, filename, filename_info, filetype_info, orthorect=False): def _standardize_dims(self, variable): """Standardize dims to y, x.""" - if 'num_pixels' in variable.dims: - variable = variable.rename({'num_pixels': 'x', 'num_lines': 'y'}) - if 'num_points_act' in variable.dims: - variable = variable.rename({'num_points_act': 'x', 'num_points_alt': 'y'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "num_pixels" in variable.dims: + variable = variable.rename({"num_pixels": "x", "num_lines": "y"}) + if "num_points_act" in variable.dims: + variable = variable.rename({"num_points_act": "x", "num_points_alt": "y"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + logger.debug("Reading in file to get dataset with key %s.", var_key) - if var_key == 'cached_longitude' and self.longitude is not None: + if var_key == "cached_longitude" and self.longitude is not None: variable = self.longitude.copy() - elif var_key == 'cached_latitude' and self.latitude is not None: + elif var_key == "cached_latitude" and self.latitude is not None: variable = self.latitude.copy() else: try: @@ -91,21 +91,21 @@ def get_dataset(self, dataset_id, dataset_info): return None # If the dataset is marked for interpolation, perform the interpolation from tie points to pixels - if dataset_info.get('interpolate', False) and self.interpolate: + if dataset_info.get("interpolate", False) and self.interpolate: variable = self._perform_interpolation(variable) # Perform the calibration if required - if dataset_info.get('calibration') is not None: + if dataset_info.get("calibration") is not None: variable = self._perform_calibration(variable, dataset_info) # Perform the orthorectification if required if self.orthorect: - orthorect_data_name = dataset_info.get('orthorect_data', None) + orthorect_data_name = dataset_info.get("orthorect_data", None) if orthorect_data_name is not None: variable = self._perform_orthorectification(variable, orthorect_data_name) # Manage the attributes of the dataset - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) @@ -130,8 +130,8 @@ def _perform_interpolation(variable): TIE_POINTS_FACTOR )[0] new_variable = interpolated_values.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_variable.name = variable.name new_variable.attrs = variable.attrs @@ -157,14 +157,14 @@ def _perform_geo_interpolation(longitude, latitude): TIE_POINTS_FACTOR ) new_longitude = interpolated_longitude.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_longitude.name = longitude.name new_longitude.attrs = longitude.attrs new_latitude = interpolated_latitude.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_latitude.name = latitude.name new_latitude.attrs = latitude.attrs @@ -181,20 +181,20 @@ def _perform_calibration(self, variable, dataset_info): def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets.""" attributes = { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.spacecraft_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['sensing_start_time'], - 'filename_end_time': self.filename_info['sensing_end_time'], - 'platform_name': self.spacecraft_name, + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.spacecraft_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor, + "filename_start_time": self.filename_info["sensing_start_time"], + "filename_end_time": self.filename_info["sensing_end_time"], + "platform_name": self.spacecraft_name, } # Add a "quality_group" item to the dictionary with all the variables and attributes # which are found in the 'quality' group of the VII product - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group where possible @@ -205,7 +205,7 @@ def _get_global_attributes(self): # Add the attributes of the quality group quality_dict.update(quality_group.attrs) - attributes['quality_group'] = quality_dict + attributes["quality_group"] = quality_dict return attributes @@ -213,29 +213,29 @@ def _get_global_attributes(self): def start_time(self): """Get observation start time.""" try: - start_time = datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y%m%d%H%M%S.%f') + start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - start_time = datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y-%m-%d %H:%M:%S.%f') + start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return start_time @property def end_time(self): """Get observation end time.""" try: - end_time = datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y%m%d%H%M%S.%f') + end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - end_time = datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y-%m-%d %H:%M:%S.%f') + end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return end_time @property def spacecraft_name(self): """Return spacecraft name.""" - return self['/attr/spacecraft'] + return self["/attr/spacecraft"] @property def sensor(self): """Return sensor.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def ssp_lon(self): diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py index 2e66c3deb0..2dbcb63eda 100644 --- a/satpy/readers/vii_l1b_nc.py +++ b/satpy/readers/vii_l1b_nc.py @@ -44,14 +44,14 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): super().__init__(filename, filename_info, filetype_info, **kwargs) # Read the variables which are required for the calibration - self._bt_conversion_a = self['data/calibration_data/bt_conversion_a'].values - self._bt_conversion_b = self['data/calibration_data/bt_conversion_b'].values - self._channel_cw_thermal = self['data/calibration_data/channel_cw_thermal'].values - self._integrated_solar_irradiance = self['data/calibration_data/Band_averaged_solar_irradiance'].values + self._bt_conversion_a = self["data/calibration_data/bt_conversion_a"].values + self._bt_conversion_b = self["data/calibration_data/bt_conversion_b"].values + self._channel_cw_thermal = self["data/calibration_data/channel_cw_thermal"].values + self._integrated_solar_irradiance = self["data/calibration_data/Band_averaged_solar_irradiance"].values # Computes the angle factor for reflectance calibration as inverse of cosine of solar zenith angle # (the values in the product file are on tie points and in degrees, # therefore interpolation and conversion to radians are required) - solar_zenith_angle = self['data/measurement_data/solar_zenith'] + solar_zenith_angle = self["data/measurement_data/solar_zenith"] solar_zenith_angle_on_pixels = self._perform_interpolation(solar_zenith_angle) solar_zenith_angle_on_pixels_radians = np.radians(solar_zenith_angle_on_pixels) self.angle_factor = 1.0 / (np.cos(solar_zenith_angle_on_pixels_radians)) @@ -67,27 +67,27 @@ def _perform_calibration(self, variable, dataset_info): DataArray: array containing the calibrated values and all the original metadata. """ - calibration_name = dataset_info['calibration'] - if calibration_name == 'brightness_temperature': + calibration_name = dataset_info["calibration"] + if calibration_name == "brightness_temperature": # Extract the values of calibration coefficients for the current channel - chan_index = dataset_info['chan_thermal_index'] + chan_index = dataset_info["chan_thermal_index"] cw = self._channel_cw_thermal[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] # Perform the calibration calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'reflectance': + elif calibration_name == "reflectance": # Extract the values of calibration coefficients for the current channel - chan_index = dataset_info['chan_solar_index'] + chan_index = dataset_info["chan_solar_index"] isi = self._integrated_solar_irradiance[chan_index] # Perform the calibration calibrated_variable = self._calibrate_refl(variable, self.angle_factor.data, isi) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'radiance': + elif calibration_name == "radiance": calibrated_variable = variable else: - raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) + raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info["name"])) return calibrated_variable @@ -108,7 +108,7 @@ def _perform_orthorectification(self, variable, orthorect_data_name): # based on the simplified formula using mean Earth radius variable += np.degrees(orthorect_data / MEAN_EARTH_RADIUS) except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) return variable @staticmethod diff --git a/satpy/readers/vii_l2_nc.py b/satpy/readers/vii_l2_nc.py index 3ce3926674..276d77f668 100644 --- a/satpy/readers/vii_l2_nc.py +++ b/satpy/readers/vii_l2_nc.py @@ -43,5 +43,5 @@ def _perform_orthorectification(self, variable, orthorect_data_name): orthorect_data = self[orthorect_data_name] variable += orthorect_data except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) return variable diff --git a/satpy/readers/viirs_atms_sdr_base.py b/satpy/readers/viirs_atms_sdr_base.py index 048c601f84..be0a7a0d65 100644 --- a/satpy/readers/viirs_atms_sdr_base.py +++ b/satpy/readers/viirs_atms_sdr_base.py @@ -32,37 +32,37 @@ LOG = logging.getLogger(__name__) -VIIRS_DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO', - 'SVDNB': 'VIIRS-DNB-SDR', - 'GITCO': 'VIIRS-IMG-GEO-TC', - 'GIMGO': 'VIIRS-IMG-GEO', - 'SVI01': 'VIIRS-I1-SDR', - 'SVI02': 'VIIRS-I2-SDR', - 'SVI03': 'VIIRS-I3-SDR', - 'SVI04': 'VIIRS-I4-SDR', - 'SVI05': 'VIIRS-I5-SDR', - 'GMTCO': 'VIIRS-MOD-GEO-TC', - 'GMODO': 'VIIRS-MOD-GEO', - 'SVM01': 'VIIRS-M1-SDR', - 'SVM02': 'VIIRS-M2-SDR', - 'SVM03': 'VIIRS-M3-SDR', - 'SVM04': 'VIIRS-M4-SDR', - 'SVM05': 'VIIRS-M5-SDR', - 'SVM06': 'VIIRS-M6-SDR', - 'SVM07': 'VIIRS-M7-SDR', - 'SVM08': 'VIIRS-M8-SDR', - 'SVM09': 'VIIRS-M9-SDR', - 'SVM10': 'VIIRS-M10-SDR', - 'SVM11': 'VIIRS-M11-SDR', - 'SVM12': 'VIIRS-M12-SDR', - 'SVM13': 'VIIRS-M13-SDR', - 'SVM14': 'VIIRS-M14-SDR', - 'SVM15': 'VIIRS-M15-SDR', - 'SVM16': 'VIIRS-M16-SDR', - 'IVCDB': 'VIIRS-DualGain-Cal-IP'} -ATMS_DATASET_KEYS = {'SATMS': 'ATMS-SDR', - 'GATMO': 'ATMS-SDR-GEO', - 'TATMS': 'ATMS-TDR'} +VIIRS_DATASET_KEYS = {"GDNBO": "VIIRS-DNB-GEO", + "SVDNB": "VIIRS-DNB-SDR", + "GITCO": "VIIRS-IMG-GEO-TC", + "GIMGO": "VIIRS-IMG-GEO", + "SVI01": "VIIRS-I1-SDR", + "SVI02": "VIIRS-I2-SDR", + "SVI03": "VIIRS-I3-SDR", + "SVI04": "VIIRS-I4-SDR", + "SVI05": "VIIRS-I5-SDR", + "GMTCO": "VIIRS-MOD-GEO-TC", + "GMODO": "VIIRS-MOD-GEO", + "SVM01": "VIIRS-M1-SDR", + "SVM02": "VIIRS-M2-SDR", + "SVM03": "VIIRS-M3-SDR", + "SVM04": "VIIRS-M4-SDR", + "SVM05": "VIIRS-M5-SDR", + "SVM06": "VIIRS-M6-SDR", + "SVM07": "VIIRS-M7-SDR", + "SVM08": "VIIRS-M8-SDR", + "SVM09": "VIIRS-M9-SDR", + "SVM10": "VIIRS-M10-SDR", + "SVM11": "VIIRS-M11-SDR", + "SVM12": "VIIRS-M12-SDR", + "SVM13": "VIIRS-M13-SDR", + "SVM14": "VIIRS-M14-SDR", + "SVM15": "VIIRS-M15-SDR", + "SVM16": "VIIRS-M16-SDR", + "IVCDB": "VIIRS-DualGain-Cal-IP"} +ATMS_DATASET_KEYS = {"SATMS": "ATMS-SDR", + "GATMO": "ATMS-SDR-GEO", + "TATMS": "ATMS-TDR"} DATASET_KEYS = {} DATASET_KEYS.update(VIIRS_DATASET_KEYS) @@ -106,7 +106,7 @@ def _parse_datetime(self, datestr, timestr): datetime_str = (str(datestr.data.compute().astype(str)) + str(timestr.data.compute().astype(str))) - time_val = datetime.strptime(datetime_str, '%Y%m%d%H%M%S.%fZ') + time_val = datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") if abs(time_val - NO_DATE) < EPSILON_TIME: # catch rare case when SDR files have incorrect date raise ValueError("Datetime invalid {}".format(time_val)) @@ -140,29 +140,29 @@ def end_orbit_number(self): def _get_aggr_path(self, fileinfo_key, aggr_default): dataset_group = DATASET_KEYS[self.datasets[0]] - default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/' + aggr_default + default = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/" + aggr_default return self.filetype_info.get(fileinfo_key, default).format(dataset_group=dataset_group) @property def platform_name(self): """Get platform name.""" - default = '/attr/Platform_Short_Name' + default = "/attr/Platform_Short_Name" platform_path = self.filetype_info.get( - 'platform_name', default).format(**self.filetype_info) - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'J01': 'NOAA-20', - 'JPSS-2': 'NOAA-21', - 'J02': 'NOAA-21'} + "platform_name", default).format(**self.filetype_info) + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "J01": "NOAA-20", + "JPSS-2": "NOAA-21", + "J02": "NOAA-21"} return platform_dict.get(self[platform_path], self[platform_path]) @property def sensor_name(self): """Get sensor name.""" dataset_group = DATASET_KEYS[self.datasets[0]] - default = 'Data_Products/{dataset_group}/attr/Instrument_Short_Name' + default = "Data_Products/{dataset_group}/attr/Instrument_Short_Name" sensor_path = self.filetype_info.get( - 'sensor_name', default).format(dataset_group=dataset_group) + "sensor_name", default).format(dataset_group=dataset_group) return self[sensor_path].lower() def scale_swath_data(self, data, scaling_factors, dataset_group): @@ -178,7 +178,7 @@ def scale_swath_data(self, data, scaling_factors, dataset_group): def scale_data_to_specified_unit(self, data, dataset_id, ds_info): """Get sscale and offset factors and convert/scale data to given physical unit.""" var_path = self._generate_file_key(dataset_id, ds_info) - dataset_group = ds_info['dataset_group'] + dataset_group = ds_info["dataset_group"] file_units = _get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) @@ -241,38 +241,38 @@ def expand_single_values(var, scans): else: expanded = np.repeat(var, scans) expanded.attrs = var.attrs - expanded.rename({expanded.dims[0]: 'y'}) + expanded.rename({expanded.dims[0]: "y"}) return expanded def _scan_size(self, dataset_group_name): """Get how many rows of data constitute one scanline.""" - if 'ATM' in dataset_group_name: + if "ATM" in dataset_group_name: scan_size = 1 - elif 'I' in dataset_group_name: + elif "I" in dataset_group_name: scan_size = 32 else: scan_size = 16 return scan_size def _generate_file_key(self, ds_id, ds_info, factors=False): - var_path = ds_info.get('file_key', 'All_Data/{dataset_group}_All/{calibration}') + var_path = ds_info.get("file_key", "All_Data/{dataset_group}_All/{calibration}") calibration = { - 'radiance': 'Radiance', - 'reflectance': 'Reflectance', - 'brightness_temperature': 'BrightnessTemperature', - }.get(ds_id.get('calibration')) - var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info['dataset_group']]) - if ds_id['name'] in ['dnb_longitude', 'dnb_latitude']: + "radiance": "Radiance", + "reflectance": "Reflectance", + "brightness_temperature": "BrightnessTemperature", + }.get(ds_id.get("calibration")) + var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info["dataset_group"]]) + if ds_id["name"] in ["dnb_longitude", "dnb_latitude"]: if self.use_tc is True: - return var_path + '_TC' - if self.use_tc is None and var_path + '_TC' in self.file_content: - return var_path + '_TC' + return var_path + "_TC" + if self.use_tc is None and var_path + "_TC" in self.file_content: + return var_path + "_TC" return var_path def _update_data_attributes(self, data, dataset_id, ds_info): file_units = _get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) - i = getattr(data, 'attrs', {}) + i = getattr(data, "attrs", {}) i.update(ds_info) i.update({ "platform_name": self.platform_name, @@ -280,7 +280,7 @@ def _update_data_attributes(self, data, dataset_id, ds_info): "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, "units": output_units, - "rows_per_scan": self._scan_size(ds_info['dataset_group']), + "rows_per_scan": self._scan_size(ds_info["dataset_group"]), }) i.update(dataset_id.to_dict()) data.attrs.update(i) @@ -304,7 +304,7 @@ def concatenate_dataset(self, dataset_group, var_path, **kwargs): data_chunks.append(variable.isel(y=slice(start_scan, start_scan + gscans * scan_size))) start_scan += gscans * scan_size - return xr.concat(data_chunks, 'y') + return xr.concat(data_chunks, "y") else: # This is not tested - Not sure this code is ever going to be used? A. Dybbroe # Mon Jan 2 13:31:21 2023 @@ -316,11 +316,11 @@ def _get_rows_per_granule(self, dataset_group): return [scan_size * gran_scans for gran_scans in scans_per_gran] def _get_scans_per_granule(self, dataset_group): - number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules' + number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules" nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): - scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans' + scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans" scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans @@ -350,7 +350,7 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info continue - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if dataset_group: yield True, ds_info elif is_avail is None: diff --git a/satpy/readers/viirs_compact.py b/satpy/readers/viirs_compact.py index b9f83e4287..af3a4ce766 100644 --- a/satpy/readers/viirs_compact.py +++ b/satpy/readers/viirs_compact.py @@ -67,9 +67,9 @@ h = 6.6260755e-34 # m2kg.s-1 k = 1.380658e-23 # m2kg.s-2.K-1 -short_names = {'NPP': 'Suomi-NPP', - 'J01': 'NOAA-20', - 'J02': 'NOAA-21'} +short_names = {"NPP": "Suomi-NPP", + "J01": "NOAA-20", + "J02": "NOAA-21"} class VIIRSCompactFileHandler(BaseFileHandler): @@ -83,28 +83,28 @@ def __init__(self, filename, filename_info, filetype_info): self.finfo = filename_info self.lons = None self.lats = None - if filetype_info['file_type'] == 'compact_m': - self.ch_type = 'MOD' - elif filetype_info['file_type'] == 'compact_dnb': - self.ch_type = 'DNB' + if filetype_info["file_type"] == "compact_m": + self.ch_type = "MOD" + elif filetype_info["file_type"] == "compact_dnb": + self.ch_type = "DNB" else: - raise IOError('Compact Viirs file type not recognized.') + raise IOError("Compact Viirs file type not recognized.") geo_data = self.h5f["Data_Products"]["VIIRS-%s-GEO" % self.ch_type]["VIIRS-%s-GEO_Gran_0" % self.ch_type] - self.min_lat = geo_data.attrs['South_Bounding_Coordinate'].item() - self.max_lat = geo_data.attrs['North_Bounding_Coordinate'].item() - self.min_lon = geo_data.attrs['West_Bounding_Coordinate'].item() - self.max_lon = geo_data.attrs['East_Bounding_Coordinate'].item() + self.min_lat = geo_data.attrs["South_Bounding_Coordinate"].item() + self.max_lat = geo_data.attrs["North_Bounding_Coordinate"].item() + self.min_lon = geo_data.attrs["West_Bounding_Coordinate"].item() + self.max_lon = geo_data.attrs["East_Bounding_Coordinate"].item() self.switch_to_cart = ((abs(self.max_lon - self.min_lon) > 90) or (max(abs(self.min_lat), abs(self.max_lat)) > 60)) self.scans = self.h5f["All_Data"]["NumberOfScans"][0] - self.geography = self.h5f["All_Data"]['VIIRS-%s-GEO_All' % self.ch_type] + self.geography = self.h5f["All_Data"]["VIIRS-%s-GEO_All" % self.ch_type] for key in self.h5f["All_Data"].keys(): if key.startswith("VIIRS") and key.endswith("SDR_All"): - channel = key.split('-')[1] + channel = key.split("-")[1] break # This supposes there is only one tiepoint zone in the track direction. @@ -134,9 +134,9 @@ def __init__(self, filename, filename_info, filetype_info): self.cache = {} self.mda = {} - short_name = np2str(self.h5f.attrs['Platform_Short_Name']) - self.mda['platform_name'] = short_names.get(short_name, short_name) - self.mda['sensor'] = 'viirs' + short_name = np2str(self.h5f.attrs["Platform_Short_Name"]) + self.mda["platform_name"] = short_names.get(short_name, short_name) + self.mda["sensor"] = "viirs" def __del__(self): """Close file handlers when we are done.""" @@ -145,75 +145,75 @@ def __del__(self): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - if key['name'] in _channels_dict: + logger.debug("Reading %s.", key["name"]) + if key["name"] in _channels_dict: m_data = self.read_dataset(key, info) else: m_data = self.read_geo(key, info) m_data.attrs.update(info) - m_data.attrs['rows_per_scan'] = self.scan_size + m_data.attrs["rows_per_scan"] = self.scan_size return m_data def get_bounding_box(self): """Get the bounding box of the data.""" for key in self.h5f["Data_Products"].keys(): if key.startswith("VIIRS") and key.endswith("GEO"): - lats = self.h5f["Data_Products"][key][key + '_Gran_0'].attrs['G-Ring_Latitude'][()] - lons = self.h5f["Data_Products"][key][key + '_Gran_0'].attrs['G-Ring_Longitude'][()] + lats = self.h5f["Data_Products"][key][key + "_Gran_0"].attrs["G-Ring_Latitude"][()] + lons = self.h5f["Data_Products"][key][key + "_Gran_0"].attrs["G-Ring_Longitude"][()] break else: - raise KeyError('Cannot find bounding coordinates!') + raise KeyError("Cannot find bounding coordinates!") return lons.ravel(), lats.ravel() @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" end_time = datetime.combine(self.start_time.date(), - self.finfo['end_time'].time()) + self.finfo["end_time"].time()) if end_time < self.start_time: end_time += timedelta(days=1) return end_time def read_geo(self, key, info): """Read angles.""" - pairs = {('satellite_azimuth_angle', 'satellite_zenith_angle'): + pairs = {("satellite_azimuth_angle", "satellite_zenith_angle"): ("SatelliteAzimuthAngle", "SatelliteZenithAngle"), - ('solar_azimuth_angle', 'solar_zenith_angle'): + ("solar_azimuth_angle", "solar_zenith_angle"): ("SolarAzimuthAngle", "SolarZenithAngle"), - ('dnb_solar_azimuth_angle', 'dnb_solar_zenith_angle'): + ("dnb_solar_azimuth_angle", "dnb_solar_zenith_angle"): ("SolarAzimuthAngle", "SolarZenithAngle"), - ('dnb_lunar_azimuth_angle', 'dnb_lunar_zenith_angle'): + ("dnb_lunar_azimuth_angle", "dnb_lunar_zenith_angle"): ("LunarAzimuthAngle", "LunarZenithAngle"), } if self.lons is None or self.lats is None: self.lons, self.lats = self.navigate() for pair, fkeys in pairs.items(): - if key['name'] in pair: + if key["name"] in pair: if (self.cache.get(pair[0]) is None or self.cache.get(pair[1]) is None): angles = self.angles(*fkeys) self.cache[pair[0]], self.cache[pair[1]] = angles - if key['name'] == pair[0]: - return xr.DataArray(self.cache[pair[0]], name=key['name'], - attrs=self.mda, dims=('y', 'x')) + if key["name"] == pair[0]: + return xr.DataArray(self.cache[pair[0]], name=key["name"], + attrs=self.mda, dims=("y", "x")) else: - return xr.DataArray(self.cache[pair[1]], name=key['name'], - attrs=self.mda, dims=('y', 'x')) + return xr.DataArray(self.cache[pair[1]], name=key["name"], + attrs=self.mda, dims=("y", "x")) - if info.get('standard_name') in ['latitude', 'longitude']: + if info.get("standard_name") in ["latitude", "longitude"]: mda = self.mda.copy() mda.update(info) - if info['standard_name'] == 'longitude': - return xr.DataArray(self.lons, attrs=mda, dims=('y', 'x')) + if info["standard_name"] == "longitude": + return xr.DataArray(self.lons, attrs=mda, dims=("y", "x")) else: - return xr.DataArray(self.lats, attrs=mda, dims=('y', 'x')) + return xr.DataArray(self.lats, attrs=mda, dims=("y", "x")) - if key['name'] == 'dnb_moon_illumination_fraction': + if key["name"] == "dnb_moon_illumination_fraction": mda = self.mda.copy() mda.update(info) return xr.DataArray(da.from_array(self.geography["MoonIllumFraction"]), @@ -222,7 +222,7 @@ def read_geo(self, key, info): def read_dataset(self, dataset_key, info): """Read a dataset.""" h5f = self.h5f - channel = _channels_dict[dataset_key['name']] + channel = _channels_dict[dataset_key["name"]] chan_dict = dict([(key.split("-")[1], key) for key in h5f["All_Data"].keys() if key.startswith("VIIRS")]) @@ -230,39 +230,39 @@ def read_dataset(self, dataset_key, info): h5rads = h5f["All_Data"][chan_dict[channel]]["Radiance"] chunks = h5rads.chunks or CHUNK_SIZE rads = xr.DataArray(da.from_array(h5rads, chunks=chunks), - name=dataset_key['name'], - dims=['y', 'x']).astype(np.float32) + name=dataset_key["name"], + dims=["y", "x"]).astype(np.float32) h5attrs = h5rads.attrs scans = h5f["All_Data"]["NumberOfScans"][0] rads = rads[:scans * 16, :] rads = rads.where(rads <= 65526) try: - rads = xr.where(rads <= h5attrs['Threshold'], - rads * h5attrs['RadianceScaleLow'] + - h5attrs['RadianceOffsetLow'], - rads * h5attrs['RadianceScaleHigh'] + - h5attrs['RadianceOffsetHigh']) + rads = xr.where(rads <= h5attrs["Threshold"], + rads * h5attrs["RadianceScaleLow"] + + h5attrs["RadianceOffsetLow"], + rads * h5attrs["RadianceScaleHigh"] + + h5attrs["RadianceOffsetHigh"]) except (KeyError, AttributeError): logger.info("Missing attribute for scaling of %s.", channel) pass unit = "W m-2 sr-1 μm-1" - if dataset_key['calibration'] == 'counts': + if dataset_key["calibration"] == "counts": raise NotImplementedError("Can't get counts from this data") - if dataset_key['calibration'] in ['reflectance', 'brightness_temperature']: + if dataset_key["calibration"] in ["reflectance", "brightness_temperature"]: # do calibrate try: # First guess: VIS or NIR data - a_vis = h5attrs['EquivalentWidth'] - b_vis = h5attrs['IntegratedSolarIrradiance'] - dse = h5attrs['EarthSunDistanceNormalised'] + a_vis = h5attrs["EquivalentWidth"] + b_vis = h5attrs["IntegratedSolarIrradiance"] + dse = h5attrs["EarthSunDistanceNormalised"] rads *= 100 * np.pi * a_vis / b_vis * (dse**2) unit = "%" except KeyError: # Maybe it's IR data? try: - a_ir = h5attrs['BandCorrectionCoefficientA'] - b_ir = h5attrs['BandCorrectionCoefficientB'] - lambda_c = h5attrs['CentralWaveLength'] + a_ir = h5attrs["BandCorrectionCoefficientA"] + b_ir = h5attrs["BandCorrectionCoefficientB"] + lambda_c = h5attrs["CentralWaveLength"] rads *= 1e6 rads = (h * c) / (k * lambda_c * np.log(1 + @@ -274,12 +274,12 @@ def read_dataset(self, dataset_key, info): except KeyError: logger.warning("Calibration failed.") - elif dataset_key['calibration'] != 'radiance': + elif dataset_key["calibration"] != "radiance": raise ValueError("Calibration parameter should be radiance, " "reflectance or brightness_temperature") rads = rads.clip(min=0) rads.attrs = self.mda - rads.attrs['units'] = unit + rads.attrs["units"] = unit return rads def expand_angle_and_nav(self, arrays): @@ -326,7 +326,7 @@ def navigate(self): return expanded def _get_geographical_chunks(self): - shape = self.geography['Longitude'].shape + shape = self.geography["Longitude"].shape horizontal_chunks = (self.nb_tiepoint_zones + 1).compute() chunks = (shape[0], tuple(horizontal_chunks)) return chunks diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index a8c6c934b2..646d7e0d17 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -86,28 +86,28 @@ def __init__(self, filename, filename_info, filetype_info): decode_cf=True, mask_and_scale=True, chunks={ - 'Columns': -1, - 'Rows': row_chunks_m, - 'Along_Scan_375m': -1, - 'Along_Track_375m': row_chunks_i, - 'Along_Scan_750m': -1, - 'Along_Track_750m': row_chunks_m, + "Columns": -1, + "Rows": row_chunks_m, + "Along_Scan_375m": -1, + "Along_Track_375m": row_chunks_i, + "Along_Scan_750m": -1, + "Along_Track_750m": row_chunks_m, }) - if 'Columns' in self.nc.dims: - self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) - elif 'Along_Track_375m' in self.nc.dims: - self.nc = self.nc.rename({'Along_Scan_375m': 'x', 'Along_Track_375m': 'y'}) - self.nc = self.nc.rename({'Along_Scan_750m': 'x', 'Along_Track_750m': 'y'}) + if "Columns" in self.nc.dims: + self.nc = self.nc.rename({"Columns": "x", "Rows": "y"}) + elif "Along_Track_375m" in self.nc.dims: + self.nc = self.nc.rename({"Along_Scan_375m": "x", "Along_Track_375m": "y"}) + self.nc = self.nc.rename({"Along_Scan_750m": "x", "Along_Track_750m": "y"}) # For some reason, no 'standard_name' is defined in some netCDF files, so # here we manually make the definitions. - if 'Latitude' in self.nc: - self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) - if 'Longitude' in self.nc: - self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) + if "Latitude" in self.nc: + self.nc["Latitude"].attrs.update({"standard_name": "latitude"}) + if "Longitude" in self.nc: + self.nc["Longitude"].attrs.update({"standard_name": "longitude"}) - self.algorithm_version = filename_info['platform_shortname'] - self.sensor_name = 'viirs' + self.algorithm_version = filename_info["platform_shortname"] + self.sensor_name = "viirs" def rows_per_scans(self, data_arr: xr.DataArray) -> int: """Get number of array rows per instrument scan based on data resolution.""" @@ -115,7 +115,7 @@ def rows_per_scans(self, data_arr: xr.DataArray) -> int: def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: """Get the dataset.""" - data_arr = self.nc[info['file_key']] + data_arr = self.nc[info["file_key"]] data_arr = self._mask_invalid(data_arr, info) units = info.get("units", data_arr.attrs.get("units")) if units is None or units == "unitless": @@ -150,27 +150,27 @@ def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) if isinstance(flag_meanings, str) and "\n" not in flag_meanings: # only handle CF-standard flag meanings - data_arr.attrs['flag_meanings'] = [flag for flag in data_arr.attrs['flag_meanings'].split(' ')] + data_arr.attrs["flag_meanings"] = [flag for flag in data_arr.attrs["flag_meanings"].split(" ")] @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def platform_name(self): """Get platform name.""" - platform_path = self.filename_info['platform_shortname'] - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'J01': 'NOAA-20', - 'JPSS-2': 'NOAA-21', - 'J02': 'NOAA-21'} + platform_path = self.filename_info["platform_shortname"] + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "J01": "NOAA-20", + "JPSS-2": "NOAA-21", + "J02": "NOAA-21"} return platform_dict[platform_path.upper()] def available_datasets(self, configured_datasets=None): @@ -212,7 +212,7 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - if self.file_type_matches(ds_info['file_type']) is None: + if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info yield file_key in self.nc, ds_info @@ -278,18 +278,18 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: def _get_veg_index_good_mask(self) -> xr.DataArray: # each mask array should be TRUE when pixels are UNACCEPTABLE - qf1 = self.nc['QF1 Surface Reflectance'] + qf1 = self.nc["QF1 Surface Reflectance"] has_sun_glint = (qf1 & 0b11000000) > 0 is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" cloud_quality = (qf1 & 0b00000011) < 0b10 - qf2 = self.nc['QF2 Surface Reflectance'] + qf2 = self.nc["QF2 Surface Reflectance"] has_snow_or_ice = (qf2 & 0b00100000) > 0 has_cloud_shadow = (qf2 & 0b00001000) > 0 water_mask = (qf2 & 0b00000111) has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic - qf7 = self.nc['QF7 Surface Reflectance'] + qf7 = self.nc["QF7 Surface Reflectance"] has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity adjacent_to_cloud = (qf7 & 0b00000010) > 0 diff --git a/satpy/readers/viirs_edr_active_fires.py b/satpy/readers/viirs_edr_active_fires.py index f1bcf4d3cc..bd8f3f6d69 100644 --- a/satpy/readers/viirs_edr_active_fires.py +++ b/satpy/readers/viirs_edr_active_fires.py @@ -44,7 +44,7 @@ def __init__(self, filename, filename_info, filetype_info, super(VIIRSActiveFiresFileHandler, self).__init__( filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) - self.prefix = filetype_info.get('variable_prefix') + self.prefix = filetype_info.get("variable_prefix") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray. @@ -57,24 +57,24 @@ def get_dataset(self, dsid, dsinfo): Dask DataArray: Data """ - key = dsinfo.get('file_key', dsid['name']).format(variable_prefix=self.prefix) + key = dsinfo.get("file_key", dsid["name"]).format(variable_prefix=self.prefix) data = self[key] # rename "phoney dims" - data = data.rename(dict(zip(data.dims, ['y', 'x']))) + data = data.rename(dict(zip(data.dims, ["y", "x"]))) # handle attributes from YAML - for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): + for key in ("units", "standard_name", "flag_meanings", "flag_values", "_FillValue"): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] - if isinstance(data.attrs.get('flag_meanings'), str): - data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') + if isinstance(data.attrs.get("flag_meanings"), str): + data.attrs["flag_meanings"] = data.attrs["flag_meanings"].split(" ") # use more common CF standard units - if data.attrs.get('units') == 'kelvins': - data.attrs['units'] = 'K' + if data.attrs.get("units") == "kelvins": + data.attrs["units"] = "K" - data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") + data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info["satellite_name"].upper(), "unknown") data.attrs["sensor"] = self.sensor_name return data @@ -82,12 +82,12 @@ def get_dataset(self, dsid, dsinfo): @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): @@ -112,33 +112,33 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info: Filetype information """ - skip_rows = filetype_info.get('skip_rows', 15) - columns = filetype_info['columns'] + skip_rows = filetype_info.get("skip_rows", 15) + columns = filetype_info["columns"] self.file_content = dd.read_csv(filename, skiprows=skip_rows, header=None, names=columns) super(VIIRSActiveFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) - self.platform_name = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = PLATFORM_MAP.get(self.filename_info["satellite_name"].upper(), "unknown") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray.""" - ds = self[dsid['name']].to_dask_array(lengths=True) + ds = self[dsid["name"]].to_dask_array(lengths=True) data = xr.DataArray(ds, dims=("y",), attrs={"platform_name": self.platform_name, "sensor": "VIIRS"}) - for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): + for key in ("units", "standard_name", "flag_meanings", "flag_values", "_FillValue"): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] - if isinstance(data.attrs.get('flag_meanings'), str): - data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') + if isinstance(data.attrs.get("flag_meanings"), str): + data.attrs["flag_meanings"] = data.attrs["flag_meanings"].split(" ") return data @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) def __getitem__(self, key): """Get file content for 'key'.""" diff --git a/satpy/readers/viirs_edr_flood.py b/satpy/readers/viirs_edr_flood.py index 2d9c319656..2625d6d8fc 100644 --- a/satpy/readers/viirs_edr_flood.py +++ b/satpy/readers/viirs_edr_flood.py @@ -29,17 +29,17 @@ class VIIRSEDRFlood(HDF4FileHandler): @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): """Get sensor name.""" - sensor = self['/attr/SensorIdentifyCode'] + sensor = self["/attr/SensorIdentifyCode"] if isinstance(sensor, np.ndarray): return str(sensor.astype(str)).lower() return sensor.lower() @@ -47,7 +47,7 @@ def sensor_name(self): @property def platform_name(self): """Get platform name.""" - platform_name = self['/attr/Satellitename'] + platform_name = self["/attr/Satellitename"] if isinstance(platform_name, np.ndarray): return str(platform_name.astype(str)).lower() return platform_name.lower() @@ -58,23 +58,23 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'sensor': self.sensor_name, - 'platform_name': self.platform_name, - 'start_time': self.start_time, - 'end_time': self.end_time, + "sensor": self.sensor_name, + "platform_name": self.platform_name, + "start_time": self.start_time, + "end_time": self.end_time, }) return metadata def get_dataset(self, ds_id, ds_info): """Get dataset.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] data.attrs = self.get_metadata(data, ds_info) - fill = data.attrs.pop('_Fillvalue') - offset = data.attrs.get('add_offset') - scale_factor = data.attrs.get('scale_factor') + fill = data.attrs.pop("_Fillvalue") + offset = data.attrs.get("add_offset") + scale_factor = data.attrs.get("scale_factor") data = data.where(data != fill) if scale_factor is not None and offset is not None: @@ -85,25 +85,25 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, ds_id): """Get area definition.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] proj_dict = { - 'proj': 'latlong', - 'datum': 'WGS84', - 'ellps': 'WGS84', - 'no_defs': True + "proj": "latlong", + "datum": "WGS84", + "ellps": "WGS84", + "no_defs": True } - area_extent = [data.attrs.get('ProjectionMinLongitude'), data.attrs.get('ProjectionMinLatitude'), - data.attrs.get('ProjectionMaxLongitude'), data.attrs.get('ProjectionMaxLatitude')] + area_extent = [data.attrs.get("ProjectionMinLongitude"), data.attrs.get("ProjectionMinLatitude"), + data.attrs.get("ProjectionMaxLongitude"), data.attrs.get("ProjectionMaxLatitude")] area = geometry.AreaDefinition( - 'viirs_flood_area', - 'name_of_proj', - 'id_of_proj', + "viirs_flood_area", + "name_of_proj", + "id_of_proj", proj_dict, - int(self.filename_info['dim0']), - int(self.filename_info['dim1']), + int(self.filename_info["dim0"]), + int(self.filename_info["dim1"]), np.asarray(area_extent) ) diff --git a/satpy/readers/viirs_l1b.py b/satpy/readers/viirs_l1b.py index a265bb1f82..510a37165d 100644 --- a/satpy/readers/viirs_l1b.py +++ b/satpy/readers/viirs_l1b.py @@ -38,39 +38,39 @@ def _parse_datetime(self, datestr): def start_orbit_number(self): """Get start orbit number.""" try: - return int(self['/attr/orbit_number']) + return int(self["/attr/orbit_number"]) except KeyError: - return int(self['/attr/OrbitNumber']) + return int(self["/attr/OrbitNumber"]) @property def end_orbit_number(self): """Get end orbit number.""" try: - return int(self['/attr/orbit_number']) + return int(self["/attr/orbit_number"]) except KeyError: - return int(self['/attr/OrbitNumber']) + return int(self["/attr/OrbitNumber"]) @property def platform_name(self): """Get platform name.""" try: - res = self.get('/attr/platform', - self.filename_info['platform_shortname']) + res = self.get("/attr/platform", + self.filename_info["platform_shortname"]) except KeyError: - res = 'Unknown' + res = "Unknown" return { - 'JPSS-1': 'NOAA-20', - 'NP': 'Suomi-NPP', - 'J1': 'NOAA-20', - 'J2': 'NOAA-21', - 'JPSS-2': 'NOAA-21', + "JPSS-1": "NOAA-20", + "NP": "Suomi-NPP", + "J1": "NOAA-20", + "J2": "NOAA-21", + "JPSS-2": "NOAA-21", }.get(res, res) @property def sensor_name(self): """Get sensor name.""" - return self['/attr/instrument'].lower() + return self["/attr/instrument"].lower() def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" @@ -96,35 +96,35 @@ def adjust_scaling_factors(self, factors, file_units, output_units): def get_shape(self, ds_id, ds_info): """Get shape.""" - var_path = self._dataset_name_to_var_path(ds_id['name'], ds_info) - return self.get(var_path + '/shape', 1) + var_path = self._dataset_name_to_var_path(ds_id["name"], ds_info) + return self.get(var_path + "/shape", 1) @property def start_time(self): """Get start time.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get end time.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) def _get_dataset_file_units(self, dataset_id, ds_info, var_path): - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: - file_units = self.get(var_path + '/attr/units') + file_units = self.get(var_path + "/attr/units") # they were almost completely CF compliant... if file_units == "none": file_units = "1" - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': - rad_units_path = var_path + '/attr/radiance_units' + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": + rad_units_path = var_path + "/attr/radiance_units" if rad_units_path in self: if file_units is None: - file_units = self[var_path + '/attr/radiance_units'] - if file_units == 'Watts/meter^2/steradian/micrometer': - file_units = 'W m-2 um-1 sr-1' - elif ds_info.get('units') == '%' and file_units is None: + file_units = self[var_path + "/attr/radiance_units"] + if file_units == "Watts/meter^2/steradian/micrometer": + file_units = "W m-2 um-1 sr-1" + elif ds_info.get("units") == "%" and file_units is None: # v1.1 and above of level 1 processing removed 'units' attribute # for all reflectance channels file_units = "1" @@ -132,54 +132,54 @@ def _get_dataset_file_units(self, dataset_id, ds_info, var_path): return file_units def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': - rad_units_path = var_path + '/attr/radiance_units' + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": + rad_units_path = var_path + "/attr/radiance_units" if rad_units_path in self: # we are getting a reflectance band but we want the radiance values # special scaling parameters - scale_factor = self[var_path + '/attr/radiance_scale_factor'] - scale_offset = self[var_path + '/attr/radiance_add_offset'] + scale_factor = self[var_path + "/attr/radiance_scale_factor"] + scale_offset = self[var_path + "/attr/radiance_add_offset"] else: # we are getting a btemp band but we want the radiance values # these are stored directly in the primary variable - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] - elif ds_info.get('units') == '%': + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] + elif ds_info.get("units") == "%": # normal reflectance - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] - elif ds_info.get('units') == 'K': + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] + elif ds_info.get("units") == "K": # normal brightness temperature # use a special LUT to get the actual values - lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') + lut_var_path = ds_info.get("lut", var_path + "_brightness_temperature_lut") # we get the BT values from a look up table using the scaled radiance integers - valid_min = self[lut_var_path + '/attr/valid_min'] - valid_max = self[lut_var_path + '/attr/valid_max'] + valid_min = self[lut_var_path + "/attr/valid_min"] + valid_max = self[lut_var_path + "/attr/valid_max"] scale_factor = scale_offset = None else: - valid_min = self.get(var_path + '/attr/valid_min') - valid_max = self.get(var_path + '/attr/valid_max') - scale_factor = self.get(var_path + '/attr/scale_factor') - scale_offset = self.get(var_path + '/attr/add_offset') + valid_min = self.get(var_path + "/attr/valid_min") + valid_max = self.get(var_path + "/attr/valid_max") + scale_factor = self.get(var_path + "/attr/scale_factor") + scale_offset = self.get(var_path + "/attr/add_offset") return valid_min, valid_max, scale_factor, scale_offset def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info) + var_path = self._dataset_name_to_var_path(dataset_id["name"], ds_info) shape = self.get_shape(dataset_id, ds_info) file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) # Get extra metadata if self._is_scan_based_array(shape): - rows_per_scan = int(shape[0] / self['/dimension/number_of_scans']) - ds_info.setdefault('rows_per_scan', rows_per_scan) + rows_per_scan = int(shape[0] / self["/dimension/number_of_scans"]) + ds_info.setdefault("rows_per_scan", rows_per_scan) - i = getattr(self[var_path], 'attrs', {}) + i = getattr(self[var_path], "attrs", {}) i.update(ds_info) i.update(dataset_id.to_dict()) i.update({ @@ -195,22 +195,22 @@ def get_metadata(self, dataset_id, ds_info): return i def _is_scan_based_array(self, shape): - return '/dimension/number_of_scans' in self and isinstance(shape, tuple) and shape + return "/dimension/number_of_scans" in self and isinstance(shape, tuple) and shape def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info) + var_path = self._dataset_name_to_var_path(dataset_id["name"], ds_info) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path) - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": data = self[var_path] - elif ds_info.get('units') == '%': + elif ds_info.get("units") == "%": data = self[var_path] - elif ds_info.get('units') == 'K': + elif ds_info.get("units") == "K": # normal brightness temperature # use a special LUT to get the actual values - lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') + lut_var_path = ds_info.get("lut", var_path + "_brightness_temperature_lut") data = self[var_path] # we get the BT values from a look up table using the scaled radiance integers index_arr = data.data.astype(int) @@ -223,21 +223,21 @@ def get_dataset(self, dataset_id, ds_info): if valid_min is not None and valid_max is not None: data = data.where((data >= valid_min) & (data <= valid_max)) - if data.attrs.get('units') in ['%', 'K', '1', 'W m-2 um-1 sr-1'] and \ - 'flag_meanings' in data.attrs: + if data.attrs.get("units") in ["%", "K", "1", "W m-2 um-1 sr-1"] and \ + "flag_meanings" in data.attrs: # flag meanings don't mean anything anymore for these variables # these aren't category products - data.attrs.pop('flag_meanings', None) - data.attrs.pop('flag_values', None) + data.attrs.pop("flag_meanings", None) + data.attrs.pop("flag_values", None) factors = (scale_factor, scale_offset) - factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) + factors = self.adjust_scaling_factors(factors, metadata["file_units"], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data *= factors[0] data += factors[1] # rename dimensions to correspond to satpy's 'y' and 'x' standard - if 'number_of_lines' in data.dims: - data = data.rename({'number_of_lines': 'y', 'number_of_pixels': 'x'}) + if "number_of_lines" in data.dims: + data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) return data def available_datasets(self, configured_datasets=None): @@ -255,11 +255,11 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - ft_matches = self.file_type_matches(ds_info['file_type']) - var_path = self._dataset_name_to_var_path(ds_info['name'], ds_info) + ft_matches = self.file_type_matches(ds_info["file_type"]) + var_path = self._dataset_name_to_var_path(ds_info["name"], ds_info) is_in_file = var_path in self yield ft_matches and is_in_file, ds_info @staticmethod def _dataset_name_to_var_path(dataset_name: str, ds_info: dict) -> str: - return ds_info.get('file_key', 'observation_data/{}'.format(dataset_name)) + return ds_info.get("file_key", "observation_data/{}".format(dataset_name)) diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index 71379b2066..db9ba9ba10 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -83,18 +83,18 @@ class VIIRSSDRFileHandler(JPSS_SDR_FileHandler): def __init__(self, filename, filename_info, filetype_info, use_tc=None, **kwargs): """Initialize file handler.""" - self.datasets = filename_info['datasets'].split('-') + self.datasets = filename_info["datasets"].split("-") self.use_tc = use_tc super().__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, item): """Get item.""" - if '*' in item: + if "*" in item: # this is an aggregated field that can't easily be loaded, need to # join things together idx = 0 base_item = item - item = base_item.replace('*', str(idx)) + item = base_item.replace("*", str(idx)) result = [] while True: try: @@ -106,7 +106,7 @@ def __getitem__(self, item): break idx += 1 - item = base_item.replace('*', str(idx)) + item = base_item.replace("*", str(idx)) return result else: return super().__getitem__(item) @@ -120,11 +120,11 @@ def get_dataset(self, dataset_id, ds_info): scans for each granule is read from: ``Data_Products/...Gran_x/N_Number_Of_Scans``. """ - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] - ds_info['dataset_group'] = dataset_group + ds_info["dataset_group"] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) data = self.concatenate_dataset(dataset_group, var_path) @@ -138,17 +138,17 @@ def get_dataset(self, dataset_id, ds_info): def get_bounding_box(self): """Get the bounding box of this file.""" from pyproj import Geod - geod = Geod(ellps='WGS84') + geod = Geod(ellps="WGS84") dataset_group = DATASET_KEYS[self.datasets[0]] idx = 0 lons_ring = None lats_ring = None while True: - path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/' + path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/" prefix = path.format(dataset_group=dataset_group, idx=idx) try: - lats = self.file_content[prefix + 'G-Ring_Latitude'] - lons = self.file_content[prefix + 'G-Ring_Longitude'] + lats = self.file_content[prefix + "G-Ring_Latitude"] + lons = self.file_content[prefix + "G-Ring_Longitude"] if lons_ring is None: lons_ring = lons lats_ring = lats @@ -215,16 +215,16 @@ def filter_filenames_by_info(self, filename_items): geo_del = [] viirs_del = [] for filename, filename_info in filename_items: - datasets = filename_info['datasets'].split('-') + datasets = filename_info["datasets"].split("-") if not self._is_viirs_dataset(datasets): viirs_del.append(filename) - if ('GITCO' in datasets) or ('GMTCO' in datasets): + if ("GITCO" in datasets) or ("GMTCO" in datasets): if self.use_tc is False: geo_del.append(filename) else: geo_keep.append(filename) - elif ('GIMGO' in datasets) or ('GMODO' in datasets): + elif ("GIMGO" in datasets) or ("GMODO" in datasets): if self.use_tc is True: geo_del.append(filename) else: @@ -240,20 +240,20 @@ def _remove_non_viirs_datasets_from_files(self, filename_items, files_to_edit): return self._remove_datasets_from_files(filename_items, files_to_edit, no_viirs) def _remove_geo_datasets_from_files(self, filename_items, files_to_edit): - datasets_to_consider = ['GITCO', 'GMTCO', 'GIMGO', 'GMODO'] + datasets_to_consider = ["GITCO", "GMTCO", "GIMGO", "GMODO"] return self._remove_datasets_from_files(filename_items, files_to_edit, datasets_to_consider) def _remove_datasets_from_files(self, filename_items, files_to_edit, considered_datasets): fdict = dict(filename_items) for to_del in files_to_edit: - fdict[to_del]['datasets'] = fdict[to_del]['datasets'].split('-') + fdict[to_del]["datasets"] = fdict[to_del]["datasets"].split("-") for dataset in considered_datasets: with suppress(ValueError): - fdict[to_del]['datasets'].remove(dataset) - if not fdict[to_del]['datasets']: + fdict[to_del]["datasets"].remove(dataset) + if not fdict[to_del]["datasets"]: del fdict[to_del] else: - fdict[to_del]['datasets'] = "-".join(fdict[to_del]['datasets']) + fdict[to_del]["datasets"] = "-".join(fdict[to_del]["datasets"]) filename_items = fdict.items() return filename_items @@ -269,15 +269,15 @@ def _load_filenames_from_geo_ref(self, dsid): try: # get the filename and remove the creation time # which is often wrong - fn = fh['/attr/N_GEO_Ref'][:46] + '*.h5' + fn = fh["/attr/N_GEO_Ref"][:46] + "*.h5" fns.extend(glob(os.path.join(base_dir, fn))) # usually is non-terrain corrected file, add the terrain # corrected file too - if fn[:5] == 'GIMGO': - fn = 'GITCO' + fn[5:] - elif fn[:5] == 'GMODO': - fn = 'GMTCO' + fn[5:] + if fn[:5] == "GIMGO": + fn = "GITCO" + fn[5:] + elif fn[:5] == "GMODO": + fn = "GMTCO" + fn[5:] else: continue fns.extend(glob(os.path.join(base_dir, fn))) @@ -288,22 +288,22 @@ def _load_filenames_from_geo_ref(self, dsid): def _get_primary_secondary_geo_groups(self, ds_info): """Find out which geolocation files are needed.""" - if ds_info['dataset_groups'][0].startswith('GM'): + if ds_info["dataset_groups"][0].startswith("GM"): if self.use_tc is False: - prime_geo = 'GMODO' - second_geo = 'GMTCO' + prime_geo = "GMODO" + second_geo = "GMTCO" else: - prime_geo = 'GMTCO' - second_geo = 'GMODO' - elif ds_info['dataset_groups'][0].startswith('GI'): + prime_geo = "GMTCO" + second_geo = "GMODO" + elif ds_info["dataset_groups"][0].startswith("GI"): if self.use_tc is False: - prime_geo = 'GIMGO' - second_geo = 'GITCO' + prime_geo = "GIMGO" + second_geo = "GITCO" else: - prime_geo = 'GITCO' - second_geo = 'GIMGO' + prime_geo = "GITCO" + second_geo = "GIMGO" else: - raise ValueError('Unknown dataset group %s' % ds_info['dataset_groups'][0]) + raise ValueError("Unknown dataset group %s" % ds_info["dataset_groups"][0]) return prime_geo, second_geo def get_right_geo_fhs(self, dsid, fhs): @@ -313,7 +313,7 @@ def get_right_geo_fhs(self, dsid, fhs): desired, other = split_desired_other(fhs, prime_geo, second_geo) if desired: try: - ds_info['dataset_groups'].remove(second_geo) + ds_info["dataset_groups"].remove(second_geo) except ValueError: pass return desired @@ -324,13 +324,13 @@ def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] - fhs = [fh for fh in self.file_handlers['generic_file'] - if set(fh.datasets) & set(ds_info['dataset_groups'])] + fhs = [fh for fh in self.file_handlers["generic_file"] + if set(fh.datasets) & set(ds_info["dataset_groups"])] if not fhs: LOG.warning("Required file type '%s' not found or loaded for " - "'%s'", ds_info['file_type'], dsid['name']) + "'%s'", ds_info["file_type"], dsid["name"]) else: - if len(set(ds_info['dataset_groups']) & {'GITCO', 'GIMGO', 'GMTCO', 'GMODO'}) > 1: + if len(set(ds_info["dataset_groups"]) & {"GITCO", "GIMGO", "GMTCO", "GMODO"}) > 1: fhs = self.get_right_geo_fhs(dsid, fhs) return fhs @@ -351,12 +351,12 @@ def _get_coordinates_for_dataset_key(self, dsid): # check the dataset file for the geolocation filename geo_filenames = self._load_filenames_from_geo_ref(dsid) self._create_new_geo_file_handlers(geo_filenames) - self._remove_not_loaded_geo_dataset_group(c_info['dataset_groups'], prime_geo, second_geo) + self._remove_not_loaded_geo_dataset_group(c_info["dataset_groups"], prime_geo, second_geo) return coords def _geo_dataset_groups(self, c_info): - if len(c_info['dataset_groups']) == 1: # filtering already done + if len(c_info["dataset_groups"]) == 1: # filtering already done return None, None try: prime_geo, second_geo = self._get_primary_secondary_geo_groups(c_info) @@ -365,12 +365,12 @@ def _geo_dataset_groups(self, c_info): return None, None def _create_new_geo_file_handlers(self, geo_filenames): - existing_filenames = set([fh.filename for fh in self.file_handlers['generic_file']]) + existing_filenames = set([fh.filename for fh in self.file_handlers["generic_file"]]) geo_filenames = set(geo_filenames) - existing_filenames self.create_filehandlers(geo_filenames) def _remove_not_loaded_geo_dataset_group(self, c_dataset_groups, prime_geo, second_geo): - all_fhs = self.file_handlers['generic_file'] + all_fhs = self.file_handlers["generic_file"] desired, other = split_desired_other(all_fhs, prime_geo, second_geo) group_to_remove = second_geo if desired else prime_geo c_dataset_groups.remove(group_to_remove) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index e4a29c27f1..0fa8ddf782 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -37,9 +37,9 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.engine = "h5netcdf" - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] self._end_time = None - self.sensor = 'viirs' + self.sensor = "viirs" self.filename_info = filename_info def calibrate(self, data, yaml_info, file_key, nc): @@ -75,11 +75,11 @@ def set_time_attrs(self, data): def get_dataset(self, key, yaml_info): """Get dataset.""" - logger.debug("Getting data for: %s", yaml_info['name']) + logger.debug("Getting data for: %s", yaml_info["name"]) nc = xr.open_dataset(self.filename, engine=self.engine, decode_times=False, - chunks={'y': CHUNK_SIZE, 'x': 800}) - name = yaml_info.get('nc_store_name', yaml_info['name']) - file_key = yaml_info.get('nc_key', name) + chunks={"y": CHUNK_SIZE, "x": 800}) + name = yaml_info.get("nc_store_name", yaml_info["name"]) + file_key = yaml_info.get("nc_key", name) data = nc[file_key] data = self.calibrate(data, yaml_info, file_key, nc) data.attrs.update(nc.attrs) # For now add global attributes to all datasets diff --git a/satpy/readers/virr_l1b.py b/satpy/readers/virr_l1b.py index 0ffe7251cb..260666ff8b 100644 --- a/satpy/readers/virr_l1b.py +++ b/satpy/readers/virr_l1b.py @@ -77,24 +77,24 @@ class VIRR_L1B(HDF5FileHandler): def __init__(self, filename, filename_info, filetype_info): """Open file and perform initial setup.""" super(VIRR_L1B, self).__init__(filename, filename_info, filetype_info) - LOG.debug('day/night flag for {0}: {1}'.format(filename, self['/attr/Day Or Night Flag'])) - self.geolocation_prefix = filetype_info['geolocation_prefix'] - self.platform_id = filename_info['platform_id'] - self.l1b_prefix = 'Data/' - self.wave_number = 'Emissive_Centroid_Wave_Number' + LOG.debug("day/night flag for {0}: {1}".format(filename, self["/attr/Day Or Night Flag"])) + self.geolocation_prefix = filetype_info["geolocation_prefix"] + self.platform_id = filename_info["platform_id"] + self.l1b_prefix = "Data/" + self.wave_number = "Emissive_Centroid_Wave_Number" # Else filename_info['platform_id'] == FY3C. - if filename_info['platform_id'] == 'FY3B': - self.l1b_prefix = '' - self.wave_number = 'Emmisive_Centroid_Wave_Number' + if filename_info["platform_id"] == "FY3B": + self.l1b_prefix = "" + self.wave_number = "Emmisive_Centroid_Wave_Number" def get_dataset(self, dataset_id, ds_info): """Create DataArray from file content for `dataset_id`.""" - file_key = self.geolocation_prefix + ds_info.get('file_key', dataset_id['name']) - if self.platform_id == 'FY3B': - file_key = file_key.replace('Data/', '') + file_key = self.geolocation_prefix + ds_info.get("file_key", dataset_id["name"]) + if self.platform_id == "FY3B": + file_key = file_key.replace("Data/", "") data = self[file_key] - band_index = ds_info.get('band_index') - valid_range = data.attrs.pop('valid_range', None) + band_index = ds_info.get("band_index") + valid_range = data.attrs.pop("valid_range", None) if isinstance(valid_range, np.ndarray): valid_range = valid_range.tolist() if band_index is not None: @@ -102,50 +102,50 @@ def get_dataset(self, dataset_id, ds_info): if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) - if 'Emissive' in file_key: + if "Emissive" in file_key: self._calibrate_emissive(data, band_index) - elif 'RefSB' in file_key: + elif "RefSB" in file_key: data = self._calibrate_reflective(data, band_index) else: - slope = self._correct_slope(self[file_key + '/attr/Slope']) - intercept = self[file_key + '/attr/Intercept'] + slope = self._correct_slope(self[file_key + "/attr/Slope"]) + intercept = self[file_key + "/attr/Intercept"] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) data = data * slope + intercept - new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))} + new_dims = {old: new for old, new in zip(data.dims, ("y", "x"))} data = data.rename(new_dims) # use lowercase sensor name to be consistent with the rest of satpy - data.attrs.update({'platform_name': self['/attr/Satellite Name'], - 'sensor': self['/attr/Sensor Identification Code'].lower()}) + data.attrs.update({"platform_name": self["/attr/Satellite Name"], + "sensor": self["/attr/Sensor Identification Code"].lower()}) data.attrs.update(ds_info) - units = self.get(file_key + '/attr/units') - if units is not None and str(units).lower() != 'none': - data.attrs.update({'units': self.get(file_key + '/attr/units')}) - elif data.attrs.get('calibration') == 'reflectance': - data.attrs.update({'units': '%'}) + units = self.get(file_key + "/attr/units") + if units is not None and str(units).lower() != "none": + data.attrs.update({"units": self.get(file_key + "/attr/units")}) + elif data.attrs.get("calibration") == "reflectance": + data.attrs.update({"units": "%"}) else: - data.attrs.update({'units': '1'}) + data.attrs.update({"units": "1"}) return data def _calibrate_reflective(self, data, band_index): - if self.platform_id == 'FY3B': + if self.platform_id == "FY3B": coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1) else: - coeffs = self['/attr/RefSB_Cal_Coefficients'] + coeffs = self["/attr/RefSB_Cal_Coefficients"] slope = self._correct_slope(coeffs[0::2]) intercept = coeffs[1::2] data = data * slope[band_index] + intercept[band_index] return data def _calibrate_emissive(self, data, band_index): - slope = self._correct_slope(self[self.l1b_prefix + 'Emissive_Radiance_Scales']. + slope = self._correct_slope(self[self.l1b_prefix + "Emissive_Radiance_Scales"]. data[:, band_index][:, np.newaxis]) - intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] + intercept = self[self.l1b_prefix + "Emissive_Radiance_Offsets"].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. - wave_number = self['/attr/' + self.wave_number][band_index] * 100 + wave_number = self["/attr/" + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays @@ -161,11 +161,11 @@ def _correct_slope(self, slope): @property def start_time(self): """Get starting observation time.""" - start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') + start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get ending observation time.""" - end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') + end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 1c4e68d621..ff3599052a 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -87,13 +87,13 @@ def _match_filenames(filenames, pattern): def _verify_reader_info_assign_config_files(config, config_files): try: - reader_info = config['reader'] + reader_info = config["reader"] except KeyError: raise KeyError( "Malformed config file {}: missing reader 'reader'".format( config_files)) else: - reader_info['config_files'] = config_files + reader_info["config_files"] = config_files def load_yaml_configs(*config_files, loader=Loader): @@ -113,9 +113,9 @@ def load_yaml_configs(*config_files, loader=Loader): """ config = {} - logger.debug('Reading %s', str(config_files)) + logger.debug("Reading %s", str(config_files)) for config_file in config_files: - with open(config_file, 'r', encoding='utf-8') as fd: + with open(config_file, "r", encoding="utf-8") as fd: config = recursive_dict_update(config, yaml.load(fd, Loader=loader)) _verify_reader_info_assign_config_files(config, config_files) return config @@ -136,23 +136,23 @@ def __init__(self, config_dict): "deprecated. Use ReaderClass.from_config_files " "instead.") self.config = config_dict - self.info = self.config['reader'] - self.name = self.info['name'] + self.info = self.config["reader"] + self.name = self.info["name"] self.file_patterns = [] - for file_type, filetype_info in self.config['file_types'].items(): - filetype_info.setdefault('file_type', file_type) + for file_type, filetype_info in self.config["file_types"].items(): + filetype_info.setdefault("file_type", file_type) # correct separator if needed - file_patterns = [os.path.join(*pattern.split('/')) - for pattern in filetype_info['file_patterns']] - filetype_info['file_patterns'] = file_patterns + file_patterns = [os.path.join(*pattern.split("/")) + for pattern in filetype_info["file_patterns"]] + filetype_info["file_patterns"] = file_patterns self.file_patterns.extend(file_patterns) - if 'sensors' in self.info and not isinstance(self.info['sensors'], (list, tuple)): - self.info['sensors'] = [self.info['sensors']] - self.datasets = self.config.get('datasets', {}) - self._id_keys = self.info.get('data_identification_keys', default_id_keys_config) - self._co_keys = self.info.get('coord_identification_keys', default_co_keys_config) - self.info['filenames'] = [] + if "sensors" in self.info and not isinstance(self.info["sensors"], (list, tuple)): + self.info["sensors"] = [self.info["sensors"]] + self.datasets = self.config.get("datasets", {}) + self._id_keys = self.info.get("data_identification_keys", default_id_keys_config) + self._co_keys = self.info.get("coord_identification_keys", default_co_keys_config) + self.info["filenames"] = [] self.all_ids = {} self.load_ds_ids_from_config() @@ -160,12 +160,12 @@ def __init__(self, config_dict): def from_config_files(cls, *config_files, **reader_kwargs): """Create a reader instance from one or more YAML configuration files.""" config_dict = load_yaml_configs(*config_files) - return config_dict['reader']['reader'](config_dict, **reader_kwargs) + return config_dict["reader"]["reader"](config_dict, **reader_kwargs) @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" - return self.info['sensors'] or [] + return self.info["sensors"] or [] @property def all_dataset_ids(self): @@ -176,7 +176,7 @@ def all_dataset_ids(self): def all_dataset_names(self): """Get names of all datasets known to this reader.""" # remove the duplicates from various calibration and resolutions - return set(ds_id['name'] for ds_id in self.all_dataset_ids) + return set(ds_id["name"] for ds_id in self.all_dataset_ids) @property def available_dataset_ids(self): @@ -188,7 +188,7 @@ def available_dataset_ids(self): @property def available_dataset_names(self): """Get names of datasets that are loadable by this reader.""" - return (ds_id['name'] for ds_id in self.available_dataset_ids) + return (ds_id["name"] for ds_id in self.available_dataset_ids) @property @abstractmethod @@ -243,7 +243,7 @@ def select_files_from_directory( """ filenames = set() if directory is None: - directory = '' + directory = "" # all the glob patterns that we are going to look at all_globs = {os.path.join(directory, globify(pattern)) for pattern in self.file_patterns} @@ -286,9 +286,9 @@ def load_ds_ids_from_config(self): for dataset in self.datasets.values(): # xarray doesn't like concatenating attributes that are lists # https://github.com/pydata/xarray/issues/2060 - if 'coordinates' in dataset and \ - isinstance(dataset['coordinates'], list): - dataset['coordinates'] = tuple(dataset['coordinates']) + if "coordinates" in dataset and \ + isinstance(dataset["coordinates"], list): + dataset["coordinates"] = tuple(dataset["coordinates"]) id_keys = get_keys_from_config(self._id_keys, dataset) # Build each permutation/product of the dataset @@ -316,10 +316,10 @@ def _build_id_permutations(self, dataset, id_keys): """Build each permutation/product of the dataset.""" id_kwargs = [] for key, idval in id_keys.items(): - val = dataset.get(key, idval.get('default') if idval is not None else None) + val = dataset.get(key, idval.get("default") if idval is not None else None) val_type = None if idval is not None: - val_type = idval.get('type') + val_type = idval.get("type") if val_type is not None and issubclass(val_type, tuple): # special case: wavelength can be [min, nominal, max] # but is still considered 1 option @@ -363,7 +363,7 @@ def __init__(self, self.file_handlers = {} self.available_ids = {} - self.filter_filenames = self.info.get('filter_filenames', filter_filenames) + self.filter_filenames = self.info.get("filter_filenames", filter_filenames) self.filter_parameters = filter_parameters or {} self.register_data_files() @@ -371,7 +371,7 @@ def __init__(self, def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" if not self.file_handlers: - return self.info['sensors'] + return self.info["sensors"] file_handlers = (handlers[0] for handlers in self.file_handlers.values()) @@ -382,7 +382,7 @@ def sensor_names(self): except NotImplementedError: continue if not sensor_names: - return self.info['sensors'] + return self.info["sensors"] return sorted(sensor_names) @property @@ -453,11 +453,11 @@ def find_required_filehandlers(self, requirements, filename_info): def sorted_filetype_items(self): """Sort the instance's filetypes in using order.""" processed_types = [] - file_type_items = deque(self.config['file_types'].items()) + file_type_items = deque(self.config["file_types"].items()) while len(file_type_items): filetype, filetype_info = file_type_items.popleft() - requirements = filetype_info.get('requires') + requirements = filetype_info.get("requires") if requirements is not None: # requirements have not been processed yet -> wait missing = [req for req in requirements @@ -475,7 +475,7 @@ def filename_items_for_filetype(filenames, filetype_info): if not isinstance(filenames, set): # we perform set operations later on to improve performance filenames = set(filenames) - for pattern in filetype_info['file_patterns']: + for pattern in filetype_info["file_patterns"]: matched_files = set() matches = _match_filenames(filenames, pattern) for filename in matches: @@ -491,8 +491,8 @@ def filename_items_for_filetype(filenames, filetype_info): def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None): """Generate new filehandler instances.""" - requirements = filetype_info.get('requires') - filetype_cls = filetype_info['file_reader'] + requirements = filetype_info.get("requires") + filetype_cls = filetype_info["file_reader"] if fh_kwargs is None: fh_kwargs = {} @@ -507,15 +507,15 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No warnings.warn(msg, stacklevel=4) continue except RuntimeError as err: - warnings.warn(str(err) + ' for {}'.format(filename), stacklevel=4) + warnings.warn(str(err) + " for {}".format(filename), stacklevel=4) continue yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) def time_matches(self, fstart, fend): """Check that a file's start and end time mtach filter_parameters of this reader.""" - start_time = self.filter_parameters.get('start_time') - end_time = self.filter_parameters.get('end_time') + start_time = self.filter_parameters.get("start_time") + end_time = self.filter_parameters.get("end_time") fend = fend or fstart if start_time and fend and fend < start_time: return False @@ -527,17 +527,17 @@ def metadata_matches(self, sample_dict, file_handler=None): """Check that file metadata matches filter_parameters of this reader.""" # special handling of start/end times if not self.time_matches( - sample_dict.get('start_time'), sample_dict.get('end_time')): + sample_dict.get("start_time"), sample_dict.get("end_time")): return False for key, val in self.filter_parameters.items(): - if key != 'area' and key not in sample_dict: + if key != "area" and key not in sample_dict: continue - if key in ['start_time', 'end_time']: + if key in ["start_time", "end_time"]: continue - elif key == 'area' and file_handler: + elif key == "area" and file_handler: if not self.check_file_covers_area(file_handler, val): - logger.info('Filtering out %s based on area', + logger.info("Filtering out %s based on area", file_handler.filename) break elif key in sample_dict and val != sample_dict[key]: @@ -556,22 +556,22 @@ def filter_filenames_by_info(self, filename_items): the requested end time. """ for filename, filename_info in filename_items: - fend = filename_info.get('end_time') - fstart = filename_info.setdefault('start_time', fend) + fend = filename_info.get("end_time") + fstart = filename_info.setdefault("start_time", fend) if fend and fend < fstart: # correct for filenames with 1 date and 2 times fend = fend.replace(year=fstart.year, month=fstart.month, day=fstart.day) - filename_info['end_time'] = fend + filename_info["end_time"] = fend if self.metadata_matches(filename_info): yield filename, filename_info def filter_fh_by_metadata(self, filehandlers): """Filter out filehandlers using provide filter parameters.""" for filehandler in filehandlers: - filehandler.metadata['start_time'] = filehandler.start_time - filehandler.metadata['end_time'] = filehandler.end_time + filehandler.metadata["start_time"] = filehandler.start_time + filehandler.metadata["end_time"] = filehandler.end_time if self.metadata_matches(filehandler.metadata, filehandler): yield filehandler @@ -606,9 +606,9 @@ def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=Non def create_filehandlers(self, filenames, fh_kwargs=None): """Organize the filenames into file types and create file handlers.""" filenames = list(OrderedDict.fromkeys(filenames)) - logger.debug("Assigning to %s: %s", self.info['name'], filenames) + logger.debug("Assigning to %s: %s", self.info["name"], filenames) - self.info.setdefault('filenames', []).extend(filenames) + self.info.setdefault("filenames", []).extend(filenames) filename_set = set(filenames) created_fhs = {} # load files that we know about by creating the file handlers @@ -670,13 +670,13 @@ def update_ds_ids_from_file_handlers(self): new_ids = {} for is_avail, ds_info in avail_datasets: # especially from the yaml config - coordinates = ds_info.get('coordinates') + coordinates = ds_info.get("coordinates") if isinstance(coordinates, list): # xarray doesn't like concatenating attributes that are # lists: https://github.com/pydata/xarray/issues/2060 - ds_info['coordinates'] = tuple(ds_info['coordinates']) + ds_info["coordinates"] = tuple(ds_info["coordinates"]) - ds_info.setdefault('modifiers', tuple()) # default to no mods + ds_info.setdefault("modifiers", tuple()) # default to no mods # Create DataID for this dataset ds_id = DataID(self._id_keys, **ds_info) @@ -690,7 +690,7 @@ def update_ds_ids_from_file_handlers(self): self.all_ids = new_ids @staticmethod - def _load_dataset(dsid, ds_info, file_handlers, dim='y', **kwargs): + def _load_dataset(dsid, ds_info, file_handlers, dim="y", **kwargs): """Load only a piece of the dataset.""" slice_list = [] failure = True @@ -723,9 +723,9 @@ def _load_dataset_data(self, file_handlers, dsid, **kwargs): proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs) # FIXME: areas could be concatenated here # Update the metadata - proj.attrs['start_time'] = file_handlers[0].start_time - proj.attrs['end_time'] = file_handlers[-1].end_time - proj.attrs['reader'] = self.name + proj.attrs["start_time"] = file_handlers[0].start_time + proj.attrs["end_time"] = file_handlers[-1].end_time + proj.attrs["reader"] = self.name return proj def _preferred_filetype(self, filetypes): @@ -750,10 +750,10 @@ def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] - filetype = self._preferred_filetype(ds_info['file_type']) + filetype = self._preferred_filetype(ds_info["file_type"]) if filetype is None: logger.warning("Required file type '%s' not found or loaded for " - "'%s'", ds_info['file_type'], dsid['name']) + "'%s'", ds_info["file_type"], dsid["name"]) else: return self.file_handlers[filetype] @@ -786,12 +786,12 @@ def _get_lons_lats_from_coords(self, coords): """Get lons and lats from the coords list.""" lons, lats = None, None for coord in coords: - if coord.attrs.get('standard_name') == 'longitude': + if coord.attrs.get("standard_name") == "longitude": lons = coord - elif coord.attrs.get('standard_name') == 'latitude': + elif coord.attrs.get("standard_name") == "latitude": lats = coord if lons is None or lats is None: - raise ValueError('Missing longitude or latitude coordinate: ' + str(coords)) + raise ValueError("Missing longitude or latitude coordinate: " + str(coords)) return lons, lats def _make_swath_definition_from_lons_lats(self, lons, lats): @@ -804,11 +804,11 @@ def _make_swath_definition_from_lons_lats(self, lons, lats): sdef = None if sdef is None: sdef = SwathDefinition(lons, lats) - sensor_str = '_'.join(self.info['sensors']) - shape_str = '_'.join(map(str, lons.shape)) + sensor_str = "_".join(self.info["sensors"]) + shape_str = "_".join(map(str, lons.shape)) sdef.name = "{}_{}_{}_{}".format(sensor_str, shape_str, - lons.attrs.get('name', lons.name), - lats.attrs.get('name', lats.name)) + lons.attrs.get("name", lons.name), + lats.attrs.get("name", lats.name)) if key is not None: FileYAMLReader._coords_cache[key] = sdef return sdef @@ -830,7 +830,7 @@ def _load_dataset_with_area(self, dsid, coords, **kwargs): area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs) if area is not None: - ds.attrs['area'] = area + ds.attrs["area"] = area ds = add_crs_xy_coords(ds, area) return ds @@ -840,7 +840,7 @@ def _assign_coords_from_dataarray(coords, ds): if not coords: coords = [] for coord in ds.coords.values(): - if coord.attrs.get('standard_name') in ['longitude', 'latitude']: + if coord.attrs.get("standard_name") in ["longitude", "latitude"]: coords.append(coord) return coords @@ -855,12 +855,12 @@ def _load_ancillary_variables(self, datasets, **kwargs): for dataset in datasets.values(): new_vars = [] - for av_id in dataset.attrs.get('ancillary_variables', []): + for av_id in dataset.attrs.get("ancillary_variables", []): if isinstance(av_id, DataID): new_vars.append(datasets[av_id]) else: new_vars.append(av_id) - dataset.attrs['ancillary_variables'] = new_vars + dataset.attrs["ancillary_variables"] = new_vars def _gather_ancillary_variables_ids(self, datasets): """Gather ancillary variables' ids. @@ -869,9 +869,9 @@ def _gather_ancillary_variables_ids(self, datasets): """ all_av_ids = set() for dataset in datasets.values(): - ancillary_variables = dataset.attrs.get('ancillary_variables', []) + ancillary_variables = dataset.attrs.get("ancillary_variables", []) if not isinstance(ancillary_variables, (list, tuple, set)): - ancillary_variables = ancillary_variables.split(' ') + ancillary_variables = ancillary_variables.split(" ") av_ids = [] for key in ancillary_variables: try: @@ -880,7 +880,7 @@ def _gather_ancillary_variables_ids(self, datasets): logger.warning("Can't load ancillary dataset %s", str(key)) all_av_ids |= set(av_ids) - dataset.attrs['ancillary_variables'] = av_ids + dataset.attrs["ancillary_variables"] = av_ids return all_av_ids def get_dataset_key(self, key, available_only=False, **kwargs): @@ -953,12 +953,12 @@ def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for *dsid*.""" ds_info = self.all_ids[dsid] cids = [] - for cinfo in ds_info.get('coordinates', []): + for cinfo in ds_info.get("coordinates", []): if not isinstance(cinfo, dict): - cinfo = {'name': cinfo} + cinfo = {"name": cinfo} for key in self._co_keys: - if key == 'name': + if key == "name": continue if key in ds_info: if ds_info[key] is not None: @@ -995,52 +995,52 @@ def _set_orientation(dataset, upper_right_corner): """ # do some checks and early returns - if upper_right_corner == 'native': + if upper_right_corner == "native": logger.debug("Requested orientation for Dataset {} is 'native' (default). " - "No flipping is applied.".format(dataset.attrs.get('name'))) + "No flipping is applied.".format(dataset.attrs.get("name"))) return dataset - if upper_right_corner not in ['NW', 'NE', 'SE', 'SW', 'native']: + if upper_right_corner not in ["NW", "NE", "SE", "SW", "native"]: raise ValueError("Target orientation for Dataset {} not recognized. " "Kwarg upper_right_corner should be " - "'NW', 'NE', 'SW', 'SE' or 'native'.".format(dataset.attrs.get('name', 'unknown_name'))) + "'NW', 'NE', 'SW', 'SE' or 'native'.".format(dataset.attrs.get("name", "unknown_name"))) - if 'area' not in dataset.attrs: + if "area" not in dataset.attrs: logger.info("Dataset {} is missing the area attribute " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - if isinstance(dataset.attrs['area'], SwathDefinition): + if isinstance(dataset.attrs["area"], SwathDefinition): logger.info("Dataset {} is in a SwathDefinition " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - projection_type = _get_projection_type(dataset.attrs['area']) - accepted_geos_proj_types = ['Geostationary Satellite (Sweep Y)', 'Geostationary Satellite (Sweep X)'] + projection_type = _get_projection_type(dataset.attrs["area"]) + accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] if projection_type not in accepted_geos_proj_types: logger.info("Dataset {} is not in one of the known geostationary projections {} " - "and cannot be flipped.".format(dataset.attrs.get('name', 'unknown_name'), + "and cannot be flipped.".format(dataset.attrs.get("name", "unknown_name"), accepted_geos_proj_types)) return dataset target_eastright, target_northup = _get_target_scene_orientation(upper_right_corner) - area_extents_to_update = _get_dataset_area_extents_array(dataset.attrs['area']) + area_extents_to_update = _get_dataset_area_extents_array(dataset.attrs["area"]) current_eastright, current_northup = _get_current_scene_orientation(area_extents_to_update) if target_northup == current_northup and target_eastright == current_eastright: logger.info("Dataset {} is already in the target orientation " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset if target_northup != current_northup: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, - 'upsidedown') + "upsidedown") if target_eastright != current_eastright: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, - 'leftright') + "leftright") - dataset.attrs['area'] = _get_new_flipped_area_definition(dataset.attrs['area'], area_extents_to_update, + dataset.attrs["area"] = _get_new_flipped_area_definition(dataset.attrs["area"], area_extents_to_update, flip_areadef_stacking=target_northup != current_northup) return dataset @@ -1062,9 +1062,9 @@ def _get_target_scene_orientation(upper_right_corner): 'NE' corresponds to target_eastright and target_northup being True. """ - target_northup = upper_right_corner in ['NW', 'NE'] + target_northup = upper_right_corner in ["NW", "NE"] - target_eastright = upper_right_corner in ['NE', 'SE'] + target_eastright = upper_right_corner in ["NE", "SE"] return target_eastright, target_northup @@ -1091,11 +1091,11 @@ def _get_current_scene_orientation(area_extents_to_update): def _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, flip_direction): """Flip the data and area extents array for a dataset.""" - logger.info("Flipping Dataset {} {}.".format(dataset.attrs.get('name', 'unknown_name'), flip_direction)) - if flip_direction == 'upsidedown': + logger.info("Flipping Dataset {} {}.".format(dataset.attrs.get("name", "unknown_name"), flip_direction)) + if flip_direction == "upsidedown": dataset = dataset[::-1, :] area_extents_to_update[:, [1, 3]] = area_extents_to_update[:, [3, 1]] - elif flip_direction == 'leftright': + elif flip_direction == "leftright": dataset = dataset[:, ::-1] area_extents_to_update[:, [0, 2]] = area_extents_to_update[:, [2, 0]] else: @@ -1128,7 +1128,7 @@ def _get_new_flipped_area_definition(dataset_area_attr, area_extents_to_update, class GEOFlippableFileYAMLReader(FileYAMLReader): """Reader for flippable geostationary data.""" - def _load_dataset_with_area(self, dsid, coords, upper_right_corner='native', **kwargs): + def _load_dataset_with_area(self, dsid, coords, upper_right_corner="native", **kwargs): ds = super(GEOFlippableFileYAMLReader, self)._load_dataset_with_area(dsid, coords, **kwargs) if ds is not None: @@ -1165,15 +1165,15 @@ def create_filehandlers(self, filenames, fh_kwargs=None): for fhs in created_fhs.values(): for fh in fhs: # check the filename for total_segments parameter as a fallback - ts = fh.filename_info.get('total_segments', 1) + ts = fh.filename_info.get("total_segments", 1) # if the YAML has segments explicitly specified then use that - fh.filetype_info.setdefault('expected_segments', ts) + fh.filetype_info.setdefault("expected_segments", ts) # add segment key-values for FCI filehandlers - if 'segment' not in fh.filename_info: - fh.filename_info['segment'] = fh.filename_info.get('count_in_repeat_cycle', 1) + if "segment" not in fh.filename_info: + fh.filename_info["segment"] = fh.filename_info.get("count_in_repeat_cycle", 1) return created_fhs - def _load_dataset(self, dsid, ds_info, file_handlers, dim='y', pad_data=True): + def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): """Load only a piece of the dataset.""" if not pad_data: return FileYAMLReader._load_dataset(dsid, ds_info, @@ -1186,7 +1186,7 @@ def _load_dataset(self, dsid, ds_info, file_handlers, dim='y', pad_data=True): raise KeyError( "Could not load {} from any provided files".format(dsid)) - filetype = file_handlers[0].filetype_info['file_type'] + filetype = file_handlers[0].filetype_info["file_type"] self.empty_segment = xr.full_like(projectable, np.nan) for i, sli in enumerate(slice_list): if sli is None: @@ -1230,9 +1230,9 @@ def _load_area_def_with_padding(self, dsid, file_handlers): def _pad_later_segments_area(self, file_handlers, dsid): """Pad area definitions for missing segments that are later in sequence than the first available.""" - expected_segments = file_handlers[0].filetype_info['expected_segments'] - filetype = file_handlers[0].filetype_info['file_type'] - available_segments = [int(fh.filename_info.get('segment', 1)) for + expected_segments = file_handlers[0].filetype_info["expected_segments"] + filetype = file_handlers[0].filetype_info["file_type"] + available_segments = [int(fh.filename_info.get("segment", 1)) for fh in file_handlers] area_defs = self._get_segments_areadef_with_later_padded(file_handlers, filetype, dsid, available_segments, @@ -1250,7 +1250,7 @@ def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, fh = file_handlers[idx] area = fh.get_area_def(dsid) except ValueError: - area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='later') + area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type="later") area_defs[segment] = area seg_size = area.shape @@ -1258,14 +1258,14 @@ def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, def _pad_earlier_segments_area(self, file_handlers, dsid, area_defs): """Pad area definitions for missing segments that are earlier in sequence than the first available.""" - available_segments = [int(fh.filename_info.get('segment', 1)) for + available_segments = [int(fh.filename_info.get("segment", 1)) for fh in file_handlers] area = file_handlers[0].get_area_def(dsid) seg_size = area.shape - filetype = file_handlers[0].filetype_info['file_type'] + filetype = file_handlers[0].filetype_info["file_type"] for segment in range(available_segments[0] - 1, 0, -1): - area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='earlier') + area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type="earlier") area_defs[segment] = area seg_size = area.shape @@ -1278,7 +1278,7 @@ def _get_new_areadef_for_padded_segment(self, area, filetype, seg_size, segment, fill_extent = (area.area_extent[0], new_ll_y, area.area_extent[2], new_ur_y) - area = AreaDefinition('fill', 'fill', 'fill', area.crs, + area = AreaDefinition("fill", "fill", "fill", area.crs, seg_size[1], new_height_px, fill_extent) return area @@ -1287,10 +1287,10 @@ def _get_y_area_extents_for_padded_segment(self, area, filetype, padding_type, s new_height_proj_coord, new_height_px = self._get_new_areadef_heights(area, seg_size, segment_n=segment, filetype=filetype) - if padding_type == 'later': + if padding_type == "later": new_ll_y = area.area_extent[1] + new_height_proj_coord new_ur_y = area.area_extent[1] - elif padding_type == 'earlier': + elif padding_type == "earlier": new_ll_y = area.area_extent[3] new_ur_y = area.area_extent[3] - new_height_proj_coord else: @@ -1324,13 +1324,13 @@ def _find_missing_segments(file_handlers, ds_info, dsid): expected_segments = 1 # get list of file handlers in segment order # (ex. first segment, second segment, etc) - handlers = sorted(file_handlers, key=lambda x: x.filename_info.get('segment', 1)) + handlers = sorted(file_handlers, key=lambda x: x.filename_info.get("segment", 1)) projectable = None for fh in handlers: - if fh.filetype_info['file_type'] in ds_info['file_type']: - expected_segments = fh.filetype_info['expected_segments'] + if fh.filetype_info["file_type"] in ds_info["file_type"]: + expected_segments = fh.filetype_info["expected_segments"] - while int(fh.filename_info.get('segment', 1)) > counter: + while int(fh.filename_info.get("segment", 1)) > counter: slice_list.append(None) counter += 1 try: @@ -1396,17 +1396,17 @@ def _collect_segment_position_infos(self, filetype): # collect the segment positioning infos for all available segments for fh in self.file_handlers[filetype]: chk_infos = fh.get_segment_position_info() - chk_infos.update({'segment_nr': fh.filename_info['segment'] - 1}) - self.segment_infos[filetype]['available_segment_infos'].append(chk_infos) + chk_infos.update({"segment_nr": fh.filename_info["segment"] - 1}) + self.segment_infos[filetype]["available_segment_infos"].append(chk_infos) def _initialise_segment_infos(self, filetype): # initialise the segment info for this filetype filetype_fhs_sample = self.file_handlers[filetype][0] - exp_segment_nr = filetype_fhs_sample.filetype_info['expected_segments'] + exp_segment_nr = filetype_fhs_sample.filetype_info["expected_segments"] grid_width_to_grid_type = _get_grid_width_to_grid_type(filetype_fhs_sample.get_segment_position_info()) - self.segment_infos.update({filetype: {'available_segment_infos': [], - 'expected_segments': exp_segment_nr, - 'grid_width_to_grid_type': grid_width_to_grid_type}}) + self.segment_infos.update({filetype: {"available_segment_infos": [], + "expected_segments": exp_segment_nr, + "grid_width_to_grid_type": grid_width_to_grid_type}}) def _get_empty_segment(self, dim=None, idx=None, filetype=None): grid_width = self.empty_segment.shape[1] @@ -1416,7 +1416,7 @@ def _get_empty_segment(self, dim=None, idx=None, filetype=None): def _segment_heights(self, filetype, grid_width): """Compute optimal padded segment heights (in number of pixels) based on the location of available segments.""" self._extract_segment_location_dicts(filetype) - grid_type = self.segment_infos[filetype]['grid_width_to_grid_type'][grid_width] + grid_type = self.segment_infos[filetype]["grid_width_to_grid_type"][grid_width] segment_heights = _compute_optimal_missing_segment_heights(self.segment_infos[filetype], grid_type, grid_width) return segment_heights @@ -1434,18 +1434,18 @@ def _get_new_areadef_heights(self, previous_area, previous_seg_size, segment_n=N def _get_grid_width_to_grid_type(seg_info): grid_width_to_grid_type = dict() for grid_type, grid_type_seg_info in seg_info.items(): - grid_width_to_grid_type.update({grid_type_seg_info['grid_width']: grid_type}) + grid_width_to_grid_type.update({grid_type_seg_info["grid_width"]: grid_type}) return grid_width_to_grid_type def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vertical_size): # initialise positioning arrays segment_start_rows, segment_end_rows, segment_heights = _init_positioning_arrays_for_variable_padding( - seg_infos['available_segment_infos'], grid_type, seg_infos['expected_segments']) + seg_infos["available_segment_infos"], grid_type, seg_infos["expected_segments"]) # populate start row of first segment and end row of last segment with known values segment_start_rows[0] = 1 - segment_end_rows[seg_infos['expected_segments'] - 1] = expected_vertical_size + segment_end_rows[seg_infos["expected_segments"] - 1] = expected_vertical_size # find missing segments and group contiguous missing segments together missing_segments = np.where(segment_heights == 0)[0] @@ -1454,7 +1454,7 @@ def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vert for group in groups_missing_segments: _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group) - return segment_heights.astype('int') + return segment_heights.astype("int") def _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group): @@ -1513,20 +1513,20 @@ def _init_positioning_arrays_for_variable_padding(chk_infos, grid_type, exp_segm def _populate_positioning_arrays_with_available_segment_info(chk_infos, grid_type, segment_start_rows, segment_end_rows, segment_heights): for chk_info in chk_infos: - current_fh_segment_nr = chk_info['segment_nr'] - segment_heights[current_fh_segment_nr] = chk_info[grid_type]['segment_height'] - segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]['start_position_row'] - segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]['end_position_row'] + current_fh_segment_nr = chk_info["segment_nr"] + segment_heights[current_fh_segment_nr] = chk_info[grid_type]["segment_height"] + segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]["start_position_row"] + segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]["end_position_row"] def split_integer_in_most_equal_parts(x, n): """Split an integer number x in n parts that are as equally-sizes as possible.""" if x % n == 0: - return np.repeat(x // n, n).astype('int') + return np.repeat(x // n, n).astype("int") else: # split the remainder amount over the last remainder parts remainder = int(x % n) mod = int(x // n) ar = np.repeat(mod, n) ar[-remainder:] = mod + 1 - return ar.astype('int') + return ar.astype("int") diff --git a/satpy/resample.py b/satpy/resample.py index b124c84933..ebf5776267 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -184,16 +184,16 @@ def lcm(a, b): CHUNK_SIZE = get_legacy_chunk_size() CACHE_SIZE = 10 -NN_COORDINATES = {'valid_input_index': ('y1', 'x1'), - 'valid_output_index': ('y2', 'x2'), - 'index_array': ('y2', 'x2', 'z2')} -BIL_COORDINATES = {'bilinear_s': ('x1', ), - 'bilinear_t': ('x1', ), - 'slices_x': ('x1', 'n'), - 'slices_y': ('x1', 'n'), - 'mask_slices': ('x1', 'n'), - 'out_coords_x': ('x2', ), - 'out_coords_y': ('y2', )} +NN_COORDINATES = {"valid_input_index": ("y1", "x1"), + "valid_output_index": ("y2", "x2"), + "index_array": ("y2", "x2", "z2")} +BIL_COORDINATES = {"bilinear_s": ("x1", ), + "bilinear_t": ("x1", ), + "slices_x": ("x1", "n"), + "slices_y": ("x1", "n"), + "mask_slices": ("x1", "n"), + "out_coords_x": ("x2", ), + "out_coords_y": ("y2", )} resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() @@ -204,7 +204,7 @@ def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" if the_hash is None: the_hash = hashlib.sha1() # nosec - the_hash.update(json.dumps(the_dict, sort_keys=True).encode('utf-8')) + the_hash.update(json.dumps(the_dict, sort_keys=True).encode("utf-8")) return the_hash @@ -213,11 +213,11 @@ def get_area_file(): The files are to be named `areas.yaml` or `areas.def`. """ - paths = config_search_paths('areas.yaml') + paths = config_search_paths("areas.yaml") if paths: return paths else: - return get_config_path('areas.def') + return get_config_path("areas.def") def get_area_def(area_name): @@ -249,13 +249,13 @@ def add_xy_coords(data_arr, area, crs=None): Returns (xarray.DataArray): Updated DataArray object """ - if 'x' in data_arr.coords and 'y' in data_arr.coords: + if "x" in data_arr.coords and "y" in data_arr.coords: # x/y coords already provided return data_arr - if 'x' not in data_arr.dims or 'y' not in data_arr.dims: + if "x" not in data_arr.dims or "y" not in data_arr.dims: # no defined x and y dimensions return data_arr - if not hasattr(area, 'get_proj_vectors'): + if not hasattr(area, "get_proj_vectors"): return data_arr x, y = area.get_proj_vectors() @@ -265,15 +265,15 @@ def add_xy_coords(data_arr, area, crs=None): if crs is not None: units = crs.axis_info[0].unit_name # fix udunits/CF standard units - units = units.replace('metre', 'meter') - if units == 'degree': - y_attrs['units'] = 'degrees_north' - x_attrs['units'] = 'degrees_east' + units = units.replace("metre", "meter") + if units == "degree": + y_attrs["units"] = "degrees_north" + x_attrs["units"] = "degrees_east" else: - y_attrs['units'] = units - x_attrs['units'] = units - y = xr.DataArray(y, dims=('y',), attrs=y_attrs) - x = xr.DataArray(x, dims=('x',), attrs=x_attrs) + y_attrs["units"] = units + x_attrs["units"] = units + y = xr.DataArray(y, dims=("y",), attrs=y_attrs) + x = xr.DataArray(x, dims=("x",), attrs=x_attrs) return data_arr.assign_coords(y=y, x=x) @@ -303,10 +303,10 @@ def add_crs_xy_coords(data_arr, area): # default lat/lon projection latlon_proj = "+proj=latlong +datum=WGS84 +ellps=WGS84" # otherwise get it from the area definition - if hasattr(area, 'crs'): + if hasattr(area, "crs"): crs = area.crs else: - proj_str = getattr(area, 'proj_str', latlon_proj) + proj_str = getattr(area, "proj_str", latlon_proj) crs = CRS.from_string(proj_str) data_arr = data_arr.assign_coords(crs=crs) @@ -319,12 +319,12 @@ def add_crs_xy_coords(data_arr, area): # array). lons = area.lons lats = area.lats - lons.attrs.setdefault('standard_name', 'longitude') - lons.attrs.setdefault('long_name', 'longitude') - lons.attrs.setdefault('units', 'degrees_east') - lats.attrs.setdefault('standard_name', 'latitude') - lats.attrs.setdefault('long_name', 'latitude') - lats.attrs.setdefault('units', 'degrees_north') + lons.attrs.setdefault("standard_name", "longitude") + lons.attrs.setdefault("long_name", "longitude") + lons.attrs.setdefault("units", "degrees_east") + lats.attrs.setdefault("standard_name", "latitude") + lats.attrs.setdefault("long_name", "latitude") + lats.attrs.setdefault("units", "degrees_north") # See https://github.com/pydata/xarray/issues/3068 # data_arr = data_arr.assign_coords(longitude=lons, latitude=lats) else: @@ -347,7 +347,7 @@ def update_resampled_coords(old_data, new_data, new_area): # this *MUST* happen before we set 'crs' below otherwise any 'crs' # coordinate in the coordinate variables we are copying will overwrite the # 'crs' coordinate we just assigned to the data - ignore_coords = ('y', 'x', 'crs') + ignore_coords = ("y", "x", "crs") new_coords = {} for cname, cval in old_data.coords.items(): # we don't want coordinates that depended on the old x/y dimensions @@ -437,19 +437,19 @@ def resample(self, data, cache_dir=None, mask_area=None, **kwargs): if isinstance(self.source_geo_def, SwathDefinition): geo_dims = self.source_geo_def.lons.dims else: - geo_dims = ('y', 'x') + geo_dims = ("y", "x") flat_dims = [dim for dim in data.dims if dim not in geo_dims] if np.issubdtype(data.dtype, np.integer): - kwargs['mask'] = data == data.attrs.get('_FillValue', np.iinfo(data.dtype.type).max) + kwargs["mask"] = data == data.attrs.get("_FillValue", np.iinfo(data.dtype.type).max) else: - kwargs['mask'] = data.isnull() - kwargs['mask'] = kwargs['mask'].all(dim=flat_dims) + kwargs["mask"] = data.isnull() + kwargs["mask"] = kwargs["mask"].all(dim=flat_dims) cache_id = self.precompute(cache_dir=cache_dir, **kwargs) return self.compute(data, cache_id=cache_id, **kwargs) - def _create_cache_filename(self, cache_dir, prefix='', - fmt='.zarr', **kwargs): + def _create_cache_filename(self, cache_dir, prefix="", + fmt=".zarr", **kwargs): """Create filename for the cached resampling parameters.""" hash_str = self.get_hash(**kwargs) return os.path.join(cache_dir, prefix + hash_str + fmt) @@ -500,7 +500,7 @@ def precompute(self, mask=None, radius_of_influence=None, epsilon=0, "masked pixels. Will not cache results.") cache_dir = None - if radius_of_influence is None and not hasattr(self.source_geo_def, 'geocentric_resolution'): + if radius_of_influence is None and not hasattr(self.source_geo_def, "geocentric_resolution"): radius_of_influence = self._adjust_radius_of_influence(radius_of_influence) kwargs = dict(source_geo_def=self.source_geo_def, @@ -555,11 +555,11 @@ def _check_numpy_cache(self, cache_dir, mask=None, if cache_dir is None: return fname_np = self._create_cache_filename(cache_dir, - prefix='resample_lut-', - mask=mask, fmt='.npz', + prefix="resample_lut-", + mask=mask, fmt=".npz", **kwargs) - fname_zarr = self._create_cache_filename(cache_dir, prefix='nn_lut-', - mask=mask, fmt='.zarr', + fname_zarr = self._create_cache_filename(cache_dir, prefix="nn_lut-", + mask=mask, fmt=".zarr", **kwargs) LOG.debug("Check if %s exists", fname_np) if os.path.exists(fname_np) and not os.path.exists(fname_zarr): @@ -570,7 +570,7 @@ def _check_numpy_cache(self, cache_dir, mask=None, ) LOG.warning("Converting resampling LUT from .npz to .zarr") zarr_out = xr.Dataset() - with np.load(fname_np, 'r') as fid: + with np.load(fname_np, "r") as fid: for idx_name, coord in NN_COORDINATES.items(): zarr_out[idx_name] = (coord, fid[idx_name]) @@ -580,7 +580,7 @@ def _check_numpy_cache(self, cache_dir, mask=None, def load_neighbour_info(self, cache_dir, mask=None, **kwargs): """Read index arrays from either the in-memory or disk cache.""" - mask_name = getattr(mask, 'name', None) + mask_name = getattr(mask, "name", None) cached = {} self._check_numpy_cache(cache_dir, mask=mask_name, **kwargs) @@ -591,11 +591,11 @@ def load_neighbour_info(self, cache_dir, mask=None, **kwargs): elif cache_dir: try: filename = self._create_cache_filename( - cache_dir, prefix='nn_lut-', + cache_dir, prefix="nn_lut-", mask=mask_name, **kwargs) - fid = zarr.open(filename, 'r') + fid = zarr.open(filename, "r") cache = np.array(fid[idx_name]) - if idx_name == 'valid_input_index': + if idx_name == "valid_input_index": # valid input index array needs to be boolean cache = cache.astype(bool) except ValueError: @@ -609,11 +609,11 @@ def load_neighbour_info(self, cache_dir, mask=None, **kwargs): def save_neighbour_info(self, cache_dir, mask=None, **kwargs): """Cache resampler's index arrays if there is a cache dir.""" if cache_dir: - mask_name = getattr(mask, 'name', None) + mask_name = getattr(mask, "name", None) cache = self._read_resampler_attrs() filename = self._create_cache_filename( - cache_dir, prefix='nn_lut-', mask=mask_name, **kwargs) - LOG.info('Saving kd_tree neighbour info to %s', filename) + cache_dir, prefix="nn_lut-", mask=mask_name, **kwargs) + LOG.info("Saving kd_tree neighbour info to %s", filename) zarr_out = xr.Dataset() for idx_name, coord in NN_COORDINATES.items(): # update the cache in place with persisted dask arrays @@ -707,7 +707,7 @@ def resample(self, *args, **kwargs): not needed in EWA resampling currently. """ - kwargs.setdefault('mask_area', False) + kwargs.setdefault("mask_area", False) return super(_LegacySatpyEWAResampler, self).resample(*args, **kwargs) def _call_ll2cr(self, lons, lats, target_geo_def, swath_usage=0): @@ -739,7 +739,7 @@ def precompute(self, cache_dir=None, swath_usage=0, **kwargs): # no need to recompute ll2cr output again return None - if kwargs.get('mask') is not None: + if kwargs.get("mask") is not None: LOG.warning("'mask' parameter has no affect during EWA " "resampling") @@ -808,13 +808,13 @@ def compute(self, data, cache_id=None, fill_value=0, weight_count=10000, # if the data is scan based then check its metadata or the passed # kwargs otherwise assume the entire input swath is one large # "scanline" - rows_per_scan = kwargs.get('rows_per_scan', + rows_per_scan = kwargs.get("rows_per_scan", data.attrs.get("rows_per_scan", data.shape[0])) - if data.ndim == 3 and 'bands' in data.dims: + if data.ndim == 3 and "bands" in data.dims: data_in = tuple(data.sel(bands=band).data - for band in data['bands']) + for band in data["bands"]) elif data.ndim == 2: data_in = data.data else: @@ -834,10 +834,10 @@ def compute(self, data, cache_id=None, fill_value=0, weight_count=10000, data_arr = da.from_delayed(res, new_shape, data.dtype) # from delayed creates one large chunk, break it up a bit if we can data_arr = data_arr.rechunk([CHUNK_SIZE] * data_arr.ndim) - if data.ndim == 3 and data.dims[0] == 'bands': - dims = ('bands', 'y', 'x') + if data.ndim == 3 and data.dims[0] == "bands": + dims = ("bands", "y", "x") elif data.ndim == 2: - dims = ('y', 'x') + dims = ("y", "x") else: dims = data.dims @@ -900,7 +900,7 @@ def load_bil_info(self, cache_dir, **kwargs): """Load bilinear resampling info from cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, - prefix='bil_lut-', + prefix="bil_lut-", **kwargs) try: self.resampler.load_resampling_info(filename) @@ -918,12 +918,12 @@ def save_bil_info(self, cache_dir, **kwargs): """Save bilinear resampling info to cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, - prefix='bil_lut-', + prefix="bil_lut-", **kwargs) # There are some old caches, move them out of the way if os.path.exists(filename): _move_existing_caches(cache_dir, filename) - LOG.info('Saving BIL neighbour info to %s', filename) + LOG.info("Saving BIL neighbour info to %s", filename) try: self.resampler.save_resampling_info(filename) except AttributeError: @@ -938,7 +938,7 @@ def compute(self, data, fill_value=None, **kwargs): del kwargs if fill_value is None: - fill_value = data.attrs.get('_FillValue') + fill_value = data.attrs.get("_FillValue") target_shape = self.target_geo_def.shape res = self.resampler.get_sample_from_bil_info(data, @@ -952,7 +952,7 @@ def _move_existing_caches(cache_dir, filename): """Move existing cache files out of the way.""" import os import shutil - old_cache_dir = os.path.join(cache_dir, 'moved_by_satpy') + old_cache_dir = os.path.join(cache_dir, "moved_by_satpy") try: os.makedirs(old_cache_dir) except FileExistsError: @@ -977,7 +977,7 @@ def _mean(data, y_size, x_size): def _repeat_by_factor(data, block_info=None): if block_info is None: return data - out_shape = block_info[None]['chunk-shape'] + out_shape = block_info[None]["chunk-shape"] out_data = data for axis, axis_size in enumerate(out_shape): in_size = data.shape[axis] @@ -1035,15 +1035,15 @@ def compute(self, data, expand=True, **kwargs): target_geo_def = self.target_geo_def # convert xarray backed with numpy array to dask array - if 'x' not in data.dims or 'y' not in data.dims: + if "x" not in data.dims or "y" not in data.dims: if data.ndim not in [2, 3]: raise ValueError("Can only handle 2D or 3D arrays without dimensions.") # assume rows is the second to last axis y_axis = data.ndim - 2 x_axis = data.ndim - 1 else: - y_axis = data.dims.index('y') - x_axis = data.dims.index('x') + y_axis = data.dims.index("y") + x_axis = data.dims.index("x") out_shape = target_geo_def.shape in_shape = data.shape @@ -1124,24 +1124,24 @@ def _get_arg_to_pass_for_skipna_handling(**kwargs): # FIXME this can be removed once Pyresample 1.18.0 is a Satpy requirement if PR_USE_SKIPNA: - if 'mask_all_nan' in kwargs: + if "mask_all_nan" in kwargs: warnings.warn( - 'Argument mask_all_nan is deprecated. Please use skipna for missing values handling. ' - 'Continuing with default skipna=True, if not provided differently.', + "Argument mask_all_nan is deprecated. Please use skipna for missing values handling. " + "Continuing with default skipna=True, if not provided differently.", DeprecationWarning, stacklevel=3 ) - kwargs.pop('mask_all_nan') + kwargs.pop("mask_all_nan") else: - if 'mask_all_nan' in kwargs: + if "mask_all_nan" in kwargs: warnings.warn( - 'Argument mask_all_nan is deprecated.' - 'Please update Pyresample and use skipna for missing values handling.', + "Argument mask_all_nan is deprecated." + "Please update Pyresample and use skipna for missing values handling.", DeprecationWarning, stacklevel=3 ) - kwargs.setdefault('mask_all_nan', False) - kwargs.pop('skipna') + kwargs.setdefault("mask_all_nan", False) + kwargs.pop("skipna") return kwargs @@ -1178,32 +1178,32 @@ def resample(self, data, **kwargs): Returns (xarray.DataArray): Data resampled to the target area """ - if not PR_USE_SKIPNA and 'skipna' in kwargs: - raise ValueError('You are trying to set the skipna argument but you are using an old version of' - ' Pyresample that does not support it.' - 'Please update Pyresample to 1.18.0 or higher to be able to use this argument.') + if not PR_USE_SKIPNA and "skipna" in kwargs: + raise ValueError("You are trying to set the skipna argument but you are using an old version of" + " Pyresample that does not support it." + "Please update Pyresample to 1.18.0 or higher to be able to use this argument.") self.precompute(**kwargs) attrs = data.attrs.copy() data_arr = data.data - if data.ndim == 3 and data.dims[0] == 'bands': - dims = ('bands', 'y', 'x') + if data.ndim == 3 and data.dims[0] == "bands": + dims = ("bands", "y", "x") # Both one and two dimensional input data results in 2D output elif data.ndim in (1, 2): - dims = ('y', 'x') + dims = ("y", "x") else: dims = data.dims - LOG.debug("Resampling %s", str(data.attrs.get('_satpy_id', 'unknown'))) + LOG.debug("Resampling %s", str(data.attrs.get("_satpy_id", "unknown"))) result = self.compute(data_arr, **kwargs) coords = {} - if 'bands' in data.coords: - coords['bands'] = data.coords['bands'] + if "bands" in data.coords: + coords["bands"] = data.coords["bands"] # Fractions are returned in a dict elif isinstance(result, dict): - coords['categories'] = sorted(result.keys()) - dims = ('categories', 'y', 'x') + coords["categories"] = sorted(result.keys()) + dims = ("categories", "y", "x") new_result = [] - for cat in coords['categories']: + for cat in coords["categories"]: new_result.append(result[cat]) result = da.stack(new_result) if result.ndim > len(dims): @@ -1211,13 +1211,13 @@ def resample(self, data, **kwargs): # Adjust some attributes if "BucketFraction" in str(self): - attrs['units'] = '' - attrs['calibration'] = '' - attrs['standard_name'] = 'area_fraction' + attrs["units"] = "" + attrs["calibration"] = "" + attrs["standard_name"] = "area_fraction" elif "BucketCount" in str(self): - attrs['units'] = '' - attrs['calibration'] = '' - attrs['standard_name'] = 'number_of_observations' + attrs["units"] = "" + attrs["calibration"] = "" + attrs["standard_name"] = "number_of_observations" result = xr.DataArray(result, dims=dims, coords=coords, attrs=attrs) @@ -1362,10 +1362,10 @@ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): "bucket_fraction": BucketFraction, } if DaskEWAResampler is not None: - RESAMPLERS['ewa'] = DaskEWAResampler - RESAMPLERS['ewa_legacy'] = LegacyDaskEWAResampler + RESAMPLERS["ewa"] = DaskEWAResampler + RESAMPLERS["ewa_legacy"] = LegacyDaskEWAResampler else: - RESAMPLERS['ewa'] = _LegacySatpyEWAResampler + RESAMPLERS["ewa"] = _LegacySatpyEWAResampler # deepcode ignore PythonSameEvalBinaryExpressiontrue: PRBaseResampler is None only on import errors @@ -1378,7 +1378,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ """Instantiate and return a resampler.""" if resampler is None: LOG.info("Using default KDTree resampler") - resampler = 'kd_tree' + resampler = "kd_tree" if isinstance(resampler, (BaseResampler, PRBaseResampler)): raise ValueError("Trying to create a resampler when one already " @@ -1388,7 +1388,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ if resampler_class is None: if resampler == "gradient_search": warnings.warn( - 'Gradient search resampler not available. Maybe missing `shapely`?', + "Gradient search resampler not available. Maybe missing `shapely`?", stacklevel=2 ) raise KeyError("Resampler '%s' not available" % resampler) @@ -1429,7 +1429,7 @@ def resample(source_area, data, destination_area, def get_fill_value(dataset): """Get the fill value of the *dataset*, defaulting to np.nan.""" if np.issubdtype(dataset.dtype, np.integer): - return dataset.attrs.get('_FillValue', np.nan) + return dataset.attrs.get("_FillValue", np.nan) return np.nan @@ -1453,11 +1453,11 @@ def resample_dataset(dataset, destination_area, **kwargs): source_area = dataset.attrs["area"] except KeyError: LOG.info("Cannot reproject dataset %s, missing area info", - dataset.attrs['name']) + dataset.attrs["name"]) return dataset - fill_value = kwargs.pop('fill_value', get_fill_value(dataset)) + fill_value = kwargs.pop("fill_value", get_fill_value(dataset)) new_data = resample(source_area, dataset, destination_area, fill_value=fill_value, **kwargs) new_attrs = new_data.attrs new_data.attrs = dataset.attrs.copy() diff --git a/satpy/scene.py b/satpy/scene.py index e3e71811e9..92a7c9d623 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -141,7 +141,7 @@ def __init__(self, filenames=None, reader=None, filter_parameters=None, cleaned_reader_kwargs = {} else: cleaned_reader_kwargs = cleaned_reader_kwargs.copy() - cleaned_reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters) + cleaned_reader_kwargs.setdefault("filter_parameters", {}).update(filter_parameters) if filenames and isinstance(filenames, str): raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])") @@ -163,7 +163,7 @@ def wishlist(self): return self._wishlist.copy() def _ipython_key_completions_(self): - return [x['name'] for x in self._datasets.keys()] + return [x["name"] for x in self._datasets.keys()] def _create_reader_instances(self, filenames=None, @@ -210,10 +210,10 @@ def start_time(self): will be consulted. """ - start_times = [data_arr.attrs['start_time'] for data_arr in self.values() - if 'start_time' in data_arr.attrs] + start_times = [data_arr.attrs["start_time"] for data_arr in self.values() + if "start_time" in data_arr.attrs] if not start_times: - start_times = self._reader_times('start_time') + start_times = self._reader_times("start_time") if not start_times: return None return min(start_times) @@ -227,10 +227,10 @@ def end_time(self): :attr:`Scene.start_time` is returned. """ - end_times = [data_arr.attrs['end_time'] for data_arr in self.values() - if 'end_time' in data_arr.attrs] + end_times = [data_arr.attrs["end_time"] for data_arr in self.values() + if "end_time" in data_arr.attrs] if not end_times: - end_times = self._reader_times('end_time') + end_times = self._reader_times("end_time") if not end_times: return self.start_time return max(end_times) @@ -309,7 +309,7 @@ def _gather_all_areas(self, datasets): continue elif not isinstance(ds, DataArray): ds = self[ds] - area = ds.attrs.get('area') + area = ds.attrs.get("area") areas.append(area) areas = [x for x in areas if x is not None] if not areas: @@ -439,7 +439,7 @@ def available_dataset_names(self, reader_name=None, composites=False): Returns: list of available dataset names """ - return sorted(set(x['name'] for x in self.available_dataset_ids( + return sorted(set(x["name"] for x in self.available_dataset_ids( reader_name=reader_name, composites=composites))) def all_dataset_ids(self, reader_name=None, composites=False): @@ -495,7 +495,7 @@ def all_dataset_names(self, reader_name=None, composites=False): Returns: list of all dataset names """ - return sorted(set(x['name'] for x in self.all_dataset_ids( + return sorted(set(x["name"] for x in self.all_dataset_ids( reader_name=reader_name, composites=composites))) def _check_known_composites(self, available_only=False): @@ -508,7 +508,7 @@ def _check_known_composites(self, available_only=False): dep_tree = DependencyTree(self._readers, sensor_comps, mods, available_only=available_only) # ignore inline compositor dependencies starting with '_' comps = (comp for comp_dict in sensor_comps.values() - for comp in comp_dict.keys() if not comp['name'].startswith('_')) + for comp in comp_dict.keys() if not comp["name"].startswith("_")) # make sure that these composites are even create-able by these readers all_comps = set(comps) # find_dependencies will update the all_comps set with DataIDs @@ -526,7 +526,7 @@ def available_composite_ids(self): def available_composite_names(self): """Names of all configured composites known to this Scene.""" - return sorted(set(x['name'] for x in self.available_composite_ids())) + return sorted(set(x["name"] for x in self.available_composite_ids())) def all_composite_ids(self): """Get all IDs for configured composites.""" @@ -534,7 +534,7 @@ def all_composite_ids(self): def all_composite_names(self): """Get all names for all configured composites.""" - return sorted(set(x['name'] for x in self.all_composite_ids())) + return sorted(set(x["name"] for x in self.all_composite_ids())) def all_modifier_names(self): """Get names of configured modifier objects.""" @@ -557,7 +557,7 @@ def iter_by_area(self): """ datasets_by_area = {} for ds in self: - a = ds.attrs.get('area') + a = ds.attrs.get("area") dsid = DataID.from_dataarray(ds) datasets_by_area.setdefault(a, []).append(dsid) @@ -597,14 +597,14 @@ def copy(self, datasets=None): @property def all_same_area(self): """All contained data arrays are on the same area.""" - all_areas = [x.attrs.get('area', None) for x in self.values()] + all_areas = [x.attrs.get("area", None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0] == x for x in all_areas[1:]) @property def all_same_proj(self): """All contained data array are in the same projection.""" - all_areas = [x.attrs.get('area', None) for x in self.values()] + all_areas = [x.attrs.get("area", None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0].crs == x.crs for x in all_areas[1:]) @@ -614,11 +614,11 @@ def _slice_area_from_bbox(src_area, dst_area, ll_bbox=None, """Slice the provided area using the bounds provided.""" if ll_bbox is not None: dst_area = AreaDefinition( - 'crop_area', 'crop_area', 'crop_latlong', - {'proj': 'latlong'}, 100, 100, ll_bbox) + "crop_area", "crop_area", "crop_latlong", + {"proj": "latlong"}, 100, 100, ll_bbox) elif xy_bbox is not None: dst_area = AreaDefinition( - 'crop_area', 'crop_area', 'crop_xy', + "crop_area", "crop_area", "crop_xy", src_area.crs, src_area.width, src_area.height, xy_bbox) x_slice, y_slice = src_area.get_area_slices(dst_area) @@ -638,7 +638,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): if ds_id in new_datasets: replace_anc(ds, pres) continue - if area_only and ds.attrs.get('area') is None: + if area_only and ds.attrs.get("area") is None: new_datasets[ds_id] = ds replace_anc(ds, pres) continue @@ -650,7 +650,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): key = slice_key new_ds = ds.isel(**key) if new_area is not None: - new_ds.attrs['area'] = new_area + new_ds.attrs["area"] = new_area new_datasets[ds_id] = new_ds if parent_ds is None: @@ -679,7 +679,7 @@ def slice(self, key): if area is not None: # assume dimensions for area are y and x one_ds = self[dataset_ids[0]] - area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ['y', 'x']) + area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ["y", "x"]) new_area = area[area_key] else: new_area = None @@ -759,7 +759,7 @@ def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): x_slice = slice(min_x_slice.start * x_factor, min_x_slice.stop * x_factor) new_area = src_area[y_slice, x_slice] - slice_key = {'y': y_slice, 'x': x_slice} + slice_key = {"y": y_slice, "x": x_slice} new_scn._slice_datasets(dataset_ids, slice_key, new_area) else: new_target_areas[src_area] = self._slice_area_from_bbox( @@ -768,7 +768,7 @@ def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): return new_scn - def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', **dim_kwargs): + def aggregate(self, dataset_ids=None, boundary="trim", side="left", func="mean", **dim_kwargs): """Create an aggregated version of the Scene. Args: @@ -810,8 +810,8 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', side=side, **dim_kwargs) new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy() - new_scn._datasets[ds_id].attrs['area'] = target_area - new_scn._datasets[ds_id].attrs['resolution'] = resolution + new_scn._datasets[ds_id].attrs["area"] = target_area + new_scn._datasets[ds_id].attrs["resolution"] = resolution return new_scn def get(self, key, default=None): @@ -846,11 +846,11 @@ def _slice_data(self, source_area, slices, dataset): """Slice the data to reduce it.""" slice_x, slice_y = slices dataset = dataset.isel(x=slice_x, y=slice_y) - if ('x', source_area.width) not in dataset.sizes.items(): + if ("x", source_area.width) not in dataset.sizes.items(): raise RuntimeError - if ('y', source_area.height) not in dataset.sizes.items(): + if ("y", source_area.height) not in dataset.sizes.items(): raise RuntimeError - dataset.attrs['area'] = source_area + dataset.attrs["area"] = source_area return dataset @@ -877,19 +877,19 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True, if ds_id in new_scn._datasets: new_scn._datasets[ds_id] = new_datasets[ds_id] continue - if dataset.attrs.get('area') is None: + if dataset.attrs.get("area") is None: if parent_dataset is None: new_scn._datasets[ds_id] = dataset else: replace_anc(dataset, pres) continue LOG.debug("Resampling %s", ds_id) - source_area = dataset.attrs['area'] + source_area = dataset.attrs["area"] dataset, source_area = self._reduce_data(dataset, source_area, destination_area, reduce_data, reductions, resample_kwargs) self._prepare_resampler(source_area, destination_area, resamplers, resample_kwargs) kwargs = resample_kwargs.copy() - kwargs['resampler'] = resamplers[source_area] + kwargs["resampler"] = resamplers[source_area] res = resample_dataset(dataset, destination_area, **kwargs) new_datasets[ds_id] = res if ds_id in new_scn._datasets: @@ -900,7 +900,7 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True, def _get_finalized_destination_area(self, destination_area, new_scn): if isinstance(destination_area, str): destination_area = get_area_def(destination_area) - if hasattr(destination_area, 'freeze'): + if hasattr(destination_area, "freeze"): try: finest_area = new_scn.finest_area() destination_area = destination_area.freeze(finest_area) @@ -923,8 +923,8 @@ def _reduce_data(self, dataset, source_area, destination_area, reduce_data, redu try: (slice_x, slice_y), source_area = reductions[key] except KeyError: - if resample_kwargs.get('resampler') == 'gradient_search': - factor = resample_kwargs.get('shape_divisible_by', 2) + if resample_kwargs.get("resampler") == "gradient_search": + factor = resample_kwargs.get("shape_divisible_by", 2) else: factor = None try: @@ -1051,7 +1051,7 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami # by default select first data variable as display variable vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name - if hasattr(ds, "area") and hasattr(ds.area, 'to_cartopy_crs'): + if hasattr(ds, "area") and hasattr(ds.area, "to_cartopy_crs"): dscrs = ds.area.to_cartopy_crs() gvds = gv.Dataset(ds, crs=dscrs) else: @@ -1083,17 +1083,17 @@ def to_xarray_dataset(self, datasets=None): if len(dataarrays) == 0: return xr.Dataset() - ds_dict = {i.attrs['name']: i.rename(i.attrs['name']) for i in dataarrays if i.attrs.get('area') is not None} + ds_dict = {i.attrs["name"]: i.rename(i.attrs["name"]) for i in dataarrays if i.attrs.get("area") is not None} mdata = combine_metadata(*tuple(i.attrs for i in dataarrays)) - if mdata.get('area') is None or not isinstance(mdata['area'], SwathDefinition): + if mdata.get("area") is None or not isinstance(mdata["area"], SwathDefinition): # either don't know what the area is or we have an AreaDefinition ds = xr.merge(ds_dict.values()) else: # we have a swath definition and should use lon/lat values - lons, lats = mdata['area'].get_lonlats() + lons, lats = mdata["area"].get_lonlats() if not isinstance(lons, DataArray): - lons = DataArray(lons, dims=('y', 'x')) - lats = DataArray(lats, dims=('y', 'x')) + lons = DataArray(lons, dims=("y", "x")) + lats = DataArray(lats, dims=("y", "x")) ds = xr.Dataset(ds_dict, coords={"latitude": lats, "longitude": lons}) @@ -1109,7 +1109,7 @@ def to_xarray(self, include_lonlats=True, epoch=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. @@ -1211,7 +1211,7 @@ def save_dataset(self, dataset_id, filename=None, writer=None, """ if writer is None and filename is None: - writer = 'geotiff' + writer = "geotiff" elif writer is None: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) @@ -1274,7 +1274,7 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, "dimensions (eg. through resampling).") if writer is None: if filename is None: - writer = 'geotiff' + writer = "geotiff" else: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, @@ -1346,7 +1346,7 @@ def _get_writer_by_ext(extension): """ mapping = {".tiff": "geotiff", ".tif": "geotiff", ".nc": "cf", ".mitiff": "mitiff"} - return mapping.get(extension.lower(), 'simple_image') + return mapping.get(extension.lower(), "simple_image") def _remove_failed_datasets(self, keepables): """Remove the datasets that we couldn't create.""" @@ -1384,8 +1384,8 @@ def unload(self, keepables=None): LOG.debug("Unloading dataset: %r", ds_id) del self._datasets[ds_id] - def load(self, wishlist, calibration='*', resolution='*', - polarization='*', level='*', modifiers='*', generate=True, unload=True, + def load(self, wishlist, calibration="*", resolution="*", + polarization="*", level="*", modifiers="*", generate=True, unload=True, **kwargs): """Read and generate requested datasets. diff --git a/satpy/tests/compositor_tests/test_abi.py b/satpy/tests/compositor_tests/test_abi.py index 93df810cf5..79c5ae99ed 100644 --- a/satpy/tests/compositor_tests/test_abi.py +++ b/satpy/tests/compositor_tests/test_abi.py @@ -26,7 +26,7 @@ class TestABIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['abi']) + load_compositor_configs_for_sensors(["abi"]) def test_simulated_green(self): """Test creating a fake 'green' band.""" @@ -39,28 +39,28 @@ def test_simulated_green(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - comp = SimulatedGreen('green', prerequisites=('C01', 'C02', 'C03'), - standard_name='toa_bidirectional_reflectance') + comp = SimulatedGreen("green", prerequisites=("C01", "C02", "C03"), + standard_name="toa_bidirectional_reflectance") c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, - dims=('y', 'x'), - attrs={'name': 'C01', 'area': area}) + dims=("y", "x"), + attrs={"name": "C01", "area": area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, - dims=('y', 'x'), - attrs={'name': 'C02', 'area': area}) + dims=("y", "x"), + attrs={"name": "C02", "area": area}) c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.35, - dims=('y', 'x'), - attrs={'name': 'C03', 'area': area}) + dims=("y", "x"), + attrs={"name": "C03", "area": area}) res = comp((c01, c02, c03)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'green') - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') + self.assertEqual(res.attrs["name"], "green") + self.assertEqual(res.attrs["standard_name"], + "toa_bidirectional_reflectance") data = res.compute() np.testing.assert_allclose(data, 0.28025) diff --git a/satpy/tests/compositor_tests/test_agri.py b/satpy/tests/compositor_tests/test_agri.py index 32fcc72c61..27a566a82c 100644 --- a/satpy/tests/compositor_tests/test_agri.py +++ b/satpy/tests/compositor_tests/test_agri.py @@ -26,7 +26,7 @@ class TestAGRIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['agri']) + load_compositor_configs_for_sensors(["agri"]) def test_simulated_red(self): """Test creating a fake 'red' band.""" @@ -39,25 +39,25 @@ def test_simulated_red(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - comp = SimulatedRed('red', prerequisites=('C01', 'C02', 'C03'), - standard_name='toa_bidirectional_reflectance') + comp = SimulatedRed("red", prerequisites=("C01", "C02", "C03"), + standard_name="toa_bidirectional_reflectance") c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, - dims=('y', 'x'), - attrs={'name': 'C01', 'area': area}) + dims=("y", "x"), + attrs={"name": "C01", "area": area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, - dims=('y', 'x'), - attrs={'name': 'C02', 'area': area}) + dims=("y", "x"), + attrs={"name": "C02", "area": area}) res = comp((c01, c02)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'red') - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') + self.assertEqual(res.attrs["name"], "red") + self.assertEqual(res.attrs["standard_name"], + "toa_bidirectional_reflectance") data = res.compute() np.testing.assert_allclose(data, 0.24252874) diff --git a/satpy/tests/compositor_tests/test_ahi.py b/satpy/tests/compositor_tests/test_ahi.py index ed485bd924..980f5a746b 100644 --- a/satpy/tests/compositor_tests/test_ahi.py +++ b/satpy/tests/compositor_tests/test_ahi.py @@ -26,4 +26,4 @@ class TestAHIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['ahi']) + load_compositor_configs_for_sensors(["ahi"]) diff --git a/satpy/tests/compositor_tests/test_glm.py b/satpy/tests/compositor_tests/test_glm.py index 6b79f96678..13783905da 100644 --- a/satpy/tests/compositor_tests/test_glm.py +++ b/satpy/tests/compositor_tests/test_glm.py @@ -24,7 +24,7 @@ class TestGLMComposites: def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['glm']) + load_compositor_configs_for_sensors(["glm"]) def test_highlight_compositor(self): """Test creating a highlight composite.""" @@ -37,34 +37,34 @@ def test_highlight_compositor(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = HighlightCompositor( - 'c14_highlight', - prerequisites=('flash_extent_density', 'C14'), + "c14_highlight", + prerequisites=("flash_extent_density", "C14"), min_hightlight=0.0, max_hightlight=1.0, ) flash_extent_density = xr.DataArray( da.zeros((rows, cols), chunks=25) + 0.5, - dims=('y', 'x'), - attrs={'name': 'flash_extent_density', 'area': area}) + dims=("y", "x"), + attrs={"name": "flash_extent_density", "area": area}) c14_data = np.repeat(np.arange(cols, dtype=np.float64)[None, :], rows, axis=0) c14 = xr.DataArray(da.from_array(c14_data, chunks=25) + 303.15, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'name': 'C14', - 'area': area, - 'standard_name': 'toa_brightness_temperature', + "name": "C14", + "area": area, + "standard_name": "toa_brightness_temperature", }) res = comp((flash_extent_density, c14)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'c14_highlight' + assert res.attrs["name"] == "c14_highlight" data = res.compute() np.testing.assert_almost_equal(data.values.min(), -0.04) np.testing.assert_almost_equal(data.values.max(), 1.04) diff --git a/satpy/tests/compositor_tests/test_sar.py b/satpy/tests/compositor_tests/test_sar.py index ed71e22730..d7cd2a9c80 100644 --- a/satpy/tests/compositor_tests/test_sar.py +++ b/satpy/tests/compositor_tests/test_sar.py @@ -33,25 +33,25 @@ def test_sar_ice(self): rows = 2 cols = 2 - comp = SARIce('sar_ice', prerequisites=('hh', 'hv'), - standard_name='sar-ice') + comp = SARIce("sar_ice", prerequisites=("hh", "hv"), + standard_name="sar-ice") hh = xr.DataArray(da.zeros((rows, cols), chunks=25) + 2000, - dims=('y', 'x'), - attrs={'name': 'hh'}) + dims=("y", "x"), + attrs={"name": "hh"}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) + 1000, - dims=('y', 'x'), - attrs={'name': 'hv'}) + dims=("y", "x"), + attrs={"name": "hv"}) res = comp((hh, hv)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'sar_ice') - self.assertEqual(res.attrs['standard_name'], - 'sar-ice') + self.assertEqual(res.attrs["name"], "sar_ice") + self.assertEqual(res.attrs["standard_name"], + "sar-ice") data = res.compute() - np.testing.assert_allclose(data.sel(bands='R'), 31.58280822) - np.testing.assert_allclose(data.sel(bands='G'), 159869.56789876) - np.testing.assert_allclose(data.sel(bands='B'), 44.68138191) + np.testing.assert_allclose(data.sel(bands="R"), 31.58280822) + np.testing.assert_allclose(data.sel(bands="G"), 159869.56789876) + np.testing.assert_allclose(data.sel(bands="B"), 44.68138191) def test_sar_ice_log(self): """Test creating a the sar_ice_log composite.""" @@ -63,22 +63,22 @@ def test_sar_ice_log(self): rows = 2 cols = 2 - comp = SARIceLog('sar_ice_log', prerequisites=('hh', 'hv'), - standard_name='sar-ice-log') + comp = SARIceLog("sar_ice_log", prerequisites=("hh", "hv"), + standard_name="sar-ice-log") hh = xr.DataArray(da.zeros((rows, cols), chunks=25) - 10, - dims=('y', 'x'), - attrs={'name': 'hh'}) + dims=("y", "x"), + attrs={"name": "hh"}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) - 20, - dims=('y', 'x'), - attrs={'name': 'hv'}) + dims=("y", "x"), + attrs={"name": "hv"}) res = comp((hh, hv)) self.assertIsInstance(res, xr.DataArray) self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'sar_ice_log') - self.assertEqual(res.attrs['standard_name'], - 'sar-ice-log') + self.assertEqual(res.attrs["name"], "sar_ice_log") + self.assertEqual(res.attrs["standard_name"], + "sar-ice-log") data = res.compute() - np.testing.assert_allclose(data.sel(bands='R'), -20) - np.testing.assert_allclose(data.sel(bands='G'), -4.6) - np.testing.assert_allclose(data.sel(bands='B'), -10) + np.testing.assert_allclose(data.sel(bands="R"), -20) + np.testing.assert_allclose(data.sel(bands="G"), -4.6) + np.testing.assert_allclose(data.sel(bands="B"), -10) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 467adf119b..4800f12a7b 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -31,71 +31,71 @@ def setup_method(self): """Initialize channels.""" rows = 5 cols = 10 - self.c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.20, dims=('y', 'x'), attrs={'name': 'C02'}) - self.c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=('y', 'x'), attrs={'name': 'C03'}) - self.c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.40, dims=('y', 'x'), attrs={'name': 'C04'}) + self.c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.20, dims=("y", "x"), attrs={"name": "C02"}) + self.c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=("y", "x"), attrs={"name": "C03"}) + self.c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.40, dims=("y", "x"), attrs={"name": "C04"}) def test_bad_lengths(self): """Test that error is raised if the amount of channels to blend does not match the number of weights.""" - comp = SpectralBlender('blended_channel', fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = SpectralBlender("blended_channel", fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") with pytest.raises(ValueError): comp((self.c01, self.c02, self.c03)) def test_spectral_blender(self): """Test the base class for spectral blending of channels.""" - comp = SpectralBlender('blended_channel', fractions=(0.3, 0.4, 0.3), prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = SpectralBlender("blended_channel", fractions=(0.3, 0.4, 0.3), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "blended_channel" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.3) def test_hybrid_green(self): """Test hybrid green correction of the 'green' band.""" - comp = HybridGreen('hybrid_green', fraction=0.15, prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = HybridGreen("hybrid_green", fraction=0.15, prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hybrid_green' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) def test_ndvi_hybrid_green(self): """Test NDVI-scaled hybrid green correction of 'green' band.""" self.c01 = xr.DataArray(da.from_array([[0.25, 0.30], [0.20, 0.30]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C02'}) + dims=("y", "x"), attrs={"name": "C02"}) self.c02 = xr.DataArray(da.from_array([[0.25, 0.30], [0.25, 0.35]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C03'}) + dims=("y", "x"), attrs={"name": "C03"}) self.c03 = xr.DataArray(da.from_array([[0.35, 0.35], [0.28, 0.65]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C04'}) + dims=("y", "x"), attrs={"name": "C04"}) - comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'ndvi_hybrid_green' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "ndvi_hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) def test_green_corrector(self): """Test the deprecated class for green corrections.""" with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=UserWarning, message=r'.*deprecated.*') - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + warnings.filterwarnings("ignore", category=UserWarning, message=r".*deprecated.*") + comp = GreenCorrector("blended_channel", fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "blended_channel" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 66c5e8c426..969f4579ef 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -35,9 +35,9 @@ def area(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) return area @@ -50,9 +50,9 @@ def dnb(self, area): dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, - dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0)}) + dims=("y", "x"), + attrs={"name": "DNB", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c01 @pytest.fixture @@ -64,9 +64,9 @@ def sza(self, area): sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0)}) + dims=("y", "x"), + attrs={"name": "solar_zenith_angle", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c02 @pytest.fixture @@ -77,29 +77,29 @@ def lza(self, area): lza[:, 4:] += 45.0 lza = da.from_array(lza, chunks=25) c03 = xr.DataArray(lza, - dims=('y', 'x'), - attrs={'name': 'lunar_zenith_angle', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0) + dims=("y", "x"), + attrs={"name": "lunar_zenith_angle", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0) }) return c03 def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['viirs']) + load_compositor_configs_for_sensors(["viirs"]) def test_histogram_dnb(self, dnb, sza): """Test the 'histogram_dnb' compositor.""" from satpy.composites.viirs import HistogramDNB - comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = HistogramDNB("histogram_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'histogram_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "histogram_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() unique_values = np.unique(data) np.testing.assert_allclose(unique_values, [0.5994, 0.7992, 0.999], rtol=1e-3) @@ -108,14 +108,14 @@ def test_adaptive_dnb(self, dnb, sza): """Test the 'adaptive_dnb' compositor.""" from satpy.composites.viirs import AdaptiveDNB - comp = AdaptiveDNB('adaptive_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = AdaptiveDNB("adaptive_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'adaptive_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "adaptive_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() np.testing.assert_allclose(data.data, 0.999, rtol=1e-4) @@ -123,17 +123,17 @@ def test_hncc_dnb(self, area, dnb, sza, lza): """Test the 'hncc_dnb' compositor.""" from satpy.composites.viirs import NCCZinke - comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = NCCZinke("hncc_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, - dims=('y',), - attrs={'name': 'moon_illumination_fraction', 'area': area}) + dims=("y",), + attrs={"name": "moon_illumination_fraction", "area": area}) res = comp((dnb, sza, lza, mif)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hncc_dnb' - assert res.attrs['standard_name'] == 'ncc_radiance' + assert res.attrs["name"] == "hncc_dnb" + assert res.attrs["standard_name"] == "ncc_radiance" data = res.compute() unique = np.unique(data) np.testing.assert_allclose( @@ -148,14 +148,14 @@ def test_hncc_dnb_nomoonpha(self, area, dnb, sza, lza): """Test the 'hncc_dnb' compositor when no moon phase data is provided.""" from satpy.composites.viirs import NCCZinke - comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = NCCZinke("hncc_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza, lza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hncc_dnb' - assert res.attrs['standard_name'] == 'ncc_radiance' + assert res.attrs["name"] == "hncc_dnb" + assert res.attrs["standard_name"] == "ncc_radiance" data = res.compute() unique = np.unique(data) np.testing.assert_allclose( @@ -169,10 +169,10 @@ def test_erf_dnb(self, dnb_units, saturation_correction, area, sza, lza): """Test the 'dynamic_dnb' or ERF DNB compositor.""" from satpy.composites.viirs import ERFDNB - comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',), + comp = ERFDNB("dynamic_dnb", prerequisites=("dnb",), saturation_correction=saturation_correction, - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") # dnb is different from in the other tests, so don't use the fixture # here dnb = np.zeros(area.shape) + 0.25 @@ -184,16 +184,16 @@ def test_erf_dnb(self, dnb_units, saturation_correction, area, sza, lza): dnb /= 10000.0 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, - dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area, 'units': dnb_units}) + dims=("y", "x"), + attrs={"name": "DNB", "area": area, "units": dnb_units}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, - dims=('y',), - attrs={'name': 'moon_illumination_fraction', 'area': area}) + dims=("y",), + attrs={"name": "moon_illumination_fraction", "area": area}) res = comp((c01, sza, lza, mif)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'dynamic_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "dynamic_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() unique = np.unique(data) assert np.isnan(unique).any() diff --git a/satpy/tests/conftest.py b/satpy/tests/conftest.py index 8bcbea2093..842dade04e 100644 --- a/satpy/tests/conftest.py +++ b/satpy/tests/conftest.py @@ -26,7 +26,7 @@ import satpy -TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), 'etc') +TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), "etc") @pytest.fixture(autouse=True) diff --git a/satpy/tests/enhancement_tests/test_abi.py b/satpy/tests/enhancement_tests/test_abi.py index f7ebb853b4..4a878ce96c 100644 --- a/satpy/tests/enhancement_tests/test_abi.py +++ b/satpy/tests/enhancement_tests/test_abi.py @@ -30,7 +30,7 @@ class TestABIEnhancement(unittest.TestCase): def setUp(self): """Create fake data for the tests.""" data = da.linspace(0, 1, 16).reshape((4, 4)) - self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) + self.da = xr.DataArray(data, dims=("y", "x"), attrs={"test": "test"}) def test_cimss_true_color_contrast(self): """Test the cimss_true_color_contrast enhancement.""" diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index e95c55a362..b518cc3f39 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -70,12 +70,12 @@ def setup_method(self): crefl_data /= 5.605 crefl_data[0, 0] = np.nan # one bad value for testing crefl_data[0, 1] = 0. - self.ch1 = xr.DataArray(da.from_array(data, chunks=2), dims=('y', 'x'), attrs={'test': 'test'}) - self.ch2 = xr.DataArray(da.from_array(crefl_data, chunks=2), dims=('y', 'x'), attrs={'test': 'test'}) + self.ch1 = xr.DataArray(da.from_array(data, chunks=2), dims=("y", "x"), attrs={"test": "test"}) + self.ch2 = xr.DataArray(da.from_array(crefl_data, chunks=2), dims=("y", "x"), attrs={"test": "test"}) rgb_data = np.stack([data, data, data]) self.rgb = xr.DataArray(da.from_array(rgb_data, chunks=(3, 2, 2)), - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}) + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}) @pytest.mark.parametrize( ("decorator", "exp_call_cls"), @@ -208,20 +208,20 @@ def test_merge_colormaps(self): create_colormap_mock = mock.Mock(wraps=create_colormap) cmap1 = Colormap((1, (1., 1., 1.))) - kwargs = {'palettes': cmap1} + kwargs = {"palettes": cmap1} - with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock): + with mock.patch("satpy.enhancements.create_colormap", create_colormap_mock): res = mcp(kwargs) assert res is cmap1 create_colormap_mock.assert_not_called() create_colormap_mock.reset_mock() ret_map.reset_mock() - cmap1 = {'colors': 'blues', 'min_value': 0, - 'max_value': 1} - kwargs = {'palettes': [cmap1]} - with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock), \ - mock.patch('trollimage.colormap.blues', ret_map): + cmap1 = {"colors": "blues", "min_value": 0, + "max_value": 1} + kwargs = {"palettes": [cmap1]} + with mock.patch("satpy.enhancements.create_colormap", create_colormap_mock), \ + mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) create_colormap_mock.assert_called_once() ret_map.reverse.assert_not_called() @@ -229,18 +229,18 @@ def test_merge_colormaps(self): create_colormap_mock.reset_mock() ret_map.reset_mock() - cmap2 = {'colors': 'blues', 'min_value': 2, - 'max_value': 3, 'reverse': True} - kwargs = {'palettes': [cmap2]} - with mock.patch('trollimage.colormap.blues', ret_map): + cmap2 = {"colors": "blues", "min_value": 2, + "max_value": 3, "reverse": True} + kwargs = {"palettes": [cmap2]} + with mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) ret_map.reverse.assert_called_once() ret_map.set_range.assert_called_with(2, 3) create_colormap_mock.reset_mock() ret_map.reset_mock() - kwargs = {'palettes': [cmap1, cmap2]} - with mock.patch('trollimage.colormap.blues', ret_map): + kwargs = {"palettes": [cmap1, cmap2]} + with mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) ret_map.__add__.assert_called_once() @@ -342,7 +342,7 @@ def test_cmap_vrgb_as_rgba(self): with closed_named_temp_file(suffix=".npy") as cmap_filename: cmap_data = _generate_cmap_test_data(None, "VRGB") np.save(cmap_filename, cmap_data) - cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': "RGBA"}) + cmap = create_colormap({"filename": cmap_filename, "colormap_mode": "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) @@ -366,14 +366,14 @@ def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix): _write_cmap_to_file(cmap_filename, cmap_data) # Force colormap_mode VRGBA to RGBA and we should see an exception with pytest.raises(ValueError): - create_colormap({'filename': cmap_filename, 'colormap_mode': forced_mode}) + create_colormap({"filename": cmap_filename, "colormap_mode": forced_mode}) def test_cmap_from_file_bad_shape(self): """Test that unknown array shape causes an error.""" from satpy.enhancements import create_colormap # create the colormap file on disk - with closed_named_temp_file(suffix='.npy') as cmap_filename: + with closed_named_temp_file(suffix=".npy") as cmap_filename: np.save(cmap_filename, np.array([ [0], [64], @@ -382,7 +382,7 @@ def test_cmap_from_file_bad_shape(self): ])) with pytest.raises(ValueError): - create_colormap({'filename': cmap_filename}) + create_colormap({"filename": cmap_filename}) def test_cmap_from_config_path(self, tmp_path): """Test loading a colormap relative to a config path.""" @@ -396,7 +396,7 @@ def test_cmap_from_config_path(self, tmp_path): np.save(cmap_filename, cmap_data) with satpy.config.set(config_path=[tmp_path]): rel_cmap_filename = os.path.join("colormaps", "my_colormap.npy") - cmap = create_colormap({'filename': rel_cmap_filename, 'colormap_mode': "RGBA"}) + cmap = create_colormap({"filename": rel_cmap_filename, "colormap_mode": "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) @@ -407,7 +407,7 @@ def test_cmap_from_config_path(self, tmp_path): def test_cmap_from_trollimage(self): """Test that colormaps in trollimage can be loaded.""" from satpy.enhancements import create_colormap - cmap = create_colormap({'colors': 'pubu'}) + cmap = create_colormap({"colors": "pubu"}) from trollimage.colormap import pubu np.testing.assert_equal(cmap.colors, pubu.colors) np.testing.assert_equal(cmap.values, pubu.values) @@ -428,14 +428,14 @@ def test_cmap_list(self): [1, 1, 1], ] values = [2, 4, 6, 8] - cmap = create_colormap({'colors': colors, 'color_scale': 1}) + cmap = create_colormap({"colors": colors, "color_scale": 1}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 - cmap = create_colormap({'colors': colors, 'color_scale': 1, 'values': values}) + cmap = create_colormap({"colors": colors, "color_scale": 1, "values": values}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 @@ -451,7 +451,7 @@ def func(array, index, gain=2): coords=array.coords, dims=array.dims, attrs=array.attrs) separate_func = on_separate_bands(func) - arr = xr.DataArray(np.zeros((3, 10, 10)), dims=['bands', 'y', 'x'], coords={"bands": ["R", "G", "B"]}) + arr = xr.DataArray(np.zeros((3, 10, 10)), dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"]}) assert separate_func(arr).shape == arr.shape assert all(separate_func(arr, gain=1).values[:, 0, 0] == [0, 1, 2]) @@ -460,11 +460,11 @@ def test_using_map_blocks(): """Test the `using_map_blocks` decorator.""" def func(np_array, block_info=None): - value = block_info[0]['chunk-location'][-1] + value = block_info[0]["chunk-location"][-1] return np.ones(np_array.shape) * value map_blocked_func = using_map_blocks(func) - arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=['bands', 'y', 'x']) + arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=["bands", "y", "x"]) res = map_blocked_func(arr) assert res.shape == arr.shape assert res[0, 0, 0].compute() != res[0, 9, 9].compute() @@ -479,7 +479,7 @@ def func(dask_array): return dask_array dask_func = on_dask_array(func) - arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=['bands', 'y', 'x']) + arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=["bands", "y", "x"]) res = dask_func(arr) assert res.shape == arr.shape @@ -492,82 +492,82 @@ def fake_area(): _nwcsaf_geo_props = { - 'cma_geo': ("geo", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), - 'cma_pps': ("pps", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), - 'cma_extended_pps': ("pps", "cma_extended", None, 'cma_extended_pal', None, - 'cloudmask_extended', 'CMA', "uint8"), - 'cmaprob_pps': ("pps", "cmaprob", None, 'cmaprob_pal', None, 'cloudmask_probability', - 'CMAPROB', "uint8"), - 'ct_geo': ("geo", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), - 'ct_pps': ("pps", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), - 'ctth_alti_geo': ("geo", "ctth_alti", None, 'ctth_alti_pal', None, 'cloud_top_height', - 'CTTH', "float64"), - 'ctth_alti_pps': ("pps", "ctth_alti", None, 'ctth_alti_pal', "ctth_status_flag", - 'cloud_top_height', 'CTTH', "float64"), - 'ctth_pres_geo': ("geo", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', - 'CTTH', "float64"), - 'ctth_pres_pps': ("pps", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', - 'CTTH', "float64"), - 'ctth_tempe_geo': ("geo", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', - 'CTTH', "float64"), - 'ctth_tempe_pps': ("pps", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', - 'CTTH', "float64"), - 'cmic_phase_geo': ("geo", "cmic_phase", None, 'cmic_phase_pal', None, 'cloud_top_phase', - 'CMIC', "uint8"), - 'cmic_phase_pps': ("pps", "cmic_phase", None, 'cmic_phase_pal', "cmic_status_flag", 'cloud_top_phase', - 'CMIC', "uint8"), - 'cmic_reff_geo': ("geo", "cmic_reff", None, 'cmic_reff_pal', None, 'cloud_drop_effective_radius', - 'CMIC', "float64"), - 'cmic_reff_pps': ("pps", "cmic_reff", "cmic_cre", 'cmic_cre_pal', "cmic_status_flag", - 'cloud_drop_effective_radius', 'CMIC', "float64"), - 'cmic_cot_geo': ("geo", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', - 'CMIC', "float64"), - 'cmic_cot_pps': ("pps", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', - 'CMIC', "float64"), - 'cmic_cwp_pps': ("pps", "cmic_cwp", None, 'cmic_cwp_pal', None, 'cloud_water_path', - 'CMIC', "float64"), - 'cmic_lwp_geo': ("geo", "cmic_lwp", None, 'cmic_lwp_pal', None, 'cloud_liquid_water_path', - 'CMIC', "float64"), - 'cmic_lwp_pps': ("pps", "cmic_lwp", None, 'cmic_lwp_pal', None, 'liquid_water_path', - 'CMIC', "float64"), - 'cmic_iwp_geo': ("geo", "cmic_iwp", None, 'cmic_iwp_pal', None, 'cloud_ice_water_path', - 'CMIC', "float64"), - 'cmic_iwp_pps': ("pps", "cmic_iwp", None, 'cmic_iwp_pal', None, 'ice_water_path', - 'CMIC', "float64"), - 'pc': ("geo", "pc", None, 'pc_pal', None, 'precipitation_probability', 'PC', "uint8"), - 'crr': ("geo", "crr", None, 'crr_pal', None, 'convective_rain_rate', 'CRR', "uint8"), - 'crr_accum': ("geo", "crr_accum", None, 'crr_pal', None, - 'convective_precipitation_hourly_accumulation', 'CRR', "uint8"), - 'ishai_tpw': ("geo", "ishai_tpw", None, 'ishai_tpw_pal', None, 'total_precipitable_water', - 'iSHAI', "float64"), - 'ishai_shw': ("geo", "ishai_shw", None, 'ishai_shw_pal', None, 'showalter_index', - 'iSHAI', "float64"), - 'ishai_li': ("geo", "ishai_li", None, 'ishai_li_pal', None, 'lifted_index', - 'iSHAI', "float64"), - 'ci_prob30': ("geo", "ci_prob30", None, 'ci_pal', None, 'convection_initiation_prob30', - 'CI', "float64"), - 'ci_prob60': ("geo", "ci_prob60", None, 'ci_pal', None, 'convection_initiation_prob60', - 'CI', "float64"), - 'ci_prob90': ("geo", "ci_prob90", None, 'ci_pal', None, 'convection_initiation_prob90', - 'CI', "float64"), - 'asii_turb_trop_prob': ("geo", "asii_turb_trop_prob", None, 'asii_turb_prob_pal', None, - 'asii_prob', 'ASII-NG', "float64"), - 'MapCellCatType': ("geo", "MapCellCatType", None, 'MapCellCatType_pal', None, - 'rdt_cell_type', 'RDT-CW', "uint8"), + "cma_geo": ("geo", "cma", None, "cma_pal", None, "cloudmask", "CMA", "uint8"), + "cma_pps": ("pps", "cma", None, "cma_pal", None, "cloudmask", "CMA", "uint8"), + "cma_extended_pps": ("pps", "cma_extended", None, "cma_extended_pal", None, + "cloudmask_extended", "CMA", "uint8"), + "cmaprob_pps": ("pps", "cmaprob", None, "cmaprob_pal", None, "cloudmask_probability", + "CMAPROB", "uint8"), + "ct_geo": ("geo", "ct", None, "ct_pal", None, "cloudtype", "CT", "uint8"), + "ct_pps": ("pps", "ct", None, "ct_pal", None, "cloudtype", "CT", "uint8"), + "ctth_alti_geo": ("geo", "ctth_alti", None, "ctth_alti_pal", None, "cloud_top_height", + "CTTH", "float64"), + "ctth_alti_pps": ("pps", "ctth_alti", None, "ctth_alti_pal", "ctth_status_flag", + "cloud_top_height", "CTTH", "float64"), + "ctth_pres_geo": ("geo", "ctth_pres", None, "ctth_pres_pal", None, "cloud_top_pressure", + "CTTH", "float64"), + "ctth_pres_pps": ("pps", "ctth_pres", None, "ctth_pres_pal", None, "cloud_top_pressure", + "CTTH", "float64"), + "ctth_tempe_geo": ("geo", "ctth_tempe", None, "ctth_tempe_pal", None, "cloud_top_temperature", + "CTTH", "float64"), + "ctth_tempe_pps": ("pps", "ctth_tempe", None, "ctth_tempe_pal", None, "cloud_top_temperature", + "CTTH", "float64"), + "cmic_phase_geo": ("geo", "cmic_phase", None, "cmic_phase_pal", None, "cloud_top_phase", + "CMIC", "uint8"), + "cmic_phase_pps": ("pps", "cmic_phase", None, "cmic_phase_pal", "cmic_status_flag", "cloud_top_phase", + "CMIC", "uint8"), + "cmic_reff_geo": ("geo", "cmic_reff", None, "cmic_reff_pal", None, "cloud_drop_effective_radius", + "CMIC", "float64"), + "cmic_reff_pps": ("pps", "cmic_reff", "cmic_cre", "cmic_cre_pal", "cmic_status_flag", + "cloud_drop_effective_radius", "CMIC", "float64"), + "cmic_cot_geo": ("geo", "cmic_cot", None, "cmic_cot_pal", None, "cloud_optical_thickness", + "CMIC", "float64"), + "cmic_cot_pps": ("pps", "cmic_cot", None, "cmic_cot_pal", None, "cloud_optical_thickness", + "CMIC", "float64"), + "cmic_cwp_pps": ("pps", "cmic_cwp", None, "cmic_cwp_pal", None, "cloud_water_path", + "CMIC", "float64"), + "cmic_lwp_geo": ("geo", "cmic_lwp", None, "cmic_lwp_pal", None, "cloud_liquid_water_path", + "CMIC", "float64"), + "cmic_lwp_pps": ("pps", "cmic_lwp", None, "cmic_lwp_pal", None, "liquid_water_path", + "CMIC", "float64"), + "cmic_iwp_geo": ("geo", "cmic_iwp", None, "cmic_iwp_pal", None, "cloud_ice_water_path", + "CMIC", "float64"), + "cmic_iwp_pps": ("pps", "cmic_iwp", None, "cmic_iwp_pal", None, "ice_water_path", + "CMIC", "float64"), + "pc": ("geo", "pc", None, "pc_pal", None, "precipitation_probability", "PC", "uint8"), + "crr": ("geo", "crr", None, "crr_pal", None, "convective_rain_rate", "CRR", "uint8"), + "crr_accum": ("geo", "crr_accum", None, "crr_pal", None, + "convective_precipitation_hourly_accumulation", "CRR", "uint8"), + "ishai_tpw": ("geo", "ishai_tpw", None, "ishai_tpw_pal", None, "total_precipitable_water", + "iSHAI", "float64"), + "ishai_shw": ("geo", "ishai_shw", None, "ishai_shw_pal", None, "showalter_index", + "iSHAI", "float64"), + "ishai_li": ("geo", "ishai_li", None, "ishai_li_pal", None, "lifted_index", + "iSHAI", "float64"), + "ci_prob30": ("geo", "ci_prob30", None, "ci_pal", None, "convection_initiation_prob30", + "CI", "float64"), + "ci_prob60": ("geo", "ci_prob60", None, "ci_pal", None, "convection_initiation_prob60", + "CI", "float64"), + "ci_prob90": ("geo", "ci_prob90", None, "ci_pal", None, "convection_initiation_prob90", + "CI", "float64"), + "asii_turb_trop_prob": ("geo", "asii_turb_trop_prob", None, "asii_turb_prob_pal", None, + "asii_prob", "ASII-NG", "float64"), + "MapCellCatType": ("geo", "MapCellCatType", None, "MapCellCatType_pal", None, + "rdt_cell_type", "RDT-CW", "uint8"), } @pytest.mark.parametrize( "data", - ['cma_geo', 'cma_pps', 'cma_extended_pps', 'cmaprob_pps', 'ct_geo', - 'ct_pps', 'ctth_alti_geo', 'ctth_alti_pps', 'ctth_pres_geo', - 'ctth_pres_pps', 'ctth_tempe_geo', 'ctth_tempe_pps', - 'cmic_phase_geo', 'cmic_phase_pps', 'cmic_reff_geo', - 'cmic_reff_pps', 'cmic_cot_geo', 'cmic_cot_pps', 'cmic_cwp_pps', - 'cmic_lwp_geo', 'cmic_lwp_pps', 'cmic_iwp_geo', 'cmic_iwp_pps', - 'pc', 'crr', 'crr_accum', 'ishai_tpw', 'ishai_shw', 'ishai_li', - 'ci_prob30', 'ci_prob60', 'ci_prob90', 'asii_turb_trop_prob', - 'MapCellCatType'] + ["cma_geo", "cma_pps", "cma_extended_pps", "cmaprob_pps", "ct_geo", + "ct_pps", "ctth_alti_geo", "ctth_alti_pps", "ctth_pres_geo", + "ctth_pres_pps", "ctth_tempe_geo", "ctth_tempe_pps", + "cmic_phase_geo", "cmic_phase_pps", "cmic_reff_geo", + "cmic_reff_pps", "cmic_cot_geo", "cmic_cot_pps", "cmic_cwp_pps", + "cmic_lwp_geo", "cmic_lwp_pps", "cmic_iwp_geo", "cmic_iwp_pps", + "pc", "crr", "crr_accum", "ishai_tpw", "ishai_shw", "ishai_li", + "ci_prob30", "ci_prob60", "ci_prob90", "asii_turb_trop_prob", + "MapCellCatType"] ) def test_nwcsaf_comps(fake_area, tmp_path, data): """Test loading NWCSAF composites.""" @@ -645,9 +645,9 @@ def setup_method(self): """Create test data.""" data = da.arange(-100, 1000, 110).reshape(2, 5) rgb_data = np.stack([data, data, data]) - self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'platform_name': 'Himawari-8'}) + self.rgb = xr.DataArray(rgb_data, dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"platform_name": "Himawari-8"}) def test_jma_true_color_reproduction(self): """Test the jma_true_color_reproduction enhancement.""" @@ -669,12 +669,12 @@ def test_jma_true_color_reproduction(self): np.testing.assert_almost_equal(img.data.compute(), expected) - self.rgb.attrs['platform_name'] = None + self.rgb.attrs["platform_name"] = None img = XRImage(self.rgb) with pytest.raises(ValueError, match="Missing platform name."): jma_true_color_reproduction(img) - self.rgb.attrs['platform_name'] = 'Fakesat' + self.rgb.attrs["platform_name"] = "Fakesat" img = XRImage(self.rgb) with pytest.raises(KeyError, match="No conversion matrix found for platform Fakesat"): jma_true_color_reproduction(img) diff --git a/satpy/tests/enhancement_tests/test_viirs.py b/satpy/tests/enhancement_tests/test_viirs.py index 5595266034..b73e5fc700 100644 --- a/satpy/tests/enhancement_tests/test_viirs.py +++ b/satpy/tests/enhancement_tests/test_viirs.py @@ -33,8 +33,8 @@ def setUp(self): """Create test data.""" data = np.arange(15, 301, 15).reshape(2, 10) data = da.from_array(data, chunks=(2, 10)) - self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) - self.palette = {'colors': + self.da = xr.DataArray(data, dims=("y", "x"), attrs={"test": "test"}) + self.palette = {"colors": [[14, [0.0, 0.0, 0.0]], [15, [0.0, 0.0, 0.39215]], [16, [0.76862, 0.63529, 0.44705]], @@ -64,8 +64,8 @@ def setUp(self): [191, [1.0, 0.0, 0.0]], [200, [1.0, 0.0, 0.0]], [201, [0.0, 0.0, 0.0]]], - 'min_value': 0, - 'max_value': 201} + "min_value": 0, + "max_value": 201} def test_viirs(self): """Test VIIRS flood enhancement.""" diff --git a/satpy/tests/features/steps/steps-load.py b/satpy/tests/features/steps/steps-load.py index 9dfe9eb9cc..7e2d1829a2 100644 --- a/satpy/tests/features/steps/steps-load.py +++ b/satpy/tests/features/steps/steps-load.py @@ -25,31 +25,31 @@ use_step_matcher("re") -@given(u'data is available') +@given(u"data is available") def step_impl_data_available(context): """Make data available.""" - if not os.path.exists('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5'): - response = urlopen('https://zenodo.org/record/16355/files/' - 'SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5') - with open('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5', + if not os.path.exists("/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5"): + response = urlopen("https://zenodo.org/record/16355/files/" + "SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5") + with open("/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5", mode="w") as fp: fp.write(response.read()) - if not os.path.exists('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5'): - response = urlopen('https://zenodo.org/record/16355/files/' - 'GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5') - with open('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5', + if not os.path.exists("/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5"): + response = urlopen("https://zenodo.org/record/16355/files/" + "GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5") + with open("/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5", mode="w") as fp: fp.write(response.read()) -@when(u'user loads the data without providing a config file') +@when(u"user loads the data without providing a config file") def step_impl_user_loads_no_config(context): """Load the data without a config.""" from datetime import datetime from satpy import Scene, find_files_and_readers os.chdir("/tmp/") - readers_files = find_files_and_readers(sensor='viirs', + readers_files = find_files_and_readers(sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=readers_files) @@ -57,20 +57,20 @@ def step_impl_user_loads_no_config(context): context.scene = scn -@then(u'the data is available in a scene object') +@then(u"the data is available in a scene object") def step_impl_data_available_in_scene(context): """Check that the data is available in the scene.""" assert context.scene["M02"] is not None assert context.scene.get("M01") is None -@when(u'some items are not available') +@when(u"some items are not available") def step_impl_items_not_available(context): """Load some data.""" context.scene.load(["M01"]) -@when(u'user wants to know what data is available') +@when(u"user wants to know what data is available") def step_impl_user_checks_availability(context): """Check availability.""" from datetime import datetime @@ -84,7 +84,7 @@ def step_impl_user_checks_availability(context): context.available_dataset_ids = scn.available_dataset_ids() -@then(u'available datasets are returned') +@then(u"available datasets are returned") def step_impl_available_datasets_are_returned(context): """Check that datasets are returned.""" assert (len(context.available_dataset_ids) >= 5) @@ -98,13 +98,13 @@ def step_impl_datasets_with_same_name(context): from satpy import Scene from satpy.tests.utils import make_dataid scn = Scene() - scn[make_dataid(name='ds1', calibration='radiance')] = DataArray([[1, 2], [3, 4]]) - scn[make_dataid(name='ds1', resolution=500, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=250, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=1000, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=500, calibration='radiance', modifiers=('mod1',))] = \ + scn[make_dataid(name="ds1", calibration="radiance")] = DataArray([[1, 2], [3, 4]]) + scn[make_dataid(name="ds1", resolution=500, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=250, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=1000, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=500, calibration="radiance", modifiers=("mod1",))] = \ DataArray([[5, 6], [7, 8]]) - ds_id = make_dataid(name='ds1', resolution=1000, calibration='radiance', modifiers=('mod1', 'mod2')) + ds_id = make_dataid(name="ds1", resolution=1000, calibration="radiance", modifiers=("mod1", "mod2")) scn[ds_id] = DataArray([[5, 6], [7, 8]]) context.scene = scn @@ -112,10 +112,10 @@ def step_impl_datasets_with_same_name(context): @when("a dataset is retrieved by name") def step_impl_dataset_retrieved_by_name(context): """Use the Scene's getitem method to get a dataset.""" - context.returned_dataset = context.scene['ds1'] + context.returned_dataset = context.scene["ds1"] @then("the least modified version of the dataset is returned") def step_impl_least_modified_dataset_returned(context): """Check that the dataset should be one of the least modified datasets.""" - assert len(context.returned_dataset.attrs['modifiers']) == 0 + assert len(context.returned_dataset.attrs["modifiers"]) == 0 diff --git a/satpy/tests/features/steps/steps-real-load-process-write.py b/satpy/tests/features/steps/steps-real-load-process-write.py index d719d397e4..d99b167b97 100644 --- a/satpy/tests/features/steps/steps-real-load-process-write.py +++ b/satpy/tests/features/steps/steps-real-load-process-write.py @@ -80,10 +80,10 @@ def before_all(context): debug_on() -@given(u'{dformat} data is available') +@given(u"{dformat} data is available") def step_impl_input_files_exists(context, dformat): """Check that input data exists on disk.""" - data_path = os.path.join('test_data', dformat) + data_path = os.path.join("test_data", dformat) data_available = os.path.exists(data_path) if not data_available: context.scenario.skip(reason="No test data available for " + dformat) @@ -92,40 +92,40 @@ def step_impl_input_files_exists(context, dformat): context.data_path = data_path -@when(u'the user loads the {composite} composite') +@when(u"the user loads the {composite} composite") def step_impl_create_scene_and_load_single(context, composite): """Create a Scene and load a single composite.""" from satpy import Scene scn = Scene(reader=context.dformat, - filenames=get_all_files(os.path.join(context.data_path, 'data'), - '*')) + filenames=get_all_files(os.path.join(context.data_path, "data"), + "*")) scn.load([composite]) context.scn = scn context.composite = composite -@when(u'the user resamples the data to {area}') +@when(u"the user resamples the data to {area}") def step_impl_resample_scene(context, area): """Resample the scene to an area or use the native resampler.""" - if area != '-': + if area != "-": context.lscn = context.scn.resample(area) else: - context.lscn = context.scn.resample(resampler='native') + context.lscn = context.scn.resample(resampler="native") context.area = area -@when(u'the user saves the composite to disk') +@when(u"the user saves the composite to disk") def step_impl_save_to_png(context): """Call Scene.save_dataset to write a PNG image.""" - with NamedTemporaryFile(suffix='.png', delete=False) as tmp_file: + with NamedTemporaryFile(suffix=".png", delete=False) as tmp_file: context.lscn.save_dataset(context.composite, filename=tmp_file.name) context.new_filename = tmp_file.name -@then(u'the resulting image should match the reference image') +@then(u"the resulting image should match the reference image") def step_impl_compare_two_png_images(context): """Compare two PNG image files.""" - if context.area == '-': + if context.area == "-": ref_filename = context.composite + ".png" else: ref_filename = context.composite + "_" + context.area + ".png" diff --git a/satpy/tests/features/steps/steps-save.py b/satpy/tests/features/steps/steps-save.py index b42d8751a2..dbfd8040c0 100644 --- a/satpy/tests/features/steps/steps-save.py +++ b/satpy/tests/features/steps/steps-save.py @@ -36,7 +36,7 @@ def step_impl_create_scene_one_dataset(context): from satpy import Scene scn = Scene() - scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) + scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=["y", "x"]) context.scene = scn @@ -48,7 +48,7 @@ def step_impl_scene_show(context): context (behave.runner.Context): Test context """ - with patch('trollimage.xrimage.XRImage.show') as mock_show: + with patch("trollimage.xrimage.XRImage.show") as mock_show: context.scene.show("MyDataset") mock_show.assert_called_once_with() @@ -100,8 +100,8 @@ def step_impl_create_scene_two_datasets(context): from satpy import Scene scn = Scene() - scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) - scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=['y', 'x']) + scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=["y", "x"]) + scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=["y", "x"]) context.scene = scn diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index cd5082a5b7..7bea78b7d1 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -79,9 +79,9 @@ def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDef vis = xr.DataArray(data, dims=dims, attrs={ - 'area': area_def, - 'start_time': stime, - 'orbital_parameters': orb_params, + "area": area_def, + "start_time": stime, + "orbital_parameters": orb_params, }) return vis diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index c842df701e..e43d7bc3fa 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -57,7 +57,7 @@ def _mock_and_create_dem_file(tmpdir, url, var_name, fill_value=None): def _mock_dem_retrieve(tmpdir, url): - rmock_obj = mock.patch('satpy.modifiers._crefl.retrieve') + rmock_obj = mock.patch("satpy.modifiers._crefl.retrieve") rmock = rmock_obj.start() dem_fn = str(tmpdir.join(url)) rmock.return_value = dem_fn @@ -74,17 +74,17 @@ def _create_fake_dem_file(dem_fn, var_name, fill_value): h.end() -def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units='degrees', calibration=None): - return xr.DataArray(data, dims=('y', 'x'), +def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units="degrees", calibration=None): + return xr.DataArray(data, dims=("y", "x"), attrs={ - 'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength, - 'modifiers': None, 'calibration': calibration, - 'resolution': 371, 'name': name, - 'standard_name': standard_name, 'platform_name': 'Suomi-NPP', - 'polarization': None, 'sensor': 'viirs', 'units': units, - 'start_time': datetime(2012, 2, 25, 18, 1, 24, 570942), - 'end_time': datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area, - 'ancillary_variables': [] + "start_orbit": 1708, "end_orbit": 1708, "wavelength": wavelength, + "modifiers": None, "calibration": calibration, + "resolution": 371, "name": name, + "standard_name": standard_name, "platform_name": "Suomi-NPP", + "polarization": None, "sensor": "viirs", "units": units, + "start_time": datetime(2012, 2, 25, 18, 1, 24, 570942), + "end_time": datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area, + "ancillary_variables": [] }) @@ -97,9 +97,9 @@ def data_area_ref_corrector(): rows = 3 cols = 5 area = AreaDefinition( - 'some_area_name', 'On-the-fly area', 'geosabii', - {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', - 'units': 'm'}, + "some_area_name", "On-the-fly area", "geosabii", + {"a": "6378137.0", "b": "6356752.31414", "h": "35786023.0", "lon_0": "-89.5", "proj": "geos", "sweep": "x", + "units": "m"}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) @@ -135,39 +135,39 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector(optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')], name=name, prerequisites=[], - wavelength=wavelength, resolution=resolution, calibration='reflectance', - modifiers=('sunz_corrected', 'rayleigh_corrected_crefl',), sensor='abi') - - assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == wavelength - assert ref_cor.attrs['name'] == name - assert ref_cor.attrs['resolution'] == resolution - assert ref_cor.attrs['sensor'] == 'abi' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")], name=name, prerequisites=[], + wavelength=wavelength, resolution=resolution, calibration="reflectance", + modifiers=("sunz_corrected", "rayleigh_corrected_crefl",), sensor="abi") + + assert ref_cor.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == wavelength + assert ref_cor.attrs["name"] == name + assert ref_cor.attrs["resolution"] == resolution + assert ref_cor.attrs["sensor"] == "abi" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, dnb = self.data_area_ref_corrector() c01 = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'GOES-16', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': name, 'resolution': resolution, 'sensor': 'abi', - 'start_time': '2017-09-20 17:30:40.800000', 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "GOES-16", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": name, "resolution": resolution, "sensor": "abi", + "start_time": "2017-09-20 17:30:40.800000", "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) with assert_maximum_dask_computes(0): @@ -175,18 +175,18 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert res.attrs['platform_name'] == 'GOES-16' - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['units'] == '%' - assert res.attrs['wavelength'] == wavelength - assert res.attrs['name'] == name - assert res.attrs['resolution'] == resolution - assert res.attrs['sensor'] == 'abi' - assert res.attrs['start_time'] == '2017-09-20 17:30:40.800000' - assert res.attrs['end_time'] == '2017-09-20 17:41:17.500000' - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert res.attrs["platform_name"] == "GOES-16" + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["units"] == "%" + assert res.attrs["wavelength"] == wavelength + assert res.attrs["name"] == name + assert res.attrs["resolution"] == resolution + assert res.attrs["sensor"] == "abi" + assert res.attrs["start_time"] == "2017-09-20 17:30:40.800000" + assert res.attrs["end_time"] == "2017-09-20 17:41:17.500000" + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5) @@ -194,7 +194,7 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) @pytest.mark.parametrize( - 'url,dem_mock_cm,dem_sds', + "url,dem_mock_cm,dem_sds", [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), @@ -207,62 +207,62 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): ref_cor = ReflectanceCorrector( optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle') + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle") ], - name='I01', + name="I01", prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, - calibration='reflectance', - modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), - sensor='viirs', + calibration="reflectance", + modifiers=("sunz_corrected_iband", "rayleigh_corrected_crefl_iband"), + sensor="viirs", url=url, dem_sds=dem_sds, ) - assert ref_cor.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == (0.6, 0.64, 0.68) - assert ref_cor.attrs['name'] == 'I01' - assert ref_cor.attrs['resolution'] == 371 - assert ref_cor.attrs['sensor'] == 'viirs' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + assert ref_cor.attrs["modifiers"] == ("sunz_corrected_iband", "rayleigh_corrected_crefl_iband") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == (0.6, 0.64, 0.68) + assert ref_cor.attrs["name"] == "I01" + assert ref_cor.attrs["resolution"] == 371 + assert ref_cor.attrs["sensor"] == "viirs" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, data = self.data_area_ref_corrector() - c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', - wavelength=(0.6, 0.64, 0.68), units='%', - calibration='reflectance') - c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') - c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') - c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') - c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') + c01 = _make_viirs_xarray(data, area, "I01", "toa_bidirectional_reflectance", + wavelength=(0.6, 0.64, 0.68), units="%", + calibration="reflectance") + c02 = _make_viirs_xarray(data, area, "satellite_azimuth_angle", "sensor_azimuth_angle") + c03 = _make_viirs_xarray(data, area, "satellite_zenith_angle", "sensor_zenith_angle") + c04 = _make_viirs_xarray(data, area, "solar_azimuth_angle", "solar_azimuth_angle") + c05 = _make_viirs_xarray(data, area, "solar_zenith_angle", "solar_zenith_angle") with dem_mock_cm(tmpdir, url), assert_maximum_dask_computes(0): res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['wavelength'] == (0.6, 0.64, 0.68) - assert res.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband') - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['resolution'] == 371 - assert res.attrs['name'] == 'I01' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' - assert res.attrs['platform_name'] == 'Suomi-NPP' - assert res.attrs['sensor'] == 'viirs' - assert res.attrs['units'] == '%' - assert res.attrs['start_time'] == datetime(2012, 2, 25, 18, 1, 24, 570942) - assert res.attrs['end_time'] == datetime(2012, 2, 25, 18, 11, 21, 175760) - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["wavelength"] == (0.6, 0.64, 0.68) + assert res.attrs["modifiers"] == ("sunz_corrected_iband", "rayleigh_corrected_crefl_iband") + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["resolution"] == 371 + assert res.attrs["name"] == "I01" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert res.attrs["platform_name"] == "Suomi-NPP" + assert res.attrs["sensor"] == "viirs" + assert res.attrs["units"] == "%" + assert res.attrs["start_time"] == datetime(2012, 2, 25, 18, 1, 24, 570942) + assert res.attrs["end_time"] == datetime(2012, 2, 25, 18, 11, 21, 175760) + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values assert abs(np.mean(data) - 51.12750267805715) < 1e-6 assert data.shape == (3, 5) @@ -273,64 +273,64 @@ def test_reflectance_corrector_modis(self): """Test ReflectanceCorrector modifier with MODIS data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq - sataa_did = make_dsq(name='satellite_azimuth_angle') - satza_did = make_dsq(name='satellite_zenith_angle') - solaa_did = make_dsq(name='solar_azimuth_angle') - solza_did = make_dsq(name='solar_zenith_angle') + sataa_did = make_dsq(name="satellite_azimuth_angle") + satza_did = make_dsq(name="satellite_zenith_angle") + solaa_did = make_dsq(name="solar_azimuth_angle") + solza_did = make_dsq(name="solar_zenith_angle") ref_cor = ReflectanceCorrector( - optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name='1', - prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration='reflectance', - modifiers=('sunz_corrected', 'rayleigh_corrected_crefl'), sensor='modis') - assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == (0.62, 0.645, 0.67) - assert ref_cor.attrs['name'] == '1' - assert ref_cor.attrs['resolution'] == 250 - assert ref_cor.attrs['sensor'] == 'modis' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name="1", + prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration="reflectance", + modifiers=("sunz_corrected", "rayleigh_corrected_crefl"), sensor="modis") + assert ref_cor.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == (0.62, 0.645, 0.67) + assert ref_cor.attrs["name"] == "1" + assert ref_cor.attrs["resolution"] == 250 + assert ref_cor.attrs["sensor"] == "modis" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, dnb = self.data_area_ref_corrector() def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000): return xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'wavelength': wavelength, 'level': None, 'modifiers': modifiers, - 'calibration': calibration, 'resolution': resolution, - 'name': name, 'coordinates': ['longitude', 'latitude'], - 'platform_name': 'EOS-Aqua', 'polarization': None, 'sensor': 'modis', - 'units': '%', 'start_time': datetime(2012, 8, 13, 18, 46, 1, 439838), - 'end_time': datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area, - 'ancillary_variables': [] + "wavelength": wavelength, "level": None, "modifiers": modifiers, + "calibration": calibration, "resolution": resolution, + "name": name, "coordinates": ["longitude", "latitude"], + "platform_name": "EOS-Aqua", "polarization": None, "sensor": "modis", + "units": "%", "start_time": datetime(2012, 8, 13, 18, 46, 1, 439838), + "end_time": datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area, + "ancillary_variables": [] }) - c01 = make_xarray('1', 'reflectance', wavelength=(0.62, 0.645, 0.67), modifiers='sunz_corrected', + c01 = make_xarray("1", "reflectance", wavelength=(0.62, 0.645, 0.67), modifiers="sunz_corrected", resolution=500) - c02 = make_xarray('satellite_azimuth_angle', None) - c03 = make_xarray('satellite_zenith_angle', None) - c04 = make_xarray('solar_azimuth_angle', None) - c05 = make_xarray('solar_zenith_angle', None) + c02 = make_xarray("satellite_azimuth_angle", None) + c03 = make_xarray("satellite_zenith_angle", None) + c04 = make_xarray("solar_azimuth_angle", None) + c05 = make_xarray("solar_zenith_angle", None) res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['wavelength'] == (0.62, 0.645, 0.67) - assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl',) - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['resolution'] == 500 - assert res.attrs['name'] == '1' - assert res.attrs['platform_name'] == 'EOS-Aqua' - assert res.attrs['sensor'] == 'modis' - assert res.attrs['units'] == '%' - assert res.attrs['start_time'] == datetime(2012, 8, 13, 18, 46, 1, 439838) - assert res.attrs['end_time'] == datetime(2012, 8, 13, 18, 57, 47, 746296) - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["wavelength"] == (0.62, 0.645, 0.67) + assert res.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl",) + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["resolution"] == 500 + assert res.attrs["name"] == "1" + assert res.attrs["platform_name"] == "EOS-Aqua" + assert res.attrs["sensor"] == "modis" + assert res.attrs["units"] == "%" + assert res.attrs["start_time"] == datetime(2012, 8, 13, 18, 46, 1, 439838) + assert res.attrs["end_time"] == datetime(2012, 8, 13, 18, 57, 47, 746296) + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values assert abs(np.mean(data) - 52.09372623964498) < 1e-6 assert data.shape == (3, 5) @@ -346,7 +346,7 @@ def test_reflectance_corrector_bad_prereqs(self): pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4]) @pytest.mark.parametrize( - 'url,dem_mock_cm,dem_sds', + "url,dem_mock_cm,dem_sds", [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), @@ -364,31 +364,31 @@ def test_reflectance_corrector_different_chunks(self, tmpdir, url, dem_mock_cm, ref_cor = ReflectanceCorrector( optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle') + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle") ], - name='I01', + name="I01", prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, - calibration='reflectance', - modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), - sensor='viirs', + calibration="reflectance", + modifiers=("sunz_corrected_iband", "rayleigh_corrected_crefl_iband"), + sensor="viirs", url=url, dem_sds=dem_sds, ) area, data = self.data_area_ref_corrector() - c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', - wavelength=(0.6, 0.64, 0.68), units='%', - calibration='reflectance') - c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') + c01 = _make_viirs_xarray(data, area, "I01", "toa_bidirectional_reflectance", + wavelength=(0.6, 0.64, 0.68), units="%", + calibration="reflectance") + c02 = _make_viirs_xarray(data, area, "satellite_azimuth_angle", "sensor_azimuth_angle") c02.data = c02.data.rechunk((1, -1)) - c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') - c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') - c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') + c03 = _make_viirs_xarray(data, area, "satellite_zenith_angle", "sensor_zenith_angle") + c04 = _make_viirs_xarray(data, area, "solar_azimuth_angle", "solar_azimuth_angle") + c05 = _make_viirs_xarray(data, area, "solar_zenith_angle", "solar_zenith_angle") with dem_mock_cm(tmpdir, url): res = ref_cor([c01], [c02, c03, c04, c05]) diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 316192421c..04af43981f 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -516,7 +516,7 @@ def test_parallax_modifier_interface_with_cloud(self): w_cth = 25 h_cth = 15 - proj_dict = {'a': '6378137', 'h': '35785863', 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": "6378137", "h": "35785863", "proj": "geos", "units": "m"} fake_area_cth = pyresample.create_area_def( area_id="test-area", projection=proj_dict, diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 6b85dd9d79..c6e65d4615 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -42,15 +42,15 @@ def _get_expected_stack_select(scene1: Scene, scene2: Scene) -> xr.DataArray: - expected = scene2['polar-ct'] - expected[..., NUM_TEST_ROWS, :] = scene1['geo-ct'][..., NUM_TEST_ROWS, :] - expected[..., :, NUM_TEST_COLS] = scene1['geo-ct'][..., :, NUM_TEST_COLS] - expected[..., -1, :] = scene1['geo-ct'][..., -1, :] + expected = scene2["polar-ct"] + expected[..., NUM_TEST_ROWS, :] = scene1["geo-ct"][..., NUM_TEST_ROWS, :] + expected[..., :, NUM_TEST_COLS] = scene1["geo-ct"][..., :, NUM_TEST_COLS] + expected[..., -1, :] = scene1["geo-ct"][..., -1, :] return expected.compute() def _get_expected_stack_blend(scene1: Scene, scene2: Scene) -> xr.DataArray: - expected = scene2['polar-ct'].copy().compute().astype(np.float64) + expected = scene2["polar-ct"].copy().compute().astype(np.float64) expected[..., NUM_TEST_ROWS, :] = 5 / 3 # (1*2 + 3*1) / (2 + 1) expected[..., :, NUM_TEST_COLS] = 5 / 3 expected[..., -1, :] = np.nan # (1*0 + 0*1) / (0 + 1) @@ -88,21 +88,21 @@ def cloud_type_data_array1(test_area, data_type, image_mode): shape = DEFAULT_SHAPE if len(image_mode) == 0 else (len(image_mode),) + DEFAULT_SHAPE dims = ("y", "x") if len(image_mode) == 0 else ("bands", "y", "x") if data_type is np.int8: - data_arr = _create_test_int8_dataset(name='geo-ct', shape=shape, area=test_area, values=1, dims=dims) + data_arr = _create_test_int8_dataset(name="geo-ct", shape=shape, area=test_area, values=1, dims=dims) else: - data_arr = _create_test_dataset(name='geo-ct', shape=shape, area=test_area, values=1.0, dims=dims) - - data_arr.attrs['platform_name'] = 'Meteosat-11' - data_arr.attrs['sensor'] = {'seviri'} - data_arr.attrs['units'] = '1' - data_arr.attrs['long_name'] = 'NWC GEO CT Cloud Type' - data_arr.attrs['orbital_parameters'] = { - 'satellite_nominal_altitude': 35785863.0, - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0, + data_arr = _create_test_dataset(name="geo-ct", shape=shape, area=test_area, values=1.0, dims=dims) + + data_arr.attrs["platform_name"] = "Meteosat-11" + data_arr.attrs["sensor"] = {"seviri"} + data_arr.attrs["units"] = "1" + data_arr.attrs["long_name"] = "NWC GEO CT Cloud Type" + data_arr.attrs["orbital_parameters"] = { + "satellite_nominal_altitude": 35785863.0, + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0, } - data_arr.attrs['start_time'] = datetime(2023, 1, 16, 11, 9, 17) - data_arr.attrs['end_time'] = datetime(2023, 1, 16, 11, 12, 22) + data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) + data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @@ -118,17 +118,17 @@ def cloud_type_data_array2(test_area, data_type, image_mode): shape = DEFAULT_SHAPE if len(image_mode) == 0 else (len(image_mode),) + DEFAULT_SHAPE dims = ("y", "x") if len(image_mode) == 0 else ("bands", "y", "x") if data_type is np.int8: - data_arr = _create_test_int8_dataset(name='polar-ct', shape=shape, area=test_area, values=3, dims=dims) - data_arr[..., -1, :] = data_arr.attrs['_FillValue'] + data_arr = _create_test_int8_dataset(name="polar-ct", shape=shape, area=test_area, values=3, dims=dims) + data_arr[..., -1, :] = data_arr.attrs["_FillValue"] else: - data_arr = _create_test_dataset(name='polar-ct', shape=shape, area=test_area, values=3.0, dims=dims) + data_arr = _create_test_dataset(name="polar-ct", shape=shape, area=test_area, values=3.0, dims=dims) data_arr[..., -1, :] = np.nan - data_arr.attrs['platform_name'] = 'NOAA-18' - data_arr.attrs['sensor'] = {'avhrr-3'} - data_arr.attrs['units'] = '1' - data_arr.attrs['long_name'] = 'SAFNWC PPS CT Cloud Type' - data_arr.attrs['start_time'] = datetime(2023, 1, 16, 11, 12, 57, 500000) - data_arr.attrs['end_time'] = datetime(2023, 1, 16, 11, 28, 1, 900000) + data_arr.attrs["platform_name"] = "NOAA-18" + data_arr.attrs["sensor"] = {"avhrr-3"} + data_arr.attrs["units"] = "1" + data_arr.attrs["long_name"] = "SAFNWC PPS CT Cloud Type" + data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) + data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @@ -141,7 +141,7 @@ def scene1_with_weights(cloud_type_data_array1, test_area): scene = Scene() scene[cloud_type_data_array1.attrs["_satpy_id"]] = cloud_type_data_array1 - wgt1 = _create_test_dataset(name='geo-ct-wgt', area=test_area, values=0) + wgt1 = _create_test_dataset(name="geo-ct-wgt", area=test_area, values=0) wgt1[NUM_TEST_ROWS, :] = 2 wgt1[:, NUM_TEST_COLS] = 2 @@ -151,11 +151,11 @@ def scene1_with_weights(cloud_type_data_array1, test_area): resolution=3000, modifiers=() ) - scene[dsid2] = _create_test_int8_dataset(name='geo-cma', area=test_area, values=2) - scene[dsid2].attrs['start_time'] = datetime(2023, 1, 16, 11, 9, 17) - scene[dsid2].attrs['end_time'] = datetime(2023, 1, 16, 11, 12, 22) + scene[dsid2] = _create_test_int8_dataset(name="geo-cma", area=test_area, values=2) + scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) + scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) - wgt2 = _create_test_dataset(name='geo-cma-wgt', area=test_area, values=0) + wgt2 = _create_test_dataset(name="geo-cma-wgt", area=test_area, values=0) return scene, [wgt1, wgt2] @@ -168,18 +168,18 @@ def scene2_with_weights(cloud_type_data_array2, test_area): scene = Scene() scene[cloud_type_data_array2.attrs["_satpy_id"]] = cloud_type_data_array2 - wgt1 = _create_test_dataset(name='polar-ct-wgt', area=test_area, values=1) + wgt1 = _create_test_dataset(name="polar-ct-wgt", area=test_area, values=1) dsid2 = make_dataid( name="polar-cma", resolution=1000, modifiers=() ) - scene[dsid2] = _create_test_int8_dataset(name='polar-cma', area=test_area, values=4) - scene[dsid2].attrs['start_time'] = datetime(2023, 1, 16, 11, 12, 57, 500000) - scene[dsid2].attrs['end_time'] = datetime(2023, 1, 16, 11, 28, 1, 900000) + scene[dsid2] = _create_test_int8_dataset(name="polar-cma", area=test_area, values=4) + scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) + scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) - wgt2 = _create_test_dataset(name='polar-cma-wgt', area=test_area, values=1) + wgt2 = _create_test_dataset(name="polar-cma-wgt", area=test_area, values=1) return scene, [wgt1, wgt2] @@ -197,8 +197,8 @@ def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): def groups(): """Get group definitions for the MultiScene.""" return { - DataQuery(name='CloudType'): ['geo-ct', 'polar-ct'], - DataQuery(name='CloudMask'): ['geo-cma', 'polar-cma'] + DataQuery(name="CloudType"): ["geo-ct", "polar-ct"], + DataQuery(name="CloudMask"): ["geo-cma", "polar-cma"] } @@ -216,15 +216,15 @@ def test_blend_two_scenes_using_stack(self, multi_scene_and_weights, groups, resampled = multi_scene stacked = resampled.blend(blend_function=stack) - result = stacked['CloudType'].compute() + result = stacked["CloudType"].compute() - expected = scene2['polar-ct'].copy() - expected[..., -1, :] = scene1['geo-ct'][..., -1, :] + expected = scene2["polar-ct"].copy() + expected[..., -1, :] = scene1["geo-ct"][..., -1, :] xr.testing.assert_equal(result, expected.compute()) _check_stacked_metadata(result, "CloudType") - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): """Test exception is raised when bad 'blend_type' is used.""" @@ -232,7 +232,7 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): multi_scene, weights = multi_scene_and_weights - simple_groups = {DataQuery(name='CloudType'): groups[DataQuery(name='CloudType')]} + simple_groups = {DataQuery(name="CloudType"): groups[DataQuery(name="CloudType")]} multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] @@ -263,7 +263,7 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr scene1, weights1 = scene1_with_weights scene2, weights2 = scene2_with_weights - simple_groups = {DataQuery(name='CloudType'): groups[DataQuery(name='CloudType')]} + simple_groups = {DataQuery(name="CloudType"): groups[DataQuery(name="CloudType")]} multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] @@ -271,52 +271,52 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr weighted_blend = multi_scene.blend(blend_function=stack_func) expected = exp_result_func(scene1, scene2) - result = weighted_blend['CloudType'].compute() + result = weighted_blend["CloudType"].compute() # result has NaNs and xarray's xr.testing.assert_equal doesn't support NaN comparison np.testing.assert_allclose(result.data, expected.data) _check_stacked_metadata(result, "CloudType") if combine_times: - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) else: - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 11, 7, 250000) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 20, 11, 950000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 11, 7, 250000) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 20, 11, 950000) @pytest.fixture def datasets_and_weights(self): """X-Array datasets with area definition plus weights for input to tests.""" shape = (8, 12) - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, shape[1], shape[0], [-200, -200, 200, 200]) - ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - - ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'time'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'time'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - - wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - - datastruct = {'shape': shape, - 'area': area, - 'datasets': [ds1, ds2, ds3, ds4, ds5], - 'weights': [wgt1, wgt2, wgt3]} + ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + + ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + + wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + + datastruct = {"shape": shape, + "area": area, + "datasets": [ds1, ds2, ds3, ds4, ds5], + "weights": [wgt1, wgt2, wgt3]} return datastruct - @pytest.mark.parametrize(('line', 'column',), + @pytest.mark.parametrize(("line", "column",), [(2, 3), (4, 5)] ) def test_blend_function_stack_weighted(self, datasets_and_weights, line, column): @@ -327,19 +327,19 @@ def test_blend_function_stack_weighted(self, datasets_and_weights, line, column) input_data = datasets_and_weights - input_data['weights'][1][line, :] = 2 - input_data['weights'][2][:, column] = 2 + input_data["weights"][1][line, :] = 2 + input_data["weights"][2][:, column] = 2 - stack_with_weights = partial(stack, weights=input_data['weights'], combine_times=False) - blend_result = stack_with_weights(input_data['datasets'][0:3]) + stack_with_weights = partial(stack, weights=input_data["weights"], combine_times=False) + blend_result = stack_with_weights(input_data["datasets"][0:3]) - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] - ds3 = input_data['datasets'][2] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] + ds3 = input_data["datasets"][2] expected = ds1.copy() expected[:, column] = ds3[:, column] expected[line, :] = ds2[line, :] - expected.attrs = combine_metadata(*[x.attrs for x in input_data['datasets'][0:3]]) + expected.attrs = combine_metadata(*[x.attrs for x in input_data["datasets"][0:3]]) xr.testing.assert_equal(blend_result.compute(), expected.compute()) assert expected.attrs == blend_result.attrs @@ -348,8 +348,8 @@ def test_blend_function_stack(self, datasets_and_weights): """Test the 'stack' function.""" input_data = datasets_and_weights - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] res = stack([ds1, ds2]) expected = ds2.copy() @@ -362,11 +362,11 @@ def test_timeseries(self, datasets_and_weights): """Test the 'timeseries' function.""" input_data = datasets_and_weights - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] - ds4 = input_data['datasets'][2] - ds4 = input_data['datasets'][3] - ds5 = input_data['datasets'][4] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] + ds4 = input_data["datasets"][2] + ds4 = input_data["datasets"][3] + ds5 = input_data["datasets"][4] res = timeseries([ds1, ds2]) res2 = timeseries([ds4, ds5]) @@ -377,16 +377,16 @@ def test_timeseries(self, datasets_and_weights): def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: - assert data_arr.attrs['units'] == '1' - assert data_arr.attrs['name'] == exp_name + assert data_arr.attrs["units"] == "1" + assert data_arr.attrs["name"] == exp_name if "_FillValue" in data_arr.attrs: - assert data_arr.attrs['_FillValue'] == 255 - assert data_arr.attrs['valid_range'] == [1, 15] + assert data_arr.attrs["_FillValue"] == 255 + assert data_arr.attrs["valid_range"] == [1, 15] expected_area = _create_test_area() - assert data_arr.attrs['area'] == expected_area + assert data_arr.attrs["area"] == expected_area # these metadata items don't match between all inputs - assert 'sensor' not in data_arr.attrs - assert 'platform_name' not in data_arr.attrs - assert 'long_name' not in data_arr.attrs + assert "sensor" not in data_arr.attrs + assert "platform_name" not in data_arr.attrs + assert "long_name" not in data_arr.attrs diff --git a/satpy/tests/multiscene_tests/test_misc.py b/satpy/tests/multiscene_tests/test_misc.py index 0cfedf226f..190045dad0 100644 --- a/satpy/tests/multiscene_tests/test_misc.py +++ b/satpy/tests/multiscene_tests/test_misc.py @@ -49,13 +49,13 @@ def test_properties(self): area = _create_test_area() scenes = _create_test_scenes(area=area) - ds1_id = make_dataid(name='ds1') - ds2_id = make_dataid(name='ds2') - ds3_id = make_dataid(name='ds3') - ds4_id = make_dataid(name='ds4') + ds1_id = make_dataid(name="ds1") + ds2_id = make_dataid(name="ds2") + ds3_id = make_dataid(name="ds3") + ds4_id = make_dataid(name="ds4") # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") mscn = MultiScene(scenes) self.assertSetEqual(mscn.loaded_dataset_ids, @@ -64,7 +64,7 @@ def test_properties(self): self.assertTrue(mscn.all_same_area) bigger_area = _create_test_area(shape=(20, 40)) - scenes[0]['ds4'] = _create_test_dataset('ds4', shape=(20, 40), + scenes[0]["ds4"] = _create_test_dataset("ds4", shape=(20, 40), area=bigger_area) self.assertSetEqual(mscn.loaded_dataset_ids, @@ -93,14 +93,14 @@ def test_from_files(self): "OR_GLM-L2-GLMC-M3_G16_s20171171506000_e20171171507000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171507000_e20171171508000_c20380190314080.nc", ] - with mock.patch('satpy.multiscene._multiscene.Scene') as scn_mock: + with mock.patch("satpy.multiscene._multiscene.Scene") as scn_mock: mscn = MultiScene.from_files( input_files_abi, - reader='abi_l1b', + reader="abi_l1b", scene_kwargs={"reader_kwargs": {}}) assert len(mscn.scenes) == 6 calls = [mock.call( - filenames={'abi_l1b': [in_file_abi]}, + filenames={"abi_l1b": [in_file_abi]}, reader_kwargs={}) for in_file_abi in input_files_abi] scn_mock.assert_has_calls(calls) @@ -109,13 +109,13 @@ def test_from_files(self): with pytest.warns(DeprecationWarning): mscn = MultiScene.from_files( input_files_abi + input_files_glm, - reader=('abi_l1b', "glm_l2"), + reader=("abi_l1b", "glm_l2"), group_keys=["start_time"], ensure_all_readers=True, time_threshold=30) assert len(mscn.scenes) == 2 calls = [mock.call( - filenames={'abi_l1b': [in_file_abi], 'glm_l2': [in_file_glm]}) + filenames={"abi_l1b": [in_file_abi], "glm_l2": [in_file_glm]}) for (in_file_abi, in_file_glm) in zip(input_files_abi[0:2], [input_files_glm[2]] + [input_files_glm[7]])] @@ -123,7 +123,7 @@ def test_from_files(self): scn_mock.reset_mock() mscn = MultiScene.from_files( input_files_abi + input_files_glm, - reader=('abi_l1b', "glm_l2"), + reader=("abi_l1b", "glm_l2"), group_keys=["start_time"], ensure_all_readers=False, time_threshold=30) @@ -144,14 +144,14 @@ def scene1(self): wavelength=(1, 2, 3), polarization="H" ) - scene[dsid1] = _create_test_dataset(name='ds1') + scene[dsid1] = _create_test_dataset(name="ds1") dsid2 = make_dataid( name="ds2", resolution=456, wavelength=(4, 5, 6), polarization="V" ) - scene[dsid2] = _create_test_dataset(name='ds2') + scene[dsid2] = _create_test_dataset(name="ds2") return scene @pytest.fixture @@ -165,14 +165,14 @@ def scene2(self): wavelength=(1.1, 2.1, 3.1), polarization="H" ) - scene[dsid1] = _create_test_dataset(name='ds3') + scene[dsid1] = _create_test_dataset(name="ds3") dsid2 = make_dataid( name="ds4", resolution=456.1, wavelength=(4.1, 5.1, 6.1), polarization="V" ) - scene[dsid2] = _create_test_dataset(name='ds4') + scene[dsid2] = _create_test_dataset(name="ds4") return scene @pytest.fixture @@ -185,8 +185,8 @@ def multi_scene(self, scene1, scene2): def groups(self): """Get group definitions for the MultiScene.""" return { - DataQuery(name='odd'): ['ds1', 'ds3'], - DataQuery(name='even'): ['ds2', 'ds4'] + DataQuery(name="odd"): ["ds1", "ds3"], + DataQuery(name="even"): ["ds2", "ds4"] } def test_multi_scene_grouping(self, multi_scene, groups, scene1): @@ -194,12 +194,12 @@ def test_multi_scene_grouping(self, multi_scene, groups, scene1): multi_scene.group(groups) shared_ids_exp = {make_dataid(name="odd"), make_dataid(name="even")} assert multi_scene.shared_dataset_ids == shared_ids_exp - assert DataQuery(name='odd') not in scene1 + assert DataQuery(name="odd") not in scene1 xr.testing.assert_allclose(multi_scene.scenes[0]["ds1"], scene1["ds1"]) def test_fails_to_add_multiple_datasets_from_the_same_scene_to_a_group(self, multi_scene): """Test that multiple datasets from the same scene in one group fails.""" - groups = {DataQuery(name='mygroup'): ['ds1', 'ds2']} + groups = {DataQuery(name="mygroup"): ["ds1", "ds2"]} multi_scene.group(groups) with pytest.raises(ValueError): next(multi_scene.scenes) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 5e5b4a1d63..2ea41f18f4 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -50,7 +50,7 @@ def tearDown(self): except OSError: pass - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_distributed(self): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene @@ -58,61 +58,61 @@ def test_save_mp4_distributed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, client=client_mock, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, client=client_mock, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") # Test no distributed client found mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer, \ - mock.patch('satpy.multiscene._multiscene.get_client', mock.Mock(side_effect=ValueError("No client"))): + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer, \ + mock.patch("satpy.multiscene._multiscene.get_client", mock.Mock(side_effect=ValueError("No client"))): get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_no_distributed(self): """Save a series of fake scenes to an mp4 video when distributed isn't available.""" from satpy import MultiScene @@ -120,39 +120,39 @@ def test_save_mp4_no_distributed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer, \ - mock.patch('satpy.multiscene._multiscene.get_client', None): + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer, \ + mock.patch("satpy.multiscene._multiscene.get_client", None): get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene self.assertEqual(writer_mock.append_data.call_count, 3 + 3) filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") + self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_simple(self): """Save a series of fake scenes to an PNG images.""" from satpy import MultiScene @@ -160,30 +160,30 @@ def test_save_datasets_simple(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [True] # some arbitrary return value # force order of datasets by specifying them - mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=['ds1', 'ds2', 'ds3'], - writer='simple_image') + mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=["ds1", "ds2", "ds3"], + writer="simple_image") # 2 for each scene self.assertEqual(save_datasets.call_count, 2) - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_delayed(self): """Test distributed save for writers returning delayed obejcts e.g. simple_image.""" from dask.delayed import Delayed @@ -193,15 +193,15 @@ def test_save_datasets_distributed_delayed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -209,16 +209,16 @@ def test_save_datasets_distributed_delayed(self): client_mock.gather.side_effect = lambda x: x future_mock = mock.MagicMock() future_mock.__class__ = Delayed - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [future_mock] # some arbitrary return value # force order of datasets by specifying them - mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], - writer='simple_image') + mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], + writer="simple_image") # 2 for each scene self.assertEqual(save_datasets.call_count, 2) - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_source_target(self): """Test distributed save for writers returning sources and targets e.g. geotiff writer.""" import dask.array as da @@ -228,15 +228,15 @@ def test_save_datasets_distributed_source_target(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -245,12 +245,12 @@ def test_save_datasets_distributed_source_target(self): source_mock = mock.MagicMock() source_mock.__class__ = da.Array target_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [(source_mock, target_mock)] # some arbitrary return value # force order of datasets by specifying them with self.assertRaises(NotImplementedError): - mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], - writer='geotiff') + mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], + writer="geotiff") def test_crop(self): """Test the crop method.""" @@ -262,44 +262,44 @@ def test_crop(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', 'test', 'test', + "test", "test", "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', 'test2', 'test2', proj_dict, + "test2", "test2", "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = DataArray(np.zeros((y_size, x_size))) - scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) - scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) - scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), - attrs={'area': area_def2}) + scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=("y", "x")) + scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) + scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=("y", "x"), + attrs={"area": area_def2}) mscn = MultiScene([scene1]) # by lon/lat bbox new_mscn = mscn.crop(ll_bbox=(-20., -5., 0, 0)) new_scn1 = list(new_mscn.scenes)[0] - self.assertIn('1', new_scn1) - self.assertIn('2', new_scn1) - self.assertIn('3', new_scn1) - self.assertTupleEqual(new_scn1['1'].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1['2'].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1['3'].shape, (184, 714)) - self.assertTupleEqual(new_scn1['4'].shape, (92, 357)) + self.assertIn("1", new_scn1) + self.assertIn("2", new_scn1) + self.assertIn("3", new_scn1) + self.assertTupleEqual(new_scn1["1"].shape, (y_size, x_size)) + self.assertTupleEqual(new_scn1["2"].shape, (y_size, x_size)) + self.assertTupleEqual(new_scn1["3"].shape, (184, 714)) + self.assertTupleEqual(new_scn1["4"].shape, (92, 357)) -@mock.patch('satpy.multiscene._multiscene.get_enhanced_image') +@mock.patch("satpy.multiscene._multiscene.get_enhanced_image") def test_save_mp4(smg, tmp_path): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene @@ -308,38 +308,38 @@ def test_save_mp4(smg, tmp_path): smg.side_effect = _fake_get_enhanced_image # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = str(tmp_path / - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3'], client=False) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"], client=False) # 2 saves for the first scene + 1 black frame # 3 for the second scene assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - assert filenames[0] == 'test_save_mp4_ds1_20180101_00_20180102_12.mp4' - assert filenames[1] == 'test_save_mp4_ds2_20180101_00_20180102_12.mp4' - assert filenames[2] == 'test_save_mp4_ds3_20180102_00_20180102_12.mp4' + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" # make sure that not specifying datasets still saves all of them fn = str(tmp_path / - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=False) @@ -353,9 +353,9 @@ def test_save_mp4(smg, tmp_path): # test decorating and enhancing fn = str(tmp_path / - 'test-{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}-rich.mp4') + "test-{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}-rich.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock mscn.save_animation( fn, client=False, diff --git a/satpy/tests/multiscene_tests/test_utils.py b/satpy/tests/multiscene_tests/test_utils.py index 409eb9cf86..310d68c215 100644 --- a/satpy/tests/multiscene_tests/test_utils.py +++ b/satpy/tests/multiscene_tests/test_utils.py @@ -30,27 +30,27 @@ DEFAULT_SHAPE = (5, 10) -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'polarization': None, - 'level': None, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "polarization": None, + "level": None, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } @@ -63,14 +63,14 @@ def _fake_get_enhanced_image(img, enhance=None, overlay=None, decorate=None): def _create_test_area(proj_str=None, shape=DEFAULT_SHAPE, extents=None): """Create a test area definition.""" if proj_str is None: - proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' \ - '+lat_0=25 +lat_1=25 +units=m +no_defs' + proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. " \ + "+lat_0=25 +lat_1=25 +units=m +no_defs" extents = extents or (-1000., -1500., 1000., 1500.) return AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_str, shape[1], shape[0], @@ -82,9 +82,9 @@ def _create_test_int8_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, """Create a test DataArray object.""" return xr.DataArray( da.ones(shape, dtype=np.uint8, chunks=shape) * values, dims=dims, - attrs={'_FillValue': 255, - 'valid_range': [1, 15], - 'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"_FillValue": 255, + "valid_range": [1, 15], + "name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, dims=("y", "x")): @@ -92,22 +92,22 @@ def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, dims if values: return xr.DataArray( da.ones(shape, dtype=np.float32, chunks=shape) * values, dims=dims, - attrs={'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) return xr.DataArray( da.zeros(shape, dtype=np.float32, chunks=shape), dims=dims, - attrs={'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) def _create_test_scenes(num_scenes=2, shape=DEFAULT_SHAPE, area=None): """Create some test scenes for various test cases.""" from satpy import Scene - ds1 = _create_test_dataset('ds1', shape=shape, area=area) - ds2 = _create_test_dataset('ds2', shape=shape, area=area) + ds1 = _create_test_dataset("ds1", shape=shape, area=area) + ds2 = _create_test_dataset("ds2", shape=shape, area=area) scenes = [] for _ in range(num_scenes): scn = Scene() - scn['ds1'] = ds1.copy() - scn['ds2'] = ds2.copy() + scn["ds1"] = ds1.copy() + scn["ds2"] = ds2.copy() scenes.append(scn) return scenes diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index 837e653cc3..d6a32253f5 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -23,16 +23,16 @@ # mapping of netcdf type code to numpy data type: TYPE_MAP = { - 'i1': np.int8, - 'i2': np.int16, - 'i4': np.int32, - 'i8': np.int64, - 'u1': np.uint8, - 'u2': np.uint16, - 'u4': np.uint32, - 'u8': np.uint64, - 'f4': np.float32, - 'f8': np.float64, + "i1": np.int8, + "i2": np.int16, + "i4": np.int32, + "i8": np.int64, + "u1": np.uint8, + "u2": np.uint16, + "u4": np.uint32, + "u8": np.uint64, + "f4": np.float32, + "f8": np.float64, } @@ -47,55 +47,55 @@ def rand_u16(num): return np.random.randint(low=0, high=np.iinfo(np.uint16).max - 1, size=num, dtype=np.uint16) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', 'data/'), - 'dimensions': { - 'unfiltered_events': nobs, - 'l1b_chunks': nchunks, - 'l1b_offsets': nchunks, - 'filters': nfilters, - 'scalar': 1, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", "data/"), + "dimensions": { + "unfiltered_events": nobs, + "l1b_chunks": nchunks, + "l1b_offsets": nchunks, + "filters": nfilters, + "scalar": 1, }, - 'variables': {}, - 'sector_variables': { + "variables": {}, + "sector_variables": { "event_id": { "format": "u2", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 65535, "long_name": "ID of LI L2 Event", "default_data": lambda: rand_u16(nobs) }, "group_id": { "format": "u2", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Group object", "default_data": lambda: rand_u16(nobs) }, "l1b_chunk_ids": { "format": "u4", - "shape": ('l1b_chunks',), + "shape": ("l1b_chunks",), "fill_value": 4294967295, "long_name": "Array of L1b event chunk IDs", "default_data": lambda: np.arange(nchunks) + 10000 }, "l1b_chunk_offsets": { "format": "u4", - "shape": ('l1b_offsets',), + "shape": ("l1b_offsets",), "fill_value": 4294967295, "long_name": "Array offset for L1b event chunk boundaries", "default_data": lambda: np.arange(nchunks) }, "l1b_window": { "format": "u4", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 4294967295, "long_name": "window index of associated L1b event", "default_data": lambda: (np.arange(nobs) + 10000) }, "filter_values": { "format": "u1", - "shape": ('unfiltered_events', 'filters',), + "shape": ("unfiltered_events", "filters",), "fill_value": 255, "scale_factor": 0.004, "add_offset": 0.0, @@ -104,22 +104,22 @@ def rand_u16(num): }, "epoch_time": { "format": "f8", - "shape": ('scalar',), + "shape": ("scalar",), "fill_value": 9.96920996886869e36, "long_name": "Start time of integration frame", "default_data": lambda: 1.234, - 'precision': '1 millisecond', - 'time_standard': 'UTC', - 'standard_name': 'time', - 'units': 'seconds since 2000-01-01 00:00:00.0', + "precision": "1 millisecond", + "time_standard": "UTC", + "standard_name": "time", + "units": "seconds since 2000-01-01 00:00:00.0", }, "time_offset": { "format": "f4", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 9.96921e36, "long_name": "Time offset from epoch time", "default_data": lambda: np.linspace(0.0, 1000.0, nobs), - 'units': 'seconds', + "units": "seconds", }, } } @@ -136,13 +136,13 @@ def l2_lef_schema(settings=None): nobs = settings.get("num_obs", 123) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', 'data/'), - 'dimensions': { - 'events': nobs, - 'scalar': 1, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", "data/"), + "dimensions": { + "events": nobs, + "scalar": 1, }, - 'variables': { + "variables": { "l1b_geolocation_warning": { "format": "i1", "shape": (), # test explicitly the scalar case @@ -150,47 +150,47 @@ def l2_lef_schema(settings=None): }, "l1b_missing_warning": { "format": "i1", - "shape": ('scalar',), + "shape": ("scalar",), "default_data": lambda: 0 }, "l1b_radiometric_warning": { "format": "i1", - "shape": ('scalar',), + "shape": ("scalar",), "default_data": lambda: 0 }, }, - 'sector_variables': { + "sector_variables": { "event_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of LI L2 Event", "default_data": lambda: np.arange(1, nobs + 1) }, "group_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Group object", "default_data": lambda: np.arange(1, nobs + 1) }, "flash_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Flash object", "default_data": lambda: np.arange(1, nobs + 1) }, "detector": { "format": "u4", - "shape": ('scalar',), + "shape": ("scalar",), "fill_value": 65535, "long_name": "ID of detector for this group", "default_data": lambda: 1 }, "latitude": { "format": "i2", - "shape": ('events',), + "shape": ("events",), "fill_value": -32767, "long_name": "Latitude of group", "units": "degrees_north", @@ -199,7 +199,7 @@ def l2_lef_schema(settings=None): }, "longitude": { "format": "i2", - "shape": ('events',), + "shape": ("events",), "fill_value": -32767, "long_name": "Longitude of group", "units": "degrees_east", @@ -208,7 +208,7 @@ def l2_lef_schema(settings=None): }, "radiance": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", @@ -216,34 +216,34 @@ def l2_lef_schema(settings=None): }, "event_filter_qa": { "format": "u1", - "shape": ('events',), + "shape": ("events",), "long_name": "L2 event pre-filtering quality assurance value", "default_data": lambda: np.random.randint(1, 2 ** 8 - 1, nobs) }, "epoch_time": { "format": "f8", - "shape": ('scalar',), + "shape": ("scalar",), "long_name": "Start time of integration frame", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: start_ts }, "time_offset": { "format": "f4", - "shape": ('events',), + "shape": ("events",), "long_name": "Time offset from epoch time", "units": "seconds", "default_data": lambda: np.random.uniform(1, 2 ** 31 - 1, nobs) }, "detector_row": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Detector row position of event pixel", "units": "1", "default_data": lambda: np.random.randint(1, 1000, nobs) }, "detector_column": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Detector column position of event pixel", "units": "1", "default_data": lambda: np.random.randint(1, 1000, nobs) @@ -258,22 +258,22 @@ def l2_lgr_schema(settings=None): ngrps = settings.get("num_groups", 120) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': { - 'groups': ngrps, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": { + "groups": ngrps, }, - 'variables': { + "variables": { "latitude": { "format": "f4", - "shape": ('groups',), + "shape": ("groups",), "long_name": "Latitude of group", "units": "degrees_north", "default_data": lambda: np.linspace(-90, 90, ngrps) }, "longitude": { "format": "f4", - "shape": ('groups',), + "shape": ("groups",), "long_name": "Longitude of group", "units": "degrees_east", "default_data": lambda: np.linspace(-180, 80, ngrps) @@ -292,15 +292,15 @@ def l2_lfl_schema(settings=None): etime = (datetime(2019, 1, 2) - epoch).total_seconds() return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': { - 'flashes': nobs, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": { + "flashes": nobs, }, - 'variables': { + "variables": { "latitude": { "format": "i2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Latitude of Flash", "standard_name": "latitude", "units": "degrees_north", @@ -312,7 +312,7 @@ def l2_lfl_schema(settings=None): }, "longitude": { "format": "i2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Longitude of Flash", "standard_name": "longitude", "units": "degrees_east", @@ -324,7 +324,7 @@ def l2_lfl_schema(settings=None): }, "radiance": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", @@ -332,7 +332,7 @@ def l2_lfl_schema(settings=None): }, "flash_duration": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash duration", "standard_name": "flash_duration", "units": "ms", @@ -340,56 +340,56 @@ def l2_lfl_schema(settings=None): }, "flash_filter_confidence": { "format": "i1", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L2 filtered flash confidence", "standard_name": "flash_filter_confidence", "default_data": lambda: np.clip(np.round(np.random.normal(20, 10, nobs)), 1, 2 ** 7 - 1) }, "flash_footprint": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash footprint size", - "standard_name": 'flash_footprint', + "standard_name": "flash_footprint", "units": "L1 grid pixels", "default_data": lambda: np.maximum(1, np.round(np.random.normal(5, 3, nobs))) }, "flash_id": { "format": "u4", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash footprint size", - "standard_name": 'flash_id', + "standard_name": "flash_id", "default_data": lambda: np.arange(1, nobs + 1) }, "flash_time": { "format": "f8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Nominal flash time", "units": "seconds since 2000-01-01 00:00:00.0", - "standard_name": 'time', + "standard_name": "time", "precision": "1 millisecond", "default_data": lambda: np.random.uniform(stime, etime, nobs) }, "l1b_geolocation_warning": { "format": "u8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { "format": "u8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 }, "number_of_events": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Number of events in each flash", "default_data": lambda: 1 }, "number_of_groups": { "format": "u4", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Number of flashes in each flash", "default_data": lambda: 1 }, @@ -403,45 +403,45 @@ def l2_af_schema(settings=None): nobs = settings.get("num_obs", 1234) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(1, nobs), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(1, nobs), + "variables": { "accumulation_offsets": { "format": "u4", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 0 }, "accumulation_start_times": { "format": "f8", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 4.25055600161e8 }, "l1b_geolocation_warning": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 }, "average_flash_qa": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 23 }, "flash_accumulation": { "format": "u2", - "shape": ('pixels',), + "shape": ("pixels",), "default_data": lambda: np.clip(np.round(np.random.normal(1, 2, nobs)), 1, 2 ** 16 - 1) }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', nobs), - "y": fci_grid_definition('Y', nobs), + "x": fci_grid_definition("X", nobs), + "y": fci_grid_definition("Y", nobs), } } @@ -453,27 +453,27 @@ def l2_afa_schema(settings=None): nacc = settings.get("num_accumulations", 20) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(nacc, npix), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(nacc, npix), + "variables": { "accumulation_start_times": { "format": "f4", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: np.linspace(0.0, 1.0, nacc) }, "accumulated_flash_area": { "format": "u4", - "shape": ('pixels',), + "shape": ("pixels",), "fill_value": 4294967295, "long_name": "Number of contributing unique flashes to each pixel", "default_data": lambda: np.mod(np.arange(npix), 10) + 1 }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', npix), - "y": fci_grid_definition('Y', npix), + "x": fci_grid_definition("X", npix), + "y": fci_grid_definition("Y", npix), } } @@ -485,13 +485,13 @@ def l2_afr_schema(settings=None): nacc = settings.get("num_accumulations", 20) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(nacc, nobs), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(nacc, nobs), + "variables": { "flash_radiance": { "format": "f4", - "shape": ('pixels',), + "shape": ("pixels",), "long_name": "Area averaged flash radiance accumulation", "grid_mapping": "mtg_geos_projection", "coordinate": "sparse: x y", @@ -499,14 +499,14 @@ def l2_afr_schema(settings=None): }, "accumulation_start_times": { "format": "f4", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: 0 }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', nobs), - "y": fci_grid_definition('Y', nobs), + "x": fci_grid_definition("X", nobs), + "y": fci_grid_definition("Y", nobs), } } @@ -514,29 +514,29 @@ def l2_afr_schema(settings=None): def accumulation_dimensions(nacc, nobs): """Set dimensions for the accumulated products.""" return { - 'accumulations': nacc, - 'pixels': nobs, + "accumulations": nacc, + "pixels": nobs, } def fci_grid_definition(axis, nobs): """FCI grid definition on X or Y axis.""" - if axis == 'X': - long_name = 'azimuth angle encoded as column' - standard_name = 'projection_x_coordinate' + if axis == "X": + long_name = "azimuth angle encoded as column" + standard_name = "projection_x_coordinate" else: - long_name = 'zenith angle encoded as row' - standard_name = 'projection_y_coordinate' + long_name = "zenith angle encoded as row" + standard_name = "projection_y_coordinate" return { "format": "i2", - "shape": ('pixels',), + "shape": ("pixels",), "add_offset": -0.155619516, "axis": axis, "long_name": long_name, "scale_factor": 5.58878e-5, "standard_name": standard_name, - "units": 'radian', + "units": "radian", "valid_range": np.asarray([1, 5568]), "default_data": lambda: np.clip(np.round(np.random.normal(2000, 500, nobs)), 1, 2 ** 16 - 1) } @@ -546,49 +546,49 @@ def mtg_geos_projection(): """MTG geos projection definition.""" return { "format": "i4", - "shape": ('accumulations',), - "grid_mapping_name": 'geostationary', + "shape": ("accumulations",), + "grid_mapping_name": "geostationary", "inverse_flattening": 298.2572221, "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, "perspective_point_height": 42164000, "semi_major_axis": 6378169, "semi_minor_axis": 6356583.8, - "sweep_angle_axis": 'y', - "long_name": 'MTG geostationary projection', + "sweep_angle_axis": "y", + "long_name": "MTG geostationary projection", "default_data": lambda: -2147483647 } products_dict = { - '2-LE': {'ftype': 'li_l2_le_nc', 'schema': l2_le_schema}, - '2-LEF': {'ftype': 'li_l2_lef_nc', 'schema': l2_lef_schema}, - '2-LGR': {'ftype': 'li_l2_lgr_nc', 'schema': l2_lgr_schema}, - '2-LFL': {'ftype': 'li_l2_lfl_nc', 'schema': l2_lfl_schema}, - '2-AF': {'ftype': 'li_l2_af_nc', 'schema': l2_af_schema}, - '2-AFA': {'ftype': 'li_l2_afa_nc', 'schema': l2_afa_schema}, - '2-AFR': {'ftype': 'li_l2_afr_nc', 'schema': l2_afr_schema}, + "2-LE": {"ftype": "li_l2_le_nc", "schema": l2_le_schema}, + "2-LEF": {"ftype": "li_l2_lef_nc", "schema": l2_lef_schema}, + "2-LGR": {"ftype": "li_l2_lgr_nc", "schema": l2_lgr_schema}, + "2-LFL": {"ftype": "li_l2_lfl_nc", "schema": l2_lfl_schema}, + "2-AF": {"ftype": "li_l2_af_nc", "schema": l2_af_schema}, + "2-AFA": {"ftype": "li_l2_afa_nc", "schema": l2_afa_schema}, + "2-AFR": {"ftype": "li_l2_afr_nc", "schema": l2_afr_schema}, } def get_product_schema(pname, settings=None): """Retrieve an LI product schema given its name.""" - return products_dict[pname]['schema'](settings) + return products_dict[pname]["schema"](settings) def extract_filetype_info(filetype_infos, filetype): """Extract Satpy-conform filetype_info from filetype_infos fixture.""" ftype_info = filetype_infos[filetype] - ftype_info['file_type'] = filetype + ftype_info["file_type"] = filetype return ftype_info def set_variable_path(var_path, desc, sname): """Replace variable default path if applicable and ensure trailing separator.""" - vpath = desc.get('path', var_path) + vpath = desc.get("path", var_path) # Ensure we have a trailing separator: - if vpath != "" and vpath[-1] != '/': - vpath += '/' + if vpath != "" and vpath[-1] != "/": + vpath += "/" if sname != "": vpath += sname + "/" return vpath @@ -606,9 +606,9 @@ def populate_dummy_data(data, names, details): # Otherwise we write the default data: if data.shape == (): # scalar case - data = desc['default_data']() + data = desc["default_data"]() else: - data[:] = desc['default_data']() + data[:] = desc["default_data"]() def add_attributes(attribs, ignored_attrs, desc): @@ -634,22 +634,22 @@ def get_variable_writer(self, dset, settings): var_path = settings.get("variable_path", "") # Also keep track of the potential providers: - providers = settings.get('providers', {}) + providers = settings.get("providers", {}) # list of ignored attribute names: ignored_attrs = ["path", "format", "shape", "default_data", "fill_value"] # dictionary of dimensions: - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) def write_variable(vname, desc, sname=""): """Write a variable in our dataset.""" # get numeric shape: - shape_str = desc['shape'] + shape_str = desc["shape"] shape = tuple([dims[dname] for dname in shape_str]) # Get the desired data type: - dtype = TYPE_MAP[desc['format']] + dtype = TYPE_MAP[desc["format"]] # Prepare a numpy array with the appropriate shape and type: data = np.zeros(shape, dtype=dtype) @@ -665,8 +665,8 @@ def write_variable(vname, desc, sname=""): add_attributes(attribs, ignored_attrs, desc) # Rename the fill value attribute: - if 'fill_value' in desc: - attribs['_FillValue'] = desc['fill_value'] + if "fill_value" in desc: + attribs["_FillValue"] = desc["fill_value"] names = [vname, sname] details = [desc, providers, settings] @@ -692,7 +692,7 @@ def get_test_content(self, filename, filename_info, filetype_info): # Note: params *IS* callable below: params = params(filename, filename_info, filetype_info) # pylint: disable=not-callable - settings = get_product_schema(filetype_info['file_desc']['product_type'], params) + settings = get_product_schema(filetype_info["file_desc"]["product_type"], params) # Resulting dataset: dset = {} @@ -713,16 +713,16 @@ def get_test_content(self, filename, filename_info, filetype_info): def write_variables(self, settings, write_variable): """Write raw (i.e. not in sectors) variables.""" - if 'variables' in settings: - variables = settings.get('variables') + if "variables" in settings: + variables = settings.get("variables") for vname, desc in variables.items(): write_variable(vname, desc) def write_sector_variables(self, settings, write_variable): """Write the sector variables.""" - if 'sector_variables' in settings: - sector_vars = settings.get('sector_variables') - sectors = settings.get('sectors', ['north', 'east', 'south', 'west']) + if "sector_variables" in settings: + sector_vars = settings.get("sector_variables") + sectors = settings.get("sectors", ["north", "east", "south", "west"]) for sname in sectors: for vname, desc in sector_vars.items(): diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index f4908c0a2b..486eba370b 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -227,7 +227,7 @@ def coord_conv(self): cpix["IR1"] = 0.5 # instead of 1672.5 cpix["VIS"] = 0.5 # instead of 6688.5 - conv['scheduled_observation_time'] = 50130.979089568464 + conv["scheduled_observation_time"] = 50130.979089568464 nsensors = conv["number_of_sensor_elements"] nsensors["IR1"] = 1 diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index 144139a07a..5b3c6117d4 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -19,9 +19,9 @@ IR_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=686, pixel=1680), - 'lon': 139.990380, - 'lat': 35.047056, - 'nav_params': nav.PixelNavigationParameters( + "lon": 139.990380, + "lat": 35.047056, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397917902958, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -67,9 +67,9 @@ }, { "pixel": nav.Pixel(line=2089, pixel=1793), - 'lon': 144.996967, - 'lat': -34.959853, - 'nav_params': nav.PixelNavigationParameters( + "lon": 144.996967, + "lat": -34.959853, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944355762, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -119,9 +119,9 @@ VIS_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=2744, pixel=6720), - 'lon': 139.975527, - 'lat': 35.078028, - 'nav_params': nav.PixelNavigationParameters( + "lon": 139.975527, + "lat": 35.078028, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397918405798, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -167,9 +167,9 @@ }, { "pixel": nav.Pixel(line=8356, pixel=7172), - 'lon': 144.980104, - 'lat': -34.929123, - 'nav_params': nav.PixelNavigationParameters( + "lon": 144.980104, + "lat": -34.929123, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944858620, angle_between_sat_spin_and_z_axis=3.149118633034304, diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index dfc8f0aec6..efecd1aa53 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -32,12 +32,12 @@ # Level 1 Fixtures AVAILABLE_1KM_VIS_PRODUCT_NAMES = [str(x) for x in range(8, 13)] -AVAILABLE_1KM_VIS_PRODUCT_NAMES += ['13lo', '13hi', '14lo', '14hi'] +AVAILABLE_1KM_VIS_PRODUCT_NAMES += ["13lo", "13hi", "14lo", "14hi"] AVAILABLE_1KM_VIS_PRODUCT_NAMES += [str(x) for x in range(15, 20)] AVAILABLE_1KM_IR_PRODUCT_NAMES = [str(x) for x in range(20, 37)] AVAILABLE_1KM_PRODUCT_NAMES = AVAILABLE_1KM_VIS_PRODUCT_NAMES + AVAILABLE_1KM_IR_PRODUCT_NAMES AVAILABLE_HKM_PRODUCT_NAMES = [str(x) for x in range(3, 8)] -AVAILABLE_QKM_PRODUCT_NAMES = ['1', '2'] +AVAILABLE_QKM_PRODUCT_NAMES = ["1", "2"] SCAN_LEN_5KM = 6 # 3 scans of 5km data SCAN_WIDTH_5KM = 270 SCALE_FACTOR = 0.5 @@ -101,14 +101,14 @@ def _generate_visible_uncertainty_data(shape: tuple) -> np.ndarray: def _get_lonlat_variable_info(resolution: int) -> dict: lon_5km, lat_5km = _generate_lonlat_data(resolution) return { - 'Latitude': {'data': lat_5km, - 'type': SDC.FLOAT32, - 'fill_value': -999, - 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, - 'Longitude': {'data': lon_5km, - 'type': SDC.FLOAT32, - 'fill_value': -999, - 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, + "Latitude": {"data": lat_5km, + "type": SDC.FLOAT32, + "fill_value": -999, + "attrs": {"dim_labels": ["Cell_Along_Swath_5km:mod35", "Cell_Across_Swath_5km:mod35"]}}, + "Longitude": {"data": lon_5km, + "type": SDC.FLOAT32, + "fill_value": -999, + "attrs": {"dim_labels": ["Cell_Along_Swath_5km:mod35", "Cell_Across_Swath_5km:mod35"]}}, } @@ -116,19 +116,19 @@ def _get_angles_variable_info(resolution: int) -> dict: angle_data = _generate_angle_data(resolution) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 angle_info = { - 'data': angle_data, - 'type': SDC.INT16, - 'fill_value': -32767, - 'attrs': { - 'dim_labels': [ - f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B', - '1KM_geo_dim:MODIS_SWATH_Type_L1B'], - 'scale_factor': 0.01, - 'add_offset': -0.01, + "data": angle_data, + "type": SDC.INT16, + "fill_value": -32767, + "attrs": { + "dim_labels": [ + f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B", + "1KM_geo_dim:MODIS_SWATH_Type_L1B"], + "scale_factor": 0.01, + "add_offset": -0.01, }, } angles_info = {} - for var_name in ('SensorAzimuth', 'SensorZenith', 'SolarAzimuth', 'SolarZenith'): + for var_name in ("SensorAzimuth", "SensorZenith", "SolarAzimuth", "SolarZenith"): angles_info[var_name] = angle_info return angles_info @@ -139,30 +139,30 @@ def _get_visible_variable_info(var_name: str, resolution: int, bands: list[str]) uncertainty = _generate_visible_uncertainty_data(data.shape) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_RefSB:MODIS_SWATH_Type_L1B" - row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' - col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B' + row_dim_name = f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B" + col_dim_name = "Max_EV_frames:MODIS_SWATH_Type_L1B" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [band_dim_name, + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'reflectance_scales': (2.0,) * num_bands, - 'reflectance_offsets': (-0.5,) * num_bands, - 'band_names': ",".join(bands), + "valid_range": (0, 32767), + "reflectance_scales": (2.0,) * num_bands, + "reflectance_offsets": (-0.5,) * num_bands, + "band_names": ",".join(bands), }, }, - var_name + '_Uncert_Indexes': { - 'data': uncertainty, - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': [band_dim_name, + var_name + "_Uncert_Indexes": { + "data": uncertainty, + "type": SDC.UINT8, + "fill_value": 255, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], }, @@ -175,27 +175,27 @@ def _get_emissive_variable_info(var_name: str, resolution: int, bands: list[str] data = _generate_visible_data(resolution, len(bands)) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_Emissive:MODIS_SWATH_Type_L1B" - row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' - col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B' + row_dim_name = f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B" + col_dim_name = "Max_EV_frames:MODIS_SWATH_Type_L1B" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { - 'dim_labels': [band_dim_name, + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'band_names': ",".join(bands), + "valid_range": (0, 32767), + "band_names": ",".join(bands), }, }, - var_name + '_Uncert_Indexes': { - 'data': np.zeros(data.shape, dtype=np.uint8), - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': [band_dim_name, + var_name + "_Uncert_Indexes": { + "data": np.zeros(data.shape, dtype=np.uint8), + "type": SDC.UINT8, + "fill_value": 255, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], }, @@ -217,13 +217,13 @@ def _get_l1b_geo_variable_info(filename: str, def generate_nasa_l1b_filename(prefix): """Generate a filename that follows NASA MODIS L1b convention.""" now = datetime.now() - return f'{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf' + return f"{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf" def generate_imapp_filename(suffix): """Generate a filename that follows IMAPP MODIS L1b convention.""" now = datetime.now() - return f't1.{now:%y%j.%H%M}.{suffix}.hdf' + return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" def create_hdfeos_test_file(filename: str, @@ -262,17 +262,17 @@ def create_hdfeos_test_file(filename: str, def _add_variable_to_file(h, var_name, var_info): - v = h.create(var_name, var_info['type'], var_info['data'].shape) - v[:] = var_info['data'] + v = h.create(var_name, var_info["type"], var_info["data"].shape) + v[:] = var_info["data"] dim_count = 0 - for dimension_name in var_info['attrs']['dim_labels']: + for dimension_name in var_info["attrs"]["dim_labels"]: v.dim(dim_count).setname(dimension_name) dim_count += 1 - v.setfillvalue(var_info['fill_value']) - v.scale_factor = var_info['attrs'].get('scale_factor', SCALE_FACTOR) - v.add_offset = var_info['attrs'].get('add_offset', ADD_OFFSET) - for attr_key, attr_val in var_info['attrs'].items(): - if attr_key == 'dim_labels': + v.setfillvalue(var_info["fill_value"]) + v.scale_factor = var_info["attrs"].get("scale_factor", SCALE_FACTOR) + v.add_offset = var_info["attrs"].get("add_offset", ADD_OFFSET) + for attr_key, attr_val in var_info["attrs"].items(): + if attr_key == "dim_labels": continue setattr(v, attr_key, attr_val) @@ -281,12 +281,12 @@ def _create_core_metadata(file_shortname: str) -> str: beginning_date = datetime.now() ending_date = beginning_date + timedelta(minutes=5) core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ - "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \ - "NUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'NUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\n" \ - "NUM_VAL = 1\nVALUE = \"{}\"\n" \ - "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'NUM_VAL = 1\nVALUE = "{}"\n' \ + 'END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME" core_metadata_header = core_metadata_header.format( beginning_date.strftime("%Y-%m-%d"), @@ -295,13 +295,13 @@ def _create_core_metadata(file_shortname: str) -> str: ending_date.strftime("%H:%M:%S.%f") ) inst_metadata = "GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" \ - "OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = \"1\"\n\n" \ - "OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"Terra\"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n" \ + 'OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = "1"\n\n' \ + 'OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "MODIS"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n' \ + 'OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "Terra"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n' \ + 'OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "MODIS"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n' \ "END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\n\n" \ "END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" collection_metadata = "GROUP = COLLECTIONDESCRIPTIONCLASS\n\nOBJECT = SHORTNAME\nNUM_VAL = 1\n" \ @@ -318,7 +318,7 @@ def _create_struct_metadata(geo_resolution: int) -> str: "GROUP=SWATH_1\n" \ "GROUP=DimensionMap\n" \ "OBJECT=DimensionMap_2\n" \ - f"GeoDimension=\"{geo_dim_factor}*nscans\"\n" \ + f'GeoDimension="{geo_dim_factor}*nscans"\n' \ "END_OBJECT=DimensionMap_2\n" \ "END_GROUP=DimensionMap\n" \ "END_GROUP=SWATH_1\n" \ @@ -413,20 +413,20 @@ def modis_l1b_nasa_1km_mod03_files(modis_l1b_nasa_mod021km_file, modis_l1b_nasa_ def _get_basic_variable_info(var_name: str, resolution: int) -> dict: shape = _shape_for_resolution(resolution) data = np.ones((shape[0], shape[1]), dtype=np.uint16) - row_dim_name = f'Cell_Along_Swath_{resolution}m:modl2' - col_dim_name = f'Cell_Across_Swath_{resolution}m:modl2' + row_dim_name = f"Cell_Along_Swath_{resolution}m:modl2" + col_dim_name = f"Cell_Across_Swath_{resolution}m:modl2" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'scale_factor': 2.0, - 'add_offset': -1.0, + "valid_range": (0, 32767), + "scale_factor": 2.0, + "add_offset": -1.0, }, }, } @@ -437,35 +437,35 @@ def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: shape = _shape_for_resolution(resolution) data = np.zeros((num_bytes, shape[0], shape[1]), dtype=np.int8) byte_dim_name = "Byte_Segment:mod35" - row_dim_name = 'Cell_Along_Swath_1km:mod35' - col_dim_name = 'Cell_Across_Swath_1km:mod35' + row_dim_name = "Cell_Along_Swath_1km:mod35" + col_dim_name = "Cell_Across_Swath_1km:mod35" return { var_name: { - 'data': data, - 'type': SDC.INT8, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.INT8, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [byte_dim_name, + "dim_labels": [byte_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, -1), - 'scale_factor': 1., - 'add_offset': 0., + "valid_range": (0, -1), + "scale_factor": 1., + "add_offset": 0., }, }, - 'Quality_Assurance': { - 'data': np.ones((shape[0], shape[1], 10), dtype=np.int8), - 'type': SDC.INT8, - 'fill_value': 0, - 'attrs': { + "Quality_Assurance": { + "data": np.ones((shape[0], shape[1], 10), dtype=np.int8), + "type": SDC.INT8, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name, - 'Quality_Dimension:mod35'], - 'valid_range': (0, -1), - 'scale_factor': 2., - 'add_offset': -0.5, + "Quality_Dimension:mod35"], + "valid_range": (0, -1), + "scale_factor": 2., + "add_offset": -0.5, }, }, } @@ -474,47 +474,47 @@ def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: def _get_mask_byte1_variable_info() -> dict: shape = _shape_for_resolution(1000) data = np.zeros((shape[0], shape[1]), dtype=np.uint16) - row_dim_name = 'Cell_Along_Swath_1km:mod35' - col_dim_name = 'Cell_Across_Swath_1km:mod35' + row_dim_name = "Cell_Along_Swath_1km:mod35" + col_dim_name = "Cell_Across_Swath_1km:mod35" return { "MODIS_Cloud_Mask": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 4), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 4), + "scale_factor": 2, + "add_offset": -1, }, }, "MODIS_Simple_LandSea_Mask": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 4), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 4), + "scale_factor": 2, + "add_offset": -1, }, }, "MODIS_Snow_Ice_Flag": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 2), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 2), + "scale_factor": 2, + "add_offset": -1, }, }, } @@ -523,7 +523,7 @@ def _get_mask_byte1_variable_info() -> dict: def generate_nasa_l2_filename(prefix: str) -> str: """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" now = datetime.now() - return f'{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf' + return f"{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf" @pytest.fixture(scope="session") diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 56e8687844..85048de0af 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -50,7 +50,7 @@ def _check_shared_metadata(data_arr): assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == 'modis_l1b' + assert data_arr.attrs["reader"] == "modis_l1b" def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res, @@ -79,30 +79,30 @@ class TestModisL1b: def test_available_reader(self): """Test that MODIS L1b reader is available.""" - assert 'modis_l1b' in available_readers() + assert "modis_l1b" in available_readers() @pytest.mark.parametrize( - ('input_files', 'expected_names', 'expected_data_res', 'expected_geo_res'), + ("input_files", "expected_names", "expected_data_res", "expected_geo_res"), [ - [lazy_fixture('modis_l1b_nasa_mod021km_file'), + [lazy_fixture("modis_l1b_nasa_mod021km_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [1000], [5000, 1000]], - [lazy_fixture('modis_l1b_imapp_1000m_file'), + [lazy_fixture("modis_l1b_imapp_1000m_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [1000], [5000, 1000]], - [lazy_fixture('modis_l1b_nasa_mod02hkm_file'), + [lazy_fixture("modis_l1b_nasa_mod02hkm_file"), AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]], - [lazy_fixture('modis_l1b_nasa_mod02qkm_file'), + [lazy_fixture("modis_l1b_nasa_mod02qkm_file"), AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]], ] ) def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res): """Test that datasets are available.""" - scene = Scene(reader='modis_l1b', filenames=input_files) + scene = Scene(reader="modis_l1b", filenames=input_files) available_datasets = scene.available_dataset_names() assert len(available_datasets) > 0 - assert 'longitude' in available_datasets - assert 'latitude' in available_datasets + assert "longitude" in available_datasets + assert "latitude" in available_datasets for chan_name in expected_names: assert chan_name in available_datasets @@ -111,8 +111,8 @@ def test_scene_available_datasets(self, input_files, expected_names, expected_da available_geos = {x: [] for x in expected_geo_res} # Make sure that every resolution from the reader is what we expect for data_id in available_data_ids: - res = data_id['resolution'] - if data_id['name'] in ['longitude', 'latitude']: + res = data_id["resolution"] + if data_id["name"] in ["longitude", "latitude"]: assert res in expected_geo_res available_geos[res].append(data_id) else: @@ -126,23 +126,23 @@ def test_scene_available_datasets(self, input_files, expected_names, expected_da assert avail_id, f"Missing geo datasets for geo resolution {exp_res}" @pytest.mark.parametrize( - ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'), + ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture('modis_l1b_nasa_mod021km_file'), + [lazy_fixture("modis_l1b_nasa_mod021km_file"), True, False, False, 1000], - [lazy_fixture('modis_l1b_imapp_1000m_file'), + [lazy_fixture("modis_l1b_imapp_1000m_file"), True, False, False, 1000], - [lazy_fixture('modis_l1b_nasa_mod02hkm_file'), + [lazy_fixture("modis_l1b_nasa_mod02hkm_file"), False, True, True, 250], - [lazy_fixture('modis_l1b_nasa_mod02qkm_file'), + [lazy_fixture("modis_l1b_nasa_mod02qkm_file"), False, True, True, 250], - [lazy_fixture('modis_l1b_nasa_1km_mod03_files'), + [lazy_fixture("modis_l1b_nasa_1km_mod03_files"), True, True, True, 250], ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" - scene = Scene(reader='modis_l1b', filenames=input_files) + scene = Scene(reader="modis_l1b", filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) @@ -155,35 +155,35 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): """Test loading satellite zenith angle band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) - dataset_name = 'satellite_zenith_angle' + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) + dataset_name = "satellite_zenith_angle" scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) - dataset_name = '1' + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) + dataset_name = "1" scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset[0, 0] == 300.0 assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) @pytest.mark.parametrize("mask_saturated", [False, True]) def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file, + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file, reader_kwargs={"mask_saturated": mask_saturated}) - dataset_name = '2' + dataset_name = "2" scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) # check saturation fill values diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 222f365d87..99c0890d30 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -46,11 +46,11 @@ def _check_shared_metadata(data_arr, expect_area=False): assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == 'modis_l2' + assert data_arr.attrs["reader"] == "modis_l2" if expect_area: - assert data_arr.attrs.get('area') is not None + assert data_arr.attrs.get("area") is not None else: - assert 'area' not in data_arr.attrs + assert "area" not in data_arr.attrs class TestModisL2: @@ -58,28 +58,28 @@ class TestModisL2: def test_available_reader(self): """Test that MODIS L2 reader is available.""" - assert 'modis_l2' in available_readers() + assert "modis_l2" in available_readers() def test_scene_available_datasets(self, modis_l2_nasa_mod35_file): """Test that datasets are available.""" - scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file) + scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod35_file) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 - assert 'cloud_mask' in available_datasets - assert 'latitude' in available_datasets - assert 'longitude' in available_datasets + assert "cloud_mask" in available_datasets + assert "latitude" in available_datasets + assert "longitude" in available_datasets @pytest.mark.parametrize( - ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'), + ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture('modis_l2_nasa_mod35_file'), + [lazy_fixture("modis_l2_nasa_mod35_file"), True, False, False, 1000], ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" from .test_modis_l1b import _load_and_check_geolocation - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) @@ -96,8 +96,8 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): """Test loading quality assurance.""" - scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file) - dataset_name = 'quality_assurance' + scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod35_file) + dataset_name = "quality_assurance" scene.load([dataset_name]) quality_assurance_id = make_dataid(name=dataset_name, resolution=1000) assert quality_assurance_id in scene @@ -106,19 +106,19 @@ def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): _check_shared_metadata(quality_assurance, expect_area=True) @pytest.mark.parametrize( - ('input_files', 'loadables', 'request_resolution', 'exp_resolution', 'exp_area'), + ("input_files", "loadables", "request_resolution", "exp_resolution", "exp_area"), [ - [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), + [lazy_fixture("modis_l2_nasa_mod35_mod03_files"), ["cloud_mask"], 1000, 1000, True], - [lazy_fixture('modis_l2_imapp_mask_byte1_geo_files'), + [lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"), ["cloud_mask", "land_sea_mask", "snow_ice_mask"], None, 1000, True], ] ) def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area): """Test loading category products.""" - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) kwargs = {"resolution": request_resolution} if request_resolution is not None else {} scene.load(loadables, **kwargs) for ds_name in loadables: @@ -129,23 +129,23 @@ def test_load_category_dataset(self, input_files, loadables, request_resolution, cat_data_arr = cat_data_arr.compute() assert cat_data_arr.shape == _shape_for_resolution(exp_resolution) assert cat_data_arr.values[0, 0] == 0.0 - assert cat_data_arr.attrs.get('resolution') == exp_resolution + assert cat_data_arr.attrs.get("resolution") == exp_resolution # mask variables should be integers assert np.issubdtype(cat_data_arr.dtype, np.integer) - assert cat_data_arr.attrs.get('_FillValue') is not None + assert cat_data_arr.attrs.get("_FillValue") is not None _check_shared_metadata(cat_data_arr, expect_area=exp_area) @pytest.mark.parametrize( - ('input_files', 'exp_area'), + ("input_files", "exp_area"), [ - [lazy_fixture('modis_l2_nasa_mod35_file'), False], - [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), True], + [lazy_fixture("modis_l2_nasa_mod35_file"), False], + [lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True], ] ) def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): """Test loading 250m cloud mask.""" - scene = Scene(reader='modis_l2', filenames=input_files) - dataset_name = 'cloud_mask' + scene = Scene(reader="modis_l2", filenames=input_files) + dataset_name = "cloud_mask" scene.load([dataset_name], resolution=250) cloud_mask_id = make_dataid(name=dataset_name, resolution=250) assert cloud_mask_id in scene @@ -156,21 +156,21 @@ def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): assert cloud_mask.values[0, 0] == 0.0 # mask variables should be integers assert np.issubdtype(cloud_mask.dtype, np.integer) - assert cloud_mask.attrs.get('_FillValue') is not None + assert cloud_mask.attrs.get("_FillValue") is not None _check_shared_metadata(cloud_mask, expect_area=exp_area) @pytest.mark.parametrize( - ('input_files', 'loadables', 'exp_resolution', 'exp_area', 'exp_value'), + ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"), [ - [lazy_fixture('modis_l2_nasa_mod06_file'), ["surface_pressure"], 5000, True, 4.0], + [lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0], # snow mask is considered a category product, factor/offset ignored - [lazy_fixture('modis_l2_imapp_snowmask_file'), ["snow_mask"], 1000, False, 1.0], - [lazy_fixture('modis_l2_imapp_snowmask_geo_files'), ["snow_mask"], 1000, True, 1.0], + [lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0], + [lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0], ] ) def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value): """Load and check an L2 variable.""" - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) scene.load(loadables) for ds_name in loadables: assert ds_name in scene @@ -179,5 +179,5 @@ def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, data_arr = data_arr.compute() assert data_arr.values[0, 0] == exp_value assert data_arr.shape == _shape_for_resolution(exp_resolution) - assert data_arr.attrs.get('resolution') == exp_resolution + assert data_arr.attrs.get("resolution") == exp_resolution _check_shared_metadata(data_arr, expect_area=exp_area) diff --git a/satpy/tests/reader_tests/test_aapp_l1b.py b/satpy/tests/reader_tests/test_aapp_l1b.py index c0f84c5a63..e9414ee521 100644 --- a/satpy/tests/reader_tests/test_aapp_l1b.py +++ b/satpy/tests/reader_tests/test_aapp_l1b.py @@ -36,26 +36,26 @@ class TestAAPPL1BAllChannelsPresent(unittest.TestCase): def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) - self._header['satid'][0] = 13 - self._header['radtempcnv'][0] = [[267194, -171669, 1002811], + self._header["satid"][0] = 13 + self._header["radtempcnv"][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3b is off, 3a is on - self._header['inststat1'][0] = 0b1111011100000000 + self._header["inststat1"][0] = 0b1111011100000000 # switch 3a off at position 1 - self._header['statchrecnb'][0] = 1 + self._header["statchrecnb"][0] = 1 # 3b is on, 3a is off - self._header['inststat2'][0] = 0b1111101100000000 + self._header["inststat2"][0] = 0b1111101100000000 self._data = np.zeros(3, dtype=_SCANTYPE) - self._data['scnlinyr'][:] = 2020 - self._data['scnlindy'][:] = 8 - self._data['scnlintime'][0] = 30195225 - self._data['scnlintime'][1] = 30195389 - self._data['scnlintime'][2] = 30195556 - self._data['scnlinbit'][0] = -16383 - self._data['scnlinbit'][1] = -16383 - self._data['scnlinbit'][2] = -16384 + self._data["scnlinyr"][:] = 2020 + self._data["scnlindy"][:] = 8 + self._data["scnlintime"][0] = 30195225 + self._data["scnlintime"][1] = 30195389 + self._data["scnlintime"][2] = 30195556 + self._data["scnlinbit"][0] = -16383 + self._data["scnlinbit"][1] = -16383 + self._data["scnlinbit"][2] = -16384 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], @@ -65,8 +65,8 @@ def setUp(self): [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) - self._data['calvis'][:] = calvis - self._data['calir'] = [[[[0, -2675, 2655265], + self._data["calvis"][:] = calvis + self._data["calir"] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], @@ -84,13 +84,13 @@ def setUp(self): [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] - self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] + self._data["hrpt"] = np.ones_like(self._data["hrpt"]) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] - self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), - 'orbit_number': 6071} - self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, + self.filename_info = {"platform_shortname": "metop03", "start_time": datetime.datetime(2020, 1, 8, 8, 19), + "orbit_number": 6071} + self.filetype_info = {"file_reader": AVHRRAAPPL1BFile, 'file_patterns': ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], # noqa - 'file_type': 'avhrr_aapp_l1b'} + "file_type": "avhrr_aapp_l1b"} def test_read(self): """Test the reading.""" @@ -103,22 +103,22 @@ def test_read(self): info = {} mins = [] maxs = [] - for name in ['1', '2', '3a']: - key = make_dataid(name=name, calibration='reflectance') + for name in ["1", "2", "3a"]: + key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) assert res.min() == 0 assert res.max() >= 100 mins.append(res.min().values) maxs.append(res.max().values) - if name == '3a': + if name == "3a": assert np.all(np.isnan(res[:2, :])) - for name in ['3b', '4', '5']: - key = make_dataid(name=name, calibration='reflectance') + for name in ["3b", "4", "5"]: + key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) mins.append(res.min().values) maxs.append(res.max().values) - if name == '3b': + if name == "3b": assert np.all(np.isnan(res[2:, :])) np.testing.assert_allclose(mins, [0., 0., 0., 204.10106939, 103.23477235, 106.42609758]) @@ -134,7 +134,7 @@ def test_angles(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='solar_zenith_angle') + key = make_dataid(name="solar_zenith_angle") res = fh.get_dataset(key, info) assert np.all(res == 0) @@ -147,10 +147,10 @@ def test_navigation(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='longitude') + key = make_dataid(name="longitude") res = fh.get_dataset(key, info) assert np.all(res == 0) - key = make_dataid(name='latitude') + key = make_dataid(name="latitude") res = fh.get_dataset(key, info) assert np.all(res == 0) @@ -286,25 +286,25 @@ class TestAAPPL1BChannel3AMissing(unittest.TestCase): def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) - self._header['satid'][0] = 13 - self._header['radtempcnv'][0] = [[267194, -171669, 1002811], + self._header["satid"][0] = 13 + self._header["radtempcnv"][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3a is off, 3b is on - self._header['inststat1'][0] = 0b1111011100000000 + self._header["inststat1"][0] = 0b1111011100000000 # valid for the whole pass - self._header['statchrecnb'][0] = 0 - self._header['inststat2'][0] = 0b0 + self._header["statchrecnb"][0] = 0 + self._header["inststat2"][0] = 0b0 self._data = np.zeros(3, dtype=_SCANTYPE) - self._data['scnlinyr'][:] = 2020 - self._data['scnlindy'][:] = 8 - self._data['scnlintime'][0] = 30195225 - self._data['scnlintime'][1] = 30195389 - self._data['scnlintime'][2] = 30195556 - self._data['scnlinbit'][0] = -16383 - self._data['scnlinbit'][1] = -16383 - self._data['scnlinbit'][2] = -16383 + self._data["scnlinyr"][:] = 2020 + self._data["scnlindy"][:] = 8 + self._data["scnlintime"][0] = 30195225 + self._data["scnlintime"][1] = 30195389 + self._data["scnlintime"][2] = 30195556 + self._data["scnlinbit"][0] = -16383 + self._data["scnlinbit"][1] = -16383 + self._data["scnlinbit"][2] = -16383 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], @@ -314,8 +314,8 @@ def setUp(self): [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) - self._data['calvis'][:] = calvis - self._data['calir'] = [[[[0, -2675, 2655265], + self._data["calvis"][:] = calvis + self._data["calir"] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], @@ -333,15 +333,15 @@ def setUp(self): [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] - self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] + self._data["hrpt"] = np.ones_like(self._data["hrpt"]) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] - self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), - 'orbit_number': 6071} - self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, - 'file_patterns': [ - 'hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], + self.filename_info = {"platform_shortname": "metop03", "start_time": datetime.datetime(2020, 1, 8, 8, 19), + "orbit_number": 6071} + self.filetype_info = {"file_reader": AVHRRAAPPL1BFile, + "file_patterns": [ + "hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b"], # noqa - 'file_type': 'avhrr_aapp_l1b'} + "file_type": "avhrr_aapp_l1b"} def test_loading_missing_channels_returns_none(self): """Test that loading a missing channel raises a keyerror.""" @@ -352,7 +352,7 @@ def test_loading_missing_channels_returns_none(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='3a', calibration='reflectance') + key = make_dataid(name="3a", calibration="reflectance") assert fh.get_dataset(key, info) is None def test_available_datasets_miss_3a(self): @@ -363,16 +363,16 @@ def test_available_datasets_miss_3a(self): self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) - configured_datasets = [[None, {'name': '1'}], - [None, {'name': '2'}], - [None, {'name': '3a'}], - [None, {'name': '3b'}], - [None, {'name': '4'}], - [None, {'name': '5'}], + configured_datasets = [[None, {"name": "1"}], + [None, {"name": "2"}], + [None, {"name": "3a"}], + [None, {"name": "3b"}], + [None, {"name": "4"}], + [None, {"name": "5"}], ] available_datasets = fh.available_datasets(configured_datasets) for status, mda in available_datasets: - if mda['name'] == '3a': + if mda["name"] == "3a": assert status is False else: assert status is True @@ -397,9 +397,9 @@ def setUp(self): [[18214, -200932, 182150896], [0, 0, 0]], [[6761, -200105, 192092496], [0, 0, 0]]], dtype=" 0 def test_is_valid_time(self): """Test that valid times are correctly identified.""" - assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO['observation_timeline']) - assert not AHIHSDFileHandler._is_valid_timeline('65526') + assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO["observation_timeline"]) + assert not AHIHSDFileHandler._is_valid_timeline("65526") def test_time_rounding(self): """Test rounding of the nominal time.""" mocker = mock.MagicMock() in_date = datetime(2020, 1, 1, 12, 0, 0) - with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline', mocker): + with mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline", mocker): with _fake_hsd_handler() as fh: mocker.return_value = True assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) @@ -468,31 +468,31 @@ def test_time_rounding(self): class TestAHICalibration(unittest.TestCase): """Test case for various AHI calibration types.""" - @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.__init__', + @mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler.__init__", return_value=None) def setUp(self, *mocks): """Create fake data for testing.""" self.def_cali = [-0.0037, 15.20] self.upd_cali = [-0.0074, 30.40] self.bad_cali = [0.0, 0.0] - fh = AHIHSDFileHandler(filetype_info={'file_type': 'hsd_b01'}) - fh.calib_mode = 'NOMINAL' + fh = AHIHSDFileHandler(filetype_info={"file_type": "hsd_b01"}) + fh.calib_mode = "NOMINAL" fh.user_calibration = None fh.is_zipped = False fh._header = { - 'block5': {'band_number': [5], - 'gain_count2rad_conversion': [self.def_cali[0]], - 'offset_count2rad_conversion': [self.def_cali[1]], - 'central_wave_length': [10.4073], }, - 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], - 'speed_of_light': [299792458.0], - 'planck_constant': [6.62606957e-34], - 'boltzmann_constant': [1.3806488e-23], - 'c0_rad2tb_conversion': [-0.116127314574], - 'c1_rad2tb_conversion': [1.00099153832], - 'c2_rad2tb_conversion': [-1.76961091571e-06], - 'cali_gain_count2rad_conversion': [self.upd_cali[0]], - 'cali_offset_count2rad_conversion': [self.upd_cali[1]]}, + "block5": {"band_number": [5], + "gain_count2rad_conversion": [self.def_cali[0]], + "offset_count2rad_conversion": [self.def_cali[1]], + "central_wave_length": [10.4073], }, + "calibration": {"coeff_rad2albedo_conversion": [0.0019255], + "speed_of_light": [299792458.0], + "planck_constant": [6.62606957e-34], + "boltzmann_constant": [1.3806488e-23], + "c0_rad2tb_conversion": [-0.116127314574], + "c1_rad2tb_conversion": [1.00099153832], + "c2_rad2tb_conversion": [-1.76961091571e-06], + "cali_gain_count2rad_conversion": [self.upd_cali[0]], + "cali_offset_count2rad_conversion": [self.upd_cali[1]]}, } self.counts = da.array(np.array([[0., 1000.], @@ -504,56 +504,56 @@ def test_default_calibrate(self, *mocks): self.setUp() # Counts self.assertEqual(self.fh.calibrate(data=123, - calibration='counts'), + calibration="counts"), 123) # Radiance rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) rad = self.fh.calibrate(data=self.counts, - calibration='radiance') + calibration="radiance") self.assertTrue(np.allclose(rad, rad_exp)) # Brightness Temperature bt_exp = np.array([[330.978979, 310.524688], [285.845017, np.nan]]) bt = self.fh.calibrate(data=self.counts, - calibration='brightness_temperature') + calibration="brightness_temperature") np.testing.assert_allclose(bt, bt_exp) # Reflectance refl_exp = np.array([[2.92676, 2.214325], [1.50189, 0.]]) refl = self.fh.calibrate(data=self.counts, - calibration='reflectance') + calibration="reflectance") self.assertTrue(np.allclose(refl, refl_exp)) def test_updated_calibrate(self): """Test updated in-file calibration modes.""" # Standard operation - self.fh.calib_mode = 'UPDATE' + self.fh.calib_mode = "UPDATE" rad_exp = np.array([[30.4, 23.0], [15.6, -6.6]]) - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad = self.fh.calibrate(data=self.counts, calibration="radiance") self.assertTrue(np.allclose(rad, rad_exp)) # Case for no updated calibration available (older data) self.fh._header = { - 'block5': {'band_number': [5], - 'gain_count2rad_conversion': [self.def_cali[0]], - 'offset_count2rad_conversion': [self.def_cali[1]], - 'central_wave_length': [10.4073], }, - 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], - 'speed_of_light': [299792458.0], - 'planck_constant': [6.62606957e-34], - 'boltzmann_constant': [1.3806488e-23], - 'c0_rad2tb_conversion': [-0.116127314574], - 'c1_rad2tb_conversion': [1.00099153832], - 'c2_rad2tb_conversion': [-1.76961091571e-06], - 'cali_gain_count2rad_conversion': [self.bad_cali[0]], - 'cali_offset_count2rad_conversion': [self.bad_cali[1]]}, + "block5": {"band_number": [5], + "gain_count2rad_conversion": [self.def_cali[0]], + "offset_count2rad_conversion": [self.def_cali[1]], + "central_wave_length": [10.4073], }, + "calibration": {"coeff_rad2albedo_conversion": [0.0019255], + "speed_of_light": [299792458.0], + "planck_constant": [6.62606957e-34], + "boltzmann_constant": [1.3806488e-23], + "c0_rad2tb_conversion": [-0.116127314574], + "c1_rad2tb_conversion": [1.00099153832], + "c2_rad2tb_conversion": [-1.76961091571e-06], + "cali_gain_count2rad_conversion": [self.bad_cali[0]], + "cali_offset_count2rad_conversion": [self.bad_cali[1]]}, } - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) self.assertTrue(np.allclose(rad, rad_exp)) @@ -561,20 +561,20 @@ def test_updated_calibrate(self): def test_user_calibration(self): """Test user-defined calibration modes.""" # This is for radiance correction - self.fh.user_calibration = {'B13': {'slope': 0.95, - 'offset': -0.1}} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() + self.fh.user_calibration = {"B13": {"slope": 0.95, + "offset": -0.1}} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance").compute() rad_exp = np.array([[16.10526316, 12.21052632], [8.31578947, -3.36842105]]) self.assertTrue(np.allclose(rad, rad_exp)) # This is for DN calibration - self.fh.user_calibration = {'B13': {'slope': -0.0032, - 'offset': 15.20}, - 'type': 'DN'} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() + self.fh.user_calibration = {"B13": {"slope": -0.0032, + "offset": 15.20}, + "type": "DN"} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance").compute() rad_exp = np.array([[15.2, 12.], [8.8, -0.8]]) self.assertTrue(np.allclose(rad, rad_exp)) @@ -584,10 +584,10 @@ def test_user_calibration(self): def _fake_hsd_handler(fh_kwargs=None): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_hsd.np.fromfile', _custom_fromfile), \ - mock.patch('satpy.readers.ahi_hsd.unzip_file', mock.MagicMock(side_effect=_new_unzip)), \ - mock.patch('satpy.readers.ahi_hsd.open', m, create=True): - in_fname = 'test_file.bz2' + with mock.patch("satpy.readers.ahi_hsd.np.fromfile", _custom_fromfile), \ + mock.patch("satpy.readers.ahi_hsd.unzip_file", mock.MagicMock(side_effect=_new_unzip)), \ + mock.patch("satpy.readers.ahi_hsd.open", m, create=True): + in_fname = "test_file.bz2" fh = _create_fake_file_handler(in_fname, fh_kwargs=fh_kwargs) yield fh @@ -639,14 +639,14 @@ def _custom_fromfile(*args, **kwargs): def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None, fh_kwargs=None): if filename_info is None: - filename_info = {'segment': 8, 'total_segments': 10} + filename_info = {"segment": 8, "total_segments": 10} if filetype_info is None: - filetype_info = {'file_type': 'hsd_b01'} + filetype_info = {"file_type": "hsd_b01"} if fh_kwargs is None: fh_kwargs = {} fh = AHIHSDFileHandler(in_fname, filename_info, filetype_info, **fh_kwargs) # Check that the filename is altered and 2 digit segment prefix added for bz2 format files assert in_fname != fh.filename - assert str(filename_info['segment']).zfill(2) == fh.filename[0:2] + assert str(filename_info["segment"]).zfill(2) == fh.filename[0:2] return fh diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index e4ef6ec72f..9d1302ef41 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -35,65 +35,65 @@ class TestAHIGriddedArea(unittest.TestCase): def setUp(self): """Create fake data for testing.""" - self.FULLDISK_SIZES = {0.005: {'x_size': 24000, - 'y_size': 24000}, - 0.01: {'x_size': 12000, - 'y_size': 12000}, - 0.02: {'x_size': 6000, - 'y_size': 6000}} + self.FULLDISK_SIZES = {0.005: {"x_size": 24000, + "y_size": 24000}, + 0.01: {"x_size": 12000, + "y_size": 12000}, + 0.02: {"x_size": 6000, + "y_size": 6000}} self.AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] @staticmethod - def make_fh(filetype, area='fld'): + def make_fh(filetype, area="fld"): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - fh = AHIGriddedFileHandler('somefile', - {'area': area}, - filetype_info={'file_type': filetype}) + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + fh = AHIGriddedFileHandler("somefile", + {"area": area}, + filetype_info={"file_type": filetype}) return fh def test_low_res(self): """Check size of the low resolution (2km) grid.""" - tmp_fh = self.make_fh('tir.01') - self.assertEqual(self.FULLDISK_SIZES[0.02]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.02]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("tir.01") + self.assertEqual(self.FULLDISK_SIZES[0.02]["x_size"], tmp_fh.ncols) + self.assertEqual(self.FULLDISK_SIZES[0.02]["y_size"], tmp_fh.nlines) def test_med_res(self): """Check size of the low resolution (1km) grid.""" - tmp_fh = self.make_fh('vis.02') - self.assertEqual(self.FULLDISK_SIZES[0.01]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.01]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("vis.02") + self.assertEqual(self.FULLDISK_SIZES[0.01]["x_size"], tmp_fh.ncols) + self.assertEqual(self.FULLDISK_SIZES[0.01]["y_size"], tmp_fh.nlines) def test_hi_res(self): """Check size of the low resolution (0.5km) grid.""" - tmp_fh = self.make_fh('ext.01') - self.assertEqual(self.FULLDISK_SIZES[0.005]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.005]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("ext.01") + self.assertEqual(self.FULLDISK_SIZES[0.005]["x_size"], tmp_fh.ncols) + self.assertEqual(self.FULLDISK_SIZES[0.005]["y_size"], tmp_fh.nlines) def test_area_def(self): """Check that a valid full disk area is produced.""" - good_area = AreaDefinition('gridded_himawari', - 'A gridded Himawari area', - 'longlat', - 'EPSG:4326', - self.FULLDISK_SIZES[0.01]['x_size'], - self.FULLDISK_SIZES[0.01]['y_size'], + good_area = AreaDefinition("gridded_himawari", + "A gridded Himawari area", + "longlat", + "EPSG:4326", + self.FULLDISK_SIZES[0.01]["x_size"], + self.FULLDISK_SIZES[0.01]["y_size"], self.AHI_FULLDISK_EXTENT) - tmp_fh = self.make_fh('vis.01') + tmp_fh = self.make_fh("vis.01") tmp_fh.get_area_def(None) self.assertEqual(tmp_fh.area, good_area) def test_bad_area(self): """Ensure an error is raised for an usupported area.""" - tmp_fh = self.make_fh('ext.01') - tmp_fh.areaname = 'scanning' + tmp_fh = self.make_fh("ext.01") + tmp_fh.areaname = "scanning" with self.assertRaises(NotImplementedError): tmp_fh.get_area_def(None) with self.assertRaises(NotImplementedError): - self.make_fh('ext.01', area='scanning') + self.make_fh("ext.01", area="scanning") class TestAHIGriddedFileCalibration(unittest.TestCase): @@ -102,16 +102,16 @@ class TestAHIGriddedFileCalibration(unittest.TestCase): def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) self.fh = fh - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._get_luts') - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.os.path.exists') - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.np.loadtxt') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._get_luts") + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.os.path.exists") + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.np.loadtxt") def test_calibrate(self, np_loadtxt, os_exist, get_luts): """Test the calibration modes of AHI using the LUTs.""" load_return = np.squeeze(np.dstack([np.arange(0, 2048, 1), @@ -128,26 +128,26 @@ def test_calibrate(self, np_loadtxt, os_exist, get_luts): os_exist.return_value = False # Check that the LUT download is called if we don't have the LUTS - self.fh.calibrate(in_data, 'reflectance') + self.fh.calibrate(in_data, "reflectance") get_luts.assert_called() os_exist.return_value = True # Ensure results equal if no calibration applied - out_data = self.fh.calibrate(in_data, 'counts') + out_data = self.fh.calibrate(in_data, "counts") np.testing.assert_equal(in_data, out_data) # Now ensure results equal if LUT calibration applied - out_data = self.fh.calibrate(in_data, 'reflectance') + out_data = self.fh.calibrate(in_data, "reflectance") np.testing.assert_allclose(refl_out, out_data) # Check that exception is raised if bad calibration is passed with self.assertRaises(NotImplementedError): - self.fh.calibrate(in_data, 'lasers') + self.fh.calibrate(in_data, "lasers") # Check that exception is raised if no file is present np_loadtxt.side_effect = FileNotFoundError with self.assertRaises(FileNotFoundError): - self.fh.calibrate(in_data, 'reflectance') + self.fh.calibrate(in_data, "reflectance") class TestAHIGriddedFileHandler(unittest.TestCase): @@ -155,44 +155,44 @@ class TestAHIGriddedFileHandler(unittest.TestCase): def new_unzip(fname): """Fake unzipping.""" - if fname[-3:] == 'bz2': + if fname[-3:] == "bz2": return fname[:-4] - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.unzip_file', + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.unzip_file", mock.MagicMock(side_effect=new_unzip)) def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file.bz2' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file.bz2" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) # Check that the filename is altered for bz2 format files self.assertNotEqual(in_fname, fh.filename) self.fh = fh - key = {'calibration': 'counts', - 'name': 'vis.01'} - info = {'units': 'unitless', - 'standard_name': 'vis.01', - 'wavelength': 10.8, - 'resolution': 0.05} + key = {"calibration": "counts", + "name": "vis.01"} + info = {"units": "unitless", + "standard_name": "vis.01", + "wavelength": 10.8, + "resolution": 0.05} self.key = key self.info = info - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.np.memmap') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.np.memmap") def test_dataread(self, memmap): """Check that a dask array is returned from the read function.""" test_arr = np.zeros((10, 10)) memmap.return_value = test_arr m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): res = self.fh._read_data(mock.MagicMock()) np.testing.assert_allclose(res, da.from_array(test_arr)) - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._read_data') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._read_data") def test_get_dataset(self, mocked_read): """Check that a good dataset is returned on request.""" m = mock.mock_open() @@ -200,17 +200,17 @@ def test_get_dataset(self, mocked_read): out_data = np.array([[100., 300., 500.], [800., 1500., 2040.]]) mocked_read.return_value = out_data - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): res = self.fh.get_dataset(self.key, self.info) mocked_read.assert_called() # Check output data is correct np.testing.assert_allclose(res.values, out_data) # Also check a couple of attributes - self.assertEqual(res.attrs['name'], self.key['name']) - self.assertEqual(res.attrs['wavelength'], self.info['wavelength']) + self.assertEqual(res.attrs["name"], self.key["name"]) + self.assertEqual(res.attrs["wavelength"], self.info["wavelength"]) - @mock.patch('os.path.exists', return_value=True) - @mock.patch('os.remove') + @mock.patch("os.path.exists", return_value=True) + @mock.patch("os.remove") def test_destructor(self, exist_patch, remove_patch): """Check that file handler deletes files if needed.""" del self.fh @@ -229,27 +229,27 @@ def mocked_ftp_dl(fname): with tarfile.open(fname, "w:gz") as tar_handle: for namer in AHI_LUT_NAMES: tmpf = os.path.join(tempfile.tempdir, namer) - with open(tmpf, 'w') as tmp_fid: + with open(tmpf, "w") as tmp_fid: tmp_fid.write("TEST\n") - tar_handle.add(tmpf, arcname='count2tbb_v102/'+namer) + tar_handle.add(tmpf, arcname="count2tbb_v102/"+namer) os.remove(tmpf) def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) self.fh = fh - key = {'calibration': 'counts', - 'name': 'vis.01'} - info = {'units': 'unitless', - 'standard_name': 'vis.01', - 'wavelength': 10.8, - 'resolution': 0.05} + key = {"calibration": "counts", + "name": "vis.01"} + info = {"units": "unitless", + "standard_name": "vis.01", + "wavelength": 10.8, + "resolution": 0.05} self.key = key self.info = info @@ -262,23 +262,23 @@ def tearDown(self): if os.path.isdir(self.fh.lut_dir): shutil.rmtree(self.fh.lut_dir) - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._download_luts', + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._download_luts", mock.MagicMock(side_effect=mocked_ftp_dl)) def test_get_luts(self): """Check that the function to download LUTs operates successfully.""" tempdir = tempfile.gettempdir() print(self.fh.lut_dir) self.fh._get_luts() - self.assertFalse(os.path.exists(os.path.join(tempdir, 'count2tbb_v102/'))) + self.assertFalse(os.path.exists(os.path.join(tempdir, "count2tbb_v102/"))) for lut_name in AHI_LUT_NAMES: self.assertTrue(os.path.isfile(os.path.join(self.fh.lut_dir, lut_name))) - @mock.patch('urllib.request.urlopen') - @mock.patch('shutil.copyfileobj') + @mock.patch("urllib.request.urlopen") + @mock.patch("shutil.copyfileobj") def test_download_luts(self, mock_dl, mock_shutil): """Test that the FTP library is called for downloading LUTS.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - self.fh._download_luts('/test_file') + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + self.fh._download_luts("/test_file") mock_dl.assert_called() mock_shutil.assert_called() diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index 50f6f2af03..58a3612b49 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -56,7 +56,7 @@ def close(self): class TestAMIL1bNetCDFBase(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" - @mock.patch('satpy.readers.ami_l1b.xr') + @mock.patch("satpy.readers.ami_l1b.xr") def setUp(self, xr_, counts=None): """Create a fake dataset using the given counts data.""" from satpy.readers.ami_l1b import AMIL1bNetCDF @@ -66,35 +66,35 @@ def setUp(self, xr_, counts=None): rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) counts = xr.DataArray( - da.from_array(rad_data, chunks='auto'), - dims=('y', 'x'), + da.from_array(rad_data, chunks="auto"), + dims=("y", "x"), attrs={ - 'channel_name': "VI006", - 'detector_side': 2, - 'number_of_total_pixels': 484000000, - 'number_of_error_pixels': 113892451, - 'max_pixel_value': 32768, - 'min_pixel_value': 6, - 'average_pixel_value': 8228.98770845248, - 'stddev_pixel_value': 13621.130386551, - 'number_of_total_bits_per_pixel': 16, - 'number_of_data_quality_flag_bits_per_pixel': 2, - 'number_of_valid_bits_per_pixel': 12, - 'data_quality_flag_meaning': + "channel_name": "VI006", + "detector_side": 2, + "number_of_total_pixels": 484000000, + "number_of_error_pixels": 113892451, + "max_pixel_value": 32768, + "min_pixel_value": 6, + "average_pixel_value": 8228.98770845248, + "stddev_pixel_value": 13621.130386551, + "number_of_total_bits_per_pixel": 16, + "number_of_data_quality_flag_bits_per_pixel": 2, + "number_of_valid_bits_per_pixel": 12, + "data_quality_flag_meaning": "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", - 'ground_sample_distance_ew': 1.4e-05, - 'ground_sample_distance_ns': 1.4e-05, + "ground_sample_distance_ew": 1.4e-05, + "ground_sample_distance_ns": 1.4e-05, } ) sc_position = xr.DataArray(0., attrs={ - 'sc_position_center_pixel': [-26113466.1974016, 33100139.1630508, 3943.75470244799], + "sc_position_center_pixel": [-26113466.1974016, 33100139.1630508, 3943.75470244799], }) xr_.open_dataset.return_value = FakeDataset( { - 'image_pixel_values': counts, - 'sc_position': sc_position, - 'gsics_coeff_intercept': [0.1859369], - 'gsics_coeff_slope': [0.9967594], + "image_pixel_values": counts, + "sc_position": sc_position, + "gsics_coeff_intercept": [0.1859369], + "gsics_coeff_slope": [0.9967594], }, { "satellite_name": "GK-2A", @@ -125,9 +125,9 @@ def setUp(self, xr_, counts=None): } ) - self.reader = AMIL1bNetCDF('filename', - {'platform_shortname': 'gk2a'}, - {'file_type': 'ir087'},) + self.reader = AMIL1bNetCDF("filename", + {"platform_shortname": "gk2a"}, + {"file_type": "ir087"},) class TestAMIL1bNetCDF(TestAMIL1bNetCDFBase): @@ -136,12 +136,12 @@ class TestAMIL1bNetCDF(TestAMIL1bNetCDFBase): def _check_orbital_parameters(self, orb_params): """Check that orbital parameters match expected values.""" exp_params = { - 'projection_altitude': 35785863.0, - 'projection_latitude': 0.0, - 'projection_longitude': 128.2, - 'satellite_actual_altitude': 35782654.56070405, - 'satellite_actual_latitude': 0.005364927, - 'satellite_actual_longitude': 128.2707, + "projection_altitude": 35785863.0, + "projection_latitude": 0.0, + "projection_longitude": 128.2, + "satellite_actual_altitude": 35782654.56070405, + "satellite_actual_latitude": 0.005364927, + "satellite_actual_longitude": 128.2707, } for key, val in exp_params.items(): self.assertAlmostEqual(val, orb_params[key], places=3) @@ -150,25 +150,25 @@ def test_filename_grouping(self): """Test that filenames are grouped properly.""" from satpy.readers import group_files filenames = [ - 'gk2a_ami_le1b_ir087_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir096_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir105_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir112_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir123_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir133_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_nr013_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_nr016_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_sw038_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_vi004_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_vi005_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_vi006_fd005ge_201909300300.nc', - 'gk2a_ami_le1b_vi008_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_wv063_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_wv069_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_wv073_fd020ge_201909300300.nc'] - groups = group_files(filenames, reader='ami_l1b') + "gk2a_ami_le1b_ir087_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir096_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir105_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir112_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir123_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir133_fd020ge_201909300300.nc", + "gk2a_ami_le1b_nr013_fd020ge_201909300300.nc", + "gk2a_ami_le1b_nr016_fd020ge_201909300300.nc", + "gk2a_ami_le1b_sw038_fd020ge_201909300300.nc", + "gk2a_ami_le1b_vi004_fd010ge_201909300300.nc", + "gk2a_ami_le1b_vi005_fd010ge_201909300300.nc", + "gk2a_ami_le1b_vi006_fd005ge_201909300300.nc", + "gk2a_ami_le1b_vi008_fd010ge_201909300300.nc", + "gk2a_ami_le1b_wv063_fd020ge_201909300300.nc", + "gk2a_ami_le1b_wv069_fd020ge_201909300300.nc", + "gk2a_ami_le1b_wv073_fd020ge_201909300300.nc"] + groups = group_files(filenames, reader="ami_l1b") self.assertEqual(len(groups), 1) - self.assertEqual(len(groups[0]['ami_l1b']), 16) + self.assertEqual(len(groups[0]["ami_l1b"]), 16) def test_basic_attributes(self): """Test getting basic file attributes.""" @@ -181,84 +181,84 @@ def test_basic_attributes(self): def test_get_dataset(self): """Test gettting radiance data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='radiance') + key = make_dataid(name="VI006", calibration="radiance") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - 'units': 'W m-2 um-1 sr-1', + "file_key": "image_pixel_values", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "units": "W m-2 um-1 sr-1", }) - exp = {'calibration': 'radiance', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': 'W m-2 um-1 sr-1'} + exp = {"calibration": "radiance", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "W m-2 um-1 sr-1"} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.tests.utils import make_dataid with self.assertRaises(ValueError): - ds_id = make_dataid(name='VI006', calibration='_bad_') - ds_info = {'file_key': 'image_pixel_values', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - 'units': 'W m-2 um-1 sr-1', + ds_id = make_dataid(name="VI006", calibration="_bad_") + ds_info = {"file_key": "image_pixel_values", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "units": "W m-2 um-1 sr-1", } self.reader.get_dataset(ds_id, ds_info) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] - exp = {'a': 6378137.0, 'b': 6356752.3, 'h': 35785863.0, - 'lon_0': 128.2, 'proj': 'geos', 'units': 'm'} + exp = {"a": 6378137.0, "b": 6356752.3, "h": 35785863.0, + "lon_0": 128.2, "proj": "geos", "units": "m"} for key, val in exp.items(): self.assertIn(key, call_args[3]) self.assertAlmostEqual(val, call_args[3][key]) - self.assertEqual(call_args[4], self.reader.nc.attrs['number_of_columns']) - self.assertEqual(call_args[5], self.reader.nc.attrs['number_of_lines']) + self.assertEqual(call_args[4], self.reader.nc.attrs["number_of_columns"]) + self.assertEqual(call_args[5], self.reader.nc.attrs["number_of_lines"]) np.testing.assert_allclose(call_args[6], [-5511022.902, -5511022.902, 5511022.902, 5511022.902]) def test_get_dataset_vis(self): """Test get visible calibrated data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='reflectance') + key = make_dataid(name="VI006", calibration="reflectance") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'toa_bidirectional_reflectance', - 'units': '%', + "file_key": "image_pixel_values", + "standard_name": "toa_bidirectional_reflectance", + "units": "%", }) - exp = {'calibration': 'reflectance', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': '%'} + exp = {"calibration": "reflectance", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "%"} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_get_dataset_counts(self): """Test get counts data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='counts') + key = make_dataid(name="VI006", calibration="counts") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'counts', - 'units': '1', + "file_key": "image_pixel_values", + "standard_name": "counts", + "units": "1", }) - exp = {'calibration': 'counts', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': '1'} + exp = {"calibration": "counts", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "1"} for key, val in exp.items(): self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + self._check_orbital_parameters(res.attrs["orbital_parameters"]) class TestAMIL1bNetCDFIRCal(TestAMIL1bNetCDFBase): @@ -270,53 +270,53 @@ def setUp(self): count_data = (np.arange(10).reshape((2, 5))) + 7000 count_data = count_data.astype(np.uint16) count = xr.DataArray( - da.from_array(count_data, chunks='auto'), - dims=('y', 'x'), + da.from_array(count_data, chunks="auto"), + dims=("y", "x"), attrs={ - 'channel_name': "IR087", - 'detector_side': 2, - 'number_of_total_pixels': 484000000, - 'number_of_error_pixels': 113892451, - 'max_pixel_value': 32768, - 'min_pixel_value': 6, - 'average_pixel_value': 8228.98770845248, - 'stddev_pixel_value': 13621.130386551, - 'number_of_total_bits_per_pixel': 16, - 'number_of_data_quality_flag_bits_per_pixel': 2, - 'number_of_valid_bits_per_pixel': 13, - 'data_quality_flag_meaning': + "channel_name": "IR087", + "detector_side": 2, + "number_of_total_pixels": 484000000, + "number_of_error_pixels": 113892451, + "max_pixel_value": 32768, + "min_pixel_value": 6, + "average_pixel_value": 8228.98770845248, + "stddev_pixel_value": 13621.130386551, + "number_of_total_bits_per_pixel": 16, + "number_of_data_quality_flag_bits_per_pixel": 2, + "number_of_valid_bits_per_pixel": 13, + "data_quality_flag_meaning": "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", - 'ground_sample_distance_ew': 1.4e-05, - 'ground_sample_distance_ns': 1.4e-05, + "ground_sample_distance_ew": 1.4e-05, + "ground_sample_distance_ns": 1.4e-05, } ) - self.ds_id = make_dataid(name='IR087', wavelength=[8.415, 8.59, 8.765], - calibration='brightness_temperature') + self.ds_id = make_dataid(name="IR087", wavelength=[8.415, 8.59, 8.765], + calibration="brightness_temperature") self.ds_info = { - 'file_key': 'image_pixel_values', - 'wavelength': [8.415, 8.59, 8.765], - 'standard_name': 'toa_brightness_temperature', - 'units': 'K', + "file_key": "image_pixel_values", + "wavelength": [8.415, 8.59, 8.765], + "standard_name": "toa_brightness_temperature", + "units": "K", } super(TestAMIL1bNetCDFIRCal, self).setUp(counts=count) def test_default_calibrate(self): """Test default (pyspectral) IR calibration.""" from satpy.readers.ami_l1b import rad2temp - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_called_once() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") def test_infile_calibrate(self): """Test IR calibration using in-file coefficients.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'FILE' - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + self.reader.calib_mode = "FILE" + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], @@ -324,34 +324,34 @@ def test_infile_calibrate(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.04) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") def test_gsics_radiance_corr(self): """Test IR radiance adjustment using in-file GSICS coefs.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'GSICS' + self.reader.calib_mode = "GSICS" expected = np.array([[238.036797, 238.007106, 237.977396, 237.947668, 237.91792], [237.888154, 237.85837, 237.828566, 237.798743, 237.768902]]) - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") def test_user_radiance_corr(self): """Test IR radiance adjustment using user-supplied coefs.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'FILE' - self.reader.user_calibration = {'IR087': {'slope': 0.99669, - 'offset': 0.16907}} + self.reader.calib_mode = "FILE" + self.reader.user_calibration = {"IR087": {"slope": 0.99669, + "offset": 0.16907}} expected = np.array([[238.073713, 238.044043, 238.014354, 237.984647, 237.954921], [237.925176, 237.895413, 237.865631, 237.835829, 237.806009]]) - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") diff --git a/satpy/tests/reader_tests/test_amsr2_l1b.py b/satpy/tests/reader_tests/test_amsr2_l1b.py index f3e9de538f..b8e51b845b 100644 --- a/satpy/tests/reader_tests/test_amsr2_l1b.py +++ b/satpy/tests/reader_tests/test_amsr2_l1b.py @@ -43,56 +43,56 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/PlatformShortName': 'GCOM-W1', - '/attr/SensorShortName': 'AMSR2', - '/attr/StartOrbitNumber': '22210', - '/attr/StopOrbitNumber': '22210', + "/attr/PlatformShortName": "GCOM-W1", + "/attr/SensorShortName": "AMSR2", + "/attr/StartOrbitNumber": "22210", + "/attr/StopOrbitNumber": "22210", } for bt_chan in [ - '(10.7GHz,H)', - '(10.7GHz,V)', - '(18.7GHz,H)', - '(18.7GHz,V)', - '(23.8GHz,H)', - '(23.8GHz,V)', - '(36.5GHz,H)', - '(36.5GHz,V)', - '(6.9GHz,H)', - '(6.9GHz,V)', - '(7.3GHz,H)', - '(7.3GHz,V)', - '(89.0GHz-A,H)', - '(89.0GHz-A,V)', - '(89.0GHz-B,H)', - '(89.0GHz-B,V)', + "(10.7GHz,H)", + "(10.7GHz,V)", + "(18.7GHz,H)", + "(18.7GHz,V)", + "(23.8GHz,H)", + "(23.8GHz,V)", + "(36.5GHz,H)", + "(36.5GHz,V)", + "(6.9GHz,H)", + "(6.9GHz,V)", + "(7.3GHz,H)", + "(7.3GHz,V)", + "(89.0GHz-A,H)", + "(89.0GHz-A,V)", + "(89.0GHz-B,H)", + "(89.0GHz-B,V)", ]: - k = 'Brightness Temperature {}'.format(bt_chan) + k = "Brightness Temperature {}".format(bt_chan) file_content[k] = DEFAULT_FILE_DATA[:, ::2] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 0.01 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 0.01 for bt_chan in [ - '(89.0GHz-A,H)', - '(89.0GHz-A,V)', - '(89.0GHz-B,H)', - '(89.0GHz-B,V)', + "(89.0GHz-A,H)", + "(89.0GHz-A,V)", + "(89.0GHz-B,H)", + "(89.0GHz-B,V)", ]: - k = 'Brightness Temperature {}'.format(bt_chan) + k = "Brightness Temperature {}".format(bt_chan) file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 0.01 - for nav_chan in ['89A', '89B']: - lon_k = 'Longitude of Observation Point for ' + nav_chan - lat_k = 'Latitude of Observation Point for ' + nav_chan + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 0.01 + for nav_chan in ["89A", "89B"]: + lon_k = "Longitude of Observation Point for " + nav_chan + lat_k = "Latitude of Observation Point for " + nav_chan file_content[lon_k] = DEFAULT_LON_DATA - file_content[lon_k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[lon_k + '/attr/SCALE FACTOR'] = 1 - file_content[lon_k + '/attr/UNIT'] = 'deg' + file_content[lon_k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[lon_k + "/attr/SCALE FACTOR"] = 1 + file_content[lon_k + "/attr/UNIT"] = "deg" file_content[lat_k] = DEFAULT_LAT_DATA - file_content[lat_k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[lat_k + '/attr/SCALE FACTOR'] = 1 - file_content[lat_k + '/attr/UNIT'] = 'deg' + file_content[lat_k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[lat_k + "/attr/SCALE FACTOR"] = 1 + file_content[lat_k + "/attr/UNIT"] = "deg" convert_file_content_to_data_array(file_content) return file_content @@ -107,9 +107,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(AMSR2L1BFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(AMSR2L1BFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -122,7 +122,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -134,59 +134,59 @@ def test_load_basic(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load([ - 'btemp_10.7v', - 'btemp_10.7h', - 'btemp_6.9v', - 'btemp_6.9h', - 'btemp_7.3v', - 'btemp_7.3h', - 'btemp_18.7v', - 'btemp_18.7h', - 'btemp_23.8v', - 'btemp_23.8h', - 'btemp_36.5v', - 'btemp_36.5h', + "btemp_10.7v", + "btemp_10.7h", + "btemp_6.9v", + "btemp_6.9h", + "btemp_7.3v", + "btemp_7.3h", + "btemp_18.7v", + "btemp_18.7h", + "btemp_23.8v", + "btemp_23.8h", + "btemp_36.5v", + "btemp_36.5h", ]) self.assertEqual(len(ds), 12) for d in ds.values(): - self.assertEqual(d.attrs['calibration'], 'brightness_temperature') + self.assertEqual(d.attrs["calibration"], "brightness_temperature") self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2))) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertTupleEqual(d.attrs["area"].lons.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) - self.assertTupleEqual(d.attrs['area'].lats.shape, + self.assertTupleEqual(d.attrs["area"].lats.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) - assert d.attrs['sensor'] == 'amsr2' - assert d.attrs['platform_name'] == 'GCOM-W1' + assert d.attrs["sensor"] == "amsr2" + assert d.attrs["platform_name"] == "GCOM-W1" def test_load_89ghz(self): """Test loading of 89GHz channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) ds = r.load([ - 'btemp_89.0av', - 'btemp_89.0ah', - 'btemp_89.0bv', - 'btemp_89.0bh', + "btemp_89.0av", + "btemp_89.0ah", + "btemp_89.0bv", + "btemp_89.0bh", ]) self.assertEqual(len(ds), 4) for d in ds.values(): - self.assertEqual(d.attrs['calibration'], 'brightness_temperature') + self.assertEqual(d.attrs["calibration"], "brightness_temperature") self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertTupleEqual(d.attrs["area"].lons.shape, DEFAULT_FILE_SHAPE) - self.assertTupleEqual(d.attrs['area'].lats.shape, + self.assertTupleEqual(d.attrs["area"].lats.shape, DEFAULT_FILE_SHAPE) diff --git a/satpy/tests/reader_tests/test_amsr2_l2.py b/satpy/tests/reader_tests/test_amsr2_l2.py index 711754c989..106f558919 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2.py +++ b/satpy/tests/reader_tests/test_amsr2_l2.py @@ -42,29 +42,29 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/PlatformShortName': 'GCOM-W1', - '/attr/SensorShortName': 'AMSR2', - '/attr/StartOrbitNumber': '22210', - '/attr/StopOrbitNumber': '22210', + "/attr/PlatformShortName": "GCOM-W1", + "/attr/SensorShortName": "AMSR2", + "/attr/StartOrbitNumber": "22210", + "/attr/StopOrbitNumber": "22210", } - k = 'Geophysical Data' + k = "Geophysical Data" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 1 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 1 - k = 'Latitude of Observation Point' + k = "Latitude of Observation Point" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'deg' - file_content[k + '/attr/SCALE FACTOR'] = 1 - k = 'Longitude of Observation Point' + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "deg" + file_content[k + "/attr/SCALE FACTOR"] = 1 + k = "Longitude of Observation Point" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'deg' - file_content[k + '/attr/SCALE FACTOR'] = 1 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "deg" + file_content[k + "/attr/SCALE FACTOR"] = 1 - convert_file_content_to_data_array(file_content, dims=('dim_0', 'dim_1')) + convert_file_content_to_data_array(file_content, dims=("dim_0", "dim_1")) return file_content @@ -78,9 +78,9 @@ def setUp(self): from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler from satpy.readers.amsr2_l2 import AMSR2L2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(AMSR2L2FileHandler, '__bases__', (FakeHDF5FileHandler2, + self.p = mock.patch.object(AMSR2L2FileHandler, "__bases__", (FakeHDF5FileHandler2, AMSR2L1BFileHandler)) self.fake_handler = self.p.start() self.p.is_local = True @@ -94,7 +94,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5', + "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -106,17 +106,17 @@ def test_load_basic(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5', + "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) - ds = r.load(['ssw']) + ds = r.load(["ssw"]) self.assertEqual(len(ds), 1) for d in ds.values(): self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1]))) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertTupleEqual(d.attrs["area"].lons.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) - self.assertTupleEqual(d.attrs['area'].lats.shape, + self.assertTupleEqual(d.attrs["area"].lats.shape, (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) diff --git a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py index ac271e7206..2f1b3ad7b0 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py +++ b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py @@ -47,10 +47,10 @@ def _get_shared_global_attrs(filename): attrs = { - 'time_coverage_start': '2020-08-12T05:58:31.0Z', - 'time_coverage_end': '2020-08-12T06:07:01.0Z', - 'platform_name': 'GCOM-W1', - 'instrument_name': 'AMSR2', + "time_coverage_start": "2020-08-12T05:58:31.0Z", + "time_coverage_end": "2020-08-12T06:07:01.0Z", + "platform_name": "GCOM-W1", + "instrument_name": "AMSR2", } return attrs @@ -58,43 +58,43 @@ def _get_shared_global_attrs(filename): def _create_two_res_gaasp_dataset(filename): """Represent files with two resolution of variables in them (ex. OCEAN).""" lon_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + attrs={"standard_name": "latitude"}) lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "latitude"}) swath_var1 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - coords={'some_longitude_hi': lon_var_hi, 'some_latitude_hi': lat_var_hi}, - attrs={'_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + coords={"some_longitude_hi": lon_var_hi, "some_latitude_hi": lat_var_hi}, + attrs={"_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - coords={'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo}, - attrs={'_FillValue': -9999.}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + coords={"some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo}, + attrs={"_FillValue": -9999.}) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'_FillValue': 100, 'comment': 'Some comment'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"_FillValue": 100, "comment": "Some comment"}) not_xy_dim_var = xr.DataArray(da.zeros((10, 5), dtype=np.float32), - dims=('Number_of_Scans', 'Time_Dimension')) + dims=("Number_of_Scans", "Time_Dimension")) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'swath_var_hi': swath_var1, - 'swath_var_low': swath_var2, - 'swath_var_low_int': swath_int_var, - 'some_longitude_hi': lon_var_hi, - 'some_latitude_hi': lat_var_hi, - 'some_longitude_lo': lon_var_lo, - 'some_latitude_lo': lat_var_lo, - 'not_xy_dim_var': not_xy_dim_var, - 'time_var': time_var, + "swath_var_hi": swath_var1, + "swath_var_low": swath_var2, + "swath_var_low_int": swath_int_var, + "some_longitude_hi": lon_var_hi, + "some_latitude_hi": lat_var_hi, + "some_longitude_lo": lon_var_lo, + "some_latitude_lo": lat_var_lo, + "not_xy_dim_var": not_xy_dim_var, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) ds = xr.Dataset(ds_vars, attrs=attrs) @@ -104,22 +104,22 @@ def _create_two_res_gaasp_dataset(filename): def _create_gridded_gaasp_dataset(filename): """Represent files with gridded products.""" grid_var = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Y_Dimension', 'Number_of_X_Dimension'), + dims=("Number_of_Y_Dimension", "Number_of_X_Dimension"), attrs={ - '_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0 + "_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0 }) latency_var = xr.DataArray(da.zeros((10, 10), dtype=np.timedelta64), - dims=('Number_of_Y_Dimension', 'Number_of_X_Dimension'), + dims=("Number_of_Y_Dimension", "Number_of_X_Dimension"), attrs={ - '_FillValue': -9999, + "_FillValue": -9999, }) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'grid_var': grid_var, - 'latency_var': latency_var, - 'time_var': time_var, + "grid_var": grid_var, + "latency_var": latency_var, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) @@ -128,29 +128,29 @@ def _create_gridded_gaasp_dataset(filename): def _create_one_res_gaasp_dataset(filename): """Represent files with one resolution of variables in them (ex. SOIL).""" lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "latitude"}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - coords={'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo}, + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + coords={"some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo}, attrs={ - '_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0 + "_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0 }) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'_FillValue': 100, 'comment': 'Some comment'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"_FillValue": 100, "comment": "Some comment"}) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'swath_var': swath_var2, - 'swath_var_int': swath_int_var, - 'some_longitude_lo': lon_var_lo, - 'some_latitude_lo': lat_var_lo, - 'time_var': time_var, + "swath_var": swath_var2, + "swath_var_int": swath_int_var, + "some_longitude_lo": lon_var_lo, + "some_latitude_lo": lat_var_lo, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) @@ -168,12 +168,12 @@ def fake_open_dataset(filename, **kwargs): class TestGAASPReader: """Tests for the GAASP reader.""" - yaml_file = 'amsr2_l2_gaasp.yaml' + yaml_file = "amsr2_l2_gaasp.yaml" def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -191,7 +191,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -203,26 +203,26 @@ def test_reader_creation(self, filenames, expected_loadables): @pytest.mark.parametrize( ("filenames", "expected_datasets"), [ - (EXAMPLE_FILENAMES, ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int', 'swath_var', - 'swath_var_int', - 'grid_var_NH', 'grid_var_SH', - 'latency_var_NH', 'latency_var_SH']), - ([MBT_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([PRECIP_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([OCEAN_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([SEAICE_NH_FILENAME], ['grid_var_NH', 'latency_var_NH']), - ([SEAICE_SH_FILENAME], ['grid_var_SH', 'latency_var_SH']), - ([SNOW_FILENAME], ['swath_var', 'swath_var_int']), - ([SOIL_FILENAME], ['swath_var', 'swath_var_int']), + (EXAMPLE_FILENAMES, ["swath_var_hi", "swath_var_low", + "swath_var_low_int", "swath_var", + "swath_var_int", + "grid_var_NH", "grid_var_SH", + "latency_var_NH", "latency_var_SH"]), + ([MBT_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([PRECIP_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([OCEAN_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([SEAICE_NH_FILENAME], ["grid_var_NH", "latency_var_NH"]), + ([SEAICE_SH_FILENAME], ["grid_var_SH", "latency_var_SH"]), + ([SNOW_FILENAME], ["swath_var", "swath_var_int"]), + ([SOIL_FILENAME], ["swath_var", "swath_var_int"]), ]) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -230,24 +230,24 @@ def test_available_datasets(self, filenames, expected_datasets): avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails - assert 'not_xy_dim_var' not in expected_datasets + assert "not_xy_dim_var" not in expected_datasets @staticmethod def _check_area(data_id, data_arr): from pyresample.geometry import AreaDefinition, SwathDefinition - area = data_arr.attrs['area'] - if 'grid_var' in data_id['name'] or 'latency_var' in data_id['name']: + area = data_arr.attrs["area"] + if "grid_var" in data_id["name"] or "latency_var" in data_id["name"]: assert isinstance(area, AreaDefinition) else: assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_id, data_arr): - if 'int' in data_id['name']: - assert data_arr.attrs['_FillValue'] == 100 + if "int" in data_id["name"]: + assert data_arr.attrs["_FillValue"] == 100 assert np.issubdtype(data_arr.dtype, np.integer) else: - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float32 @@ -255,33 +255,33 @@ def _check_fill(data_id, data_arr): @staticmethod def _check_attrs(data_arr): attrs = data_arr.attrs - assert 'scale_factor' not in attrs - assert 'add_offset' not in attrs - assert attrs['platform_name'] == 'GCOM-W1' - assert attrs['sensor'] == 'amsr2' - assert attrs['start_time'] == datetime(2020, 8, 12, 5, 58, 31) - assert attrs['end_time'] == datetime(2020, 8, 12, 6, 7, 1) + assert "scale_factor" not in attrs + assert "add_offset" not in attrs + assert attrs["platform_name"] == "GCOM-W1" + assert attrs["sensor"] == "amsr2" + assert attrs["start_time"] == datetime(2020, 8, 12, 5, 58, 31) + assert attrs["end_time"] == datetime(2020, 8, 12, 6, 7, 1) @pytest.mark.parametrize( ("filenames", "loadable_ids"), [ - (EXAMPLE_FILENAMES, ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int', 'swath_var', - 'swath_var_int', - 'grid_var_NH', 'grid_var_SH', - 'latency_var_NH', 'latency_var_SH']), - ([MBT_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([PRECIP_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([OCEAN_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([SEAICE_NH_FILENAME], ['grid_var_NH', 'latency_var_NH']), - ([SEAICE_SH_FILENAME], ['grid_var_SH', 'latency_var_SH']), - ([SNOW_FILENAME], ['swath_var', 'swath_var_int']), - ([SOIL_FILENAME], ['swath_var', 'swath_var_int']), + (EXAMPLE_FILENAMES, ["swath_var_hi", "swath_var_low", + "swath_var_low_int", "swath_var", + "swath_var_int", + "grid_var_NH", "grid_var_SH", + "latency_var_NH", "latency_var_SH"]), + ([MBT_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([PRECIP_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([OCEAN_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([SEAICE_NH_FILENAME], ["grid_var_NH", "latency_var_NH"]), + ([SEAICE_SH_FILENAME], ["grid_var_SH", "latency_var_SH"]), + ([SNOW_FILENAME], ["swath_var", "swath_var_int"]), + ([SOIL_FILENAME], ["swath_var", "swath_var_int"]), ]) def test_basic_load(self, filenames, loadable_ids): """Test that variables are loaded properly.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py index a65d0638f5..17ac9f62de 100644 --- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py +++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py @@ -42,42 +42,42 @@ def create_message(): surfaceSoilMoisture = np.round(rstate.rand(samples)*100, 1) surfaceSoilMoisture[0] = -1e+100 retmsg = { - 'inputDelayedDescriptorReplicationFactor': [8], - 'edition': 4, - 'masterTableNumber': 0, - 'bufrHeaderCentre': 254, - 'bufrHeaderSubCentre': 0, - 'updateSequenceNumber': 0, - 'dataCategory': 12, - 'internationalDataSubCategory': 255, - 'dataSubCategory': 190, - 'masterTablesVersionNumber': 13, - 'localTablesVersionNumber': 0, - 'typicalYear': 2020, - 'typicalMonth': 12, - 'typicalDay': 21, - 'typicalHour': 9, - 'typicalMinute': 33, - 'typicalSecond': 0, - 'numberOfSubsets': samples, - 'observedData': 1, - 'compressedData': 1, - 'unexpandedDescriptors': 312061, - 'centre': 254, - 'subCentre': 0, - '#1#softwareIdentification': 1000, - 'satelliteIdentifier': 4, - 'satelliteInstruments': 190, - 'year': 2020, - 'month': 12, - 'day': 21, - 'hour': 9, - 'minute': 33, - 'second': np.linspace(0, 59, samples), - 'latitude': lat, - 'longitude': lon, - 'surfaceSoilMoisture': surfaceSoilMoisture, - 'soilMoistureQuality': np.zeros(samples), + "inputDelayedDescriptorReplicationFactor": [8], + "edition": 4, + "masterTableNumber": 0, + "bufrHeaderCentre": 254, + "bufrHeaderSubCentre": 0, + "updateSequenceNumber": 0, + "dataCategory": 12, + "internationalDataSubCategory": 255, + "dataSubCategory": 190, + "masterTablesVersionNumber": 13, + "localTablesVersionNumber": 0, + "typicalYear": 2020, + "typicalMonth": 12, + "typicalDay": 21, + "typicalHour": 9, + "typicalMinute": 33, + "typicalSecond": 0, + "numberOfSubsets": samples, + "observedData": 1, + "compressedData": 1, + "unexpandedDescriptors": 312061, + "centre": 254, + "subCentre": 0, + "#1#softwareIdentification": 1000, + "satelliteIdentifier": 4, + "satelliteInstruments": 190, + "year": 2020, + "month": 12, + "day": 21, + "hour": 9, + "minute": 33, + "second": np.linspace(0, 59, samples), + "latitude": lat, + "longitude": lon, + "surfaceSoilMoisture": surfaceSoilMoisture, + "soilMoistureQuality": np.zeros(samples), } return retmsg @@ -85,22 +85,22 @@ def create_message(): MSG = create_message() # the notional filename that would contain the above test message data -FILENAME = 'W_XX-EUMETSAT-TEST,SOUNDING+SATELLITE,METOPA+ASCAT_C_EUMC_20201221093300_73545_eps_o_125_ssm_l2.bin' +FILENAME = "W_XX-EUMETSAT-TEST,SOUNDING+SATELLITE,METOPA+ASCAT_C_EUMC_20201221093300_73545_eps_o_125_ssm_l2.bin" # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { - 'reception_location': 'TEST', - 'platform': 'METOPA', - 'instrument': 'ASCAT', - 'start_time': '20201221093300', - 'perigee': '73545', - 'species': '125_ssm', - 'level': 'l2' + "reception_location": "TEST", + "platform": "METOPA", + "instrument": "ASCAT", + "start_time": "20201221093300", + "perigee": "73545", + "species": "125_ssm", + "level": "l2" } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { - 'file_type': 'ascat_l2_soilmoisture_bufr', - 'file_reader': 'AscatSoilMoistureBufr' + "file_type": "ascat_l2_soilmoisture_bufr", + "file_reader": "AscatSoilMoistureBufr" } @@ -110,14 +110,14 @@ def save_test_data(path): filepath = os.path.join(path, FILENAME) with open(filepath, "wb") as f: for m in [MSG]: - buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') + buf = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) - ec.codes_set(buf, 'pack', 1) + ec.codes_set(buf, "pack", 1) ec.codes_write(buf, f) ec.codes_release(buf) return filepath @@ -145,41 +145,41 @@ def tearDown(self): except OSError: pass - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) - self.assertTrue('scatterometer' in scn.sensor_names) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) + self.assertTrue("scatterometer" in scn.sensor_names) self.assertTrue(datetime(2020, 12, 21, 9, 33, 0) == scn.start_time) self.assertTrue(datetime(2020, 12, 21, 9, 33, 59) == scn.end_time) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) - self.assertTrue('surface_soil_moisture' in scn.available_dataset_names()) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) + self.assertTrue("surface_soil_moisture" in scn.available_dataset_names()) scn.load(scn.available_dataset_names()) loaded = [dataset.name for dataset in scn] self.assertTrue(sorted(loaded) == sorted(scn.available_dataset_names())) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) for name in scn.available_dataset_names(): scn.load([name]) loaded_values = scn[name].values - fill_value = scn[name].attrs['fill_value'] + fill_value = scn[name].attrs["fill_value"] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) - key = scn[name].attrs['key'] + key = scn[name].attrs["key"] original_values = MSG[key] # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) diff --git a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py index 96fb4d7305..aa11e66d09 100644 --- a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py +++ b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py @@ -53,14 +53,14 @@ def __init__(self, filename, filename_info, filetype_info, include_factors=True) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): - start_time = filename_info['start_time'] - end_time = filename_info['end_time'].replace(year=start_time.year, + start_time = filename_info["start_time"] + end_time = filename_info["end_time"].replace(year=start_time.year, month=start_time.month, day=start_time.day) - begin_date = start_time.strftime('%Y%m%d') - begin_time = start_time.strftime('%H%M%S.%fZ') - ending_date = end_time.strftime('%Y%m%d') - ending_time = end_time.strftime('%H%M%S.%fZ') + begin_date = start_time.strftime("%Y%m%d") + begin_time = start_time.strftime("%H%M%S.%fZ") + ending_date = end_time.strftime("%Y%m%d") + ending_time = end_time.strftime("%H%M%S.%fZ") new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, @@ -69,8 +69,8 @@ def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), - "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), - "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), + "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info["orbit"]), + "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix1}/attr/Instrument_Short_Name": "ATMS", "/attr/Platform_Short_Name": "J01", } @@ -82,13 +82,13 @@ def _add_granule_specific_info_to_file_content(self, file_content, dataset_group lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([1] * num_granules) for granule_idx in range(num_granules): - prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix, + prefix_gran = "{prefix}/{dataset_group}_Gran_{idx}".format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] - file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = num_scans - file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx] - file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx] + file_content[prefix_gran + "/attr/N_Number_Of_Scans"] = num_scans + file_content[prefix_gran + "/attr/G-Ring_Longitude"] = lons_lists[granule_idx] + file_content[prefix_gran + "/attr/G-Ring_Latitude"] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): @@ -152,7 +152,7 @@ def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix # ATMS SDR files always produce data with 12 scans per granule even if there are less? FIXME! total_rows = DEFAULT_FILE_SHAPE[0] * 12 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1], self._num_of_bands) - key = 'BrightnessTemperature' + key = "BrightnessTemperature" key = data_var_prefix + "/" + key file_content[key] = np.repeat(DEFAULT_FILE_DATA.copy(), 12 * num_grans, axis=0) file_content[key] = np.repeat(file_content[key][:, :, np.newaxis], self._num_of_bands, axis=2) @@ -181,10 +181,10 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape - angles = ['SolarZenithAngle', - 'SolarAzimuthAngle', - 'SatelliteZenithAngle', - 'SatelliteAzimuthAngle'] + angles = ["SolarZenithAngle", + "SolarAzimuthAngle", + "SatelliteZenithAngle", + "SatelliteAzimuthAngle"] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA @@ -193,8 +193,8 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi @staticmethod def _add_geo_ref(file_content, filename): - geo_prefix = 'GATMO' - file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] + geo_prefix = "GATMO" + file_content["/attr/N_GEO_Ref"] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): @@ -204,9 +204,9 @@ def _convert_numpy_content_to_dataarray(final_content): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 2: - final_content[key] = DataArray(val, dims=('y', 'x', 'z')) + final_content[key] = DataArray(val, dims=("y", "x", "z")) elif val.ndim > 1: - final_content[key] = DataArray(val, dims=('y', 'x')) + final_content[key] = DataArray(val, dims=("y", "x")) else: final_content[key] = DataArray(val) @@ -215,9 +215,9 @@ def get_test_content(self, filename, filename_info, filetype_info): final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] - prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) - prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) - prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) + prefix1 = "Data_Products/{dataset_group}".format(dataset_group=dataset_group) + prefix2 = "{prefix}/{dataset_group}_Aggr".format(prefix=prefix1, dataset_group=dataset_group) + prefix3 = "All_Data/{dataset_group}_All".format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) @@ -229,10 +229,10 @@ def get_test_content(self, filename, filename_info, filetype_info): for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v - if filename[:5] in ['SATMS', 'TATMS']: + if filename[:5] in ["SATMS", "TATMS"]: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) - elif filename[0] == 'G': + elif filename[0] == "G": self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) @@ -248,23 +248,23 @@ class TestATMS_SDR_Reader: def _assert_bt_properties(self, data_arr, num_scans=1, with_area=True): assert np.issubdtype(data_arr.dtype, np.float32) - assert data_arr.attrs['calibration'] == 'brightness_temperature' - assert data_arr.attrs['units'] == 'K' - assert data_arr.attrs['rows_per_scan'] == num_scans + assert data_arr.attrs["calibration"] == "brightness_temperature" + assert data_arr.attrs["units"] == "K" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - assert 'area' in data_arr.attrs - assert data_arr.attrs['area'] is not None - assert data_arr.attrs['area'].shape == data_arr.shape + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - assert 'area' not in data_arr.attrs + assert "area" not in data_arr.attrs def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeHDF5_ATMS_SDR_FileHandler,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeHDF5_ATMS_SDR_FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -277,7 +277,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - '/path/to/atms/sdr/data/SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', + "/path/to/atms/sdr/data/SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -288,11 +288,11 @@ def test_init_start_end_time(self): """Test basic init with start and end times around the start/end times of the provided file.""" r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2022, 12, 19), - 'end_time': datetime(2022, 12, 21) + "start_time": datetime(2022, 12, 19), + "end_time": datetime(2022, 12, 21) }) loadables = r.select_files_from_pathnames([ - 'SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', + "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -300,10 +300,10 @@ def test_init_start_end_time(self): assert r.file_handlers @pytest.mark.parametrize("files, expected", - [(['SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', - 'GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5'], + [(["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", + "GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5"], True), - (['SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', ], + (["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ], False)] ) def test_load_all_bands(self, files, expected): diff --git a/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py b/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py index e5241ba025..b1504e9014 100644 --- a/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py +++ b/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py @@ -44,7 +44,7 @@ def setUp(self) -> None: test_data["id"]["id"][:5] = 891 # Channel 3b test_data["id"]["id"][5:] = 890 - with NamedTemporaryFile(mode='w+', suffix='.hmf', delete=False) as hrpt_file: + with NamedTemporaryFile(mode="w+", suffix=".hmf", delete=False) as hrpt_file: self.filename = hrpt_file.name test_data.tofile(hrpt_file) @@ -71,7 +71,7 @@ class TestHRPTGetUncalibratedData(TestHRPTWithFile): """Test case for reading uncalibrated hrpt data.""" def _get_channel_1_counts(self): - return self._get_dataset(make_dataid(name='1', calibration='counts')) + return self._get_dataset(make_dataid(name="1", calibration="counts")) def test_get_dataset_returns_a_dataarray(self): """Test that get_dataset returns a dataarray.""" @@ -81,7 +81,7 @@ def test_get_dataset_returns_a_dataarray(self): def test_platform_name(self): """Test that the platform name is correct.""" result = self._get_channel_1_counts() - assert result.attrs['platform_name'] == 'NOAA 19' + assert result.attrs["platform_name"] == "NOAA 19" def test_no_calibration_values_are_1(self): """Test that the values of non-calibrated data is 1.""" @@ -137,7 +137,7 @@ class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_1_reflectance(self): """Get the channel 1 reflectance.""" - dataset_id = make_dataid(name='1', calibration='reflectance') + dataset_id = make_dataid(name="1", calibration="reflectance") return self._get_dataset(dataset_id) def test_calibrated_reflectances_values(self): @@ -151,7 +151,7 @@ class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_4_bt(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='4', calibration='brightness_temperature') + dataset_id = make_dataid(name="4", calibration="brightness_temperature") return self._get_dataset(dataset_id) def test_calibrated_bt_values(self): @@ -165,17 +165,17 @@ class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_3b_bt(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3b', calibration='brightness_temperature') + dataset_id = make_dataid(name="3b", calibration="brightness_temperature") return self._get_dataset(dataset_id) def _get_channel_3a_reflectance(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3a', calibration='reflectance') + dataset_id = make_dataid(name="3a", calibration="reflectance") return self._get_dataset(dataset_id) def _get_channel_3a_counts(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3a', calibration='counts') + dataset_id = make_dataid(name="3a", calibration="counts") return self._get_dataset(dataset_id) def test_channel_3b_masking(self): @@ -212,7 +212,7 @@ def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt): get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats - @mock.patch.multiple('satpy.readers.hrpt', + @mock.patch.multiple("satpy.readers.hrpt", Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, @@ -220,11 +220,11 @@ def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt): def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) - dataset_id = make_dataid(name='longitude') + dataset_id = make_dataid(name="longitude") result = self._get_dataset(dataset_id) assert (result == self.fake_lons).all() - @mock.patch.multiple('satpy.readers.hrpt', + @mock.patch.multiple("satpy.readers.hrpt", Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, @@ -232,6 +232,6 @@ def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, S def test_latitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) - dataset_id = make_dataid(name='latitude') + dataset_id = make_dataid(name="latitude") result = self._get_dataset(dataset_id) assert (result == self.fake_lats).all() diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 4a543b449c..076f89b0f2 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -27,49 +27,49 @@ GAC_PATTERN = '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' # noqa -GAC_POD_FILENAMES = ['NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI', - 'NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI', - 'NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI', - 'NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI', - 'NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC', - 'NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI', - 'NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', - 'NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC', - 'NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC', - 'NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC', - 'NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC'] - -GAC_KLM_FILENAMES = ['NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC', - 'NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC', - 'NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC', - 'NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI', - 'NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV', - 'NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV', - 'NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV'] - -LAC_POD_FILENAMES = ['BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB', - 'BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB', - 'BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB', - 'BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB'] - -LAC_KLM_FILENAMES = ['BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB', - 'BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB', - 'BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB', - 'BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB', - 'BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB', - 'BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB', - 'BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB', - 'BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB', - 'BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB', - 'BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB', - 'BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB', - 'BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB', - 'BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB', - 'NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV', - 'NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI'] - - -@mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) +GAC_POD_FILENAMES = ["NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI", + "NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI", + "NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI", + "NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI", + "NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC", + "NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI", + "NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", + "NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC", + "NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC", + "NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC", + "NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC"] + +GAC_KLM_FILENAMES = ["NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC", + "NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC", + "NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC", + "NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI", + "NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV", + "NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV", + "NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV"] + +LAC_POD_FILENAMES = ["BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB", + "BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB", + "BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB", + "BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB"] + +LAC_KLM_FILENAMES = ["BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB", + "BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB", + "BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB", + "BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB", + "BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB", + "BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB", + "BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB", + "BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB", + "BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB", + "BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB", + "BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB", + "BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB", + "BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB", + "NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV", + "NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI"] + + +@mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) def _get_fh_mocked(init_mock, **attrs): """Create a mocked file handler with the given attributes.""" from satpy.readers.avhrr_l1b_gaclac import GACLACFile @@ -82,11 +82,11 @@ def _get_fh_mocked(init_mock, **attrs): def _get_reader_mocked(along_track=3): """Create a mocked reader.""" - reader = mock.MagicMock(spacecraft_name='spacecraft_name', - meta_data={'foo': 'bar'}) + reader = mock.MagicMock(spacecraft_name="spacecraft_name", + meta_data={"foo": "bar"}) reader.mask = [0, 0] reader.get_times.return_value = np.arange(along_track) - reader.get_tle_lines.return_value = 'tle' + reader.get_tle_lines.return_value = "tle" return reader @@ -98,16 +98,16 @@ def setUp(self): self.pygac = mock.MagicMock() self.fhs = mock.MagicMock() modules = { - 'pygac': self.pygac, - 'pygac.gac_klm': self.pygac.gac_klm, - 'pygac.gac_pod': self.pygac.gac_pod, - 'pygac.lac_klm': self.pygac.lac_klm, - 'pygac.lac_pod': self.pygac.lac_pod, - 'pygac.utils': self.pygac.utils, - 'pygac.calibration': self.pygac.calibration, + "pygac": self.pygac, + "pygac.gac_klm": self.pygac.gac_klm, + "pygac.gac_pod": self.pygac.gac_pod, + "pygac.lac_klm": self.pygac.lac_klm, + "pygac.lac_pod": self.pygac.lac_pod, + "pygac.utils": self.pygac.utils, + "pygac.calibration": self.pygac.calibration, } - self.module_patcher = mock.patch.dict('sys.modules', modules) + self.module_patcher = mock.patch.dict("sys.modules", modules) self.module_patcher.start() def tearDown(self): @@ -131,7 +131,7 @@ def setUp(self): class TestGACLACFile(GACLACFilePatcher): """Test the GACLAC file handler.""" - def _get_fh(self, filename='NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', + def _get_fh(self, filename="NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", **kwargs): """Create a file handler.""" from trollsift import parse @@ -145,15 +145,15 @@ def test_init(self): from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader - kwargs = {'start_line': 1, - 'end_line': 2, - 'strip_invalid_coords': True, - 'interpolate_coords': True, - 'adjust_clock_drift': True, - 'tle_dir': 'tle_dir', - 'tle_name': 'tle_name', - 'tle_thresh': 123, - 'calibration': 'calibration'} + kwargs = {"start_line": 1, + "end_line": 2, + "strip_invalid_coords": True, + "interpolate_coords": True, + "adjust_clock_drift": True, + "tle_dir": "tle_dir", + "tle_name": "tle_name", + "tle_thresh": 123, + "calibration": "calibration"} for filenames, reader_cls in zip([GAC_POD_FILENAMES, GAC_KLM_FILENAMES, LAC_POD_FILENAMES, LAC_KLM_FILENAMES], [GACPODReader, GACKLMReader, LACPODReader, LACKLMReader]): for filename in filenames: @@ -161,23 +161,23 @@ def test_init(self): self.assertLess(fh.start_time, fh.end_time, "Start time must precede end time.") self.assertIs(fh.reader_class, reader_cls, - 'Wrong reader class assigned to {}'.format(filename)) + "Wrong reader class assigned to {}".format(filename)) def test_read_raw_data(self): """Test raw data reading.""" fh = _get_fh_mocked(reader=None, - interpolate_coords='interpolate_coords', - creation_site='creation_site', - reader_kwargs={'foo': 'bar'}, - filename='myfile') + interpolate_coords="interpolate_coords", + creation_site="creation_site", + reader_kwargs={"foo": "bar"}, + filename="myfile") reader = mock.MagicMock(mask=[0]) reader_cls = mock.MagicMock(return_value=reader) fh.reader_class = reader_cls fh.read_raw_data() - reader_cls.assert_called_with(interpolate_coords='interpolate_coords', - creation_site='creation_site', - foo='bar') - reader.read.assert_called_with('myfile') + reader_cls.assert_called_with(interpolate_coords="interpolate_coords", + creation_site="creation_site", + foo="bar") + reader.read.assert_called_with("myfile") # Test exception if all data is masked reader.mask = [1] @@ -185,9 +185,9 @@ def test_read_raw_data(self): with self.assertRaises(ValueError): fh.read_raw_data() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel") def test_get_dataset_slice(self, get_channel, slc, *mocks): """Get a slice of a dataset.""" from satpy.tests.utils import make_dataid @@ -206,28 +206,28 @@ def slice_patched(data, times): acq = np.array([0, 1, 2, 3, 4]) slc.side_effect = slice_patched get_channel.return_value = ch - kwargs_list = [{'strip_invalid_coords': False, - 'start_line': 123, 'end_line': 456}, - {'strip_invalid_coords': True, - 'start_line': None, 'end_line': None}, - {'strip_invalid_coords': True, - 'start_line': 123, 'end_line': 456}] + kwargs_list = [{"strip_invalid_coords": False, + "start_line": 123, "end_line": 456}, + {"strip_invalid_coords": True, + "start_line": None, "end_line": None}, + {"strip_invalid_coords": True, + "start_line": 123, "end_line": 456}] for kwargs in kwargs_list: fh = _get_fh_mocked( reader=_get_reader_mocked(along_track=len(acq)), - chn_dict={'1': 0}, + chn_dict={"1": 0}, **kwargs ) - key = make_dataid(name='1', calibration='reflectance') - info = {'name': '1', 'standard_name': 'reflectance'} + key = make_dataid(name="1", calibration="reflectance") + info = {"name": "1", "standard_name": "reflectance"} res = fh.get_dataset(key, info) np.testing.assert_array_equal(res.data, ch[1:3, :]) - np.testing.assert_array_equal(res.coords['acq_time'].data, acq[1:3]) - np.testing.assert_array_equal(slc.call_args_list[-1][1]['times'], acq) - np.testing.assert_array_equal(slc.call_args_list[-1][1]['data'], ch) + np.testing.assert_array_equal(res.coords["acq_time"].data, acq[1:3]) + np.testing.assert_array_equal(slc.call_args_list[-1][1]["times"], acq) + np.testing.assert_array_equal(slc.call_args_list[-1][1]["data"], ch) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_latlon(self, *mocks): """Test getting the latitudes and longitudes.""" from satpy.tests.utils import make_dataid @@ -245,26 +245,26 @@ def test_get_dataset_latlon(self, *mocks): ) # With interpolation of coordinates - for name, exp_data in zip(['longitude', 'latitude'], [lons, lats]): + for name, exp_data in zip(["longitude", "latitude"], [lons, lats]): key = make_dataid(name=name) - info = {'name': name, 'standard_name': 'my_standard_name'} + info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(exp_data, name=res.name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}) + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False - for name, _exp_data in zip(['longitude', 'latitude'], [lons, lats]): + for name, _exp_data in zip(["longitude", "latitude"], [lons, lats]): key = make_dataid(name=name) - info = {'name': name, 'standard_name': 'my_standard_name'} + info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) + self.assertTupleEqual(res.dims, ("y", "x_every_eighth")) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle") def test_get_dataset_angles(self, get_angle, *mocks): """Test getting the angles.""" from satpy.readers.avhrr_l1b_gaclac import ANGLES @@ -284,23 +284,23 @@ def test_get_dataset_angles(self, get_angle, *mocks): # With interpolation of coordinates for angle in ANGLES: key = make_dataid(name=angle) - info = {'name': angle, 'standard_name': 'my_standard_name'} + info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(ones, name=res.name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}) + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False for angle in ANGLES: key = make_dataid(name=angle) - info = {'name': angle, 'standard_name': 'my_standard_name'} + info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) + self.assertTupleEqual(res.dims, ("y", "x_every_eighth")) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_qual_flags(self, *mocks): """Test getting the qualitiy flags.""" from satpy.tests.utils import make_dataid @@ -316,20 +316,20 @@ def test_get_dataset_qual_flags(self, *mocks): interpolate_coords=True ) - key = make_dataid(name='qual_flags') - info = {'name': 'qual_flags'} + key = make_dataid(name="qual_flags") + info = {"name": "qual_flags"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(qual_flags, name=res.name, - dims=('y', 'num_flags'), - coords={'acq_time': ('y', [0, 1, 2]), - 'num_flags': ['Scan line number', - 'Fatal error flag', - 'Insufficient data for calibration', - 'Insufficient data for calibration', - 'Solar contamination of blackbody in channels 3', - 'Solar contamination of blackbody in channels 4', - 'Solar contamination of blackbody in channels 5']}) + dims=("y", "num_flags"), + coords={"acq_time": ("y", [0, 1, 2]), + "num_flags": ["Scan line number", + "Fatal error flag", + "Insufficient data for calibration", + "Insufficient data for calibration", + "Solar contamination of blackbody in channels 3", + "Solar contamination of blackbody in channels 4", + "Solar contamination of blackbody in channels 5"]}) xr.testing.assert_equal(res, exp) def test_get_channel(self): @@ -343,9 +343,9 @@ def test_get_channel(self): reader.get_counts.return_value = counts reader.get_calibrated_channels.return_value = calib_channels fh = _get_fh_mocked(reader=reader, counts=None, calib_channels=None, - chn_dict={'1': 0}) + chn_dict={"1": 0}) - key = make_dataid(name='1', calibration='counts') + key = make_dataid(name="1", calibration="counts") # Counts res = fh._get_channel(key=key) np.testing.assert_array_equal(res, [[1, 2, 3], @@ -353,8 +353,8 @@ def test_get_channel(self): np.testing.assert_array_equal(fh.counts, counts) # Reflectance and Brightness Temperature - for calib in ['reflectance', 'brightness_temperature']: - key = make_dataid(name='1', calibration=calib) + for calib in ["reflectance", "brightness_temperature"]: + key = make_dataid(name="1", calibration=calib) res = fh._get_channel(key=key) np.testing.assert_array_equal(res, [[2, 4, 6], [8, 10, 12]]) @@ -362,17 +362,17 @@ def test_get_channel(self): # Invalid with pytest.raises(ValueError): - key = make_dataid(name='7', calibration='coffee') + key = make_dataid(name="7", calibration="coffee") # Buffering reader.get_counts.reset_mock() - key = make_dataid(name='1', calibration='counts') + key = make_dataid(name="1", calibration="counts") fh._get_channel(key=key) reader.get_counts.assert_not_called() reader.get_calibrated_channels.reset_mock() - for calib in ['reflectance', 'brightness_temperature']: - key = make_dataid(name='1', calibration=calib) + for calib in ["reflectance", "brightness_temperature"]: + key = make_dataid(name="1", calibration=calib) fh._get_channel(key) reader.get_calibrated_channels.assert_not_called() @@ -385,17 +385,17 @@ def test_get_angle(self): fh = _get_fh_mocked(reader=reader, angles=None) # Test angle readout - key = make_dataid(name='sensor_zenith_angle') + key = make_dataid(name="sensor_zenith_angle") res = fh._get_angle(key) self.assertEqual(res, 2) - self.assertDictEqual(fh.angles, {'sensor_zenith_angle': 2, - 'sensor_azimuth_angle': 1, - 'solar_zenith_angle': 4, - 'solar_azimuth_angle': 3, - 'sun_sensor_azimuth_difference_angle': 5}) + self.assertDictEqual(fh.angles, {"sensor_zenith_angle": 2, + "sensor_azimuth_angle": 1, + "solar_zenith_angle": 4, + "solar_azimuth_angle": 3, + "sun_sensor_azimuth_difference_angle": 5}) # Test buffering - key = make_dataid(name='sensor_azimuth_angle') + key = make_dataid(name="sensor_azimuth_angle") fh._get_angle(key) reader.get_angles.assert_called_once() @@ -416,7 +416,7 @@ def test_strip_invalid_lat(self): fh._strip_invalid_lat() pygac.utils.strip_invalid_lat.assert_called_once() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice") def test_slice(self, _slice): """Test slicing.""" @@ -425,7 +425,7 @@ def _slice_patched(data): _slice.side_effect = _slice_patched data = np.zeros((4, 2)) - times = np.array([1, 2, 3, 4], dtype='datetime64[us]') + times = np.array([1, 2, 3, 4], dtype="datetime64[us]") fh = _get_fh_mocked(start_line=1, end_line=3, strip_invalid_coords=False) data_slc, times_slc = fh.slice(data, times) @@ -434,22 +434,22 @@ def _slice_patched(data): self.assertEqual(fh.start_time, datetime(1970, 1, 1, 0, 0, 0, 2)) self.assertEqual(fh.end_time, datetime(1970, 1, 1, 0, 0, 0, 3)) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat") def test__slice(self, strip_invalid_lat, get_qual_flags): """Test slicing.""" import pygac.utils pygac.utils.check_user_scanlines.return_value = 1, 2 - pygac.utils.slice_channel.return_value = 'sliced' + pygac.utils.slice_channel.return_value = "sliced" strip_invalid_lat.return_value = 3, 4 - get_qual_flags.return_value = 'qual_flags' + get_qual_flags.return_value = "qual_flags" data = np.zeros((2, 2)) # a) Only start/end line given fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=False) data_slc = fh._slice(data) - self.assertEqual(data_slc, 'sliced') + self.assertEqual(data_slc, "sliced") pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=None, last_valid_lat=None, along_track=2) @@ -472,9 +472,9 @@ def test__slice(self, strip_invalid_lat, get_qual_flags): first_valid_lat=3, last_valid_lat=4, along_track=2) # Test slicing with older pygac versions - pygac.utils.slice_channel.return_value = ('sliced', 'foo', 'bar') + pygac.utils.slice_channel.return_value = ("sliced", "foo", "bar") data_slc = fh._slice(data) - self.assertEqual(data_slc, 'sliced') + self.assertEqual(data_slc, "sliced") class TestGetDataset(GACLACFilePatcher): @@ -483,21 +483,21 @@ class TestGetDataset(GACLACFilePatcher): def setUp(self): """Set up the instance.""" self.exp = xr.DataArray(da.ones((3, 3)), - name='1', - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}, - attrs={'name': '1', - 'platform_name': 'spacecraft_name', - 'orbit_number': 123, - 'sensor': 'sensor', - 'foo': 'bar', - 'standard_name': 'my_standard_name'}) - self.exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + name="1", + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}, + attrs={"name": "1", + "platform_name": "spacecraft_name", + "orbit_number": 123, + "sensor": "sensor", + "foo": "bar", + "standard_name": "my_standard_name"}) + self.exp.coords["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" super().setUp() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel', return_value=np.ones((3, 3))) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel", return_value=np.ones((3, 3))) def test_get_dataset_channels(self, get_channel, *mocks): """Test getting the channel datasets.""" pygac_reader = _get_reader_mocked() @@ -506,7 +506,7 @@ def test_get_dataset_channels(self, get_channel, *mocks): # Test calibration to reflectance as well as attributes. key, res = self._get_dataset(fh) exp = self._create_expected(res.name) - exp.attrs['orbital_parameters'] = {'tle': 'tle'} + exp.attrs["orbital_parameters"] = {"tle": "tle"} xr.testing.assert_identical(res, exp) get_channel.assert_called_with(key) @@ -517,8 +517,8 @@ def test_get_dataset_channels(self, get_channel, *mocks): def _get_dataset(fh): from satpy.tests.utils import make_dataid - key = make_dataid(name='1', calibration='reflectance') - info = {'name': '1', 'standard_name': 'my_standard_name'} + key = make_dataid(name="1", calibration="reflectance") + info = {"name": "1", "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) return key, res @@ -527,12 +527,12 @@ def _create_file_handler(reader): """Mock reader and file handler.""" fh = _get_fh_mocked( reader=reader, - chn_dict={'1': 0, '5': 0}, + chn_dict={"1": 0, "5": 0}, start_line=None, end_line=None, strip_invalid_coords=False, - filename_info={'orbit_number': 123}, - sensor='sensor', + filename_info={"orbit_number": 123}, + sensor="sensor", ) return fh @@ -540,20 +540,20 @@ def _create_file_handler(reader): def _create_expected(name): exp = xr.DataArray(da.ones((3, 3)), name=name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}, - attrs={'name': '1', - 'platform_name': 'spacecraft_name', - 'orbit_number': 123, - 'sensor': 'sensor', - 'foo': 'bar', - 'standard_name': 'my_standard_name'}) - exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}, + attrs={"name": "1", + "platform_name": "spacecraft_name", + "orbit_number": 123, + "sensor": "sensor", + "foo": "bar", + "standard_name": "my_standard_name"}) + exp.coords["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" return exp - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel', return_value=np.ones((3, 3))) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel", return_value=np.ones((3, 3))) def test_get_dataset_no_tle(self, get_channel, *mocks): """Test getting the channel datasets when no TLEs are present.""" pygac_reader = _get_reader_mocked() @@ -575,7 +575,7 @@ def _check_get_channel_calls(fh, get_channel): """Check _get_channel() calls.""" from satpy.tests.utils import make_dataid - for key in [make_dataid(name='1', calibration='counts'), - make_dataid(name='5', calibration='brightness_temperature')]: - fh.get_dataset(key=key, info={'name': 1}) + for key in [make_dataid(name="1", calibration="counts"), + make_dataid(name="5", calibration="brightness_temperature")]: + fh.get_dataset(key=key, info={"name": 1}) get_channel.assert_called_with(key) diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 86e0cf1fa7..66758d44dc 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -45,64 +45,64 @@ class FakeHDF4FileHandlerPolar(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/platform': 'SNPP', - '/attr/sensor': 'VIIRS', + "/attr/platform": "SNPP", + "/attr/sensor": "VIIRS", } - file_content['longitude'] = xr.DataArray( + file_content["longitude"] = xr.DataArray( da.from_array(DEFAULT_LON_DATA, chunks=4096), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", }) - file_content['longitude/shape'] = DEFAULT_FILE_SHAPE + file_content["longitude/shape"] = DEFAULT_FILE_SHAPE - file_content['latitude'] = xr.DataArray( + file_content["latitude"] = xr.DataArray( da.from_array(DEFAULT_LAT_DATA, chunks=4096), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", }) - file_content['latitude/shape'] = DEFAULT_FILE_SHAPE + file_content["latitude/shape"] = DEFAULT_FILE_SHAPE - file_content['variable1'] = xr.DataArray( + file_content["variable1"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = xr.DataArray( + file_content["variable2"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE - file_content['variable2'] = file_content['variable2'].where( - file_content['variable2'] % 2 != 0) + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE + file_content["variable2"] = file_content["variable2"].where( + file_content["variable2"] % 2 != 0) # category - file_content['variable3'] = xr.DataArray( + file_content["variable3"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.byte), attrs={ - 'SCALED': 0, - '_FillValue': -128, - 'flag_meanings': 'clear water supercooled mixed ice unknown', - 'flag_values': [0, 1, 2, 3, 4, 5], - 'units': 'none', + "SCALED": 0, + "_FillValue": -128, + "flag_meanings": "clear water supercooled mixed ice unknown", + "flag_values": [0, 1, 2, 3, 4, 5], + "units": "none", }) - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE return file_content @@ -116,9 +116,9 @@ def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerPolar,)) + self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerPolar,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -131,7 +131,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -143,7 +143,7 @@ def test_available_datasets(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -152,57 +152,57 @@ def test_available_datasets(self): # mimic the YAML file being configured for more datasets fake_dataset_info = [ - (None, {'name': 'variable1', 'resolution': None, 'file_type': ['clavrx_hdf4']}), - (True, {'name': 'variable2', 'resolution': 742, 'file_type': ['clavrx_hdf4']}), - (True, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}), - (None, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}), - (None, {'name': '_fake1', 'file_type': ['clavrx_hdf4']}), - (None, {'name': 'variable1', 'file_type': ['level_fake']}), - (True, {'name': 'variable3', 'file_type': ['clavrx_hdf4']}), + (None, {"name": "variable1", "resolution": None, "file_type": ["clavrx_hdf4"]}), + (True, {"name": "variable2", "resolution": 742, "file_type": ["clavrx_hdf4"]}), + (True, {"name": "variable2", "resolution": 1, "file_type": ["clavrx_hdf4"]}), + (None, {"name": "variable2", "resolution": 1, "file_type": ["clavrx_hdf4"]}), + (None, {"name": "_fake1", "file_type": ["clavrx_hdf4"]}), + (None, {"name": "variable1", "file_type": ["level_fake"]}), + (True, {"name": "variable3", "file_type": ["clavrx_hdf4"]}), ] - new_ds_infos = list(r.file_handlers['clavrx_hdf4'][0].available_datasets( + new_ds_infos = list(r.file_handlers["clavrx_hdf4"][0].available_datasets( fake_dataset_info)) self.assertEqual(len(new_ds_infos), 9) # we have this and can provide the resolution self.assertTrue(new_ds_infos[0][0]) - self.assertEqual(new_ds_infos[0][1]['resolution'], 742) # hardcoded + self.assertEqual(new_ds_infos[0][1]["resolution"], 742) # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have self.assertTrue(new_ds_infos[1][0]) - self.assertEqual(new_ds_infos[1][1]['resolution'], 742) + self.assertEqual(new_ds_infos[1][1]["resolution"], 742) # we have this, but don't want to change the resolution # because a previous handler said it has it self.assertTrue(new_ds_infos[2][0]) - self.assertEqual(new_ds_infos[2][1]['resolution'], 1) + self.assertEqual(new_ds_infos[2][1]["resolution"], 1) # even though the previous one was known we can still # produce it at our new resolution self.assertTrue(new_ds_infos[3][0]) - self.assertEqual(new_ds_infos[3][1]['resolution'], 742) + self.assertEqual(new_ds_infos[3][1]["resolution"], 742) # we have this and can update the resolution since # no one else has claimed it self.assertTrue(new_ds_infos[4][0]) - self.assertEqual(new_ds_infos[4][1]['resolution'], 742) + self.assertEqual(new_ds_infos[4][1]["resolution"], 742) # we don't have this variable, don't change it self.assertFalse(new_ds_infos[5][0]) - self.assertIsNone(new_ds_infos[5][1].get('resolution')) + self.assertIsNone(new_ds_infos[5][1].get("resolution")) # we have this, but it isn't supposed to come from our file type self.assertIsNone(new_ds_infos[6][0]) - self.assertIsNone(new_ds_infos[6][1].get('resolution')) + self.assertIsNone(new_ds_infos[6][1].get("resolution")) # we could have loaded this but some other file handler said it has this self.assertTrue(new_ds_infos[7][0]) - self.assertIsNone(new_ds_infos[7][1].get('resolution')) + self.assertIsNone(new_ds_infos[7][1].get("resolution")) # we can add resolution to the previous dataset, so we do self.assertTrue(new_ds_infos[8][0]) - self.assertEqual(new_ds_infos[8][1]['resolution'], 742) + self.assertEqual(new_ds_infos[8][1]["resolution"], 742) def test_load_all(self): """Test loading all test datasets.""" @@ -210,22 +210,22 @@ def test_load_all(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) r.create_filehandlers(loadables) - var_list = ['variable1', 'variable2', 'variable3'] + var_list = ["variable1", "variable2", "variable3"] datasets = r.load(var_list) self.assertEqual(len(datasets), len(var_list)) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['platform_name'], 'npp') - self.assertEqual(v.attrs['sensor'], 'viirs') - self.assertIsInstance(v.attrs['area'], SwathDefinition) - self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 16) + self.assertEqual(v.attrs["units"], "1") + self.assertEqual(v.attrs["platform_name"], "npp") + self.assertEqual(v.attrs["sensor"], "viirs") + self.assertIsInstance(v.attrs["area"], SwathDefinition) + self.assertEqual(v.attrs["area"].lons.attrs["rows_per_scan"], 16) + self.assertEqual(v.attrs["area"].lats.attrs["rows_per_scan"], 16) self.assertIsInstance(datasets["variable3"].attrs.get("flag_meanings"), list) @@ -235,72 +235,72 @@ class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/platform': 'HIM8', - '/attr/sensor': 'AHI', + "/attr/platform": "HIM8", + "/attr/sensor": "AHI", # this is a Level 2 file that came from a L1B file - '/attr/L1B': 'clavrx_H08_20180806_1800', + "/attr/L1B": "clavrx_H08_20180806_1800", } - file_content['longitude'] = xr.DataArray( + file_content["longitude"] = xr.DataArray( DEFAULT_LON_DATA, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", }) - file_content['longitude/shape'] = DEFAULT_FILE_SHAPE + file_content["longitude/shape"] = DEFAULT_FILE_SHAPE - file_content['latitude'] = xr.DataArray( + file_content["latitude"] = xr.DataArray( DEFAULT_LAT_DATA, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", }) - file_content['latitude/shape'] = DEFAULT_FILE_SHAPE + file_content["latitude/shape"] = DEFAULT_FILE_SHAPE - file_content['variable1'] = xr.DataArray( + file_content["variable1"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': (-32767, 32767), + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": (-32767, 32767), }) - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = xr.DataArray( + file_content["variable2"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE - file_content['variable2'] = file_content['variable2'].where( - file_content['variable2'] % 2 != 0) + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE + file_content["variable2"] = file_content["variable2"].where( + file_content["variable2"] % 2 != 0) # category - file_content['variable3'] = xr.DataArray( + file_content["variable3"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.byte), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'SCALED': 0, - '_FillValue': -128, - 'flag_meanings': 'clear water supercooled mixed ice unknown', - 'flag_values': [0, 1, 2, 3, 4, 5], - 'units': '1', + "SCALED": 0, + "_FillValue": -128, + "flag_meanings": "clear water supercooled mixed ice unknown", + "flag_values": [0, 1, 2, 3, 4, 5], + "units": "1", }) - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE return file_content @@ -314,9 +314,9 @@ def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerGeo,)) + self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -329,7 +329,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -342,12 +342,12 @@ def test_no_nav_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - self.assertRaises(IOError, r.load, ['variable1', 'variable2', 'variable3']) + self.assertRaises(IOError, r.load, ["variable1", "variable2", "variable3"]) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" @@ -355,13 +355,13 @@ def test_load_all_old_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -369,29 +369,29 @@ def test_load_all_old_donor(self): semi_minor_axis=6356.7523142, perspective_point_height=35791, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'Projection': proj, 'x': x, 'y': y}, + variables={"Projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(['variable1', 'variable2', 'variable3']) + datasets = r.load(["variable1", "variable2", "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - self.assertNotIn('calibration', v.attrs) - self.assertEqual(v.attrs['units'], '1') - self.assertIsInstance(v.attrs['area'], AreaDefinition) + self.assertNotIn("calibration", v.attrs) + self.assertEqual(v.attrs["units"], "1") + self.assertIsInstance(v.attrs["area"], AreaDefinition) if v.attrs.get("flag_values"): - self.assertIn('_FillValue', v.attrs) + self.assertIn("_FillValue", v.attrs) else: - self.assertNotIn('_FillValue', v.attrs) - if v.attrs["name"] == 'variable1': + self.assertNotIn("_FillValue", v.attrs) + if v.attrs["name"] == "variable1": self.assertIsInstance(v.attrs["valid_range"], list) else: - self.assertNotIn('valid_range', v.attrs) - if 'flag_values' in v.attrs: + self.assertNotIn("valid_range", v.attrs) + if "flag_values" in v.attrs: self.assertTrue(np.issubdtype(v.dtype, np.integer)) - self.assertIsNotNone(v.attrs.get('flag_meanings')) + self.assertIsNotNone(v.attrs.get("flag_meanings")) def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" @@ -399,13 +399,13 @@ def test_load_all_new_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -413,19 +413,19 @@ def test_load_all_new_donor(self): semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(['variable1', 'variable2', 'variable3']) + datasets = r.load(["variable1", "variable2", "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - self.assertNotIn('calibration', v.attrs) - self.assertEqual(v.attrs['units'], '1') - self.assertIsInstance(v.attrs['area'], AreaDefinition) - self.assertTrue(v.attrs['area'].is_geostationary) - self.assertEqual(v.attrs['platform_name'], 'himawari8') - self.assertEqual(v.attrs['sensor'], 'ahi') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + self.assertNotIn("calibration", v.attrs) + self.assertEqual(v.attrs["units"], "1") + self.assertIsInstance(v.attrs["area"], AreaDefinition) + self.assertTrue(v.attrs["area"].is_geostationary) + self.assertEqual(v.attrs["platform_name"], "himawari8") + self.assertEqual(v.attrs["sensor"], "ahi") + self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index ea0dcaed9b..33be29078a 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -36,75 +36,75 @@ DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) -AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc' +AHI_FILE = "clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc" def fake_test_content(filename, **kwargs): """Mimic reader input file content.""" attrs = { - 'platform': 'HIM8', - 'sensor': 'AHI', + "platform": "HIM8", + "sensor": "AHI", # this is a Level 2 file that came from a L1B file - 'L1B': 'clavrx_H08_20210603_1500_B01_FLDK_R', + "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R", } longitude = xr.DataArray(DEFAULT_LON_DATA, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', - 'units': 'degrees_east' + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", + "units": "degrees_east" }) latitude = xr.DataArray(DEFAULT_LAT_DATA, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', - 'units': 'degrees_south' + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", + "units": "degrees_south" }) variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': [-32767, 32767], + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": [-32767, 32767], }) # data with fill values variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': [-32767, 32767], + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": [-32767, 32767], }) variable2 = variable2.where(variable2 % 2 != 0) # category variable3 = xr.DataArray(DEFAULT_FILE_FLAGS, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'SCALED': 0, - '_FillValue': -127, - 'units': '1', - 'flag_values': [0, 1, 2, 3]}) + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"SCALED": 0, + "_FillValue": -127, + "units": "1", + "flag_values": [0, 1, 2, 3]}) ds_vars = { - 'longitude': longitude, - 'latitude': latitude, - 'variable1': variable1, - 'variable2': variable2, - 'variable3': variable3 + "longitude": longitude, + "latitude": latitude, + "variable1": variable1, + "variable2": variable2, + "variable3": variable3 } ds = xr.Dataset(ds_vars, attrs=attrs) @@ -121,7 +121,7 @@ class TestCLAVRXReaderGeo: def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -130,7 +130,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -141,12 +141,12 @@ def test_reader_creation(self, filenames, expected_loadables): @pytest.mark.parametrize( ("filenames", "expected_datasets"), - [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] + [([AHI_FILE], ["variable1", "variable2", "variable3"]), ] ) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -157,19 +157,19 @@ def test_available_datasets(self, filenames, expected_datasets): @pytest.mark.parametrize( ("filenames", "loadable_ids"), - [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] + [([AHI_FILE], ["variable1", "variable2", "variable3"]), ] ) def test_load_all_new_donor(self, filenames, loadable_ids): """Test loading all test datasets with new donor.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, \ - mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, \ + mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -177,26 +177,26 @@ def test_load_all_new_donor(self, filenames, loadable_ids): semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(loadable_ids) assert len(datasets) == 3 for v in datasets.values(): - assert 'calibration' not in v.attrs - assert v.attrs['units'] == '1' - assert isinstance(v.attrs['area'], AreaDefinition) - assert v.attrs['platform_name'] == 'himawari8' - assert v.attrs['sensor'] == 'ahi' - assert 'rows_per_scan' not in v.coords.get('longitude').attrs + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) + assert v.attrs["platform_name"] == "himawari8" + assert v.attrs["sensor"] == "ahi" + assert "rows_per_scan" not in v.coords.get("longitude").attrs if v.attrs["name"] in ["variable1", "variable2"]: assert isinstance(v.attrs["valid_range"], list) assert v.dtype == np.float32 assert "_FillValue" not in v.attrs.keys() else: - assert (datasets['variable3'].attrs.get('flag_meanings')) is not None - assert (datasets['variable3'].attrs.get('flag_meanings') == '') + assert (datasets["variable3"].attrs.get("flag_meanings")) is not None + assert (datasets["variable3"].attrs.get("flag_meanings") == "") assert np.issubdtype(v.dtype, np.integer) diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index 9c7c7e0089..7f5b728ba8 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -219,8 +219,8 @@ def test_get_area_def(self, file_handler, area_exp): @pytest.mark.parametrize( "ds_name,expected", [ - ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=('y', 'x'))), - ("cph", xr.DataArray([[0, 1], [2, 0]], dims=('y', 'x'))), + ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=("y", "x"))), + ("cph", xr.DataArray([[0, 1], [2, 0]], dims=("y", "x"))), ] ) def test_get_dataset(self, file_handler, ds_name, expected): diff --git a/satpy/tests/reader_tests/test_electrol_hrit.py b/satpy/tests/reader_tests/test_electrol_hrit.py index eca413d033..c555f377b1 100644 --- a/satpy/tests/reader_tests/test_electrol_hrit.py +++ b/satpy/tests/reader_tests/test_electrol_hrit.py @@ -48,15 +48,15 @@ class Testrecarray2dict(unittest.TestCase): def test_fun(self): """Test record array.""" - inner_st = np.dtype([('test_str', '02d}": {'channels': [4, 5, 6, 8, 9], - 'grid_type': '1km'}, - "nir_{:>02d}": {'channels': [13, 16, 22], - 'grid_type': '1km'}, - "ir_{:>02d}": {'channels': [38, 87, 97, 105, 123, 133], - 'grid_type': '2km'}, - "wv_{:>02d}": {'channels': [63, 73], - 'grid_type': '2km'}, + "vis_{:>02d}": {"channels": [4, 5, 6, 8, 9], + "grid_type": "1km"}, + "nir_{:>02d}": {"channels": [13, 16, 22], + "grid_type": "1km"}, + "ir_{:>02d}": {"channels": [38, 87, 97, 105, 123, 133], + "grid_type": "2km"}, + "wv_{:>02d}": {"channels": [63, 73], + "grid_type": "2km"}, } @@ -336,15 +336,15 @@ class FakeFCIFileHandlerWithBadIDPFData(FakeFCIFileHandlerFDHSI): def _get_test_content_all_channels(self): data = super()._get_test_content_all_channels() - data['data/vis_06/measured/x'].attrs['scale_factor'] *= -1 - data['data/vis_06/measured/x'].attrs['scale_factor'] = \ - np.float32(data['data/vis_06/measured/x'].attrs['scale_factor']) - data['data/vis_06/measured/x'].attrs['add_offset'] = \ - np.float32(data['data/vis_06/measured/x'].attrs['add_offset']) - data['data/vis_06/measured/y'].attrs['scale_factor'] = \ - np.float32(data['data/vis_06/measured/y'].attrs['scale_factor']) - data['data/vis_06/measured/y'].attrs['add_offset'] = \ - np.float32(data['data/vis_06/measured/y'].attrs['add_offset']) + data["data/vis_06/measured/x"].attrs["scale_factor"] *= -1 + data["data/vis_06/measured/x"].attrs["scale_factor"] = \ + np.float32(data["data/vis_06/measured/x"].attrs["scale_factor"]) + data["data/vis_06/measured/x"].attrs["add_offset"] = \ + np.float32(data["data/vis_06/measured/x"].attrs["add_offset"]) + data["data/vis_06/measured/y"].attrs["scale_factor"] = \ + np.float32(data["data/vis_06/measured/y"].attrs["scale_factor"]) + data["data/vis_06/measured/y"].attrs["add_offset"] = \ + np.float32(data["data/vis_06/measured/y"].attrs["add_offset"]) data["state/celestial/earth_sun_distance"] = xr.DataArray(da.repeat(da.array([30000000]), 6000)) return data @@ -354,12 +354,12 @@ class FakeFCIFileHandlerHRFI(FakeFCIFileHandlerBase): """Mock HRFI data.""" chan_patterns = { - "vis_{:>02d}_hr": {'channels': [6], - 'grid_type': '500m'}, - "nir_{:>02d}_hr": {'channels': [22], - 'grid_type': '500m'}, - "ir_{:>02d}_hr": {'channels': [38, 105], - 'grid_type': '1km'}, + "vis_{:>02d}_hr": {"channels": [6], + "grid_type": "500m"}, + "nir_{:>02d}_hr": {"channels": [22], + "grid_type": "500m"}, + "ir_{:>02d}_hr": {"channels": [38, 105], + "grid_type": "1km"}, } @@ -404,12 +404,12 @@ def clear_cache(reader): "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} -_test_filenames = {'fdhsi': [ +_test_filenames = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ], - 'hrfi': [ + "hrfi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" @@ -431,9 +431,9 @@ def FakeFCIFileHandlerFDHSI_fixture(): """Get a fixture for the fake FDHSI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { - 'filetype': 'fci_l1c_fdhsi', - 'channels': _chans_fdhsi, - 'filenames': _test_filenames['fdhsi'] + "filetype": "fci_l1c_fdhsi", + "channels": _chans_fdhsi, + "filenames": _test_filenames["fdhsi"] } yield param_dict @@ -443,9 +443,9 @@ def FakeFCIFileHandlerHRFI_fixture(): """Get a fixture for the fake HRFI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFI): param_dict = { - 'filetype': 'fci_l1c_hrfi', - 'channels': _chans_hrfi, - 'filenames': _test_filenames['hrfi'] + "filetype": "fci_l1c_hrfi", + "channels": _chans_hrfi, + "filenames": _test_filenames["hrfi"] } yield param_dict @@ -458,12 +458,12 @@ def FakeFCIFileHandlerHRFI_fixture(): class TestFCIL1cNCReader: """Test FCI L1c NetCDF reader with nominal data.""" - fh_param_for_filetype = {'hrfi': {'channels': _chans_hrfi, - 'filenames': _test_filenames['hrfi']}, - 'fdhsi': {'channels': _chans_fdhsi, - 'filenames': _test_filenames['fdhsi']}} + fh_param_for_filetype = {"hrfi": {"channels": _chans_hrfi, + "filenames": _test_filenames["hrfi"]}, + "fdhsi": {"channels": _chans_fdhsi, + "filenames": _test_filenames["fdhsi"]}} - @pytest.mark.parametrize('filenames', [_test_filenames['fdhsi'], _test_filenames['hrfi']]) + @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"], _test_filenames["hrfi"]]) def test_file_pattern(self, reader_configs, filenames): """Test file pattern matching.""" from satpy.readers import load_reader @@ -472,8 +472,8 @@ def test_file_pattern(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 1 - @pytest.mark.parametrize('filenames', [_test_filenames['fdhsi'][0].replace('BODY', 'TRAIL'), - _test_filenames['hrfi'][0].replace('BODY', 'TRAIL')]) + @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"][0].replace("BODY", "TRAIL"), + _test_filenames["hrfi"][0].replace("BODY", "TRAIL")]) def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): """Test file pattern matching for TRAIL files, which should not be picked up.""" from satpy.readers import load_reader @@ -482,226 +482,226 @@ def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 0 - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_counts(self, reader_configs, fh_param, expected_res_n): """Test loading with counts.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="counts") for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.uint16 assert res[ch].attrs["calibration"] == "counts" assert res[ch].attrs["units"] == "count" - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 1) numpy.testing.assert_array_equal(res[ch][0], 5000) else: numpy.testing.assert_array_equal(res[ch], 1) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_radiance(self, reader_configs, fh_param, expected_res_n): """Test loading with radiance.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="radiance") for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "radiance" - assert res[ch].attrs["units"] == 'mW m-2 sr-1 (cm-1)-1' + assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" assert res[ch].attrs["radiance_unit_conversion_coefficient"] == 1234.56 - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 15) numpy.testing.assert_array_equal(res[ch][0], 9700) else: numpy.testing.assert_array_equal(res[ch], 15) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 8), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 2)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) def test_load_reflectance(self, reader_configs, fh_param, expected_res_n): """Test loading with reflectance.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="reflectance") for name in - fh_param['channels']["solar"]], pad_data=False) + fh_param["channels"]["solar"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"], fh_param['channels']["solar_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "reflectance" assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 8), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 2)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) def test_load_bt(self, reader_configs, caplog, fh_param, expected_res_n): """Test loading with bt.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with caplog.at_level(logging.WARNING): res = reader.load( [make_dataid(name=name, calibration="brightness_temperature") for - name in fh_param['channels']["terran"]], pad_data=False) + name in fh_param["channels"]["terran"]], pad_data=False) assert caplog.text == "" assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["terran"], fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "brightness_temperature" assert res[ch].attrs["units"] == "K" - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_almost_equal(res[ch][-1], 209.68274099) numpy.testing.assert_array_almost_equal(res[ch][0], 1888.851296) else: numpy.testing.assert_array_almost_equal(res[ch], 209.68274099) - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_orbital_parameters_attr(self, reader_configs, fh_param): """Test the orbital parameter attribute.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name) for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - for ch in fh_param['channels']["solar"] + fh_param['channels']["terran"]: + for ch in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]: assert res[ch].attrs["orbital_parameters"] == { - 'satellite_actual_longitude': np.mean(np.arange(6000)), - 'satellite_actual_latitude': np.mean(np.arange(6000)), - 'satellite_actual_altitude': np.mean(np.arange(6000)), - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0, - 'satellite_nominal_altitude': 35786400.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0, - 'projection_altitude': 35786400.0, + "satellite_actual_longitude": np.mean(np.arange(6000)), + "satellite_actual_latitude": np.mean(np.arange(6000)), + "satellite_actual_altitude": np.mean(np.arange(6000)), + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0, + "satellite_nominal_altitude": 35786400.0, + "projection_longitude": 0.0, + "projection_latitude": 0, + "projection_altitude": 35786400.0, } expected_pos_info_for_filetype = { - 'fdhsi': {'1km': {'start_position_row': 1, - 'end_position_row': 200, - 'segment_height': 200, - 'grid_width': 11136}, - '2km': {'start_position_row': 1, - 'end_position_row': 100, - 'segment_height': 100, - 'grid_width': 5568}}, - 'hrfi': {'500m': {'start_position_row': 1, - 'end_position_row': 400, - 'segment_height': 400, - 'grid_width': 22272}, - '1km': {'start_position_row': 1, - 'end_position_row': 200, - 'grid_width': 11136, - 'segment_height': 200}} + "fdhsi": {"1km": {"start_position_row": 1, + "end_position_row": 200, + "segment_height": 200, + "grid_width": 11136}, + "2km": {"start_position_row": 1, + "end_position_row": 100, + "segment_height": 100, + "grid_width": 5568}}, + "hrfi": {"500m": {"start_position_row": 1, + "end_position_row": 400, + "segment_height": 400, + "grid_width": 22272}, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200}} } - @pytest.mark.parametrize('fh_param, expected_pos_info', [ - (lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), expected_pos_info_for_filetype['fdhsi']), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), expected_pos_info_for_filetype['hrfi']) + @pytest.mark.parametrize("fh_param, expected_pos_info", [ + (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), expected_pos_info_for_filetype["fdhsi"]), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), expected_pos_info_for_filetype["hrfi"]) ]) def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_info): """Test the segment position info method.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) for filetype_handler in list(reader.file_handlers.values())[0]: segpos_info = filetype_handler.get_segment_position_info() assert segpos_info == expected_pos_info - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_index_map(self, reader_configs, fh_param, expected_res_n): """Test loading of index_map.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( - [name + '_index_map' for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + [name + "_index_map" for name in + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch + '_index_map'].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - numpy.testing.assert_array_equal(res[ch + '_index_map'][1, 1], 110) - - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch + "_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110) + + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_load_aux_data(self, reader_configs, fh_param): """Test loading of auxiliary data.""" from satpy.readers.fci_l1c_nc import AUX_DATA - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) - res = reader.load([fh_param['channels']['solar'][0] + '_' + key for key in AUX_DATA.keys()], + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + res = reader.load([fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()], pad_data=False) - grid_type = fh_param['channels']['solar_grid_type'][0] - for aux in [fh_param['channels']['solar'][0] + '_' + key for key in AUX_DATA.keys()]: - assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - if aux == fh_param['channels']['solar'][0] + '_earth_sun_distance': + grid_type = fh_param["channels"]["solar_grid_type"][0] + for aux in [fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()]: + assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + if aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance": numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7) else: numpy.testing.assert_array_equal(res[aux][1, 1], 10) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize("fh_param,expected_res_n", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_quality_only(self, reader_configs, fh_param, expected_res_n): """Test that loading quality only works.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( - [name + '_pixel_quality' for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + [name + "_pixel_quality" for name in + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch + '_pixel_quality'].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - numpy.testing.assert_array_equal(res[ch + '_pixel_quality'][1, 1], 3) - assert res[ch + '_pixel_quality'].attrs["name"] == ch + '_pixel_quality' - - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch + "_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3) + assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality" + + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_platform_name(self, reader_configs, fh_param): """Test that platform name is exposed. Test that the FCI reader exposes the platform name. Corresponds to GH issue 1014. """ - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load(["vis_06"], pad_data=False) assert res["vis_06"].attrs["platform_name"] == "MTG-I1" - @pytest.mark.parametrize('fh_param, expected_area', [ - (lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), ['mtg_fci_fdss_1km', 'mtg_fci_fdss_2km']), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), ['mtg_fci_fdss_500m', 'mtg_fci_fdss_1km']), + @pytest.mark.parametrize("fh_param, expected_area", [ + (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), ]) def test_area_definition_computation(self, reader_configs, fh_param, expected_area): """Test that the geolocation computation is correct.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) - res = reader.load(['ir_105', 'vis_06'], pad_data=False) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + res = reader.load(["ir_105", "vis_06"], pad_data=False) # test that area_ids are harmonisation-conform ___ - assert res['vis_06'].attrs['area'].area_id == expected_area[0] - assert res['ir_105'].attrs['area'].area_id == expected_area[1] + assert res["vis_06"].attrs["area"].area_id == expected_area[0] + assert res["ir_105"].attrs["area"].area_id == expected_area[1] - area_def = res['ir_105'].attrs['area'] + area_def = res["ir_105"].attrs["area"] # test area extents computation np.testing.assert_array_almost_equal(np.array(area_def.area_extent), np.array([-5567999.994203, -5367999.994411, @@ -709,23 +709,23 @@ def test_area_definition_computation(self, reader_configs, fh_param, expected_ar decimal=2) # check that the projection is read in properly - assert area_def.crs.coordinate_operation.method_name == 'Geostationary Satellite (Sweep Y)' + assert area_def.crs.coordinate_operation.method_name == "Geostationary Satellite (Sweep Y)" assert area_def.crs.coordinate_operation.params[0].value == 0.0 # projection origin longitude assert area_def.crs.coordinate_operation.params[1].value == 35786400.0 # projection height assert area_def.crs.ellipsoid.semi_major_metre == 6378137.0 assert area_def.crs.ellipsoid.inverse_flattening == 298.257223563 assert area_def.crs.ellipsoid.is_semi_minor_computed - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_excs(self, reader_configs, fh_param): """Test that exceptions are raised where expected.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with pytest.raises(ValueError): - reader.file_handlers[fh_param['filetype']][0].get_dataset(make_dataid(name="invalid"), {}) + reader.file_handlers[fh_param["filetype"]][0].get_dataset(make_dataid(name="invalid"), {}) with pytest.raises(ValueError): - reader.file_handlers[fh_param['filetype']][0].get_dataset( + reader.file_handlers[fh_param["filetype"]][0].get_dataset( make_dataid(name="ir_123", calibration="unknown"), {"units": "unknown"}) @@ -736,7 +736,7 @@ def test_load_composite(self): # in the tests.compositor_tests package from satpy.composites.config_loader import load_compositor_configs_for_sensors - comps, mods = load_compositor_configs_for_sensors(['fci']) + comps, mods = load_compositor_configs_for_sensors(["fci"]) assert len(comps["fci"]) > 0 assert len(mods["fci"]) > 0 @@ -747,7 +747,7 @@ class TestFCIL1cNCReaderBadData: def test_handling_bad_data_ir(self, reader_configs, caplog): """Test handling of bad IR data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="ir_105", @@ -757,7 +757,7 @@ def test_handling_bad_data_ir(self, reader_configs, caplog): def test_handling_bad_data_vis(self, reader_configs, caplog): """Test handling of bad VIS data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="vis_06", @@ -771,7 +771,7 @@ class TestFCIL1cNCReaderBadDataFromIDPF: def test_handling_bad_earthsun_distance(self, reader_configs): """Test handling of bad earth-sun distance data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) res = reader.load([make_dataid(name=["vis_06"], calibration="reflectance")], pad_data=False) numpy.testing.assert_array_almost_equal(res["vis_06"], 100 * 15 * 1 * np.pi / 50) @@ -779,10 +779,10 @@ def test_handling_bad_earthsun_distance(self, reader_configs): def test_bad_xy_coords(self, reader_configs): """Test that the geolocation computation is correct.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) - res = reader.load(['vis_06'], pad_data=False) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + res = reader.load(["vis_06"], pad_data=False) - area_def = res['vis_06'].attrs['area'] + area_def = res["vis_06"].attrs["area"] # test area extents computation np.testing.assert_array_almost_equal(np.array(area_def.area_extent), np.array([-5568000.227139, -5368000.221262, diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 9ebbdb32e7..114fa1d6d2 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -33,20 +33,20 @@ from satpy.tests.utils import make_dataid AREA_DEF = geometry.AreaDefinition( - 'mtg_fci_fdss_2km', - 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution', + "mtg_fci_fdss_2km", + "MTG FCI Full Disk Scanning Service area definition with 2 km resolution", "", - {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'}, + {"h": 35786400., "lon_0": 0.0, "ellps": "WGS84", "proj": "geos", "units": "m"}, 5568, 5568, (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) ) SEG_AREA_DEF = geometry.AreaDefinition( - 'mtg_fci_fdss_32km', - 'MTG FCI Full Disk Scanning Service area definition with 32 km resolution', + "mtg_fci_fdss_32km", + "MTG FCI Full Disk Scanning Service area definition with 32 km resolution", "", - {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'}, + {"h": 35786400., "lon_0": 0.0, "ellps": "WGS84", "proj": "geos", "units": "m"}, 348, 348, (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) @@ -61,42 +61,42 @@ def setUp(self): # Easiest way to test the reader is to create a test netCDF file on the fly # Create unique filenames to prevent race conditions when tests are run in parallel self.test_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_file, 'w') as nc: + with Dataset(self.test_file, "w") as nc: # Create dimensions - nc.createDimension('number_of_columns', 10) - nc.createDimension('number_of_rows', 100) - nc.createDimension('maximum_number_of_layers', 2) + nc.createDimension("number_of_columns", 10) + nc.createDimension("number_of_rows", 100) + nc.createDimension("maximum_number_of_layers", 2) # add global attributes - nc.data_source = 'test_data_source' - nc.platform = 'test_platform' + nc.data_source = "test_data_source" + nc.platform = "test_platform" # Add datasets - x = nc.createVariable('x', np.float32, dimensions=('number_of_columns',)) - x.standard_name = 'projection_x_coordinate' + x = nc.createVariable("x", np.float32, dimensions=("number_of_columns",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(10) - y = nc.createVariable('y', np.float32, dimensions=('number_of_rows',)) - y.standard_name = 'projection_y_coordinate' + y = nc.createVariable("y", np.float32, dimensions=("number_of_rows",)) + y.standard_name = "projection_y_coordinate" y[:] = np.arange(100) - s = nc.createVariable('product_quality', np.int8) + s = nc.createVariable("product_quality", np.int8) s[:] = 99. - one_layer_dataset = nc.createVariable('test_one_layer', np.float32, - dimensions=('number_of_rows', 'number_of_columns')) + one_layer_dataset = nc.createVariable("test_one_layer", np.float32, + dimensions=("number_of_rows", "number_of_columns")) one_layer_dataset[:] = np.ones((100, 10)) - one_layer_dataset.test_attr = 'attr' - one_layer_dataset.units = 'test_units' + one_layer_dataset.test_attr = "attr" + one_layer_dataset.units = "test_units" - two_layers_dataset = nc.createVariable('test_two_layers', np.float32, - dimensions=('maximum_number_of_layers', - 'number_of_rows', - 'number_of_columns')) + two_layers_dataset = nc.createVariable("test_two_layers", np.float32, + dimensions=("maximum_number_of_layers", + "number_of_rows", + "number_of_columns")) two_layers_dataset[0, :, :] = np.ones((100, 10)) two_layers_dataset[1, :, :] = 2 * np.ones((100, 10)) - mtg_geos_projection = nc.createVariable('mtg_geos_projection', int, dimensions=()) + mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 @@ -114,89 +114,89 @@ def tearDown(self): def test_all_basic(self): """Test all basic functionalities.""" - self.assertEqual(self.fh.spacecraft_name, 'test_platform') - self.assertEqual(self.fh.sensor_name, 'test_data_source') + self.assertEqual(self.fh.spacecraft_name, "test_platform") + self.assertEqual(self.fh.sensor_name, "test_data_source") self.assertEqual(self.fh.ssp_lon, 0.0) global_attributes = self.fh._get_global_attributes() expected_global_attributes = { - 'filename': self.test_file, - 'spacecraft_name': 'test_platform', - 'ssp_lon': 0.0, - 'sensor': 'test_data_source', - 'platform_name': 'test_platform' + "filename": self.test_file, + "spacecraft_name": "test_platform", + "ssp_lon": 0.0, + "sensor": "test_data_source", + "platform_name": "test_platform" } self.assertEqual(global_attributes, expected_global_attributes) - @mock.patch('satpy.readers.fci_l2_nc.geometry.AreaDefinition') - @mock.patch('satpy.readers.fci_l2_nc.make_ext') + @mock.patch("satpy.readers.fci_l2_nc.geometry.AreaDefinition") + @mock.patch("satpy.readers.fci_l2_nc.make_ext") def test_area_definition(self, me_, gad_): """Test the area definition computation.""" - self.fh._compute_area_def(make_dataid(name='test_area_def', resolution=2000)) + self.fh._compute_area_def(make_dataid(name="test_area_def", resolution=2000)) # Asserts that the make_ext function was called with the correct arguments me_.assert_called_once() args, kwargs = me_.call_args np.testing.assert_allclose(args, [-0.0, -515.6620, 5672.28217, 0.0, 35786400.]) - proj_dict = {'a': 6378137., - 'lon_0': 0.0, - 'h': 35786400, + proj_dict = {"a": 6378137., + "lon_0": 0.0, + "h": 35786400, "rf": 298.257223563, - 'proj': 'geos', - 'units': 'm', - 'sweep': 'y'} + "proj": "geos", + "units": "m", + "sweep": "y"} # Asserts that the get_area_definition function was called with the correct arguments gad_.assert_called_once() args, kwargs = gad_.call_args - self.assertEqual(args[0], 'mtg_fci_fdss_2km') - self.assertEqual(args[1], 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution') - self.assertEqual(args[2], '') + self.assertEqual(args[0], "mtg_fci_fdss_2km") + self.assertEqual(args[1], "MTG FCI Full Disk Scanning Service area definition with 2 km resolution") + self.assertEqual(args[2], "") self.assertEqual(args[3], proj_dict) self.assertEqual(args[4], 10) self.assertEqual(args[5], 100) def test_dataset(self): """Test the correct execution of the get_dataset function with a valid file_key.""" - dataset = self.fh.get_dataset(make_dataid(name='test_one_layer', resolution=2000), - {'name': 'test_one_layer', - 'file_key': 'test_one_layer', - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_one_layer", resolution=2000), + {"name": "test_one_layer", + "file_key": "test_one_layer", + "fill_value": -999, + "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, np.ones((100, 10))) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + self.assertEqual(dataset.attrs["test_attr"], "attr") + self.assertEqual(dataset.attrs["units"], "test_units") + self.assertEqual(dataset.attrs["fill_value"], -999) def test_dataset_with_layer(self): """Check the correct execution of the get_dataset function with a valid file_key & layer.""" - dataset = self.fh.get_dataset(make_dataid(name='test_two_layers', resolution=2000), - {'name': 'test_two_layers', - 'file_key': 'test_two_layers', 'layer': 1, - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_two_layers", resolution=2000), + {"name": "test_two_layers", + "file_key": "test_two_layers", "layer": 1, + "fill_value": -999, + "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10))) - self.assertEqual(dataset.attrs['units'], None) - self.assertEqual(dataset.attrs['spacecraft_name'], 'test_platform') + self.assertEqual(dataset.attrs["units"], None) + self.assertEqual(dataset.attrs["spacecraft_name"], "test_platform") def test_dataset_with_invalid_filekey(self): """Test the correct execution of the get_dataset function with an invalid file_key.""" - invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=2000), - {'name': 'test_invalid', - 'file_key': 'test_invalid', - 'fill_value': -999, - 'file_type': 'test_file_type'}) + invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), + {"name": "test_invalid", + "file_key": "test_invalid", + "fill_value": -999, + "file_type": "test_file_type"}) self.assertEqual(invalid_dataset, None) def test_dataset_with_total_cot(self): """Test the correct execution of the get_dataset function for total COT (add contributions from two layers).""" - dataset = self.fh.get_dataset(make_dataid(name='retrieved_cloud_optical_thickness', resolution=2000), - {'name': 'retrieved_cloud_optical_thickness', - 'file_key': 'test_two_layers', - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="retrieved_cloud_optical_thickness", resolution=2000), + {"name": "retrieved_cloud_optical_thickness", + "file_key": "test_two_layers", + "fill_value": -999, + "file_type": "test_file_type"}) # Checks that the function returns None expected_sum = np.empty((100, 10)) expected_sum[:] = np.log10(10**2 + 10**1) @@ -205,10 +205,10 @@ def test_dataset_with_total_cot(self): def test_dataset_with_scalar(self): """Test the execution of the get_dataset function for scalar values.""" # Checks returned scalar value - dataset = self.fh.get_dataset(make_dataid(name='test_scalar'), - {'name': 'product_quality', - 'file_key': 'product_quality', - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), + {"name": "product_quality", + "file_key": "product_quality", + "file_type": "test_file_type"}) self.assertEqual(dataset.values, 99.) # Checks that no AreaDefintion is implemented for scalar values @@ -223,44 +223,44 @@ def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.seg_test_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.seg_test_file, 'w') as nc: + with Dataset(self.seg_test_file, "w") as nc: # Create dimensions - nc.createDimension('number_of_FoR_cols', 348) - nc.createDimension('number_of_FoR_rows', 348) - nc.createDimension('number_of_channels', 8) - nc.createDimension('number_of_categories', 6) + nc.createDimension("number_of_FoR_cols", 348) + nc.createDimension("number_of_FoR_rows", 348) + nc.createDimension("number_of_channels", 8) + nc.createDimension("number_of_categories", 6) # add global attributes - nc.data_source = 'test_fci_data_source' - nc.platform = 'test_fci_platform' + nc.data_source = "test_fci_data_source" + nc.platform = "test_fci_platform" # Add datasets - x = nc.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',)) - x.standard_name = 'projection_x_coordinate' + x = nc.createVariable("x", np.float32, dimensions=("number_of_FoR_cols",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(348) - y = nc.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',)) - y.standard_name = 'projection_y_coordinate' + y = nc.createVariable("y", np.float32, dimensions=("number_of_FoR_rows",)) + y.standard_name = "projection_y_coordinate" y[:] = np.arange(348) - s = nc.createVariable('product_quality', np.int8) + s = nc.createVariable("product_quality", np.int8) s[:] = 99. - chans = nc.createVariable('channels', np.float32, dimensions=('number_of_channels',)) - chans.standard_name = 'fci_channels' + chans = nc.createVariable("channels", np.float32, dimensions=("number_of_channels",)) + chans.standard_name = "fci_channels" chans[:] = np.arange(8) - cats = nc.createVariable('categories', np.float32, dimensions=('number_of_categories',)) - cats.standard_name = 'product_categories' + cats = nc.createVariable("categories", np.float32, dimensions=("number_of_categories",)) + cats.standard_name = "product_categories" cats[:] = np.arange(6) - test_dataset = nc.createVariable('test_values', np.float32, - dimensions=('number_of_FoR_rows', 'number_of_FoR_cols', - 'number_of_channels', 'number_of_categories')) + test_dataset = nc.createVariable("test_values", np.float32, + dimensions=("number_of_FoR_rows", "number_of_FoR_cols", + "number_of_channels", "number_of_categories")) test_dataset[:] = self._get_unique_array(range(8), range(6)) - test_dataset.test_attr = 'attr' - test_dataset.units = 'test_units' + test_dataset.test_attr = "attr" + test_dataset.units = "test_units" def tearDown(self): """Remove the previously created test file.""" @@ -274,18 +274,18 @@ def test_all_basic(self): """Test all basic functionalities.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - assert self.fh.spacecraft_name == 'test_fci_platform' - assert self.fh.sensor_name == 'test_fci_data_source' + assert self.fh.spacecraft_name == "test_fci_platform" + assert self.fh.sensor_name == "test_fci_data_source" assert self.fh.ssp_lon == 0.0 global_attributes = self.fh._get_global_attributes() expected_global_attributes = { - 'filename': self.seg_test_file, - 'spacecraft_name': 'test_fci_platform', - 'ssp_lon': 0.0, - 'sensor': 'test_fci_data_source', - 'platform_name': 'test_fci_platform' + "filename": self.seg_test_file, + "spacecraft_name": "test_fci_platform", + "ssp_lon": 0.0, + "sensor": "test_fci_data_source", + "platform_name": "test_fci_platform" } self.assertEqual(global_attributes, expected_global_attributes) @@ -294,15 +294,15 @@ def test_dataset(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks the correct execution of the get_dataset function with a valid file_key - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, }) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + self.assertEqual(dataset.attrs["test_attr"], "attr") + self.assertEqual(dataset.attrs["units"], "test_units") + self.assertEqual(dataset.attrs["fill_value"], -999) # Checks that no AreaDefintion is implemented with pytest.raises(NotImplementedError): @@ -313,10 +313,10 @@ def test_dataset_with_invalid_filekey(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks the correct execution of the get_dataset function with an invalid file_key - invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=32000), - {'name': 'test_invalid', - 'file_key': 'test_invalid', - 'fill_value': -999, }) + invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=32000), + {"name": "test_invalid", + "file_key": "test_invalid", + "fill_value": -999, }) # Checks that the function returns None self.assertEqual(invalid_dataset, None) @@ -326,16 +326,16 @@ def test_dataset_with_adef(self): with_area_definition=True) # Checks the correct execution of the get_dataset function with a valid file_key - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'coordinates': ('test_lon', 'test_lat'), }) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "coordinates": ("test_lon", "test_lat"), }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + self.assertEqual(dataset.attrs["test_attr"], "attr") + self.assertEqual(dataset.attrs["units"], "test_units") + self.assertEqual(dataset.attrs["fill_value"], -999) # Checks returned AreaDefinition against reference adef = self.fh.get_area_def(None) @@ -346,18 +346,18 @@ def test_dataset_with_adef_and_wrongs_dims(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, with_area_definition=True) with pytest.raises(NotImplementedError): - self.fh.get_dataset(make_dataid(name='test_wrong_dims', resolution=6000), - {'name': 'test_wrong_dims', 'file_key': 'test_values', 'fill_value': -999} + self.fh.get_dataset(make_dataid(name="test_wrong_dims", resolution=6000), + {"name": "test_wrong_dims", "file_key": "test_values", "fill_value": -999} ) def test_dataset_with_scalar(self): """Test the execution of the get_dataset function for scalar values.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks returned scalar value - dataset = self.fh.get_dataset(make_dataid(name='test_scalar'), - {'name': 'product_quality', - 'file_key': 'product_quality', - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), + {"name": "product_quality", + "file_key": "product_quality", + "file_type": "test_file_type"}) self.assertEqual(dataset.values, 99.) # Checks that no AreaDefintion is implemented for scalar values @@ -368,11 +368,11 @@ def test_dataset_slicing_catid(self): """Test the correct execution of the _slice_dataset function with 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'category_id': 5}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "category_id": 5}) expected_dataset = self._get_unique_array(range(8), 5) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -380,11 +380,11 @@ def test_dataset_slicing_chid_catid(self): """Test the correct execution of the _slice_dataset function with 'channel_id' and 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'channel_id': 0, 'category_id': 1}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "channel_id": 0, "category_id": 1}) expected_dataset = self._get_unique_array(0, 1) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -392,12 +392,12 @@ def test_dataset_slicing_visid_catid(self): """Test the correct execution of the _slice_dataset function with 'vis_channel_id' and 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_vis_channels'}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'vis_channel_id': 3, 'category_id': 3}) + self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_vis_channels"}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "vis_channel_id": 3, "category_id": 3}) expected_dataset = self._get_unique_array(3, 3) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -405,21 +405,21 @@ def test_dataset_slicing_irid(self): """Test the correct execution of the _slice_dataset function with 'ir_channel_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_ir_channels'}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'ir_channel_id': 4}) + self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_ir_channels"}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "ir_channel_id": 4}) expected_dataset = self._get_unique_array(4, range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) @staticmethod def _get_unique_array(iarr, jarr): - if not hasattr(iarr, '__iter__'): + if not hasattr(iarr, "__iter__"): iarr = [iarr] - if not hasattr(jarr, '__iter__'): + if not hasattr(jarr, "__iter__"): jarr = [jarr] array = np.zeros((348, 348, 8, 6)) @@ -440,32 +440,32 @@ def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.test_byte_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_byte_file, 'w') as nc_byte: + with Dataset(self.test_byte_file, "w") as nc_byte: # Create dimensions - nc_byte.createDimension('number_of_columns', 1) - nc_byte.createDimension('number_of_rows', 1) + nc_byte.createDimension("number_of_columns", 1) + nc_byte.createDimension("number_of_rows", 1) # add global attributes - nc_byte.data_source = 'test_data_source' - nc_byte.platform = 'test_platform' + nc_byte.data_source = "test_data_source" + nc_byte.platform = "test_platform" # Add datasets - x = nc_byte.createVariable('x', np.float32, dimensions=('number_of_columns',)) - x.standard_name = 'projection_x_coordinate' + x = nc_byte.createVariable("x", np.float32, dimensions=("number_of_columns",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(1) - y = nc_byte.createVariable('y', np.float32, dimensions=('number_of_rows',)) - x.standard_name = 'projection_y_coordinate' + y = nc_byte.createVariable("y", np.float32, dimensions=("number_of_rows",)) + x.standard_name = "projection_y_coordinate" y[:] = np.arange(1) - mtg_geos_projection = nc_byte.createVariable('mtg_geos_projection', int, dimensions=()) + mtg_geos_projection = nc_byte.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. - test_dataset = nc_byte.createVariable('cloud_mask_test_flag', np.float32, - dimensions=('number_of_rows', 'number_of_columns',)) + test_dataset = nc_byte.createVariable("cloud_mask_test_flag", np.float32, + dimensions=("number_of_rows", "number_of_columns",)) # This number was chosen as we know the expected byte values test_dataset[:] = 4544767 @@ -487,23 +487,23 @@ def tearDown(self): def test_byte_extraction(self): """Test the execution of the get_dataset function.""" # Value of 1 is expected to be returned for this test - dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000), - {'name': 'cloud_mask_test_flag', - 'file_key': 'cloud_mask_test_flag', - 'fill_value': -999, - 'file_type': 'nc_fci_test_clm', - 'extract_byte': 1, + dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), + {"name": "cloud_mask_test_flag", + "file_key": "cloud_mask_test_flag", + "fill_value": -999, + "file_type": "nc_fci_test_clm", + "extract_byte": 1, }) self.assertEqual(dataset.values, 1) # Value of 0 is expected fto be returned or this test - dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000), - {'name': 'cloud_mask_test_flag', - 'file_key': 'cloud_mask_test_flag', - 'fill_value': -999, 'mask_value': 0., - 'file_type': 'nc_fci_test_clm', - 'extract_byte': 23, + dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), + {"name": "cloud_mask_test_flag", + "file_key": "cloud_mask_test_flag", + "fill_value": -999, "mask_value": 0., + "file_type": "nc_fci_test_clm", + "extract_byte": 23, }) self.assertEqual(dataset.values, 0) diff --git a/satpy/tests/reader_tests/test_fy4_base.py b/satpy/tests/reader_tests/test_fy4_base.py index 432117e1ad..ae6df61195 100644 --- a/satpy/tests/reader_tests/test_fy4_base.py +++ b/satpy/tests/reader_tests/test_fy4_base.py @@ -30,11 +30,11 @@ class Test_FY4Base: def setup_method(self): """Initialise the tests.""" - self.p = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - self.file_type = {'file_type': 'agri_l1_0500m'} + self.file_type = {"file_type": "agri_l1_0500m"} def teardown_method(self): """Stop wrapping the HDF5 file handler.""" @@ -42,7 +42,7 @@ def teardown_method(self): def test_badsensor(self): """Test case where we pass a bad sensor name, must be GHI or AGRI.""" - fy4 = FY4Base(None, {'platform_id': 'FY4A', 'instrument': 'FCI'}, self.file_type) + fy4 = FY4Base(None, {"platform_id": "FY4A", "instrument": "FCI"}, self.file_type) with pytest.raises(ValueError): fy4.calibrate_to_reflectance(None, None, None) with pytest.raises(ValueError): @@ -50,11 +50,11 @@ def test_badsensor(self): def test_badcalibration(self): """Test case where we pass a bad calibration type, radiance is not supported.""" - fy4 = FY4Base(None, {'platform_id': 'FY4A', 'instrument': 'AGRI'}, self.file_type) + fy4 = FY4Base(None, {"platform_id": "FY4A", "instrument": "AGRI"}, self.file_type) with pytest.raises(NotImplementedError): - fy4.calibrate(None, {'calibration': 'radiance'}, None, None) + fy4.calibrate(None, {"calibration": "radiance"}, None, None) def test_badplatform(self): """Test case where we pass a bad calibration type, radiance is not supported.""" with pytest.raises(KeyError): - FY4Base(None, {'platform_id': 'FY3D', 'instrument': 'AGRI'}, self.file_type) + FY4Base(None, {"platform_id": "FY3D", "instrument": "AGRI"}, self.file_type) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 1477586205..dc658ab79a 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -41,12 +41,12 @@ def setUp(self): self.date = datetime(2018, 1, 1) # Create area definition - pcs_id = 'ETRS89 / LAEA Europe' + pcs_id = "ETRS89 / LAEA Europe" proj4_dict = "EPSG:3035" self.x_size = 100 self.y_size = 100 area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) - self.area_def = AreaDefinition('geotiff_area', pcs_id, pcs_id, + self.area_def = AreaDefinition("geotiff_area", pcs_id, pcs_id, proj4_dict, self.x_size, self.y_size, area_extent) @@ -65,56 +65,56 @@ def setUp(self): r_nan__[:10, :10] = np.nan r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) - ds_l = xr.DataArray(da.stack([r__]), dims=('bands', 'y', 'x'), - attrs={'name': 'test_l', - 'start_time': self.date}) - ds_l['bands'] = ['L'] - ds_la = xr.DataArray(da.stack([r__, a__]), dims=('bands', 'y', 'x'), - attrs={'name': 'test_la', - 'start_time': self.date}) - ds_la['bands'] = ['L', 'A'] + ds_l = xr.DataArray(da.stack([r__]), dims=("bands", "y", "x"), + attrs={"name": "test_l", + "start_time": self.date}) + ds_l["bands"] = ["L"] + ds_la = xr.DataArray(da.stack([r__, a__]), dims=("bands", "y", "x"), + attrs={"name": "test_la", + "start_time": self.date}) + ds_la["bands"] = ["L", "A"] ds_rgb = xr.DataArray(da.stack([r__, g__, b__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_rgb', - 'start_time': self.date}) - ds_rgb['bands'] = ['R', 'G', 'B'] + dims=("bands", "y", "x"), + attrs={"name": "test_rgb", + "start_time": self.date}) + ds_rgb["bands"] = ["R", "G", "B"] ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_rgba', - 'start_time': self.date}) - ds_rgba['bands'] = ['R', 'G', 'B', 'A'] + dims=("bands", "y", "x"), + attrs={"name": "test_rgba", + "start_time": self.date}) + ds_rgba["bands"] = ["R", "G", "B", "A"] ds_l_nan = xr.DataArray(da.stack([r_nan__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_l_nan', - 'start_time': self.date}) - ds_l_nan['bands'] = ['L'] + dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", + "start_time": self.date}) + ds_l_nan["bands"] = ["L"] # Temp dir for the saved images self.base_dir = tempfile.mkdtemp() # Put the datasets to Scene for easy saving scn = Scene() - scn['l'] = ds_l - scn['l'].attrs['area'] = self.area_def - scn['la'] = ds_la - scn['la'].attrs['area'] = self.area_def - scn['rgb'] = ds_rgb - scn['rgb'].attrs['area'] = self.area_def - scn['rgba'] = ds_rgba - scn['rgba'].attrs['area'] = self.area_def - scn['l_nan'] = ds_l_nan - scn['l_nan'].attrs['area'] = self.area_def + scn["l"] = ds_l + scn["l"].attrs["area"] = self.area_def + scn["la"] = ds_la + scn["la"].attrs["area"] = self.area_def + scn["rgb"] = ds_rgb + scn["rgb"].attrs["area"] = self.area_def + scn["rgba"] = ds_rgba + scn["rgba"].attrs["area"] = self.area_def + scn["l_nan"] = ds_l_nan + scn["l_nan"].attrs["area"] = self.area_def # Save the images. Two images in PNG and two in GeoTIFF - scn.save_dataset('l', os.path.join(self.base_dir, 'test_l.png'), writer='simple_image') - scn.save_dataset('la', os.path.join(self.base_dir, '20180101_0000_test_la.png'), writer='simple_image') - scn.save_dataset('rgb', os.path.join(self.base_dir, '20180101_0000_test_rgb.tif'), writer='geotiff') - scn.save_dataset('rgba', os.path.join(self.base_dir, 'test_rgba.tif'), writer='geotiff') - scn.save_dataset('l_nan', os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif'), - writer='geotiff', fill_value=0) - scn.save_dataset('l_nan', os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif'), - writer='geotiff') + scn.save_dataset("l", os.path.join(self.base_dir, "test_l.png"), writer="simple_image") + scn.save_dataset("la", os.path.join(self.base_dir, "20180101_0000_test_la.png"), writer="simple_image") + scn.save_dataset("rgb", os.path.join(self.base_dir, "20180101_0000_test_rgb.tif"), writer="geotiff") + scn.save_dataset("rgba", os.path.join(self.base_dir, "test_rgba.tif"), writer="geotiff") + scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_fillvalue.tif"), + writer="geotiff", fill_value=0) + scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif"), + writer="geotiff") self.scn = scn @@ -130,78 +130,78 @@ def test_png_scene(self): """Test reading PNG images via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, 'test_l.png') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) + fname = os.path.join(self.base_dir, "test_l.png") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (1, self.y_size, self.x_size)) + self.assertEqual(scn.sensor_names, {"images"}) self.assertEqual(scn.start_time, None) self.assertEqual(scn.end_time, None) - self.assertNotIn('area', scn['image'].attrs) - - fname = os.path.join(self.base_dir, '20180101_0000_test_la.png') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - data = da.compute(scn['image'].data) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) + self.assertNotIn("area", scn["image"].attrs) + + fname = os.path.join(self.base_dir, "20180101_0000_test_la.png") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + data = da.compute(scn["image"].data) + self.assertEqual(scn["image"].shape, (1, self.y_size, self.x_size)) + self.assertEqual(scn.sensor_names, {"images"}) self.assertEqual(scn.start_time, self.date) self.assertEqual(scn.end_time, self.date) - self.assertNotIn('area', scn['image'].attrs) + self.assertNotIn("area", scn["image"].attrs) self.assertEqual(np.sum(np.isnan(data)), 100) def test_geotiff_scene(self): """Test reading TIFF images via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, '20180101_0000_test_rgb.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) + fname = os.path.join(self.base_dir, "20180101_0000_test_rgb.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (3, self.y_size, self.x_size)) + self.assertEqual(scn.sensor_names, {"images"}) self.assertEqual(scn.start_time, self.date) self.assertEqual(scn.end_time, self.date) - self.assertEqual(scn['image'].area, self.area_def) + self.assertEqual(scn["image"].area, self.area_def) - fname = os.path.join(self.base_dir, 'test_rgba.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) + fname = os.path.join(self.base_dir, "test_rgba.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (3, self.y_size, self.x_size)) + self.assertEqual(scn.sensor_names, {"images"}) self.assertEqual(scn.start_time, None) self.assertEqual(scn.end_time, None) - self.assertEqual(scn['image'].area, self.area_def) + self.assertEqual(scn["image"].area, self.area_def) def test_geotiff_scene_nan(self): """Test reading TIFF images originally containing NaN values via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(np.sum(scn['image'].data[0][:10, :10].compute()), 0) + fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (1, self.y_size, self.x_size)) + self.assertEqual(np.sum(scn["image"].data[0][:10, :10].compute()), 0) - fname = os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertTrue(np.all(np.isnan(scn['image'].data[0][:10, :10].compute()))) + fname = os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + self.assertEqual(scn["image"].shape, (1, self.y_size, self.x_size)) + self.assertTrue(np.all(np.isnan(scn["image"].data[0][:10, :10].compute()))) def test_GenericImageFileHandler(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_rgba.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_rgba.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image') + foo = make_dataid(name="image") self.assertTrue(reader.file_content) - self.assertEqual(reader.finfo['filename'], fname) - self.assertEqual(reader.finfo['start_time'], self.date) - self.assertEqual(reader.finfo['end_time'], self.date) + self.assertEqual(reader.finfo["filename"], fname) + self.assertEqual(reader.finfo["start_time"], self.date) + self.assertEqual(reader.finfo["end_time"], self.date) self.assertEqual(reader.area, self.area_def) self.assertEqual(reader.get_area_def(None), self.area_def) self.assertEqual(reader.start_time, self.date) @@ -209,7 +209,7 @@ def test_GenericImageFileHandler(self): dataset = reader.get_dataset(foo, {}) self.assertTrue(isinstance(dataset, xr.DataArray)) - self.assertIn('spatial_ref', dataset.coords) + self.assertIn("spatial_ref", dataset.coords) self.assertTrue(np.all(np.isnan(dataset.data[:, :10, :10].compute()))) def test_GenericImageFileHandler_masking_only_integer(self): @@ -225,59 +225,59 @@ def __init__(self, filename, filename_info, filetype_info, file_content, **kwarg self.dataset_name = None self.file_content.update(kwargs) - data = self.scn['rgba'] + data = self.scn["rgba"] # do nothing if not integer float_data = data / 255. reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) - self.assertIs(reader.get_dataset(make_dataid(name='image'), {}), float_data) + self.assertIs(reader.get_dataset(make_dataid(name="image"), {}), float_data) # masking if integer data = data.astype(np.uint32) self.assertEqual(data.bands.size, 4) reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) - ret_data = reader.get_dataset(make_dataid(name='image'), {}) + ret_data = reader.get_dataset(make_dataid(name="image"), {}) self.assertEqual(ret_data.bands.size, 3) def test_GenericImageFileHandler_datasetid(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_rgba.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_rgba.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image-custom') - self.assertTrue(reader.file_content, 'file_content should be set') + foo = make_dataid(name="image-custom") + self.assertTrue(reader.file_content, "file_content should be set") dataset = reader.get_dataset(foo, {}) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') + self.assertTrue(isinstance(dataset, xr.DataArray), "dataset should be a xr.DataArray") def test_GenericImageFileHandler_nodata(self): """Test nodata handling with direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image-custom') - self.assertTrue(reader.file_content, 'file_content should be set') - info = {'nodata_handling': 'nan_mask'} + foo = make_dataid(name="image-custom") + self.assertTrue(reader.file_content, "file_content should be set") + info = {"nodata_handling": "nan_mask"} dataset = reader.get_dataset(foo, info) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') - self.assertTrue(np.all(np.isnan(dataset.data[0][:10, :10].compute())), 'values should be np.nan') - self.assertTrue(np.isnan(dataset.attrs['_FillValue']), '_FillValue should be np.nan') + self.assertTrue(isinstance(dataset, xr.DataArray), "dataset should be a xr.DataArray") + self.assertTrue(np.all(np.isnan(dataset.data[0][:10, :10].compute())), "values should be np.nan") + self.assertTrue(np.isnan(dataset.attrs["_FillValue"]), "_FillValue should be np.nan") - info = {'nodata_handling': 'fill_value'} + info = {"nodata_handling": "fill_value"} dataset = reader.get_dataset(foo, info) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') + self.assertTrue(isinstance(dataset, xr.DataArray), "dataset should be a xr.DataArray") self.assertEqual(np.sum(dataset.data[0][:10, :10].compute()), 0) - self.assertEqual(dataset.attrs['_FillValue'], 0) + self.assertEqual(dataset.attrs["_FillValue"], 0) # default same as 'nodata_handling': 'fill_value' dataset = reader.get_dataset(foo, {}) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') + self.assertTrue(isinstance(dataset, xr.DataArray), "dataset should be a xr.DataArray") self.assertEqual(np.sum(dataset.data[0][:10, :10].compute()), 0) - self.assertEqual(dataset.attrs['_FillValue'], 0) + self.assertEqual(dataset.attrs["_FillValue"], 0) diff --git a/satpy/tests/reader_tests/test_geocat.py b/satpy/tests/reader_tests/test_geocat.py index 91de6a4265..91a6b7de37 100644 --- a/satpy/tests/reader_tests/test_geocat.py +++ b/satpy/tests/reader_tests/test_geocat.py @@ -43,64 +43,64 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/Platform_Name': filename_info['platform_shortname'], - '/attr/Element_Resolution': 2., - '/attr/Line_Resolution': 2., - '/attr/Subsatellite_Longitude': -70.2 if 'GOES' in filename_info['platform_shortname'] else 140.65, - 'pixel_longitude': DEFAULT_LON_DATA, - 'pixel_longitude/attr/scale_factor': 1., - 'pixel_longitude/attr/add_offset': 0., - 'pixel_longitude/shape': DEFAULT_FILE_SHAPE, - 'pixel_longitude/attr/_FillValue': np.nan, - 'pixel_latitude': DEFAULT_LAT_DATA, - 'pixel_latitude/attr/scale_factor': 1., - 'pixel_latitude/attr/add_offset': 0., - 'pixel_latitude/shape': DEFAULT_FILE_SHAPE, - 'pixel_latitude/attr/_FillValue': np.nan, + "/attr/Platform_Name": filename_info["platform_shortname"], + "/attr/Element_Resolution": 2., + "/attr/Line_Resolution": 2., + "/attr/Subsatellite_Longitude": -70.2 if "GOES" in filename_info["platform_shortname"] else 140.65, + "pixel_longitude": DEFAULT_LON_DATA, + "pixel_longitude/attr/scale_factor": 1., + "pixel_longitude/attr/add_offset": 0., + "pixel_longitude/shape": DEFAULT_FILE_SHAPE, + "pixel_longitude/attr/_FillValue": np.nan, + "pixel_latitude": DEFAULT_LAT_DATA, + "pixel_latitude/attr/scale_factor": 1., + "pixel_latitude/attr/add_offset": 0., + "pixel_latitude/shape": DEFAULT_FILE_SHAPE, + "pixel_latitude/attr/_FillValue": np.nan, } sensor = { - 'HIMAWARI-8': 'himawari8', - 'GOES-17': 'goesr', - 'GOES-16': 'goesr', - 'GOES-13': 'goes', - 'GOES-14': 'goes', - 'GOES-15': 'goes', - }[filename_info['platform_shortname']] - file_content['/attr/Sensor_Name'] = sensor - - if filename_info['platform_shortname'] == 'HIMAWARI-8': - file_content['pixel_longitude'] = DEFAULT_LON_DATA + 130. - - file_content['variable1'] = DEFAULT_FILE_DATA.astype(np.float32) - file_content['variable1/attr/_FillValue'] = -1 - file_content['variable1/attr/scale_factor'] = 1. - file_content['variable1/attr/add_offset'] = 0. - file_content['variable1/attr/units'] = '1' - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + "HIMAWARI-8": "himawari8", + "GOES-17": "goesr", + "GOES-16": "goesr", + "GOES-13": "goes", + "GOES-14": "goes", + "GOES-15": "goes", + }[filename_info["platform_shortname"]] + file_content["/attr/Sensor_Name"] = sensor + + if filename_info["platform_shortname"] == "HIMAWARI-8": + file_content["pixel_longitude"] = DEFAULT_LON_DATA + 130. + + file_content["variable1"] = DEFAULT_FILE_DATA.astype(np.float32) + file_content["variable1/attr/_FillValue"] = -1 + file_content["variable1/attr/scale_factor"] = 1. + file_content["variable1/attr/add_offset"] = 0. + file_content["variable1/attr/units"] = "1" + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = np.ma.masked_array( + file_content["variable2"] = np.ma.masked_array( DEFAULT_FILE_DATA.astype(np.float32), mask=np.zeros_like(DEFAULT_FILE_DATA)) - file_content['variable2'].mask[::5, ::5] = True - file_content['variable2/attr/_FillValue'] = -1 - file_content['variable2/attr/scale_factor'] = 1. - file_content['variable2/attr/add_offset'] = 0. - file_content['variable2/attr/units'] = '1' - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE + file_content["variable2"].mask[::5, ::5] = True + file_content["variable2/attr/_FillValue"] = -1 + file_content["variable2/attr/scale_factor"] = 1. + file_content["variable2/attr/add_offset"] = 0. + file_content["variable2/attr/units"] = "1" + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE # category - file_content['variable3'] = DEFAULT_FILE_DATA.astype(np.byte) - file_content['variable3/attr/_FillValue'] = -128 - file_content['variable3/attr/flag_meanings'] = "clear water supercooled mixed ice unknown" - file_content['variable3/attr/flag_values'] = [0, 1, 2, 3, 4, 5] - file_content['variable3/attr/units'] = '1' - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE - - attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') + file_content["variable3"] = DEFAULT_FILE_DATA.astype(np.byte) + file_content["variable3/attr/_FillValue"] = -128 + file_content["variable3/attr/flag_meanings"] = "clear water supercooled mixed ice unknown" + file_content["variable3/attr/flag_values"] = [0, 1, 2, 3, 4, 5] + file_content["variable3/attr/units"] = "1" + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE + + attrs = ("_FillValue", "flag_meanings", "flag_values", "units") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'lines', 'elements')) + dims=("z", "lines", "elements")) return file_content @@ -113,9 +113,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.geocat import GEOCATFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(GEOCATFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(GEOCATFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -128,7 +128,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -140,10 +140,10 @@ def test_init_with_kwargs(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, xarray_kwargs={"decode_times": True}) loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) assert len(loadables) == 1 - r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {'decode_times': True}}) + r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {"decode_times": True}}) # make sure we have some files assert r.file_handlers @@ -153,19 +153,19 @@ def test_load_all_old_goes(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) + datasets = r.load(["variable1", + "variable2", + "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + assert "calibration" not in v.attrs + self.assertEqual(v.attrs["units"], "1") + self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) def test_load_all_himawari8(self): """Test loading all test datasets from H8 NetCDF file.""" @@ -174,20 +174,20 @@ def test_load_all_himawari8(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc', + "geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) + datasets = r.load(["variable1", + "variable2", + "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) - self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) + assert "calibration" not in v.attrs + self.assertEqual(v.attrs["units"], "1") + self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) + self.assertIsInstance(datasets["variable1"].attrs["area"], AreaDefinition) def test_load_all_goes17_hdf4(self): """Test loading all test datasets from GOES-17 HDF4 file.""" @@ -196,17 +196,17 @@ def test_load_all_goes17_hdf4(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-17.CONUS.2020041.163130.hdf', + "geocatL2.GOES-17.CONUS.2020041.163130.hdf", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) + datasets = r.load(["variable1", + "variable2", + "variable3"]) self.assertEqual(len(datasets), 3) for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) - self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) + assert "calibration" not in v.attrs + self.assertEqual(v.attrs["units"], "1") + self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) + self.assertIsInstance(datasets["variable1"].attrs["area"], AreaDefinition) diff --git a/satpy/tests/reader_tests/test_geos_area.py b/satpy/tests/reader_tests/test_geos_area.py index d7d4c2a510..76dc2d7b92 100644 --- a/satpy/tests/reader_tests/test_geos_area.py +++ b/satpy/tests/reader_tests/test_geos_area.py @@ -37,58 +37,58 @@ class TestGEOSProjectionUtil(unittest.TestCase): def make_pdict_ext(self, typ, scan): """Create a dictionary and extents to use in testing.""" if typ == 1: # Fulldisk - pdict = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'ssp_lon': 0.0, - 'nlines': 3712, - 'ncols': 3712, - 'a_name': 'geostest', - 'a_desc': 'test area', - 'p_id': 'test_area', - 'cfac': -13642337, - 'lfac': -13642337, - 'coff': 1856} - if scan == 'N2S': - pdict['scandir'] = 'N2S' - pdict['loff'] = 1856 + pdict = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "ssp_lon": 0.0, + "nlines": 3712, + "ncols": 3712, + "a_name": "geostest", + "a_desc": "test area", + "p_id": "test_area", + "cfac": -13642337, + "lfac": -13642337, + "coff": 1856} + if scan == "N2S": + pdict["scandir"] = "N2S" + pdict["loff"] = 1856 extent = (5567248.28340708, 5567248.28340708, -5570248.686685662, -5570248.686685662) - if scan == 'S2N': - pdict['scandir'] = 'S2N' - pdict['loff'] = -1856 + if scan == "S2N": + pdict["scandir"] = "S2N" + pdict["loff"] = -1856 extent = (5567248.28340708, 5570248.686685662, -5570248.686685662, -5567248.28340708) if typ == 2: # One sector - pdict = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'ssp_lon': 0.0, - 'nlines': 464, - 'ncols': 3712, - 'a_name': 'geostest', - 'a_desc': 'test area', - 'p_id': 'test_area', - 'cfac': -13642337, - 'lfac': -13642337, - 'coff': 1856} - if scan == 'N2S': - pdict['scandir'] = 'N2S' - pdict['loff'] = 464 + pdict = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "ssp_lon": 0.0, + "nlines": 464, + "ncols": 3712, + "a_name": "geostest", + "a_desc": "test area", + "p_id": "test_area", + "cfac": -13642337, + "lfac": -13642337, + "coff": 1856} + if scan == "N2S": + pdict["scandir"] = "N2S" + pdict["loff"] = 464 extent = (5567248.28340708, 1390686.9196223018, -5570248.686685662, -1500.2016392905093) - if scan == 'S2N': - pdict['scandir'] = 'S2N' - pdict['loff'] = 464 + if scan == "S2N": + pdict["scandir"] = "S2N" + pdict["loff"] = 464 extent = (5567248.28340708, -1390686.9196223018, -5570248.686685662, @@ -99,39 +99,39 @@ def make_pdict_ext(self, typ, scan): def test_geos_area(self): """Test area extent calculation with N->S scan then S->N scan.""" # North -> South full disk - pdict, extent = self.make_pdict_ext(1, 'N2S') + pdict, extent = self.make_pdict_ext(1, "N2S") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North full disk - pdict, extent = self.make_pdict_ext(1, 'S2N') + pdict, extent = self.make_pdict_ext(1, "S2N") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # North -> South one sector - pdict, extent = self.make_pdict_ext(2, 'N2S') + pdict, extent = self.make_pdict_ext(2, "N2S") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North one sector - pdict, extent = self.make_pdict_ext(2, 'S2N') + pdict, extent = self.make_pdict_ext(2, "S2N") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) def test_get_xy_from_linecol(self): """Test the scan angle calculation.""" - pdict, extent = self.make_pdict_ext(1, 'S2N') + pdict, extent = self.make_pdict_ext(1, "S2N") good_xy = [0.2690166648133674, -10.837528496767087] - factors = (pdict['lfac'], pdict['cfac']) - offsets = (pdict['loff'], pdict['coff']) + factors = (pdict["lfac"], pdict["cfac"]) + offsets = (pdict["loff"], pdict["coff"]) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) - pdict, extent = self.make_pdict_ext(2, 'N2S') + pdict, extent = self.make_pdict_ext(2, "N2S") good_xy = [0.2690166648133674, 0.30744761692956274] - factors = (pdict['lfac'], pdict['cfac']) - offsets = (pdict['loff'], pdict['coff']) + factors = (pdict["lfac"], pdict["cfac"]) + offsets = (pdict["loff"], pdict["coff"]) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) @@ -139,18 +139,18 @@ def test_get_xy_from_linecol(self): def test_get_area_definition(self): """Test the retrieval of the area definition.""" from pyresample.utils import proj4_radius_parameters - pdict, extent = self.make_pdict_ext(1, 'N2S') + pdict, extent = self.make_pdict_ext(1, "N2S") good_res = (-3000.4032785810186, -3000.4032785810186) a_def = get_area_definition(pdict, extent) - self.assertEqual(a_def.area_id, pdict['a_name']) + self.assertEqual(a_def.area_id, pdict["a_name"]) self.assertEqual(a_def.resolution, good_res) - self.assertEqual(a_def.proj_dict['proj'], 'geos') - self.assertEqual(a_def.proj_dict['units'], 'm') + self.assertEqual(a_def.proj_dict["proj"], "geos") + self.assertEqual(a_def.proj_dict["units"], "m") a, b = proj4_radius_parameters(a_def.proj_dict) self.assertEqual(a, 6378169) self.assertEqual(b, 6356583.8) - self.assertEqual(a_def.proj_dict['h'], 35785831) + self.assertEqual(a_def.proj_dict["h"], 35785831) def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" @@ -160,26 +160,26 @@ def test_sampling_to_lfac_cfac(self): def test_get_geos_area_naming(self): """Test the geos area naming function.""" - input_dict = {'platform_name': 'testplatform', - 'instrument_name': 'testinstrument', - 'resolution': 1000, - 'service_name': 'testservicename', - 'service_desc': 'testdesc'} + input_dict = {"platform_name": "testplatform", + "instrument_name": "testinstrument", + "resolution": 1000, + "service_name": "testservicename", + "service_desc": "testdesc"} output_dict = get_geos_area_naming(input_dict) - self.assertEqual(output_dict['area_id'], 'testplatform_testinstrument_testservicename_1km') - self.assertEqual(output_dict['description'], 'TESTPLATFORM TESTINSTRUMENT testdesc area definition' - ' with 1 km resolution') + self.assertEqual(output_dict["area_id"], "testplatform_testinstrument_testservicename_1km") + self.assertEqual(output_dict["description"], "TESTPLATFORM TESTINSTRUMENT testdesc area definition" + " with 1 km resolution") def test_get_resolution_and_unit_strings_in_km(self): """Test the resolution and unit strings function for a km resolution.""" out = get_resolution_and_unit_strings(1000) - self.assertEqual(out['value'], '1') - self.assertEqual(out['unit'], 'km') + self.assertEqual(out["value"], "1") + self.assertEqual(out["unit"], "km") def test_get_resolution_and_unit_strings_in_m(self): """Test the resolution and unit strings function for a m resolution.""" out = get_resolution_and_unit_strings(500) - self.assertEqual(out['value'], '500') - self.assertEqual(out['unit'], 'm') + self.assertEqual(out["value"], "500") + self.assertEqual(out["unit"], "m") diff --git a/satpy/tests/reader_tests/test_ghi_l1.py b/satpy/tests/reader_tests/test_ghi_l1.py index 79667ef37d..2b6ff4af54 100644 --- a/satpy/tests/reader_tests/test_ghi_l1.py +++ b/satpy/tests/reader_tests/test_ghi_l1.py @@ -33,7 +33,7 @@ CHANNELS_BY_RESOLUTION = {250: ["C01"], 500: ["C01", "C02", "C03", "C04", "C05", "C06"], 2000: ALL_BAND_NAMES, - 'GEO': 'solar_azimuth_angle' + "GEO": "solar_azimuth_angle" } AREA_EXTENTS_BY_RESOLUTION = { @@ -48,58 +48,58 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def make_test_data(self, cwl, ch, prefix, dims, file_type): """Make test data.""" - if prefix == 'CAL': + if prefix == "CAL": data = xr.DataArray( da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(-65535.0), - 'units': 'NUL', - 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), - 'band_names': 'band{}(band number is range from 1 to 14)' - .format(ch).encode('utf-8'), - 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), - 'valid_range': np.array([0, 1.5]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(-65535.0), + "units": "NUL", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 14)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, 1.5]), }, - dims='_const') + dims="_const") - elif prefix == 'NOM': + elif prefix == "NOM": data = xr.DataArray( da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(65535), - 'units': 'DN', - 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), - 'band_names': 'band{}(band number is range from 1 to 7)' - .format(ch).encode('utf-8'), - 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), - 'valid_range': np.array([0, 4095]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(65535), + "units": "DN", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 7)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, 4095]), }, - dims=('_RegLength', '_RegWidth')) + dims=("_RegLength", "_RegWidth")) - elif prefix == 'GEO': + elif prefix == "GEO": data = xr.DataArray( da.from_array(np.arange(10, dtype=np.float32).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(65535.), - 'units': 'NUL', - 'band_names': 'NUL', - 'valid_range': np.array([0., 360.]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(65535.), + "units": "NUL", + "band_names": "NUL", + "valid_range": np.array([0., 360.]), }, - dims=('_RegLength', '_RegWidth')) + dims=("_RegLength", "_RegWidth")) - elif prefix == 'COEF': - if file_type == '250': + elif prefix == "COEF": + if file_type == "250": data = self._create_coeff_array(1) - elif file_type == '500': + elif file_type == "500": data = self._create_coeff_array(6) - elif file_type == '2000': + elif file_type == "2000": data = self._create_coeff_array(7) return data @@ -109,14 +109,14 @@ def _create_coeff_array(self, nb_channels): da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) / np.array([1E4, 1E2]), [nb_channels, 2]), attrs={ - 'Slope': 1., 'Intercept': 0., - 'FillValue': 0, - 'units': 'NUL', - 'band_names': 'NUL', - 'long_name': b'Calibration coefficient (SCALE and OFFSET)', - 'valid_range': [-500, 500], + "Slope": 1., "Intercept": 0., + "FillValue": 0, + "units": "NUL", + "band_names": "NUL", + "long_name": b"Calibration coefficient (SCALE and OFFSET)", + "valid_range": [-500, 500], }, - dims=('_num_channel', '_coefs')) + dims=("_num_channel", "_coefs")) return data def _create_channel_data(self, chs, cwls, file_type): @@ -124,11 +124,11 @@ def _create_channel_data(self, chs, cwls, file_type): dim_1 = 5 data = {} for index, _cwl in enumerate(cwls): - data['Calibration/CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL', + data["Calibration/CALChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "CAL", [dim_0, dim_1], file_type) - data['Data/NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM', + data["Data/NOMChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "NOM", [dim_0, dim_1], file_type) - data['Calibration/CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF', + data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self.make_test_data(cwls[index], chs[index], "COEF", [dim_0, dim_1], file_type) return data @@ -156,39 +156,39 @@ def _get_2km_data(self, file_type): def _get_geo_data(self, file_type): dim_0 = 2 dim_1 = 5 - data = {'Navigation/NOMSunAzimuth': self.make_test_data('NUL', 'NUL', 'GEO', + data = {"Navigation/NOMSunAzimuth": self.make_test_data("NUL", "NUL", "GEO", [dim_0, dim_1], file_type)} return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { - '/attr/NOMSubSatLat': np.array(0.0), - '/attr/NOMSubSatLon': np.array(133.0), - '/attr/NOMSatHeight': np.array(3.5786E7), - '/attr/Semi_major_axis': np.array(6378.14), - '/attr/Semi_minor_axis': np.array(6353.28), - '/attr/OBIType': 'REGX', - '/attr/RegLength': np.array(2.0), - '/attr/RegWidth': np.array(5.0), - '/attr/Corner-Point Latitudes': np.array((4.1, 5.1, 4.1, 5.1)), - '/attr/Corner-Point Longitudes': np.array((141.1, 141.1, 141.1, 151.1)), - '/attr/Begin Line Number': np.array(0), - '/attr/End Line Number': np.array(1), - '/attr/Observing Beginning Date': '2019-06-03', '/attr/Observing Beginning Time': '00:30:01.807', - '/attr/Observing Ending Date': '2019-06-03', '/attr/Observing Ending Time': '00:34:07.572', - '/attr/Satellite Name': 'FY4B', '/attr/Sensor Identification Code': 'GHI', '/attr/Sensor Name': 'GHI', + "/attr/NOMSubSatLat": np.array(0.0), + "/attr/NOMSubSatLon": np.array(133.0), + "/attr/NOMSatHeight": np.array(3.5786E7), + "/attr/Semi_major_axis": np.array(6378.14), + "/attr/Semi_minor_axis": np.array(6353.28), + "/attr/OBIType": "REGX", + "/attr/RegLength": np.array(2.0), + "/attr/RegWidth": np.array(5.0), + "/attr/Corner-Point Latitudes": np.array((4.1, 5.1, 4.1, 5.1)), + "/attr/Corner-Point Longitudes": np.array((141.1, 141.1, 141.1, 151.1)), + "/attr/Begin Line Number": np.array(0), + "/attr/End Line Number": np.array(1), + "/attr/Observing Beginning Date": "2019-06-03", "/attr/Observing Beginning Time": "00:30:01.807", + "/attr/Observing Ending Date": "2019-06-03", "/attr/Observing Ending Time": "00:34:07.572", + "/attr/Satellite Name": "FY4B", "/attr/Sensor Identification Code": "GHI", "/attr/Sensor Name": "GHI", } data = {} - if self.filetype_info['file_type'] == 'ghi_l1_0250m': - data = self._get_250m_data('250') - elif self.filetype_info['file_type'] == 'ghi_l1_0500m': - data = self._get_500m_data('500') - elif self.filetype_info['file_type'] == 'ghi_l1_2000m': - data = self._get_2km_data('2000') - elif self.filetype_info['file_type'] == 'ghi_l1_2000m_geo': - data = self._get_geo_data('2000') + if self.filetype_info["file_type"] == "ghi_l1_0250m": + data = self._get_250m_data("250") + elif self.filetype_info["file_type"] == "ghi_l1_0500m": + data = self._get_500m_data("500") + elif self.filetype_info["file_type"] == "ghi_l1_2000m": + data = self._get_2km_data("2000") + elif self.filetype_info["file_type"] == "ghi_l1_2000m_geo": + data = self._get_geo_data("2000") test_content = {} test_content.update(global_attrs) @@ -199,7 +199,7 @@ def get_test_content(self, filename, filename_info, filetype_info): def _create_filenames_from_resolutions(*resolutions): """Create filenames from the given resolutions.""" - if 'GEO' in resolutions: + if "GEO" in resolutions: return ["FY4B-_GHI---_N_REGX_1330E_L1-_GEO-_MULT_NOM_20220613145300_20220613145359_2000M_V0001.HDF"] pattern = ("FY4B-_GHI---_N_REGX_1330E_L1-_FDI-_MULT_NOM_20220613145300_20220613145359_" "{resolution:04d}M_V0001.HDF") @@ -216,21 +216,21 @@ def setup_method(self): from satpy._config import config_search_paths from satpy.readers.fy4_base import FY4Base from satpy.readers.ghi_l1 import HDF_GHI_L1 - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.fy4 = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,)) + self.fy4 = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.p = mock.patch.object(HDF_GHI_L1.__class__, (self.fy4,)) self.fake_handler = self.fy4.start() self.p.is_local = True self.expected = { - 'C01': np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), - 'C02': np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), - 'C03': np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), - 'C04': np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), - 'C05': np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), - 'C06': np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), - 'C07': np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), + "C01": np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), + "C02": np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), + "C03": np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), + "C04": np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), + "C05": np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), + "C06": np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), + "C07": np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), } def teardown_method(self): @@ -266,12 +266,12 @@ def test_ghi_orbital_parameters_are_correct(self): res = reader.load(band_names) # check whether the data type of orbital_parameters is float - orbital_parameters = res[band_names[0]].attrs['orbital_parameters'] + orbital_parameters = res[band_names[0]].attrs["orbital_parameters"] for attr in orbital_parameters: assert isinstance(orbital_parameters[attr], float) - assert orbital_parameters['satellite_nominal_latitude'] == 0. - assert orbital_parameters['satellite_nominal_longitude'] == 133.0 - assert orbital_parameters['satellite_nominal_altitude'] == 3.5786E7 + assert orbital_parameters["satellite_nominal_latitude"] == 0. + assert orbital_parameters["satellite_nominal_longitude"] == 133.0 + assert orbital_parameters["satellite_nominal_altitude"] == 3.5786E7 @staticmethod def _check_keys_for_dsq(available_datasets, resolution_to_test): @@ -282,7 +282,7 @@ def _check_keys_for_dsq(available_datasets, resolution_to_test): for band_name in band_names: ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) - if band_name < 'C07': + if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 @@ -295,21 +295,21 @@ def test_ghi_counts_calibration(self): ds_ids = [] band_names = CHANNELS_BY_RESOLUTION[2000] for band_name in band_names: - ds_ids.append(make_dsq(name=band_name, calibration='counts')) + ds_ids.append(make_dsq(name=band_name, calibration="counts")) res = reader.load(ds_ids) assert len(res) == 7 for band_name in band_names: assert res[band_name].shape == (2, 5) - assert res[band_name].attrs['calibration'] == "counts" + assert res[band_name].attrs["calibration"] == "counts" assert res[band_name].dtype == np.uint16 - assert res[band_name].attrs['units'] == "1" + assert res[band_name].attrs["units"] == "1" def test_ghi_geo(self): """Test loading data for angles.""" from satpy.tests.utils import make_dsq - reader = self._create_reader_for_resolutions('GEO') - band_name = 'solar_azimuth_angle' + reader = self._create_reader_for_resolutions("GEO") + band_name = "solar_azimuth_angle" ds_ids = [make_dsq(name=band_name)] res = reader.load(ds_ids) assert len(res) == 1 @@ -339,26 +339,26 @@ def test_ghi_for_one_resolution(self, resolution_to_test): assert len(res) == len(band_names) self._check_calibration_and_units(band_names, res) for band_name in band_names: - np.testing.assert_allclose(np.array(res[band_name].attrs['area'].area_extent), + np.testing.assert_allclose(np.array(res[band_name].attrs["area"].area_extent), np.array(AREA_EXTENTS_BY_RESOLUTION[resolution_to_test])) def _check_calibration_and_units(self, band_names, result): for band_name in band_names: - assert result[band_name].attrs['sensor'].islower() + assert result[band_name].attrs["sensor"].islower() assert result[band_name].shape == (2, 5) np.testing.assert_allclose(result[band_name].values, self.expected[band_name], equal_nan=True) self._check_units(band_name, result) @staticmethod def _check_units(band_name, result): - if band_name <= 'C06': - assert result[band_name].attrs['calibration'] == "reflectance" + if band_name <= "C06": + assert result[band_name].attrs["calibration"] == "reflectance" else: - assert result[band_name].attrs['calibration'] == 'brightness_temperature' - if band_name <= 'C06': - assert result[band_name].attrs['units'] == "%" + assert result[band_name].attrs["calibration"] == "brightness_temperature" + if band_name <= "C06": + assert result[band_name].attrs["units"] == "%" else: - assert result[band_name].attrs['units'] == "K" + assert result[band_name].attrs["units"] == "K" @staticmethod def _assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test): @@ -375,7 +375,7 @@ def _assert_which_channels_are_loaded(available_datasets, band_names, resolution ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) - if band_name < 'C07': + if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 diff --git a/satpy/tests/reader_tests/test_ghrsst_l2.py b/satpy/tests/reader_tests/test_ghrsst_l2.py index e33cec467a..66c030e91d 100644 --- a/satpy/tests/reader_tests/test_ghrsst_l2.py +++ b/satpy/tests/reader_tests/test_ghrsst_l2.py @@ -39,35 +39,35 @@ def setup_method(self, tmp_path): self.lat_data = np.array(([43.43, 55.56, 61.25], [41.38, 50.28, 60.80])) self.lon = xr.DataArray( self.lon_data, - dims=('nj', 'ni'), - attrs={'standard_name': 'longitude', - 'units': 'degrees_east', + dims=("nj", "ni"), + attrs={"standard_name": "longitude", + "units": "degrees_east", } ) self.lat = xr.DataArray( self.lat_data, - dims=('nj', 'ni'), - attrs={'standard_name': 'latitude', - 'units': 'degrees_north', + dims=("nj", "ni"), + attrs={"standard_name": "latitude", + "units": "degrees_north", } ) self.sst = xr.DataArray( self.base_data, - dims=('nj', 'ni'), - attrs={'scale_factor': 0.01, 'add_offset': 273.15, - '_FillValue': -32768, 'units': 'kelvin', + dims=("nj", "ni"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, + "_FillValue": -32768, "units": "kelvin", } ) self.fake_dataset = xr.Dataset( data_vars={ - 'sea_surface_temperature': self.sst, - 'longitude': self.lon, - 'latitude': self.lat, + "sea_surface_temperature": self.sst, + "longitude": self.lon, + "latitude": self.lat, }, attrs={ "start_time": "20220321T112640Z", "stop_time": "20220321T145711Z", - "platform": 'NOAA20', + "platform": "NOAA20", "sensor": "VIIRS", }, ) @@ -81,12 +81,12 @@ def _create_tarfile_with_testdata(self, mypath): slstrdir.mkdir(parents=True, exist_ok=True) tarfile_path = mypath / tarfile_fakename - ncfilename = slstrdir / 'L2P_GHRSST-SSTskin-202204131200.nc' + ncfilename = slstrdir / "L2P_GHRSST-SSTskin-202204131200.nc" self.fake_dataset.to_netcdf(os.fspath(ncfilename)) - xmlfile_path = slstrdir / 'xfdumanifest.xml' + xmlfile_path = slstrdir / "xfdumanifest.xml" xmlfile_path.touch() - with tarfile.open(name=tarfile_path, mode='w') as tar: + with tarfile.open(name=tarfile_path, mode="w") as tar: tar.add(os.fspath(ncfilename), arcname=Path(slstr_fakename) / ncfilename.name) tar.add(os.fspath(xmlfile_path), arcname=Path(slstr_fakename) / xmlfile_path.name) @@ -95,7 +95,7 @@ def _create_tarfile_with_testdata(self, mypath): def test_instantiate_single_netcdf_file(self, tmp_path): """Test initialization of file handlers - given a single netCDF file.""" filename_info = {} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) @@ -110,29 +110,29 @@ def test_instantiate_tarfile(self, tmp_path): def test_get_dataset(self, tmp_path): """Test retrieval of datasets.""" filename_info = {} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) - test.get_dataset('longitude', {'standard_name': 'longitude'}) - test.get_dataset('latitude', {'standard_name': 'latitude'}) - test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'}) + test.get_dataset("longitude", {"standard_name": "longitude"}) + test.get_dataset("latitude", {"standard_name": "latitude"}) + test.get_dataset("sea_surface_temperature", {"standard_name": "sea_surface_temperature"}) with pytest.raises(KeyError): - test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'}) + test.get_dataset("erroneous dataset", {"standard_name": "erroneous dataset"}) def test_get_sensor(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z - filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_', - 'satid': 'NOAA20_', 'valid_time': dt_valid} + filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", + "satid": "NOAA20_", "valid_time": dt_valid} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) - assert test.sensor == 'viirs' + assert test.sensor == "viirs" def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" @@ -140,10 +140,10 @@ def test_get_start_and_end_times(self, tmp_path): good_start_time = datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z good_stop_time = datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z - filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_', - 'satid': 'NOAA20_', 'valid_time': dt_valid} + filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", + "satid": "NOAA20_", "valid_time": dt_valid} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) diff --git a/satpy/tests/reader_tests/test_glm_l2.py b/satpy/tests/reader_tests/test_glm_l2.py index 57d324f0b1..3744f2c964 100644 --- a/satpy/tests/reader_tests/test_glm_l2.py +++ b/satpy/tests/reader_tests/test_glm_l2.py @@ -33,72 +33,72 @@ def setup_fake_dataset(): fed = fed.astype(np.int16) fed = xr.DataArray( fed, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 0, - 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'flash_extent_density', - 'long_name': 'Flash extent density', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 0, + "units": "Count per nominal 3136 microradian^2 pixel per 1.0 min", + "grid_mapping": "goes_imager_projection", + "standard_name": "flash_extent_density", + "long_name": "Flash extent density", } ) dqf = xr.DataArray( fed.data.copy().astype(np.uint8), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'units': '1', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'status_flag', - 'long_name': 'GLM data quality flags', - 'flag_meanings': "valid invalid", + "_FillValue": -1, + "units": "1", + "grid_mapping": "goes_imager_projection", + "standard_name": "status_flag", + "long_name": "GLM data quality flags", + "flag_meanings": "valid invalid", } ) # create a variable that won't be configured to test available_datasets not_configured = xr.DataArray( fed.data.copy(), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 0, - 'units': '1', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'test', - 'long_name': 'Test', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 0, + "units": "1", + "grid_mapping": "goes_imager_projection", + "standard_name": "test", + "long_name": "Test", } ) x__ = xr.DataArray( range(5), - attrs={'scale_factor': 2., 'add_offset': -1.}, - dims=('x',), + attrs={"scale_factor": 2., "add_offset": -1.}, + dims=("x",), ) y__ = xr.DataArray( range(2), - attrs={'scale_factor': -2., 'add_offset': 1.}, - dims=('y',), + attrs={"scale_factor": -2., "add_offset": 1.}, + dims=("y",), ) proj = xr.DataArray( [], attrs={ - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'perspective_point_height': 1., - 'longitude_of_projection_origin': -90., - 'latitude_of_projection_origin': 0., - 'sweep_angle_axis': u'x' + "semi_major_axis": 1., + "semi_minor_axis": 1., + "perspective_point_height": 1., + "longitude_of_projection_origin": -90., + "latitude_of_projection_origin": 0., + "sweep_angle_axis": u"x" } ) fake_dataset = xr.Dataset( data_vars={ - 'flash_extent_density': fed, - 'not_configured': not_configured, - 'DQF': dqf, - 'x': x__, - 'y': y__, - 'goes_imager_projection': proj, + "flash_extent_density": fed, + "not_configured": not_configured, + "DQF": dqf, + "x": x__, + "y": y__, + "goes_imager_projection": proj, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02) @@ -115,16 +115,16 @@ def setup_fake_dataset(): class TestGLML2FileHandler(unittest.TestCase): """Tests for the GLM L2 reader.""" - @mock.patch('satpy.readers.abi_base.xr') + @mock.patch("satpy.readers.abi_base.xr") def setUp(self, xr_): """Create a fake file handler to test.""" from satpy.readers.glm_l2 import NCGriddedGLML2 fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset - self.reader = NCGriddedGLML2('filename', - {'platform_shortname': 'G16', - 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'glm_l2_imagery'}) + self.reader = NCGriddedGLML2("filename", + {"platform_shortname": "G16", + "scene_abbr": "C", "scan_mode": "M3"}, + {"filetype": "glm_l2_imagery"}) def test_basic_attributes(self): """Test getting basic file attributes.""" @@ -137,64 +137,64 @@ def test_basic_attributes(self): def test_get_dataset(self): """Test the get_dataset method.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='flash_extent_density') - res = self.reader.get_dataset(key, {'info': 'info'}) - exp = {'instrument_ID': None, - 'modifiers': (), - 'name': 'flash_extent_density', - 'orbital_parameters': {'projection_altitude': 1.0, - 'projection_latitude': 0.0, - 'projection_longitude': -90.0, + key = make_dataid(name="flash_extent_density") + res = self.reader.get_dataset(key, {"info": "info"}) + exp = {"instrument_ID": None, + "modifiers": (), + "name": "flash_extent_density", + "orbital_parameters": {"projection_altitude": 1.0, + "projection_latitude": 0.0, + "projection_longitude": -90.0, # 'satellite_nominal_altitude': 35786.02, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_longitude': -89.5}, - 'orbital_slot': None, - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M3', - 'scene_abbr': 'C', - 'scene_id': None, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_longitude": -89.5}, + "orbital_slot": None, + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M3", + "scene_abbr": "C", + "scene_id": None, "spatial_resolution": "2km at nadir", - 'sensor': 'glm', - 'timeline_ID': None, - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'flash_extent_density', - 'long_name': 'Flash extent density', - 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min'} + "sensor": "glm", + "timeline_ID": None, + "grid_mapping": "goes_imager_projection", + "standard_name": "flash_extent_density", + "long_name": "Flash extent density", + "units": "Count per nominal 3136 microradian^2 pixel per 1.0 min"} self.assertDictEqual(res.attrs, exp) def test_get_dataset_dqf(self): """Test the get_dataset method with special DQF var.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='DQF') - res = self.reader.get_dataset(key, {'info': 'info'}) - exp = {'instrument_ID': None, - 'modifiers': (), - 'name': 'DQF', - 'orbital_parameters': {'projection_altitude': 1.0, - 'projection_latitude': 0.0, - 'projection_longitude': -90.0, + key = make_dataid(name="DQF") + res = self.reader.get_dataset(key, {"info": "info"}) + exp = {"instrument_ID": None, + "modifiers": (), + "name": "DQF", + "orbital_parameters": {"projection_altitude": 1.0, + "projection_latitude": 0.0, + "projection_longitude": -90.0, # 'satellite_nominal_altitude': 35786.02, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_longitude': -89.5}, - 'orbital_slot': None, - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M3', - 'scene_abbr': 'C', - 'scene_id': None, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_longitude": -89.5}, + "orbital_slot": None, + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M3", + "scene_abbr": "C", + "scene_id": None, "spatial_resolution": "2km at nadir", - 'sensor': 'glm', - 'timeline_ID': None, - 'grid_mapping': 'goes_imager_projection', - 'units': '1', - '_FillValue': -1, - 'standard_name': 'status_flag', - 'long_name': 'GLM data quality flags', - 'flag_meanings': "valid invalid"} + "sensor": "glm", + "timeline_ID": None, + "grid_mapping": "goes_imager_projection", + "units": "1", + "_FillValue": -1, + "standard_name": "status_flag", + "long_name": "GLM data quality flags", + "flag_meanings": "valid invalid"} self.assertDictEqual(res.attrs, exp) self.assertTrue(np.issubdtype(res.dtype, np.integer)) @@ -205,18 +205,18 @@ class TestGLML2Reader(unittest.TestCase): yaml_file = "glm_l2.yaml" - @mock.patch('satpy.readers.abi_base.xr') + @mock.patch("satpy.readers.abi_base.xr") def setUp(self, xr_): """Create a fake reader to test.""" from satpy._config import config_search_paths from satpy.readers import load_reader - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', - 'CSPP_CG_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', + "OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc", + "CSPP_CG_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc", ]) self.assertEqual(len(loadables), 2) r.create_filehandlers(loadables) @@ -230,7 +230,7 @@ def test_available_datasets(self): # flash_extent_density, DQF, and not_configured are available in our tests self.assertEqual(len(available_datasets), 3) for ds_id in available_datasets: - self.assertEqual(ds_id['resolution'], 2000) + self.assertEqual(ds_id["resolution"], 2000) # make sure not_configured was discovered - names = [dataid['name'] for dataid in available_datasets] - assert 'not_configured' in names + names = [dataid["name"] for dataid in available_datasets] + assert "not_configured" in names diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py index 02b9632335..4339bef47d 100644 --- a/satpy/tests/reader_tests/test_goes_imager_hrit.py +++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py @@ -51,7 +51,7 @@ def test_fun(self): (100.1640625, b"\x42\x64\x2a\x00")] for expected, str_val in test_data: - val = np.frombuffer(str_val, dtype='>i4') + val = np.frombuffer(str_val, dtype=">i4") self.assertEqual(expected, make_gvar_float(val)) @@ -66,63 +66,63 @@ def test_fun(self): self.assertEqual(make_sgs_time(tcds[0]), expected) -test_pro = {'TISTR': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TCurr': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TCLMT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'SubSatLongitude': 100.1640625, - 'TCHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIPFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TISPC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceLatitude': 0.0, - 'TIIRT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIVIT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'SubSatLatitude': 0.0, - 'TIECL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceLongitude': 100.1640625, - 'TCTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLRAN': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TINFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIBBC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIONA': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceDistance': 100.1640625, - 'SatelliteID': 15} +test_pro = {"TISTR": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TCurr": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TCLMT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "SubSatLongitude": 100.1640625, + "TCHED": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLTRL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIPFS": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TISPC": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceLatitude": 0.0, + "TIIRT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLHED": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIVIT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "SubSatLatitude": 0.0, + "TIECL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceLongitude": 100.1640625, + "TCTRL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLRAN": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TINFS": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIBBC": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIONA": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceDistance": 100.1640625, + "SatelliteID": 15} class TestHRITGOESPrologueFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" - @mock.patch('satpy.readers.goes_imager_hrit.recarray2dict') - @mock.patch('satpy.readers.goes_imager_hrit.np.fromfile') - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') + @mock.patch("satpy.readers.goes_imager_hrit.recarray2dict") + @mock.patch("satpy.readers.goes_imager_hrit.np.fromfile") + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.__init__") def test_init(self, new_fh_init, fromfile, recarray2dict): """Setup the hrit file handler for testing.""" recarray2dict.side_effect = lambda x: x[0] - new_fh_init.return_value.filename = 'filename' - HRITGOESPrologueFileHandler.filename = 'filename' - HRITGOESPrologueFileHandler.mda = {'total_header_length': 1} + new_fh_init.return_value.filename = "filename" + HRITGOESPrologueFileHandler.filename = "filename" + HRITGOESPrologueFileHandler.mda = {"total_header_length": 1} ret = {} the_time = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time)[0] - for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', - 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', - 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: + for key in ["TCurr", "TCHED", "TCTRL", "TLHED", "TLTRL", "TIPFS", + "TINFS", "TISPC", "TIECL", "TIBBC", "TISTR", "TLRAN", + "TIIRT", "TIVIT", "TCLMT", "TIONA"]: ret[key] = the_time - ret['SubSatLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] - ret['ReferenceLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] - ret['SubSatLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['ReferenceLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['ReferenceDistance'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['SatelliteID'] = 15 + ret["SubSatLatitude"] = np.frombuffer(b"\x00\x00\x00\x00", dtype=">i4")[0] + ret["ReferenceLatitude"] = np.frombuffer(b"\x00\x00\x00\x00", dtype=">i4")[0] + ret["SubSatLongitude"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["ReferenceLongitude"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["ReferenceDistance"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["SatelliteID"] = 15 fromfile.return_value = [ret] m = mock.mock_open() - with mock.patch('satpy.readers.goes_imager_hrit.open', m, create=True) as newopen: + with mock.patch("satpy.readers.goes_imager_hrit.open", m, create=True) as newopen: newopen.return_value.__enter__.return_value.seek.return_value = 1 self.reader = HRITGOESPrologueFileHandler( - 'filename', {'platform_shortname': 'GOES15', - 'start_time': datetime.datetime(2016, 3, 3, 0, 0), - 'service': 'test_service'}, - {'filetype': 'info'}) + "filename", {"platform_shortname": "GOES15", + "start_time": datetime.datetime(2016, 3, 3, 0, 0), + "service": "test_service"}, + {"filetype": "info"}) self.assertEqual(test_pro, self.reader.prologue) @@ -130,31 +130,31 @@ def test_init(self, new_fh_init, fromfile, recarray2dict): class TestHRITGOESFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.__init__") def setUp(self, new_fh_init): """Set up the hrit file handler for testing.""" - blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() - mda = {'projection_parameters': {'SSP_longitude': -123.0}, - 'spectral_channel_id': 1, - 'image_data_function': blob} - HRITGOESFileHandler.filename = 'filename' + blob = "$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n".encode() + mda = {"projection_parameters": {"SSP_longitude": -123.0}, + "spectral_channel_id": 1, + "image_data_function": blob} + HRITGOESFileHandler.filename = "filename" HRITGOESFileHandler.mda = mda self.prologue = mock.MagicMock() self.prologue.prologue = test_pro - self.reader = HRITGOESFileHandler('filename', {}, {}, self.prologue) + self.reader = HRITGOESFileHandler("filename", {}, {}, self.prologue) def test_init(self): """Test the init.""" - blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() - mda = {'spectral_channel_id': 1, - 'projection_parameters': {'SSP_longitude': 100.1640625}, - 'image_data_function': blob} + blob = "$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n".encode() + mda = {"spectral_channel_id": 1, + "projection_parameters": {"SSP_longitude": 100.1640625}, + "image_data_function": blob} self.assertEqual(self.reader.mda, mda) - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset') + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset") def test_get_dataset(self, base_get_dataset): """Test get_dataset.""" - key = make_dataid(name="CH1", calibration='reflectance') + key = make_dataid(name="CH1", calibration="reflectance") base_get_dataset.return_value = DataArray(np.arange(25).reshape(5, 5)) res = self.reader.get_dataset(key, {}) expected = np.array([[np.nan, 0.097752, 0.195503, 0.293255, 0.391007], @@ -164,36 +164,36 @@ def test_get_dataset(self, base_get_dataset): [1.955034, 2.052786, 2.150538, 2.248289, 2.346041]]) self.assertTrue(np.allclose(res.values, expected, equal_nan=True)) - self.assertEqual(res.attrs['units'], '%') - self.assertDictEqual(res.attrs['orbital_parameters'], - {'projection_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE}) + self.assertEqual(res.attrs["units"], "%") + self.assertDictEqual(res.attrs["orbital_parameters"], + {"projection_longitude": self.reader.mda["projection_parameters"]["SSP_longitude"], + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE}) def test_get_area_def(self): """Test getting the area definition.""" self.reader.mda.update({ - 'cfac': 10216334, - 'lfac': 10216334, - 'coff': 1408.0, - 'loff': 944.0, - 'number_of_lines': 464, - 'number_of_columns': 2816 + "cfac": 10216334, + "lfac": 10216334, + "coff": 1408.0, + "loff": 944.0, + "number_of_lines": 464, + "number_of_columns": 2816 }) - dsid = make_dataid(name="CH1", calibration='reflectance', + dsid = make_dataid(name="CH1", calibration="reflectance", resolution=3000) area = self.reader.get_area_def(dsid) a, b = proj4_radius_parameters(area.proj_dict) assert a == EQUATOR_RADIUS assert b == POLE_RADIUS - assert area.proj_dict['h'] == ALTITUDE - assert area.proj_dict['lon_0'] == 100.1640625 - assert area.proj_dict['proj'] == 'geos' - assert area.proj_dict['units'] == 'm' + assert area.proj_dict["h"] == ALTITUDE + assert area.proj_dict["lon_0"] == 100.1640625 + assert area.proj_dict["proj"] == "geos" + assert area.proj_dict["units"] == "m" assert area.width == 2816 assert area.height == 464 - assert area.area_id == 'goes-15_goes_imager_fd_3km' + assert area.area_id == "goes-15_goes_imager_fd_3km" area_extent_exp = (-5639254.900260435, 1925159.4881528523, 5643261.475678028, 3784210.48191544) np.testing.assert_allclose(area.area_extent, area_extent_exp) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py index d72271f623..bac840a55b 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py @@ -30,12 +30,12 @@ class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -51,19 +51,19 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.radiance, dims=('time', 'yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.radiance, dims=("time", "yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) geo_data = xr.Dataset( - {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, - attrs={'Satellite Sensor': 'G-15'}) + {"lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc"))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESEUMNCFileHandler(filename="dummy", filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_radiance(self): @@ -71,20 +71,20 @@ def test_get_dataset_radiance(self): for ch in self.channels: if not is_vis_channel(ch): radiance = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='radiance'), info={}) + key=make_dataid(name=ch, calibration="radiance"), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.radiance == radiance.to_masked_array()), - msg='get_dataset() returns invalid radiance for ' - 'channel {}'.format(ch)) + msg="get_dataset() returns invalid radiance for " + "channel {}".format(ch)) def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if not is_vis_channel(ch): - calibs = {'brightness_temperature': '_calibrate_ir'} + calibs = {"brightness_temperature": "_calibrate_ir"} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: - self.reader.calibrate(data=self.reader.nc['data'], + self.reader.calibrate(data=self.reader.nc["data"], calibration=calib, channel=ch) target_func.assert_called() @@ -107,11 +107,11 @@ def test_get_sector(self): (123, 456): UNKNOWN_SECTOR } for (nlines, ncols), sector_ref in shapes.items(): - for channel in ('00_7', '10_7'): + for channel in ("00_7", "10_7"): sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) self.assertEqual(sector, sector_ref, - msg='Incorrect sector identification') + msg="Incorrect sector identification") class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): @@ -119,12 +119,12 @@ class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -140,19 +140,19 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.reflectance, dims=('time', 'yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.reflectance, dims=("time", "yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) geo_data = xr.Dataset( - {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, - attrs={'Satellite Sensor': 'G-15'}) + {"lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc"))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESEUMNCFileHandler(filename="dummy", filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_reflectance(self): @@ -160,8 +160,8 @@ def test_get_dataset_reflectance(self): for ch in self.channels: if is_vis_channel(ch): refl = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='reflectance'), info={}) + key=make_dataid(name=ch, calibration="reflectance"), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.reflectance == refl.to_masked_array()), - msg='get_dataset() returns invalid reflectance for ' - 'channel {}'.format(ch)) + msg="get_dataset() returns invalid reflectance for " + "channel {}".format(ch)) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py index 6369568d1f..1238594d19 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py @@ -39,14 +39,14 @@ class GOESNCBaseFileHandlerTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') - @mock.patch.multiple('satpy.readers.goes_imager_nc.GOESNCBaseFileHandler', + @mock.patch("satpy.readers.goes_imager_nc.xr") + @mock.patch.multiple("satpy.readers.goes_imager_nc.GOESNCBaseFileHandler", _get_sector=mock.MagicMock()) def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCBaseFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] # Mock file access to return a fake dataset. self.time = datetime.datetime(2018, 8, 16, 16, 7) @@ -54,27 +54,27 @@ def setUp(self, xr_): self.dummy2d = np.zeros((2, 2)) self.band = 1 self.nc = xr.Dataset( - {'data': xr.DataArray(self.dummy3d, dims=('time', 'yc', 'xc')), - 'lon': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), - 'time': xr.DataArray(data=np.array([self.time], - dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([self.band]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(self.dummy3d, dims=("time", "yc", "xc")), + "lon": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), + "time": xr.DataArray(data=np.array([self.time], + dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([self.band]))}, + attrs={"Satellite Sensor": "G-15"}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. - self.reader = GOESNCBaseFileHandler(filename='dummy', filename_info={}, + self.reader = GOESNCBaseFileHandler(filename="dummy", filename_info={}, filetype_info={}) def test_init(self): """Tests reader initialization.""" self.assertEqual(self.reader.nlines, self.dummy2d.shape[0]) self.assertEqual(self.reader.ncols, self.dummy2d.shape[1]) - self.assertEqual(self.reader.platform_name, 'GOES-15') - self.assertEqual(self.reader.platform_shortname, 'goes15') + self.assertEqual(self.reader.platform_name, "GOES-15") + self.assertEqual(self.reader.platform_shortname, "goes15") self.assertEqual(self.reader.gvar_channel, self.band) self.assertIsInstance(self.reader.geo_data, xr.Dataset) @@ -90,13 +90,13 @@ def test_get_nadir_pixel(self): nadir_row, nadir_col = self.reader._get_nadir_pixel( earth_mask=earth_mask, sector=FULL_DISC) self.assertEqual((nadir_row, nadir_col), (2, 1), - msg='Incorrect nadir pixel') + msg="Incorrect nadir pixel") def test_viscounts2radiance(self): """Test conversion from VIS counts to radiance.""" # Reference data is for detector #1 - slope = self.coefs['00_7']['slope'][0] - offset = self.coefs['00_7']['offset'][0] + slope = self.coefs["00_7"]["slope"][0] + offset = self.coefs["00_7"]["offset"][0] counts = xr.DataArray([0, 100, 200, 500, 1000, 1023]) rad_expected = xr.DataArray( [0., 41.54896, 100.06862, @@ -104,8 +104,8 @@ def test_viscounts2radiance(self): rad = self.reader._viscounts2radiance(counts=counts, slope=slope, offset=offset) self.assertTrue(np.allclose(rad.data, rad_expected.data, atol=1E-6), - msg='Incorrect conversion from VIS counts to ' - 'radiance') + msg="Incorrect conversion from VIS counts to " + "radiance") def test_ircounts2radiance(self): """Test conversion from IR counts to radiance.""" @@ -115,10 +115,10 @@ def test_ircounts2radiance(self): # Reference Radiance from NOAA lookup tables (same for detectors 1 and # 2, see [IR]) rad_expected = { - '03_9': np.array([0, 0.140, 1.899, 4.098, 4.199]), - '06_5': np.array([0, 1.825, 12.124, 24.998, 25.590]), - '10_7': np.array([0, 16.126, 92.630, 188.259, 192.658]), - '13_3': np.array([0, 15.084, 87.421, 177.842, 182.001]) + "03_9": np.array([0, 0.140, 1.899, 4.098, 4.199]), + "06_5": np.array([0, 1.825, 12.124, 24.998, 25.590]), + "10_7": np.array([0, 16.126, 92.630, 188.259, 192.658]), + "13_3": np.array([0, 15.084, 87.421, 177.842, 182.001]) } # The input counts are exact, but the accuracy of the output radiance is @@ -128,60 +128,60 @@ def test_ircounts2radiance(self): for ch in sorted(rad_expected.keys()): coefs = self.coefs[ch] rad = self.reader._ircounts2radiance( - counts=counts, scale=coefs['scale'], offset=coefs['offset']) + counts=counts, scale=coefs["scale"], offset=coefs["offset"]) self.assertTrue(np.allclose(rad.data, rad_expected[ch], atol=atol), - msg='Incorrect conversion from IR counts to ' - 'radiance in channel {}'.format(ch)) + msg="Incorrect conversion from IR counts to " + "radiance in channel {}".format(ch)) def test_calibrate_vis(self): """Test VIS calibration.""" rad = xr.DataArray([0, 1, 10, 100, 500]) refl_expected = xr.DataArray([0., 0.188852, 1.88852, 18.8852, 94.426]) refl = self.reader._calibrate_vis(radiance=rad, - k=self.coefs['00_7']['k']) + k=self.coefs["00_7"]["k"]) self.assertTrue(np.allclose(refl.data, refl_expected.data, atol=1E-6), - msg='Incorrect conversion from radiance to ' - 'reflectance') + msg="Incorrect conversion from radiance to " + "reflectance") def test_calibrate_ir(self): """Test IR calibration.""" # Test radiance values and corresponding BT from NOAA lookup tables # rev. H (see [IR]). rad = { - '03_9': xr.DataArray([0, 0.1, 2, 3.997, 4.199]), - '06_5': xr.DataArray([0, 0.821, 12.201, 25.590, 100]), - '10_7': xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), - '13_3': xr.DataArray([0, 22.679, 90.133, 182.001, 500]) + "03_9": xr.DataArray([0, 0.1, 2, 3.997, 4.199]), + "06_5": xr.DataArray([0, 0.821, 12.201, 25.590, 100]), + "10_7": xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), + "13_3": xr.DataArray([0, 22.679, 90.133, 182.001, 500]) } bt_expected = { - '03_9': np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], + "03_9": np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], [np.nan, 253.213, 319.451, 339.983, np.nan]]), - '06_5': np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], + "06_5": np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], [np.nan, 200.308, 267.879, 295.008, np.nan]]), - '10_7': np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], + "10_7": np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], [np.nan, 200.097, 294.429, 339.953, np.nan]]), - '13_3': np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], + "13_3": np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], [np.nan, 200.014, 267.524, 321.990, np.nan]]) } # first row is for detector 1, second for detector 2. # The accuracy of the input radiance is limited to 3 digits so that # the results differ slightly. - atol = {'03_9': 0.04, '06_5': 0.03, '10_7': 0.01, '13_3': 0.01} + atol = {"03_9": 0.04, "06_5": 0.03, "10_7": 0.01, "13_3": 0.01} for ch in sorted(rad.keys()): coefs = self.coefs[ch] for det in [0, 1]: bt = self.reader._calibrate_ir(radiance=rad[ch], - coefs={'a': coefs['a'][det], - 'b': coefs['b'][det], - 'n': coefs['n'][det], - 'btmin': coefs['btmin'], - 'btmax': coefs['btmax']}) + coefs={"a": coefs["a"][det], + "b": coefs["b"][det], + "n": coefs["n"][det], + "btmin": coefs["btmin"], + "btmax": coefs["btmax"]}) self.assertTrue( np.allclose(bt.data, bt_expected[ch][det], equal_nan=True, atol=atol[ch]), - msg='Incorrect conversion from radiance to brightness ' - 'temperature in channel {} detector {}'.format(ch, det)) + msg="Incorrect conversion from radiance to brightness " + "temperature in channel {} detector {}".format(ch, det)) def test_start_time(self): """Test dataset start time stamp.""" @@ -252,13 +252,13 @@ def dataset(self, lons_lats, channel_id): bands = xr.DataArray([channel_id], dims="bands") return xr.Dataset( { - 'data': data, - 'lon': lon, - 'lat': lat, - 'time': time, - 'bands': bands, + "data": data, + "lon": lon, + "lat": lat, + "time": time, + "bands": bands, }, - attrs={'Satellite Sensor': 'G-15'} + attrs={"Satellite Sensor": "G-15"} ) @pytest.fixture @@ -290,16 +290,16 @@ def geometry(self, channel_id, yaw_flip): def expected(self, geometry, earth_mask, yaw_flip): """Define expected metadata.""" proj_dict = { - 'a': '6378169', - 'h': '35785831', - 'lon_0': '0', - 'no_defs': 'None', - 'proj': 'geos', - 'rf': '295.488065897001', - 'type': 'crs', - 'units': 'm', - 'x_0': '0', - 'y_0': '0' + "a": "6378169", + "h": "35785831", + "lon_0": "0", + "no_defs": "None", + "proj": "geos", + "rf": "295.488065897001", + "type": "crs", + "units": "m", + "x_0": "0", + "y_0": "0" } area = AreaDefinition( area_id="goes_geos_uniform", @@ -329,7 +329,7 @@ def mocked_file_handler(self, dataset): GOESNCFileHandler.ir_sectors[(3, 4)] = FULL_DISC GOESNCFileHandler.yaw_flip_sampling_distance = 1 return GOESNCFileHandler( - filename='dummy', + filename="dummy", filename_info={}, filetype_info={}, ) @@ -351,12 +351,12 @@ class GOESNCFileHandlerTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -374,58 +374,58 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.counts, dims=('time', 'yc', 'xc')), - 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.counts, dims=("time", "yc", "xc")), + "lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESNCFileHandler(filename="dummy", filename_info={}, filetype_info={}) def test_get_dataset_coords(self): """Test whether coordinates returned by get_dataset() are correct.""" - lon = self.reader.get_dataset(key=make_dataid(name='longitude'), + lon = self.reader.get_dataset(key=make_dataid(name="longitude"), info={}) - lat = self.reader.get_dataset(key=make_dataid(name='latitude'), + lat = self.reader.get_dataset(key=make_dataid(name="latitude"), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(lat.to_masked_array() == self.lat), - msg='get_dataset() returns invalid latitude') + msg="get_dataset() returns invalid latitude") self.assertTrue(np.all(lon.to_masked_array() == self.lon), - msg='get_dataset() returns invalid longitude') + msg="get_dataset() returns invalid longitude") def test_get_dataset_counts(self): """Test whether counts returned by get_dataset() are correct.""" from satpy.readers.goes_imager_nc import ALTITUDE, UNKNOWN_SECTOR - self.reader.meta.update({'lon0': -75.0, - 'lat0': 0.0, - 'sector': UNKNOWN_SECTOR, - 'nadir_row': 1, - 'nadir_col': 2, - 'area_def_uni': 'some_area'}) - attrs_exp = {'orbital_parameters': {'projection_longitude': -75.0, - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE, - 'yaw_flip': True}, - 'platform_name': 'GOES-15', - 'sensor': 'goes_imager', - 'sector': UNKNOWN_SECTOR, - 'nadir_row': 1, - 'nadir_col': 2, - 'area_def_uniform_sampling': 'some_area'} + self.reader.meta.update({"lon0": -75.0, + "lat0": 0.0, + "sector": UNKNOWN_SECTOR, + "nadir_row": 1, + "nadir_col": 2, + "area_def_uni": "some_area"}) + attrs_exp = {"orbital_parameters": {"projection_longitude": -75.0, + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE, + "yaw_flip": True}, + "platform_name": "GOES-15", + "sensor": "goes_imager", + "sector": UNKNOWN_SECTOR, + "nadir_row": 1, + "nadir_col": 2, + "area_def_uniform_sampling": "some_area"} for ch in self.channels: counts = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='counts'), info={}) + key=make_dataid(name=ch, calibration="counts"), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.counts/32. == counts.to_masked_array()), - msg='get_dataset() returns invalid counts for ' - 'channel {}'.format(ch)) + msg="get_dataset() returns invalid counts for " + "channel {}".format(ch)) # Check attributes self.assertDictEqual(counts.attrs, attrs_exp) @@ -434,12 +434,12 @@ def test_get_dataset_masks(self): """Test whether data and coordinates are masked consistently.""" # Requires that no element has been masked due to invalid # radiance/reflectance/BT (see setUp()). - lon = self.reader.get_dataset(key=make_dataid(name='longitude'), + lon = self.reader.get_dataset(key=make_dataid(name="longitude"), info={}) lon_mask = lon.to_masked_array().mask for ch in self.channels: - for calib in ('counts', 'radiance', 'reflectance', - 'brightness_temperature'): + for calib in ("counts", "radiance", "reflectance", + "brightness_temperature"): try: data = self.reader.get_dataset( key=make_dataid(name=ch, calibration=calib), info={}) @@ -447,41 +447,41 @@ def test_get_dataset_masks(self): continue data_mask = data.to_masked_array().mask self.assertTrue(np.all(data_mask == lon_mask), - msg='get_dataset() returns inconsistently ' - 'masked {} in channel {}'.format(calib, ch)) + msg="get_dataset() returns inconsistently " + "masked {} in channel {}".format(calib, ch)) def test_get_dataset_invalid(self): """Test handling of invalid calibrations.""" # VIS -> BT - args = dict(key=make_dataid(name='00_7', - calibration='brightness_temperature'), + args = dict(key=make_dataid(name="00_7", + calibration="brightness_temperature"), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # IR -> Reflectance - args = dict(key=make_dataid(name='10_7', - calibration='reflectance'), + args = dict(key=make_dataid(name="10_7", + calibration="reflectance"), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # Unsupported calibration with pytest.raises(ValueError): - args = dict(key=make_dataid(name='10_7', - calibration='invalid'), + args = dict(key=make_dataid(name="10_7", + calibration="invalid"), info={}) def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if is_vis_channel(ch): - calibs = {'radiance': '_viscounts2radiance', - 'reflectance': '_calibrate_vis'} + calibs = {"radiance": "_viscounts2radiance", + "reflectance": "_calibrate_vis"} else: - calibs = {'radiance': '_ircounts2radiance', - 'brightness_temperature': '_calibrate_ir'} + calibs = {"radiance": "_ircounts2radiance", + "brightness_temperature": "_calibrate_ir"} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: - self.reader.calibrate(counts=self.reader.nc['data'], + self.reader.calibrate(counts=self.reader.nc["data"], calibration=calib, channel=ch) target_func.assert_called() @@ -515,13 +515,13 @@ def test_get_sector(self): shapes.update(shapes_vis) for (nlines, ncols), sector_ref in shapes.items(): if (nlines, ncols) in shapes_vis: - channel = '00_7' + channel = "00_7" else: - channel = '10_7' + channel = "10_7" sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) self.assertEqual(sector, sector_ref, - msg='Incorrect sector identification') + msg="Incorrect sector identification") class TestChannelIdentification: diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index a91f6d300f..8fc8bb855b 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -41,31 +41,31 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def _get_geo_data(self, num_rows, num_cols): geo = { - 'Grid/lon': + "Grid/lon": xr.DataArray(DEFAULT_LON_DATA, - attrs={'units': 'degrees_east', }, - dims=('lon')), - 'Grid/lat': + attrs={"units": "degrees_east", }, + dims=("lon")), + "Grid/lat": xr.DataArray(DEFAULT_LAT_DATA, - attrs={'units': 'degrees_north', }, - dims=('lat')), + attrs={"units": "degrees_north", }, + dims=("lat")), } return geo def _get_precip_data(self, num_rows, num_cols): selection = { - 'Grid/IRprecipitation': + "Grid/IRprecipitation": xr.DataArray( da.ones((1, num_cols, num_rows), chunks=1024, dtype=np.float32), attrs={ - '_FillValue': -9999.9, - 'units': 'mm/hr', - 'Units': 'mm/hr', - 'badval': h5py.h5r.Reference(), - 'badvals': np.array([[h5py.h5r.Reference()]]) + "_FillValue": -9999.9, + "units": "mm/hr", + "Units": "mm/hr", + "badval": h5py.h5r.Reference(), + "badvals": np.array([[h5py.h5r.Reference()]]) }, - dims=('time', 'lon', 'lat')), + dims=("time", "lon", "lat")), } return selection @@ -93,9 +93,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.gpm_imerg import Hdf5IMERG - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(Hdf5IMERG, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(Hdf5IMERG, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -109,13 +109,13 @@ def test_load_data(self): # Filename to test, needed for start and end times filenames = [ - '3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5', ] + "3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5", ] # Expected projection in area def - pdict = {'proj': 'longlat', - 'datum': 'WGS84', - 'no_defs': None, - 'type': 'crs'} + pdict = {"proj": "longlat", + "datum": "WGS84", + "no_defs": None, + "type": "crs"} reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -123,19 +123,19 @@ def test_load_data(self): reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['IRprecipitation']) + res = reader.load(["IRprecipitation"]) self.assertEqual(1, len(res)) - self.assertEqual(res['IRprecipitation'].start_time, + self.assertEqual(res["IRprecipitation"].start_time, datetime(2020, 1, 31, 23, 30, 0)) - self.assertEqual(res['IRprecipitation'].end_time, + self.assertEqual(res["IRprecipitation"].end_time, datetime(2020, 1, 31, 23, 59, 59)) - self.assertEqual(res['IRprecipitation'].resolution, + self.assertEqual(res["IRprecipitation"].resolution, 0.1) - self.assertEqual(res['IRprecipitation'].area.width, + self.assertEqual(res["IRprecipitation"].area.width, 3600) - self.assertEqual(res['IRprecipitation'].area.height, + self.assertEqual(res["IRprecipitation"].area.height, 1800) - self.assertEqual(res['IRprecipitation'].area.proj_dict, + self.assertEqual(res["IRprecipitation"].area.proj_dict, pdict) - np.testing.assert_almost_equal(res['IRprecipitation'].area.area_extent, + np.testing.assert_almost_equal(res["IRprecipitation"].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) diff --git a/satpy/tests/reader_tests/test_grib.py b/satpy/tests/reader_tests/test_grib.py index b349e91169..dce6b3f557 100644 --- a/satpy/tests/reader_tests/test_grib.py +++ b/satpy/tests/reader_tests/test_grib.py @@ -28,14 +28,14 @@ from satpy.dataset import DataQuery # Parameterized cases -TEST_ARGS = ('proj_params', 'lon_corners', 'lat_corners') +TEST_ARGS = ("proj_params", "lon_corners", "lat_corners") TEST_PARAMS = ( (None, None, None), # cyl default case ( { - 'a': 6371229, 'b': 6371229, 'proj': 'lcc', - 'lon_0': 265.0, 'lat_0': 25.0, - 'lat_1': 25.0, 'lat_2': 25.0 + "a": 6371229, "b": 6371229, "proj": "lcc", + "lon_0": 265.0, "lat_0": 25.0, + "lat_1": 25.0, "lat_2": 25.0 }, [-133.459, -65.12555139, -152.8786225, -49.41598659], [12.19, 14.34208538, 54.56534318, 57.32843565] @@ -78,7 +78,7 @@ def __init__(self, values, proj_params=None, latlons=None, **attrs): self.attrs = attrs self.values = values if proj_params is None: - proj_params = {'a': 6371229, 'b': 6371229, 'proj': 'cyl'} + proj_params = {"a": 6371229, "b": 6371229, "proj": "cyl"} self.projparams = proj_params self._latlons = latlons @@ -111,12 +111,12 @@ def __init__(self, messages=None, proj_params=None, latlons=None): self._messages = [ FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=100, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -124,22 +124,22 @@ def __init__(self, messages=None, proj_params=None, latlons=None): distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, - modelName='notknown', + modelName="notknown", minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=0, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=200, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -147,22 +147,22 @@ def __init__(self, messages=None, proj_params=None, latlons=None): distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, - modelName='notknown', + modelName="notknown", minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=1, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=300, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -172,7 +172,7 @@ def __init__(self, messages=None, proj_params=None, latlons=None): missingValue=9999, minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=0, proj_params=proj_params, latlons=latlons, @@ -208,29 +208,29 @@ class TestGRIBReader: def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib - sys.modules['pygrib'] = mock.MagicMock() + sys.modules["pygrib"] = mock.MagicMock() def teardown_method(self): """Re-enable pygrib import.""" - sys.modules['pygrib'] = self.orig_pygrib + sys.modules["pygrib"] = self.orig_pygrib def _get_test_datasets(self, dataids, fake_pygrib=None): from satpy.readers import load_reader if fake_pygrib is None: fake_pygrib = FakeGRIB() - with mock.patch('satpy.readers.grib.pygrib') as pg: + with mock.patch("satpy.readers.grib.pygrib") as pg: pg.open.return_value = fake_pygrib r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'gfs.t18z.sfluxgrbf106.grib2', + "gfs.t18z.sfluxgrbf106.grib2", ]) r.create_filehandlers(loadables) datasets = r.load(dataids) @@ -262,11 +262,11 @@ def _get_fake_pygrib(proj_params, lon_corners, lat_corners): def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.grib.pygrib') as pg: + with mock.patch("satpy.readers.grib.pygrib") as pg: pg.open.return_value = FakeGRIB() r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'gfs.t18z.sfluxgrbf106.grib2', + "gfs.t18z.sfluxgrbf106.grib2", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -293,25 +293,25 @@ def test_load_all(self, proj_params, lon_corners, lat_corners): """Test loading all test datasets.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [ - DataQuery(name='t', level=100, modifiers=tuple()), - DataQuery(name='t', level=200, modifiers=tuple()), - DataQuery(name='t', level=300, modifiers=tuple()) + DataQuery(name="t", level=100, modifiers=tuple()), + DataQuery(name="t", level=200, modifiers=tuple()), + DataQuery(name="t", level=300, modifiers=tuple()) ] datasets = self._get_test_datasets(dataids, fake_pygrib) assert len(datasets) == 3 for v in datasets.values(): - assert v.attrs['units'] == 'K' + assert v.attrs["units"] == "K" assert isinstance(v, xr.DataArray) @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_area_def_crs(self, proj_params, lon_corners, lat_corners): """Check that the projection is accurate.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) - dataids = [DataQuery(name='t', level=100, modifiers=tuple())] + dataids = [DataQuery(name="t", level=100, modifiers=tuple())] datasets = self._get_test_datasets(dataids, fake_pygrib) - area = datasets['t'].attrs['area'] - if not hasattr(area, 'crs'): + area = datasets["t"].attrs["area"] + if not hasattr(area, "crs"): pytest.skip("Can't test with pyproj < 2.0") _round_trip_projection_lonlat_check(area) @@ -321,12 +321,12 @@ def test_missing_attributes(self, proj_params, lon_corners, lat_corners): fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has modelName - query_contains = DataQuery(name='t', level=100, modifiers=tuple()) + query_contains = DataQuery(name="t", level=100, modifiers=tuple()) # This does not have modelName - query_not_contains = DataQuery(name='t', level=300, modifiers=tuple()) + query_not_contains = DataQuery(name="t", level=300, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) - assert dataset[query_contains].attrs['modelName'] == 'notknown' - assert dataset[query_not_contains].attrs['modelName'] == 'unknown' + assert dataset[query_contains].attrs["modelName"] == "notknown" + assert dataset[query_not_contains].attrs["modelName"] == "unknown" @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_jscanspositively(self, proj_params, lon_corners, lat_corners): @@ -334,9 +334,9 @@ def test_jscanspositively(self, proj_params, lon_corners, lat_corners): fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has no jScansPositively - query_not_contains = DataQuery(name='t', level=100, modifiers=tuple()) + query_not_contains = DataQuery(name="t", level=100, modifiers=tuple()) # This contains jScansPositively - query_contains = DataQuery(name='t', level=200, modifiers=tuple()) + query_contains = DataQuery(name="t", level=200, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) np.testing.assert_allclose(fake_gribdata(), dataset[query_not_contains].values) diff --git a/satpy/tests/reader_tests/test_hdf4_utils.py b/satpy/tests/reader_tests/test_hdf4_utils.py index 9a0773c2c1..1328fd3a29 100644 --- a/satpy/tests/reader_tests/test_hdf4_utils.py +++ b/satpy/tests/reader_tests/test_hdf4_utils.py @@ -67,20 +67,20 @@ class TestHDF4FileHandler(unittest.TestCase): def setUp(self): """Create a test HDF4 file.""" from pyhdf.SD import SD, SDC - h = SD('test.hdf', SDC.WRITE | SDC.CREATE | SDC.TRUNC) + h = SD("test.hdf", SDC.WRITE | SDC.CREATE | SDC.TRUNC) data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100)) - v1 = h.create('ds1_f', SDC.FLOAT32, (10, 100)) + v1 = h.create("ds1_f", SDC.FLOAT32, (10, 100)) v1[:] = data - v2 = h.create('ds1_i', SDC.INT16, (10, 100)) + v2 = h.create("ds1_i", SDC.INT16, (10, 100)) v2[:] = data.astype(np.int16) # Add attributes - h.test_attr_str = 'test_string' + h.test_attr_str = "test_string" h.test_attr_int = 0 h.test_attr_float = 1.2 # h.test_attr_str_arr = np.array(b"test_string2") for d in [v1, v2]: - d.test_attr_str = 'test_string' + d.test_attr_str = "test_string" d.test_attr_int = 0 d.test_attr_float = 1.2 @@ -88,34 +88,34 @@ def setUp(self): def tearDown(self): """Remove the previously created test file.""" - os.remove('test.hdf') + os.remove("test.hdf") def test_all_basic(self): """Test everything about the HDF4 class.""" from satpy.readers.hdf4_utils import HDF4FileHandler - file_handler = HDF4FileHandler('test.hdf', {}, {}) + file_handler = HDF4FileHandler("test.hdf", {}, {}) - for ds in ('ds1_f', 'ds1_i'): - self.assertEqual(file_handler[ds + '/dtype'], np.float32 if ds.endswith('f') else np.int16) - self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) + for ds in ("ds1_f", "ds1_i"): + self.assertEqual(file_handler[ds + "/dtype"], np.float32 if ds.endswith("f") else np.int16) + self.assertTupleEqual(file_handler[ds + "/shape"], (10, 100)) # make sure that the dtype is an instance, not the class - self.assertEqual(file_handler[ds].dtype.itemsize, 4 if ds.endswith('f') else 2) + self.assertEqual(file_handler[ds].dtype.itemsize, 4 if ds.endswith("f") else 2) attrs = file_handler[ds].attrs - self.assertEqual(attrs.get('test_attr_str'), 'test_string') - self.assertEqual(attrs.get('test_attr_int'), 0) - self.assertEqual(attrs.get('test_attr_float'), 1.2) + self.assertEqual(attrs.get("test_attr_str"), "test_string") + self.assertEqual(attrs.get("test_attr_int"), 0) + self.assertEqual(attrs.get("test_attr_float"), 1.2) - self.assertIsInstance(file_handler['/attr/test_attr_str'], str) - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') + self.assertIsInstance(file_handler["/attr/test_attr_str"], str) + self.assertEqual(file_handler["/attr/test_attr_str"], "test_string") # self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertIsInstance(file_handler['/attr/test_attr_int'], int) - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertIsInstance(file_handler['/attr/test_attr_float'], float) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) + self.assertIsInstance(file_handler["/attr/test_attr_int"], int) + self.assertEqual(file_handler["/attr/test_attr_int"], 0) + self.assertIsInstance(file_handler["/attr/test_attr_float"], float) + self.assertEqual(file_handler["/attr/test_attr_float"], 1.2) - self.assertIsInstance(file_handler.get('ds1_f'), xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') + self.assertIsInstance(file_handler.get("ds1_f"), xr.DataArray) + self.assertIsNone(file_handler.get("fake_ds")) + self.assertEqual(file_handler.get("fake_ds", "test"), "test") - self.assertTrue('ds1_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) + self.assertTrue("ds1_f" in file_handler) + self.assertFalse("fake_ds" in file_handler) diff --git a/satpy/tests/reader_tests/test_hdf5_utils.py b/satpy/tests/reader_tests/test_hdf5_utils.py index 2c5fd2d19a..4e700a57f6 100644 --- a/satpy/tests/reader_tests/test_hdf5_utils.py +++ b/satpy/tests/reader_tests/test_hdf5_utils.py @@ -67,86 +67,86 @@ class TestHDF5FileHandler(unittest.TestCase): def setUp(self): """Create a test HDF5 file.""" import h5py - h = h5py.File('test.h5', 'w') + h = h5py.File("test.h5", "w") # Create Group - g1 = h.create_group('test_group') + g1 = h.create_group("test_group") # Add datasets - ds1_f = g1.create_dataset('ds1_f', + ds1_f = g1.create_dataset("ds1_f", shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) - ds1_i = g1.create_dataset('ds1_i', + ds1_i = g1.create_dataset("ds1_i", shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) - ds2_f = h.create_dataset('ds2_f', + ds2_f = h.create_dataset("ds2_f", shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) - ds2_i = h.create_dataset('ds2_i', + ds2_i = h.create_dataset("ds2_i", shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) # Add attributes # shows up as a scalar array of bytes (shape=(), size=1) - h.attrs['test_attr_str'] = 'test_string' - h.attrs['test_attr_byte'] = b'test_byte' - h.attrs['test_attr_int'] = 0 - h.attrs['test_attr_float'] = 1.2 + h.attrs["test_attr_str"] = "test_string" + h.attrs["test_attr_byte"] = b"test_byte" + h.attrs["test_attr_int"] = 0 + h.attrs["test_attr_float"] = 1.2 # shows up as a numpy bytes object - h.attrs['test_attr_str_arr'] = np.array(b"test_string2") - g1.attrs['test_attr_str'] = 'test_string' - g1.attrs['test_attr_byte'] = b'test_byte' - g1.attrs['test_attr_int'] = 0 - g1.attrs['test_attr_float'] = 1.2 + h.attrs["test_attr_str_arr"] = np.array(b"test_string2") + g1.attrs["test_attr_str"] = "test_string" + g1.attrs["test_attr_byte"] = b"test_byte" + g1.attrs["test_attr_int"] = 0 + g1.attrs["test_attr_float"] = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: - d.attrs['test_attr_str'] = 'test_string' - d.attrs['test_attr_byte'] = b'test_byte' - d.attrs['test_attr_int'] = 0 - d.attrs['test_attr_float'] = 1.2 - d.attrs['test_ref'] = d.ref + d.attrs["test_attr_str"] = "test_string" + d.attrs["test_attr_byte"] = b"test_byte" + d.attrs["test_attr_int"] = 0 + d.attrs["test_attr_float"] = 1.2 + d.attrs["test_ref"] = d.ref self.var_attrs = list(d.attrs.keys()) h.close() def tearDown(self): """Remove the previously created test file.""" - os.remove('test.h5') + os.remove("test.h5") def test_all_basic(self): """Test everything about the HDF5 class.""" import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler - file_handler = HDF5FileHandler('test.h5', {}, {}) + file_handler = HDF5FileHandler("test.h5", {}, {}) - for ds_name in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): + for ds_name in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): ds = file_handler[ds_name] attrs = ds.attrs - self.assertEqual(ds.dtype, np.float32 if ds_name.endswith('f') else np.int32) - self.assertTupleEqual(file_handler[ds_name + '/shape'], (10, 100)) - self.assertEqual(attrs['test_attr_str'], 'test_string') - self.assertEqual(attrs['test_attr_byte'], 'test_byte') - self.assertEqual(attrs['test_attr_int'], 0) - self.assertEqual(attrs['test_attr_float'], 1.2) - self.assertEqual(file_handler[ds_name + '/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler[ds_name + '/attr/test_attr_byte'], 'test_byte') - self.assertEqual(file_handler[ds_name + '/attr/test_attr_int'], 0) - self.assertEqual(file_handler[ds_name + '/attr/test_attr_float'], 1.2) - - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler['/attr/test_attr_byte'], 'test_byte') - self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) - - self.assertIsInstance(file_handler.get('ds2_f'), xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') - - self.assertTrue('ds2_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) - - self.assertIsInstance(file_handler['ds2_f/attr/test_ref'], np.ndarray) + self.assertEqual(ds.dtype, np.float32 if ds_name.endswith("f") else np.int32) + self.assertTupleEqual(file_handler[ds_name + "/shape"], (10, 100)) + self.assertEqual(attrs["test_attr_str"], "test_string") + self.assertEqual(attrs["test_attr_byte"], "test_byte") + self.assertEqual(attrs["test_attr_int"], 0) + self.assertEqual(attrs["test_attr_float"], 1.2) + self.assertEqual(file_handler[ds_name + "/attr/test_attr_str"], "test_string") + self.assertEqual(file_handler[ds_name + "/attr/test_attr_byte"], "test_byte") + self.assertEqual(file_handler[ds_name + "/attr/test_attr_int"], 0) + self.assertEqual(file_handler[ds_name + "/attr/test_attr_float"], 1.2) + + self.assertEqual(file_handler["/attr/test_attr_str"], "test_string") + self.assertEqual(file_handler["/attr/test_attr_byte"], "test_byte") + self.assertEqual(file_handler["/attr/test_attr_str_arr"], "test_string2") + self.assertEqual(file_handler["/attr/test_attr_int"], 0) + self.assertEqual(file_handler["/attr/test_attr_float"], 1.2) + + self.assertIsInstance(file_handler.get("ds2_f"), xr.DataArray) + self.assertIsNone(file_handler.get("fake_ds")) + self.assertEqual(file_handler.get("fake_ds", "test"), "test") + + self.assertTrue("ds2_f" in file_handler) + self.assertFalse("fake_ds" in file_handler) + + self.assertIsInstance(file_handler["ds2_f/attr/test_ref"], np.ndarray) diff --git a/satpy/tests/reader_tests/test_hdfeos_base.py b/satpy/tests/reader_tests/test_hdfeos_base.py index 68b8928f2e..9ca6ee5fdd 100644 --- a/satpy/tests/reader_tests/test_hdfeos_base.py +++ b/satpy/tests/reader_tests/test_hdfeos_base.py @@ -19,7 +19,7 @@ import unittest -nrt_mda = '''GROUP = INVENTORYMETADATA +nrt_mda = """GROUP = INVENTORYMETADATA GROUPTYPE = MASTERGROUP GROUP = ECSDATAGRANULE @@ -439,175 +439,175 @@ END_GROUP = INVENTORYMETADATA -END''' # noqa: E501 +END""" # noqa: E501 nrt_mda_dict = { - 'INVENTORYMETADATA': { - 'ADDITIONALATTRIBUTES': { - 'ADDITIONALATTRIBUTESCONTAINER': { - 'ADDITIONALATTRIBUTENAME': { - 'VALUE': 'identifier_product_doi_authority' + "INVENTORYMETADATA": { + "ADDITIONALATTRIBUTES": { + "ADDITIONALATTRIBUTESCONTAINER": { + "ADDITIONALATTRIBUTENAME": { + "VALUE": "identifier_product_doi_authority" }, - 'INFORMATIONCONTENT': { - 'PARAMETERVALUE': { - 'VALUE': 'http://dx.doi.org' + "INFORMATIONCONTENT": { + "PARAMETERVALUE": { + "VALUE": "http://dx.doi.org" } } } }, - 'ASSOCIATEDPLATFORMINSTRUMENTSENSOR': { - 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER': { - 'ASSOCIATEDINSTRUMENTSHORTNAME': { - 'VALUE': 'MODIS' + "ASSOCIATEDPLATFORMINSTRUMENTSENSOR": { + "ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER": { + "ASSOCIATEDINSTRUMENTSHORTNAME": { + "VALUE": "MODIS" }, - 'ASSOCIATEDPLATFORMSHORTNAME': { - 'VALUE': 'Aqua' + "ASSOCIATEDPLATFORMSHORTNAME": { + "VALUE": "Aqua" }, - 'ASSOCIATEDSENSORSHORTNAME': { - 'VALUE': 'MODIS' + "ASSOCIATEDSENSORSHORTNAME": { + "VALUE": "MODIS" } } }, - 'COLLECTIONDESCRIPTIONCLASS': { - 'SHORTNAME': { - 'VALUE': 'MYD03' + "COLLECTIONDESCRIPTIONCLASS": { + "SHORTNAME": { + "VALUE": "MYD03" }, - 'VERSIONID': { - 'VALUE': 61 + "VERSIONID": { + "VALUE": 61 } }, - 'ECSDATAGRANULE': { - 'DAYNIGHTFLAG': { - 'VALUE': 'Day' + "ECSDATAGRANULE": { + "DAYNIGHTFLAG": { + "VALUE": "Day" }, - 'LOCALGRANULEID': { - 'VALUE': 'MYD03.A2019051.1225.061.2019051131153.NRT.hdf' + "LOCALGRANULEID": { + "VALUE": "MYD03.A2019051.1225.061.2019051131153.NRT.hdf" }, - 'LOCALVERSIONID': { - 'VALUE': '6.0.4' + "LOCALVERSIONID": { + "VALUE": "6.0.4" }, - 'PRODUCTIONDATETIME': { - 'VALUE': '2019-02-20T13:11:53.000Z' + "PRODUCTIONDATETIME": { + "VALUE": "2019-02-20T13:11:53.000Z" }, - 'REPROCESSINGACTUAL': { - 'VALUE': 'Near ' - 'Real ' - 'Time' + "REPROCESSINGACTUAL": { + "VALUE": "Near " + "Real " + "Time" }, - 'REPROCESSINGPLANNED': { - 'VALUE': 'further ' - 'update ' - 'is ' - 'anticipated' + "REPROCESSINGPLANNED": { + "VALUE": "further " + "update " + "is " + "anticipated" } }, - 'GROUPTYPE': 'MASTERGROUP', - 'INPUTGRANULE': { - 'INPUTPOINTER': { - 'VALUE': - ('MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf', - 'MYD03LUT.coeff_V6.1.4', - 'PM1EPHND_NRT.A2019051.1220.061.2019051125628', - 'PM1EPHND_NRT.A2019051.1225.061.2019051125628', - 'PM1EPHND_NRT.A2019051.1230.061.2019051125628', ' ' - 'PM1ATTNR_NRT.A2019051.1220.061.2019051125628', - 'PM1ATTNR_NRT.A2019051.1225.061.2019051125628', - 'PM1ATTNR_NRT.A2019051.1230.061.2019051125628') + "GROUPTYPE": "MASTERGROUP", + "INPUTGRANULE": { + "INPUTPOINTER": { + "VALUE": + ("MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf", + "MYD03LUT.coeff_V6.1.4", + "PM1EPHND_NRT.A2019051.1220.061.2019051125628", + "PM1EPHND_NRT.A2019051.1225.061.2019051125628", + "PM1EPHND_NRT.A2019051.1230.061.2019051125628", " " + "PM1ATTNR_NRT.A2019051.1220.061.2019051125628", + "PM1ATTNR_NRT.A2019051.1225.061.2019051125628", + "PM1ATTNR_NRT.A2019051.1230.061.2019051125628") } }, - 'MEASUREDPARAMETER': { - 'MEASUREDPARAMETERCONTAINER': { - 'PARAMETERNAME': { - 'VALUE': 'Geolocation' + "MEASUREDPARAMETER": { + "MEASUREDPARAMETERCONTAINER": { + "PARAMETERNAME": { + "VALUE": "Geolocation" }, - 'QAFLAGS': { - 'AUTOMATICQUALITYFLAG': { - 'VALUE': 'Passed' + "QAFLAGS": { + "AUTOMATICQUALITYFLAG": { + "VALUE": "Passed" }, - 'AUTOMATICQUALITYFLAGEXPLANATION': { - 'VALUE': - 'Set ' - 'to ' + "AUTOMATICQUALITYFLAGEXPLANATION": { + "VALUE": + "Set " + "to " "'Failed' " - 'if ' - 'processing ' - 'error ' - 'occurred, ' - 'set ' - 'to ' + "if " + "processing " + "error " + "occurred, " + "set " + "to " "'Passed' " - 'otherwise' + "otherwise" }, - 'SCIENCEQUALITYFLAG': { - 'VALUE': 'Not ' - 'Investigated' + "SCIENCEQUALITYFLAG": { + "VALUE": "Not " + "Investigated" } }, - 'QASTATS': { - 'QAPERCENTMISSINGDATA': { - 'VALUE': 0 + "QASTATS": { + "QAPERCENTMISSINGDATA": { + "VALUE": 0 }, - 'QAPERCENTOUTOFBOUNDSDATA': { - 'VALUE': 0 + "QAPERCENTOUTOFBOUNDSDATA": { + "VALUE": 0 } } } }, - 'ORBITCALCULATEDSPATIALDOMAIN': { - 'ORBITCALCULATEDSPATIALDOMAINCONTAINER': { - 'EQUATORCROSSINGDATE': { - 'VALUE': '2019-02-20' + "ORBITCALCULATEDSPATIALDOMAIN": { + "ORBITCALCULATEDSPATIALDOMAINCONTAINER": { + "EQUATORCROSSINGDATE": { + "VALUE": "2019-02-20" }, - 'EQUATORCROSSINGLONGITUDE': { - 'VALUE': -151.260740805733 + "EQUATORCROSSINGLONGITUDE": { + "VALUE": -151.260740805733 }, - 'EQUATORCROSSINGTIME': { - 'VALUE': '12:49:52.965727' + "EQUATORCROSSINGTIME": { + "VALUE": "12:49:52.965727" }, - 'ORBITNUMBER': { - 'VALUE': 89393 + "ORBITNUMBER": { + "VALUE": 89393 } } }, - 'PGEVERSIONCLASS': { - 'PGEVERSION': { - 'VALUE': '6.1.4' + "PGEVERSIONCLASS": { + "PGEVERSION": { + "VALUE": "6.1.4" } }, - 'RANGEDATETIME': { - 'RANGEBEGINNINGDATE': { - 'VALUE': '2019-02-20' + "RANGEDATETIME": { + "RANGEBEGINNINGDATE": { + "VALUE": "2019-02-20" }, - 'RANGEBEGINNINGTIME': { - 'VALUE': '12:25:00.000000' + "RANGEBEGINNINGTIME": { + "VALUE": "12:25:00.000000" }, - 'RANGEENDINGDATE': { - 'VALUE': '2019-02-20' + "RANGEENDINGDATE": { + "VALUE": "2019-02-20" }, - 'RANGEENDINGTIME': { - 'VALUE': '12:30:00.000000' + "RANGEENDINGTIME": { + "VALUE": "12:30:00.000000" } }, - 'SPATIALDOMAINCONTAINER': { - 'HORIZONTALSPATIALDOMAINCONTAINER': { - 'GPOLYGON': { - 'GPOLYGONCONTAINER': { - 'GRING': { - 'EXCLUSIONGRINGFLAG': { - 'VALUE': 'N' + "SPATIALDOMAINCONTAINER": { + "HORIZONTALSPATIALDOMAINCONTAINER": { + "GPOLYGON": { + "GPOLYGONCONTAINER": { + "GRING": { + "EXCLUSIONGRINGFLAG": { + "VALUE": "N" } }, - 'GRINGPOINT': { - 'GRINGPOINTLATITUDE': { - 'VALUE': (29.5170117594673, 26.1480434828114, + "GRINGPOINT": { + "GRINGPOINTLATITUDE": { + "VALUE": (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) }, - 'GRINGPOINTLONGITUDE': { - 'VALUE': (25.3839329817764, 1.80418778807854, + "GRINGPOINTLONGITUDE": { + "VALUE": (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) }, - 'GRINGPOINTSEQUENCENO': { - 'VALUE': (1, 2, 3, 4) + "GRINGPOINTSEQUENCENO": { + "VALUE": (1, 2, 3, 4) } } } diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index f8ad241532..4449c1a1a9 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -41,23 +41,23 @@ class TestHRITDecompress(unittest.TestCase): def test_xrit_cmd(self): """Test running the xrit decompress command.""" - old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) + old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) - os.environ['XRIT_DECOMPRESS_PATH'] = '/path/to/my/bin' + os.environ["XRIT_DECOMPRESS_PATH"] = "/path/to/my/bin" self.assertRaises(IOError, get_xritdecompress_cmd) - os.environ['XRIT_DECOMPRESS_PATH'] = gettempdir() + os.environ["XRIT_DECOMPRESS_PATH"] = gettempdir() self.assertRaises(IOError, get_xritdecompress_cmd) with NamedTemporaryFile() as fd: - os.environ['XRIT_DECOMPRESS_PATH'] = fd.name + os.environ["XRIT_DECOMPRESS_PATH"] = fd.name fname = fd.name res = get_xritdecompress_cmd() if old_env is not None: - os.environ['XRIT_DECOMPRESS_PATH'] = old_env + os.environ["XRIT_DECOMPRESS_PATH"] = old_env else: - os.environ.pop('XRIT_DECOMPRESS_PATH') + os.environ.pop("XRIT_DECOMPRESS_PATH") self.assertEqual(fname, res) @@ -65,53 +65,53 @@ def test_xrit_outfile(self): """Test the right decompression filename is used.""" stdout = [b"Decompressed file: bla.__\n"] outfile = get_xritdecompress_outfile(stdout) - self.assertEqual(outfile, b'bla.__') + self.assertEqual(outfile, b"bla.__") - @mock.patch('satpy.readers.hrit_base.Popen') + @mock.patch("satpy.readers.hrit_base.Popen") def test_decompress(self, popen): """Test decompression works.""" popen.return_value.returncode = 0 popen.return_value.communicate.return_value = [b"Decompressed file: bla.__\n"] - old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) + old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) with NamedTemporaryFile() as fd: - os.environ['XRIT_DECOMPRESS_PATH'] = fd.name - res = decompress('bla.C_') + os.environ["XRIT_DECOMPRESS_PATH"] = fd.name + res = decompress("bla.C_") if old_env is not None: - os.environ['XRIT_DECOMPRESS_PATH'] = old_env + os.environ["XRIT_DECOMPRESS_PATH"] = old_env else: - os.environ.pop('XRIT_DECOMPRESS_PATH') + os.environ.pop("XRIT_DECOMPRESS_PATH") - self.assertEqual(res, os.path.join('.', 'bla.__')) + self.assertEqual(res, os.path.join(".", "bla.__")) # From a compressed msg hrit file. # uncompressed data field length 17223680 # compressed data field length 1578312 -mda = {'file_type': 0, 'total_header_length': 6198, 'data_field_length': 17223680, 'number_of_bits_per_pixel': 10, - 'number_of_columns': 3712, 'number_of_lines': 464, 'compression_flag_for_data': 0, - 'projection_name': b'GEOS(+000.0) ', - 'cfac': -13642337, 'lfac': -13642337, 'coff': 1856, 'loff': 1856, - 'annotation_header': b'H-000-MSG4__-MSG4________-VIS006___-000001___-202208180730-C_', - 'cds_p_field': 64, 'timestamp': (23605, 27911151), 'GP_SC_ID': 324, - 'spectral_channel_id': 1, - 'segment_sequence_number': 1, 'planned_start_segment_number': 1, 'planned_end_segment_number': 8, - 'data_field_representation': 3, - 'image_segment_line_quality': np.array([(1, (0, 0), 1, 1, 0)] * 464, - dtype=[('line_number_in_grid', '>i4'), - ('line_mean_acquisition', [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]), - 'projection_parameters': {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'SSP_longitude': 0.0}, - 'orbital_parameters': {}} +mda = {"file_type": 0, "total_header_length": 6198, "data_field_length": 17223680, "number_of_bits_per_pixel": 10, + "number_of_columns": 3712, "number_of_lines": 464, "compression_flag_for_data": 0, + "projection_name": b"GEOS(+000.0) ", + "cfac": -13642337, "lfac": -13642337, "coff": 1856, "loff": 1856, + "annotation_header": b"H-000-MSG4__-MSG4________-VIS006___-000001___-202208180730-C_", + "cds_p_field": 64, "timestamp": (23605, 27911151), "GP_SC_ID": 324, + "spectral_channel_id": 1, + "segment_sequence_number": 1, "planned_start_segment_number": 1, "planned_end_segment_number": 8, + "data_field_representation": 3, + "image_segment_line_quality": np.array([(1, (0, 0), 1, 1, 0)] * 464, + dtype=[("line_number_in_grid", ">i4"), + ("line_mean_acquisition", [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]), + "projection_parameters": {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "SSP_longitude": 0.0}, + "orbital_parameters": {}} mda_compressed = mda.copy() mda_compressed["data_field_length"] = 1578312 -mda_compressed['compression_flag_for_data'] = 1 +mda_compressed["compression_flag_for_data"] = 1 def new_get_hd(instance, hdr_info): @@ -125,8 +125,8 @@ def new_get_hd(instance, hdr_info): def new_get_hd_compressed(instance, hdr_info): """Generate some metadata.""" instance.mda = mda.copy() - instance.mda['compression_flag_for_data'] = 1 - instance.mda['data_field_length'] = 1578312 + instance.mda["compression_flag_for_data"] = 1 + instance.mda["data_field_length"] = 1578312 @pytest.fixture @@ -139,15 +139,15 @@ def stub_hrit_file(tmp_path): def create_stub_hrit(filename, open_fun=open, meta=mda): """Create a stub hrit file.""" - nbits = meta['number_of_bits_per_pixel'] - lines = meta['number_of_lines'] - cols = meta['number_of_columns'] + nbits = meta["number_of_bits_per_pixel"] + lines = meta["number_of_lines"] + cols = meta["number_of_columns"] total_bits = lines * cols * nbits arr = np.random.randint(0, 256, size=int(total_bits / 8), dtype=np.uint8) with open_fun(filename, mode="wb") as fd: - fd.write(b" " * meta['total_header_length']) + fd.write(b" " * meta["total_header_length"]) bytes_data = arr.tobytes() fd.write(bytes_data) return filename @@ -184,19 +184,19 @@ def setup_method(self, method): """Set up the hrit file handler for testing.""" del method - with mock.patch.object(HRITFileHandler, '_get_hd', new=new_get_hd): - self.reader = HRITFileHandler('filename', - {'platform_shortname': 'MSG3', - 'start_time': datetime(2016, 3, 3, 0, 0)}, - {'filetype': 'info'}, + with mock.patch.object(HRITFileHandler, "_get_hd", new=new_get_hd): + self.reader = HRITFileHandler("filename", + {"platform_shortname": "MSG3", + "start_time": datetime(2016, 3, 3, 0, 0)}, + {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) - self.reader.mda['cfac'] = 5 - self.reader.mda['lfac'] = 5 - self.reader.mda['coff'] = 10 - self.reader.mda['loff'] = 10 - self.reader.mda['projection_parameters']['SSP_longitude'] = 44 + self.reader.mda["cfac"] = 5 + self.reader.mda["lfac"] = 5 + self.reader.mda["coff"] = 10 + self.reader.mda["loff"] = 10 + self.reader.mda["projection_parameters"]["SSP_longitude"] = 44 def test_get_xy_from_linecol(self): """Test get_xy_from_linecol.""" @@ -220,15 +220,15 @@ def test_get_area_extent(self): def test_get_area_def(self): """Test getting an area definition.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def('VIS06') + area = self.reader.get_area_def("VIS06") proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == 6356583.8 - assert proj_dict['h'] == 35785831.0 - assert proj_dict['lon_0'] == 44.0 - assert proj_dict['proj'] == 'geos' - assert proj_dict['units'] == 'm' + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == 44.0 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" assert area.area_extent == (-77771774058.38356, -77771774058.38356, 30310525626438.438, 3720765401003.719) @@ -236,7 +236,7 @@ def test_read_band_filepath(self, stub_hrit_file): """Test reading a single band from a filepath.""" self.reader.filename = stub_hrit_file - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_FSFile(self, stub_hrit_file): @@ -247,14 +247,14 @@ def test_read_band_FSFile(self, stub_hrit_file): fs_file = fsspec.open(filename) self.reader.filename = FSFile(fs_file) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_bzipped2_filepath(self, stub_bzipped_hrit_file): """Test reading a single band from a bzipped file.""" self.reader.filename = stub_bzipped_hrit_file - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): @@ -265,7 +265,7 @@ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): fs_file = fsspec.open(filename, compression="gzip") self.reader.filename = FSFile(fs_file) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_start_end_time(self): @@ -276,7 +276,7 @@ def test_start_end_time(self): assert self.reader.end_time == self.reader.observation_end_time -def fake_decompress(infile, outdir='.'): +def fake_decompress(infile, outdir="."): """Fake decompression.""" filename = os.fspath(infile)[:-3] return create_stub_hrit(filename) @@ -290,15 +290,15 @@ def test_read_band_filepath(self, stub_compressed_hrit_file): filename = stub_compressed_hrit_file with mock.patch("satpy.readers.hrit_base.decompress", side_effect=fake_decompress) as mock_decompress: - with mock.patch.object(HRITFileHandler, '_get_hd', side_effect=new_get_hd, autospec=True) as get_hd: + with mock.patch.object(HRITFileHandler, "_get_hd", side_effect=new_get_hd, autospec=True) as get_hd: self.reader = HRITFileHandler(filename, - {'platform_shortname': 'MSG3', - 'start_time': datetime(2016, 3, 3, 0, 0)}, - {'filetype': 'info'}, + {"platform_shortname": "MSG3", + "start_time": datetime(2016, 3, 3, 0, 0)}, + {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert get_hd.call_count == 1 assert mock_decompress.call_count == 0 assert res.compute().shape == (464, 3712) diff --git a/satpy/tests/reader_tests/test_hsaf_grib.py b/satpy/tests/reader_tests/test_hsaf_grib.py index bc8a2c2c73..b08aa81d06 100644 --- a/satpy/tests/reader_tests/test_hsaf_grib.py +++ b/satpy/tests/reader_tests/test_hsaf_grib.py @@ -36,8 +36,8 @@ def __init__(self, values, proj_params=None, latlons=None, **attrs): self.attrs = attrs self.values = values if proj_params is None: - proj_params = {'a': 6378140.0, 'b': 6356755.0, 'lat_0': 0.0, - 'lon_0': 0.0, 'proj': 'geos', 'h': 35785830.098} + proj_params = {"a": 6378140.0, "b": 6356755.0, "lat_0": 0.0, + "lon_0": 0.0, "proj": "geos", "h": 35785830.098} self.projparams = proj_params self._latlons = latlons @@ -66,15 +66,15 @@ def __init__(self, messages=None, proj_params=None, latlons=None): self._messages = [ FakeMessage( values=np.arange(25.).reshape((5, 5)), - name='Instantaneous rain rate', - shortName='irrate', - cfName='unknown', - units='kg m**-2 s**-1', + name="Instantaneous rain rate", + shortName="irrate", + cfName="unknown", + units="kg m**-2 s**-1", dataDate=20190603, dataTime=1645, missingValue=9999, - modelName='unknown', - centreDescription='Rome', + modelName="unknown", + centreDescription="Rome", minimum=0.0, maximum=0.01475, Nx=3712, @@ -121,51 +121,51 @@ def setUp(self): except ImportError: pygrib = None self.orig_pygrib = pygrib - sys.modules['pygrib'] = mock.MagicMock() + sys.modules["pygrib"] = mock.MagicMock() def tearDown(self): """Re-enable pygrib import.""" - sys.modules['pygrib'] = self.orig_pygrib + sys.modules["pygrib"] = self.orig_pygrib - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_init(self, pg): """Test the init function, ensure that the correct dates and metadata are returned.""" pg.open.return_value = FakeGRIB() correct_dt = datetime(2019, 6, 3, 16, 45, 0) from satpy.readers.hsaf_grib import HSAFFileHandler - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) self.assertEqual(fh._analysis_time, correct_dt) - self.assertEqual(fh.metadata['projparams']['lat_0'], 0.0) - self.assertEqual(fh.metadata['shortName'], 'irrate') - self.assertEqual(fh.metadata['nx'], 3712) + self.assertEqual(fh.metadata["projparams"]["lat_0"], 0.0) + self.assertEqual(fh.metadata["shortName"], "irrate") + self.assertEqual(fh.metadata["nx"], 3712) - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_get_area_def(self, pg): """Test the area definition setup, checks the size and extent.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) - area_def = HSAFFileHandler.get_area_def(fh, 'H03B') + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) + area_def = HSAFFileHandler.get_area_def(fh, "H03B") self.assertEqual(area_def.width, 3712) self.assertAlmostEqual(area_def.area_extent[0], -5569209.3026, places=3) self.assertAlmostEqual(area_def.area_extent[3], 5587721.9097, places=3) - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_get_dataset(self, pg): """Test reading the actual datasets from a grib file.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler # Instantaneous precipitation - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) fh.filename = "H03B" - ds_id = make_dataid(name='H03B') + ds_id = make_dataid(name="H03B") data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) # Accumulated precipitation - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) fh.filename = "H05B" - ds_id = make_dataid(name='H05B') + ds_id = make_dataid(name="H05B") data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) diff --git a/satpy/tests/reader_tests/test_hsaf_h5.py b/satpy/tests/reader_tests/test_hsaf_h5.py index 4d574b6eb4..49658e6727 100644 --- a/satpy/tests/reader_tests/test_hsaf_h5.py +++ b/satpy/tests/reader_tests/test_hsaf_h5.py @@ -21,42 +21,42 @@ def sc_h5_file(tmp_path_factory): """Create a fake HSAF SC HDF5 file.""" filename = tmp_path_factory.mktemp("data") / "h10_20221115_day_merged.H5" h5f = h5py.File(filename, mode="w") - h5f.create_dataset('SC', SHAPE_SC, dtype=np.uint8) - h5f.create_dataset('colormap', SHAPE_SC_COLORMAP, dtype=np.uint8) + h5f.create_dataset("SC", SHAPE_SC, dtype=np.uint8) + h5f.create_dataset("colormap", SHAPE_SC_COLORMAP, dtype=np.uint8) return str(filename) def _get_scene_with_loaded_sc_datasets(filename): """Return a scene with SC and SC_pal loaded.""" loaded_scene = Scene(filenames=[filename], reader="hsaf_h5") - loaded_scene.load(['SC', 'SC_pal']) + loaded_scene.load(["SC", "SC_pal"]) return loaded_scene def test_hsaf_sc_dataset(sc_h5_file): """Test the H-SAF SC dataset.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - assert loaded_scene['SC'].shape == SHAPE_SC + assert loaded_scene["SC"].shape == SHAPE_SC def test_hsaf_sc_colormap_dataset(sc_h5_file): """Test the H-SAF SC_pal dataset.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - assert loaded_scene['SC_pal'].shape == SHAPE_SC_COLORMAP + assert loaded_scene["SC_pal"].shape == SHAPE_SC_COLORMAP def test_hsaf_sc_datetime(sc_h5_file): """Test the H-SAF reference time.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) fname = os.path.basename(sc_h5_file) - dtstr = fname.split('_')[1] + dtstr = fname.split("_")[1] obs_time = datetime.strptime(dtstr, "%Y%m%d") - assert loaded_scene['SC'].attrs['data_time'] == obs_time + assert loaded_scene["SC"].attrs["data_time"] == obs_time def test_hsaf_sc_areadef(sc_h5_file): """Test the H-SAF SC area definition.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - fd_def = get_area_def('msg_seviri_fes_3km') + fd_def = get_area_def("msg_seviri_fes_3km") hsaf_def = fd_def[AREA_Y_OFFSET:AREA_Y_OFFSET+SHAPE_SC[0], AREA_X_OFFSET:AREA_X_OFFSET+SHAPE_SC[1]] - assert loaded_scene['SC'].area == hsaf_def + assert loaded_scene["SC"].area == hsaf_def diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py index b2a5d4d3e1..a0bca2143f 100644 --- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py +++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py @@ -49,237 +49,237 @@ def __getitem__(self, key): def _get_geo_data(self, num_rows, num_cols): geo = { - 'wvc_lon': + "wvc_lon": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid range': [0, 359.99], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid range": [0, 359.99], }, - dims=('y', 'x')), - 'wvc_lat': + dims=("y", "x")), + "wvc_lat": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid range': [-90.0, 90.0], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid range": [-90.0, 90.0], }, - dims=('y', 'x')), + dims=("y", "x")), } return geo def _get_geo_data_nsoas(self, num_rows, num_cols): geo = { - 'wvc_lon': + "wvc_lon": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid_range': [0, 359.99], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid_range": [0, 359.99], }, - dims=('y', 'x')), - 'wvc_lat': + dims=("y", "x")), + "wvc_lat": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid_range': [-90.0, 90.0], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid_range": [-90.0, 90.0], }, - dims=('y', 'x')), + dims=("y", "x")), } return geo def _get_selection_data(self, num_rows, num_cols): selection = { - 'wvc_selection': + "wvc_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 8], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 8], }, - dims=('y', 'x')), - 'wind_speed_selection': + dims=("y", "x")), + "wind_speed_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x')), - 'wind_dir_selection': + dims=("y", "x")), + "wind_dir_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x')), - 'model_dir': + dims=("y", "x")), + "model_dir": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x')), - 'model_speed': + dims=("y", "x")), + "model_speed": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x')), - 'num_ambigs': + dims=("y", "x")), + "num_ambigs": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 8], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 8], }, - dims=('y', 'x')), - 'num_in_aft': + dims=("y", "x")), + "num_in_aft": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_in_fore': + dims=("y", "x")), + "num_in_fore": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_out_aft': + dims=("y", "x")), + "num_out_aft": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_out_fore': + dims=("y", "x")), + "num_out_fore": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'wvc_quality_flag': + dims=("y", "x")), + "wvc_quality_flag": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'fill_value': 2.14748e+09, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'na', - 'valid range': [1, 2.14748e+09], + "fill_value": 2.14748e+09, + "scale_factor": 1., + "add_offset": 0., + "units": "na", + "valid range": [1, 2.14748e+09], }, - dims=('y', 'x')), + dims=("y", "x")), } return selection def _get_all_ambiguities_data(self, num_rows, num_cols, num_amb): all_amb = { - 'max_likelihood_est': + "max_likelihood_est": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'na', - 'valid range': [0, 32767], + "fill_value": -32767, + "scale_factor": 1., + "add_offset": 0., + "units": "na", + "valid range": [0, 32767], }, - dims=('y', 'x', 'selection')), - 'wind_dir': + dims=("y", "x", "selection")), + "wind_dir": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x', 'selection')), - 'wind_speed': + dims=("y", "x", "selection")), + "wind_speed": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x', 'selection')), + dims=("y", "x", "selection")), } return all_amb @@ -295,49 +295,49 @@ def _get_wvc_row_time(self, num_rows): "20200326T01:11:38.074", "20200326T01:11:41.887"] wvc_row_time = { - 'wvc_row_time': + "wvc_row_time": xr.DataArray(data, attrs={ - 'fill_value': "", + "fill_value": "", }, - dims=('y',)), + dims=("y",)), } return wvc_row_time def _get_global_attrs(self, num_rows, num_cols): return { - '/attr/Equator_Crossing_Longitude': '246.408397', - '/attr/Equator_Crossing_Time': '20200326T01:37:15.875', - '/attr/HDF_Version_Id': 'HDF5-1.8.16', - '/attr/Input_L2A_Filename': 'H2B_OPER_SCA_L2A_OR_20200326T010839_20200326T025757_07076_dps_250_20.h5', - '/attr/Instrument_ShorName': 'HSCAT-B', - '/attr/L2A_Inputdata_Version': '10', - '/attr/L2B_Actual_WVC_Rows': np.int32(num_rows), - '/attr/L2B_Algorithm_Descriptor': ('Wind retrieval processing uses the multiple solution scheme (MSS) for ' - 'wind inversion with the NSCAT-4 GMF,and a circular median filter ' - 'method (CMF) for ambiguity removal. The ECMWF/NCEP forescate data are ' - 'used as background winds in the CMF'), - '/attr/L2B_Data_Version': '10', - '/attr/L2B_Expected_WVC_Rows': np.int32(num_rows), - '/attr/L2B_Processing_Type': 'OPER', - '/attr/L2B_Processor_Name': 'hy2_sca_l2b_pro', - '/attr/L2B_Processor_Version': '01.00', - '/attr/Long_Name': 'HY-2B/SCAT Level 2B Ocean Wind Vectors in 25.0 km Swath Grid', - '/attr/Orbit_Inclination': np.float32(99.3401), - '/attr/Orbit_Number': '07076', - '/attr/Output_L2B_Filename': 'H2B_OPER_SCA_L2B_OR_20200326T011107_20200326T025540_07076_dps_250_20_owv.h5', - '/attr/Platform_LongName': 'Haiyang 2B Ocean Observing Satellite', - '/attr/Platform_ShortName': 'HY-2B', - '/attr/Platform_Type': 'spacecraft', - '/attr/Producer_Agency': 'Ministry of Natural Resources of the People\'s Republic of China', - '/attr/Producer_Institution': 'NSOAS', - '/attr/Production_Date_Time': '20200326T06:23:10', - '/attr/Range_Beginning_Time': '20200326T01:11:07', - '/attr/Range_Ending_Time': '20200326T02:55:40', - '/attr/Rev_Orbit_Period': '14 days', - '/attr/Short_Name': 'HY-2B SCAT-L2B-25km', - '/attr/Sigma0_Granularity': 'whole pulse', - '/attr/WVC_Size': '25000m*25000m', + "/attr/Equator_Crossing_Longitude": "246.408397", + "/attr/Equator_Crossing_Time": "20200326T01:37:15.875", + "/attr/HDF_Version_Id": "HDF5-1.8.16", + "/attr/Input_L2A_Filename": "H2B_OPER_SCA_L2A_OR_20200326T010839_20200326T025757_07076_dps_250_20.h5", + "/attr/Instrument_ShorName": "HSCAT-B", + "/attr/L2A_Inputdata_Version": "10", + "/attr/L2B_Actual_WVC_Rows": np.int32(num_rows), + "/attr/L2B_Algorithm_Descriptor": ("Wind retrieval processing uses the multiple solution scheme (MSS) for " + "wind inversion with the NSCAT-4 GMF,and a circular median filter " + "method (CMF) for ambiguity removal. The ECMWF/NCEP forescate data are " + "used as background winds in the CMF"), + "/attr/L2B_Data_Version": "10", + "/attr/L2B_Expected_WVC_Rows": np.int32(num_rows), + "/attr/L2B_Processing_Type": "OPER", + "/attr/L2B_Processor_Name": "hy2_sca_l2b_pro", + "/attr/L2B_Processor_Version": "01.00", + "/attr/Long_Name": "HY-2B/SCAT Level 2B Ocean Wind Vectors in 25.0 km Swath Grid", + "/attr/Orbit_Inclination": np.float32(99.3401), + "/attr/Orbit_Number": "07076", + "/attr/Output_L2B_Filename": "H2B_OPER_SCA_L2B_OR_20200326T011107_20200326T025540_07076_dps_250_20_owv.h5", + "/attr/Platform_LongName": "Haiyang 2B Ocean Observing Satellite", + "/attr/Platform_ShortName": "HY-2B", + "/attr/Platform_Type": "spacecraft", + "/attr/Producer_Agency": "Ministry of Natural Resources of the People\'s Republic of China", + "/attr/Producer_Institution": "NSOAS", + "/attr/Production_Date_Time": "20200326T06:23:10", + "/attr/Range_Beginning_Time": "20200326T01:11:07", + "/attr/Range_Ending_Time": "20200326T02:55:40", + "/attr/Rev_Orbit_Period": "14 days", + "/attr/Short_Name": "HY-2B SCAT-L2B-25km", + "/attr/Sigma0_Granularity": "whole pulse", + "/attr/WVC_Size": "25000m*25000m", } def get_test_content(self, filename, filename_info, filetype_info): @@ -349,11 +349,11 @@ def get_test_content(self, filename, filename_info, filetype_info): test_content = {} test_content.update(self._get_global_attrs(num_rows, num_cols)) data = {} - if 'OPER_SCA_L2B' in filename: - test_content.update({'/attr/L2B_Expected_WVC_Cells': np.int32(num_cols)}) + if "OPER_SCA_L2B" in filename: + test_content.update({"/attr/L2B_Expected_WVC_Cells": np.int32(num_cols)}) data = self._get_geo_data_nsoas(num_rows, num_cols) else: - test_content.update({'/attr/L2B_Number_WVC_cells': np.int32(num_cols)}) + test_content.update({"/attr/L2B_Number_WVC_cells": np.int32(num_cols)}) data = self._get_geo_data(num_rows, num_cols) test_content.update(data) @@ -377,9 +377,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.hy2_scat_l2b_h5 import HY2SCATL2BH5FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(HY2SCATL2BH5FileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(HY2SCATL2BH5FileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -391,7 +391,7 @@ def test_load_geo(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -400,14 +400,14 @@ def test_load_geo(self): # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wvc_lon', 'wvc_lat']) + res = reader.load(["wvc_lon", "wvc_lat"]) self.assertEqual(2, len(res)) def test_load_geo_nsoas(self): """Test loading data from nsoas file.""" from satpy.readers import load_reader filenames = [ - 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ] + "H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -416,14 +416,14 @@ def test_load_geo_nsoas(self): # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wvc_lon', 'wvc_lat']) + res = reader.load(["wvc_lon", "wvc_lat"]) self.assertEqual(2, len(res)) def test_load_data_selection(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -431,16 +431,16 @@ def test_load_data_selection(self): reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wind_speed_selection', - 'wind_dir_selection', - 'wvc_selection']) + res = reader.load(["wind_speed_selection", + "wind_dir_selection", + "wvc_selection"]) self.assertEqual(3, len(res)) def test_load_data_all_ambiguities(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -448,24 +448,24 @@ def test_load_data_all_ambiguities(self): reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wind_speed', - 'wind_dir', - 'max_likelihood_est', - 'model_dir', - 'model_speed', - 'num_ambigs', - 'num_in_aft', - 'num_in_fore', - 'num_out_aft', - 'num_out_fore', - 'wvc_quality_flag']) + res = reader.load(["wind_speed", + "wind_dir", + "max_likelihood_est", + "model_dir", + "model_speed", + "num_ambigs", + "num_in_aft", + "num_in_fore", + "num_out_aft", + "num_out_fore", + "wvc_quality_flag"]) self.assertEqual(11, len(res)) def test_load_data_row_times(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) @@ -473,38 +473,38 @@ def test_load_data_row_times(self): reader.create_filehandlers(files) # Make sure we have some files self.assertTrue(reader.file_handlers) - res = reader.load(['wvc_row_time']) + res = reader.load(["wvc_row_time"]) self.assertEqual(1, len(res)) def test_reading_attrs(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) - self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10) + res = reader.load(["wvc_lon"]) + self.assertEqual(res["wvc_lon"].attrs["L2B_Number_WVC_cells"], 10) with self.assertRaises(KeyError): - self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10) + self.assertEqual(res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"], 10) def test_reading_attrs_nsoas(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ] + "H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) + res = reader.load(["wvc_lon"]) with self.assertRaises(KeyError): - self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10) - self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10) + self.assertEqual(res["wvc_lon"].attrs["L2B_Number_WVC_cells"], 10) + self.assertEqual(res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"], 10) def test_properties(self): """Test platform_name.""" @@ -512,13 +512,13 @@ def test_properties(self): from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) - self.assertEqual(res['wvc_lon'].platform_name, 'HY-2B') - self.assertEqual(res['wvc_lon'].start_time, datetime(2020, 3, 26, 1, 11, 7)) - self.assertEqual(res['wvc_lon'].end_time, datetime(2020, 3, 26, 2, 55, 40)) + res = reader.load(["wvc_lon"]) + self.assertEqual(res["wvc_lon"].platform_name, "HY-2B") + self.assertEqual(res["wvc_lon"].start_time, datetime(2020, 3, 26, 1, 11, 7)) + self.assertEqual(res["wvc_lon"].end_time, datetime(2020, 3, 26, 2, 55, 40)) diff --git a/satpy/tests/reader_tests/test_iasi_l2.py b/satpy/tests/reader_tests/test_iasi_l2.py index 9dbfa7eef0..81ca28a5b6 100644 --- a/satpy/tests/reader_tests/test_iasi_l2.py +++ b/satpy/tests/reader_tests/test_iasi_l2.py @@ -32,85 +32,85 @@ # Structure for the test data, to be written to HDF5 file TEST_DATA = { # Not implemented in the reader - 'Amsu': { - 'FLG_AMSUBAD': {'data': np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), - 'attrs': {}} + "Amsu": { + "FLG_AMSUBAD": {"data": np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), + "attrs": {}} }, # Not implemented in the reader - 'INFO': { - 'OmC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': "Cloud signal. Predicted average window channel 'Obs minus Calc", - 'units': 'K'}}, - 'mdist': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}} + "INFO": { + "OmC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Cloud signal. Predicted average window channel 'Obs minus Calc", + "units": "K"}}, + "mdist": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}} }, - 'L1C': { - 'Latitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees_north'}}, - 'Longitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees_north'}}, - 'SatAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SatZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SensingTime_day': {'data': np.array([6472], dtype=np.uint16), - 'attrs': {}}, - 'SensingTime_msec': {'data': np.array([37337532], dtype=np.uint32), - 'attrs': {}}, - 'SunAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SunZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, + "L1C": { + "Latitude": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees_north"}}, + "Longitude": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees_north"}}, + "SatAzimuth": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SatZenith": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SensingTime_day": {"data": np.array([6472], dtype=np.uint16), + "attrs": {}}, + "SensingTime_msec": {"data": np.array([37337532], dtype=np.uint32), + "attrs": {}}, + "SunAzimuth": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SunZenith": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, }, # Not implemented in the reader - 'Maps': { - 'Height': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'm'}}, - 'HeightStd': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'm'}}, + "Maps": { + "Height": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "m"}}, + "HeightStd": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "m"}}, }, # Not implemented in the reader - 'Mhs': { - 'FLG_MHSBAD': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), - 'attrs': {}} + "Mhs": { + "FLG_MHSBAD": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), + "attrs": {}} }, - 'PWLR': { - 'E': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), - 'attrs': {'emissivity_wavenumbers': np.array([699.3, 826.4, + "PWLR": { + "E": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), + "attrs": {"emissivity_wavenumbers": np.array([699.3, 826.4, 925.9, 1075.2, 1204.8, 1315.7, 1724.1, 2000.0, 2325.5, 2702.7], dtype=np.float32)}}, - 'O': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Ozone mixing ratio vertical profile', - 'units': 'kg/kg'}}, - 'OC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'P': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Atmospheric pressures at which the vertical profiles are given. ' - 'Last value is the surface pressure', - 'units': 'hpa'}}, - 'QE': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QO': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QP': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QT': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QTs': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QW': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'T': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Temperature vertical profile', 'units': 'K'}}, - 'Ts': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': 'Surface skin temperature', 'units': 'K'}}, - 'W': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Water vapour mixing ratio vertical profile', 'units': 'kg/kg'}}, - 'WC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': 'Water vapour total columnar amount', 'units': 'mm'}}, + "O": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Ozone mixing ratio vertical profile", + "units": "kg/kg"}}, + "OC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "P": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Atmospheric pressures at which the vertical profiles are given. " + "Last value is the surface pressure", + "units": "hpa"}}, + "QE": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QO": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QP": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QT": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QTs": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QW": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "T": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Temperature vertical profile", "units": "K"}}, + "Ts": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Surface skin temperature", "units": "K"}}, + "W": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Water vapour mixing ratio vertical profile", "units": "kg/kg"}}, + "WC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Water vapour total columnar amount", "units": "mm"}}, } } @@ -118,17 +118,17 @@ def save_test_data(path): """Save the test to the indicated directory.""" import h5py - with h5py.File(os.path.join(path, FNAME), 'w') as fid: + with h5py.File(os.path.join(path, FNAME), "w") as fid: # Create groups for grp in TEST_DATA: fid.create_group(grp) # Write datasets for dset in TEST_DATA[grp]: - fid[grp][dset] = TEST_DATA[grp][dset]['data'] + fid[grp][dset] = TEST_DATA[grp][dset]["data"] # Write dataset attributes - for attr in TEST_DATA[grp][dset]['attrs']: + for attr in TEST_DATA[grp][dset]["attrs"]: fid[grp][dset].attrs[attr] = \ - TEST_DATA[grp][dset]['attrs'][attr] + TEST_DATA[grp][dset]["attrs"][attr] class TestIasiL2(unittest.TestCase): @@ -144,16 +144,16 @@ def setUp(self): self.base_dir = tempfile.mkdtemp() save_test_data(self.base_dir) self.fname = os.path.join(self.base_dir, FNAME) - self.fname_info = {'start_time': dt.datetime(2017, 9, 20, 10, 22, 17), - 'end_time': dt.datetime(2017, 9, 20, 10, 29, 12), - 'processing_time': dt.datetime(2017, 9, 20, 10, 35, 59), - 'processing_location': 'kan', - 'long_platform_id': 'metopb', - 'instrument': 'iasi', - 'platform_id': 'M01'} - self.ftype_info = {'file_reader': IASIL2HDF5, - 'file_patterns': ['{fname}.hdf'], - 'file_type': 'iasi_l2_hdf5'} + self.fname_info = {"start_time": dt.datetime(2017, 9, 20, 10, 22, 17), + "end_time": dt.datetime(2017, 9, 20, 10, 29, 12), + "processing_time": dt.datetime(2017, 9, 20, 10, 35, 59), + "processing_location": "kan", + "long_platform_id": "metopb", + "instrument": "iasi", + "platform_id": "M01"} + self.ftype_info = {"file_reader": IASIL2HDF5, + "file_patterns": ["{fname}.hdf"], + "file_type": "iasi_l2_hdf5"} self.reader = IASIL2HDF5(self.fname, self.fname_info, self.ftype_info) def tearDown(self): @@ -168,44 +168,44 @@ def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) + scn = Scene(reader="iasi_l2", filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names - assert 'iasi' in scn.sensor_names + assert "iasi" in scn.sensor_names def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) + scn = Scene(reader="iasi_l2", filenames=[fname]) scn.load(scn.available_dataset_names()) def test_scene_load_pressure(self): """Test loading pressure data.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['pressure']) - pres = scn['pressure'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["pressure"]) + pres = scn["pressure"].compute() self.check_pressure(pres, scn.attrs) def test_scene_load_emissivity(self): """Test loading emissivity data.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['emissivity']) - emis = scn['emissivity'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["emissivity"]) + emis = scn["emissivity"].compute() self.check_emissivity(emis) def test_scene_load_sensing_times(self): """Test loading sensing times.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['sensing_time']) - times = scn['sensing_time'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["sensing_time"]) + times = scn["sensing_time"].compute() self.check_sensing_times(times) def test_init(self): @@ -214,8 +214,8 @@ def test_init(self): self.assertEqual(self.reader.finfo, self.fname_info) self.assertTrue(self.reader.lons is None) self.assertTrue(self.reader.lats is None) - self.assertEqual(self.reader.mda['platform_name'], 'Metop-B') - self.assertEqual(self.reader.mda['sensor'], 'iasi') + self.assertEqual(self.reader.mda["platform_name"], "Metop-B") + self.assertEqual(self.reader.mda["sensor"], "iasi") def test_time_properties(self): """Test time properties.""" @@ -226,16 +226,16 @@ def test_time_properties(self): def test_get_dataset(self): """Test get_dataset() for different datasets.""" from satpy.tests.utils import make_dataid - info = {'eggs': 'spam'} - key = make_dataid(name='pressure') + info = {"eggs": "spam"} + key = make_dataid(name="pressure") data = self.reader.get_dataset(key, info).compute() self.check_pressure(data) - self.assertTrue('eggs' in data.attrs) - self.assertEqual(data.attrs['eggs'], 'spam') - key = make_dataid(name='emissivity') + self.assertTrue("eggs" in data.attrs) + self.assertEqual(data.attrs["eggs"], "spam") + key = make_dataid(name="emissivity") data = self.reader.get_dataset(key, info).compute() self.check_emissivity(data) - key = make_dataid(name='sensing_time') + key = make_dataid(name="sensing_time") data = self.reader.get_dataset(key, info).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) @@ -249,10 +249,10 @@ def check_pressure(self, pres, attrs=None): self.assertEqual(pres.y.size, NUM_SCANLINES) self.assertEqual(pres.level.size, NUM_LEVELS) if attrs: - self.assertEqual(pres.attrs['start_time'], attrs['start_time']) - self.assertEqual(pres.attrs['end_time'], attrs['end_time']) - self.assertTrue('long_name' in pres.attrs) - self.assertTrue('units' in pres.attrs) + self.assertEqual(pres.attrs["start_time"], attrs["start_time"]) + self.assertEqual(pres.attrs["end_time"], attrs["end_time"]) + self.assertTrue("long_name" in pres.attrs) + self.assertTrue("units" in pres.attrs) def check_emissivity(self, emis): """Test reading emissivity dataset. @@ -262,7 +262,7 @@ def check_emissivity(self, emis): self.assertTrue(np.all(emis == 0.0)) self.assertEqual(emis.x.size, SCAN_WIDTH) self.assertEqual(emis.y.size, NUM_SCANLINES) - self.assertTrue('emissivity_wavenumbers' in emis.attrs) + self.assertTrue("emissivity_wavenumbers" in emis.attrs) def check_sensing_times(self, times): """Test reading sensing times. @@ -281,15 +281,15 @@ def test_read_dataset(self): from satpy.readers.iasi_l2 import read_dataset from satpy.tests.utils import make_dataid - with h5py.File(self.fname, 'r') as fid: - key = make_dataid(name='pressure') + with h5py.File(self.fname, "r") as fid: + key = make_dataid(name="pressure") data = read_dataset(fid, key).compute() self.check_pressure(data) - key = make_dataid(name='emissivity') + key = make_dataid(name="emissivity") data = read_dataset(fid, key).compute() self.check_emissivity(data) # This dataset doesn't have any attributes - key = make_dataid(name='ozone_total_column') + key = make_dataid(name="ozone_total_column") data = read_dataset(fid, key).compute() self.assertEqual(len(data.attrs), 0) @@ -299,19 +299,19 @@ def test_read_geo(self): from satpy.readers.iasi_l2 import read_geo from satpy.tests.utils import make_dataid - with h5py.File(self.fname, 'r') as fid: - key = make_dataid(name='sensing_time') + with h5py.File(self.fname, "r") as fid: + key = make_dataid(name="sensing_time") data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) - key = make_dataid(name='latitude') + key = make_dataid(name="latitude") data = read_geo(fid, key).compute() self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) def test_form_datetimes(self): """Test _form_datetimes() function.""" from satpy.readers.iasi_l2 import _form_datetimes - days = TEST_DATA['L1C']['SensingTime_day']['data'] - msecs = TEST_DATA['L1C']['SensingTime_msec']['data'] + days = TEST_DATA["L1C"]["SensingTime_day"]["data"] + msecs = TEST_DATA["L1C"]["SensingTime_msec"]["data"] times = _form_datetimes(days, msecs) self.check_sensing_times(times) diff --git a/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py b/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py index 85df6b64ed..58aee7d1ba 100644 --- a/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py +++ b/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py @@ -29,46 +29,46 @@ # bufr file distributed over EUMETCAST msg = { - 'unpack': 1, - 'inputDelayedDescriptorReplicationFactor': 5, - 'edition': 4, - 'masterTableNumber': 0, - 'bufrHeaderCentre': 254, - 'bufrHeaderSubCentre': 0, - 'updateSequenceNumber': 0, - 'dataCategory': 3, - 'internationalDataSubCategory': 255, - 'dataSubCategory': 230, - 'masterTablesVersionNumber': 31, - 'localTablesVersionNumber': 0, - 'typicalYear': 2020, - 'typicalMonth': 2, - 'typicalDay': 4, - 'typicalHour': 8, - 'typicalMinute': 59, - 'typicalSecond': 0, - 'numberOfSubsets': 120, - 'observedData': 1, - 'compressedData': 1, - 'unexpandedDescriptors': np.array([ + "unpack": 1, + "inputDelayedDescriptorReplicationFactor": 5, + "edition": 4, + "masterTableNumber": 0, + "bufrHeaderCentre": 254, + "bufrHeaderSubCentre": 0, + "updateSequenceNumber": 0, + "dataCategory": 3, + "internationalDataSubCategory": 255, + "dataSubCategory": 230, + "masterTablesVersionNumber": 31, + "localTablesVersionNumber": 0, + "typicalYear": 2020, + "typicalMonth": 2, + "typicalDay": 4, + "typicalHour": 8, + "typicalMinute": 59, + "typicalSecond": 0, + "numberOfSubsets": 120, + "observedData": 1, + "compressedData": 1, + "unexpandedDescriptors": np.array([ 1007, 1031, 25060, 2019, 2020, 4001, 4002, 4003, 4004, 4005, 4006, 5040, 201133, 5041, 201000, 5001, 6001, 5043, 7024, 5021, 7025, 5022, 7007, 40068, 7002, 15045, 12080, 102000, 31001, 7007, 15045], dtype=int), - '#1#satelliteIdentifier': 4, - '#1#centre': 254, - '#1#softwareIdentification': 605, - '#1#satelliteInstruments': 221, - '#1#satelliteClassification': 61, - '#1#year': 2020, - '#1#month': 2, - '#1#day': 4, - '#1#hour': 9, - '#1#minute': 1, - '#1#second': 11, - '#1#orbitNumber': 68984, - '#1#scanLineNumber': 447, - '#1#latitude': np.array([ + "#1#satelliteIdentifier": 4, + "#1#centre": 254, + "#1#softwareIdentification": 605, + "#1#satelliteInstruments": 221, + "#1#satelliteClassification": 61, + "#1#year": 2020, + "#1#month": 2, + "#1#day": 4, + "#1#hour": 9, + "#1#minute": 1, + "#1#second": 11, + "#1#orbitNumber": 68984, + "#1#scanLineNumber": 447, + "#1#latitude": np.array([ -33.4055, -33.6659, -33.738, -33.4648, -33.263, -33.5027, -33.5673, -33.3172, -33.1332, -33.3564, -33.4151, -33.1832, -33.0132, -33.2232, -33.2771, -33.0596, -32.903, -33.1021, -33.1522, -32.9466, -32.7982, -32.9884, -33.0354, -32.8395, @@ -85,7 +85,7 @@ -30.4071, -30.6153, -30.7036, -30.4967, -30.146, -30.3672, -30.4712, -30.2521, -29.8276, -30.0649, -30.1911, -29.9569, -29.4268, -29.6844, -29.8436, -29.5903]), - '#1#longitude': np.array([ + "#1#longitude": np.array([ 2.53790e+00, 2.49440e+00, 3.08690e+00, 3.12690e+00, 1.15600e+00, 1.11230e+00, 1.59640e+00, 1.63750e+00, -3.70000e-03, -4.73000e-02, 3.61900e-01, 4.03500e-01, -1.00010e+00, -1.04340e+00, -6.88300e-01, @@ -111,7 +111,7 @@ -1.59045e+01, -1.58264e+01, -1.73549e+01, -1.74460e+01, -1.69944e+01, -1.69085e+01, -1.87277e+01, -1.88302e+01, -1.82832e+01, -1.81873e+01]), - '#1#fieldOfViewNumber': np.array([ + "#1#fieldOfViewNumber": np.array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, @@ -120,7 +120,7 @@ 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]), - '#1#satelliteZenithAngle': np.array([ + "#1#satelliteZenithAngle": np.array([ 56.64, 56.64, 58.38, 58.37, 52.15, 52.15, 53.8, 53.79, 47.84, 47.84, 49.42, 49.42, 43.67, 43.67, 45.21, 45.2, 39.59, 39.59, 41.1, 41.09, 35.59, 35.59, 37.08, 37.07, 31.65, 31.65, 33.11, 33.1, 27.75, 27.75, 29.2, 29.19, 23.89, 23.89, 25.33, 25.32, @@ -133,7 +133,7 @@ 49.52, 49.53, 47.94, 47.94, 53.89, 53.9, 52.25, 52.25, 58.48, 58.48, 56.74, 56.75]), - '#1#bearingOrAzimuth': np.array([ + "#1#bearingOrAzimuth": np.array([ 276.93, 278.61, 278.27, 276.61, 277.64, 279.42, 279.14, 277.38, 278.22, 280.11, 279.88, 278.01, 278.69, 280.72, 280.51, 278.51, 279.09, 281.3, 281.11, 278.94, 279.41, 281.83, 281.64, 279.28, 279.68, 282.36, 282.18, 279.58, 279.88, 282.9, @@ -147,7 +147,7 @@ 107.74, 105.67, 105.47, 107.64, 108.11, 106.2, 105.99, 107.98, 108.54, 106.76, 106.53, 108.38, 109.06, 107.39, 107.14, 108.87, 109.7, 108.13, 107.83, 109.46]), - '#1#solarZenithAngle': np.array([ + "#1#solarZenithAngle": np.array([ 44.36, 44.44, 43.98, 43.89, 45.47, 45.54, 45.16, 45.08, 46.4, 46.47, 46.14, 46.07, 47.21, 47.27, 46.99, 46.92, 47.92, 47.98, 47.73, 47.67, 48.56, 48.62, 48.39, 48.33, 49.15, 49.21, 49., 48.94, 49.7, 49.75, 49.55, 49.5, 50.21, 50.26, 50.07, 50.02, @@ -160,7 +160,7 @@ 59.98, 60.04, 59.70, 59.64, 60.98, 61.05, 60.65, 60.59, 62.20, 62.27, 61.78, 61.72]), - '#1#solarAzimuth': np.array([ + "#1#solarAzimuth": np.array([ 78.89, 78.66, 78.16, 78.41, 80.00, 79.80, 79.40, 79.62, 80.92, 80.74, 80.40, 80.6, 81.69, 81.53, 81.24, 81.42, 82.36, 82.21, 81.96, 82.12, 82.96, 82.82, 82.60, 82.74, 83.49, 83.36, 83.16, 83.3, 83.98, 83.86, 83.68, 83.80, 84.43, 84.32, 84.15, 84.27, @@ -172,11 +172,11 @@ 90.58, 90.49, 90.31, 90.4, 91.09, 91., 90.81, 90.89, 91.66, 91.57, 91.35, 91.44, 92.29, 92.20, 91.95, 92.04, 93.02, 92.93, 92.64, 92.73, 93.87, 93.79, 93.45, 93.54]), - '#1#height': 83270, - '#1#generalRetrievalQualityFlagForSo2': 9, - '#2#height': -1e+100, - '#1#sulphurDioxide': -1e+100, - '#1#brightnessTemperatureRealPart': np.array([ + "#1#height": 83270, + "#1#generalRetrievalQualityFlagForSo2": 9, + "#2#height": -1e+100, + "#1#sulphurDioxide": -1e+100, + "#1#brightnessTemperatureRealPart": np.array([ 0.11, 0.11, -0.07, 0.08, 0.13, 0.15, 0.10, 0.06, -0.02, -0.03, 0.08, 0.17, -0.05, 0.12, 0.08, -0.06, 0.15, 0.08, -0.04, -0.01, 0.06, 0.17, -0.01, 0.15, 0.18, 0.05, 0.11, -0.03, 0.09, 0.02, 0.04, 0.10, 0.00, 0.00, 0.01, 0.18, @@ -188,8 +188,8 @@ 0.08, -0.05, -0.08, 0.41, -0.19, -0.22, -0.03, 0.11, -0.26, -0.33, -0.08, 0.03, -0.05, 0.02, 0.17, -0.10, 0.01, 0.01, 0.05, 0.01, 0.15, -0.06, -0.14, 0.38]), - '#3#height': 7000, - '#2#sulphurDioxide': np.array([ + "#3#height": 7000, + "#2#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -209,8 +209,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#4#height': 10000, - '#3#sulphurDioxide': np.array([ + "#4#height": 10000, + "#3#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -227,8 +227,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#5#height': 13000, - '#4#sulphurDioxide': np.array([ + "#5#height": 13000, + "#4#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -245,8 +245,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#6#height': 16000, - '#5#sulphurDioxide': np.array([ + "#6#height": 16000, + "#5#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -263,8 +263,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#7#height': 25000, - '#6#sulphurDioxide': np.array([ + "#7#height": 25000, + "#6#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -283,23 +283,23 @@ } # the notional filename that would contain the above test message data -FILENAME = 'W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin' +FILENAME = "W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin" # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { - 'reception_location': 'EUMETSAT-Darmstadt', - 'platform': 'METOPA', - 'instrument': 'IASI', - 'start_time': '20200204091455', - 'perigee': '68977', - 'species': 'so2', - 'level': 'l2' + "reception_location": "EUMETSAT-Darmstadt", + "platform": "METOPA", + "instrument": "IASI", + "start_time": "20200204091455", + "perigee": "68977", + "species": "so2", + "level": "l2" } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { - 'file_type': 'iasi_l2_so2_bufr', - 'file_reader': 'IASIL2SO2BUFR' + "file_type": "iasi_l2_so2_bufr", + "file_reader": "IASIL2SO2BUFR" } # number of cross track samples in one IASI scan @@ -314,7 +314,7 @@ def save_test_data(path): for m in [msg]: - buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') + buf = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") for key in m: val = m[key] @@ -325,7 +325,7 @@ def save_test_data(path): else: ec.codes_set_array(buf, key, val) - ec.codes_set(buf, 'pack', 1) + ec.codes_set(buf, "pack", 1) ec.codes_write(buf, f) ec.codes_release(buf) @@ -354,36 +354,36 @@ def tearDown(self): except OSError: pass - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names - assert 'iasi' in scn.sensor_names + assert "iasi" in scn.sensor_names - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) scn.load(scn.available_dataset_names()) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) for name in scn.available_dataset_names(): @@ -391,13 +391,13 @@ def test_scene_dataset_values(self): loaded_values = scn[name].values - fill_value = scn[name].attrs['fill_value'] + fill_value = scn[name].attrs["fill_value"] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) - key = scn[name].attrs['key'] + key = scn[name].attrs["key"] original_values = msg[key] @@ -407,7 +407,7 @@ def test_scene_dataset_values(self): self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_init(self): """Test reader initialization.""" self.assertTrue(True) diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py index d98da24f31..96c1ac2354 100644 --- a/satpy/tests/reader_tests/test_ici_l1b_nc.py +++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py @@ -49,21 +49,21 @@ def reader(fake_file): return IciL1bNCFileHandler( filename=fake_file, filename_info={ - 'sensing_start_time': ( - datetime.fromisoformat('2000-01-01T01:00:00') + "sensing_start_time": ( + datetime.fromisoformat("2000-01-01T01:00:00") ), - 'sensing_end_time': ( - datetime.fromisoformat('2000-01-01T02:00:00') + "sensing_end_time": ( + datetime.fromisoformat("2000-01-01T02:00:00") ), - 'creation_time': ( - datetime.fromisoformat('2000-01-01T03:00:00') + "creation_time": ( + datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ - 'longitude': 'data/navigation_data/longitude', - 'latitude': 'data/navigation_data/latitude', - 'solar_azimuth': 'data/navigation_data/ici_solar_azimuth_angle', - 'solar_zenith': 'data/navigation_data/ici_solar_zenith_angle', + "longitude": "data/navigation_data/longitude", + "latitude": "data/navigation_data/latitude", + "solar_azimuth": "data/navigation_data/ici_solar_azimuth_angle", + "solar_zenith": "data/navigation_data/ici_solar_zenith_angle", } ) @@ -71,7 +71,7 @@ def reader(fake_file): @pytest.fixture def fake_file(tmp_path): """Return file path to level1b file.""" - file_path = tmp_path / 'test_file_ici_l1b_nc.nc' + file_path = tmp_path / "test_file_ici_l1b_nc.nc" writer = IciL1bFakeFileWriter(file_path) writer.write() yield file_path @@ -81,13 +81,13 @@ def fake_file(tmp_path): def dataset_info(): """Return dataset info.""" return { - 'name': '1', - 'file_type': 'nc_ici_l1b_rad', - 'file_key': 'data/measurement_data/ici_radiance_183', - 'coordinates': ['lat_pixels_horn_1', 'lon_pixels_horn_1'], - 'n_183': 0, - 'chan_index': 0, - 'calibration': 'brightness_temperature', + "name": "1", + "file_type": "nc_ici_l1b_rad", + "file_key": "data/measurement_data/ici_radiance_183", + "coordinates": ["lat_pixels_horn_1", "lon_pixels_horn_1"], + "n_183": 0, + "chan_index": 0, + "calibration": "brightness_temperature", } @@ -100,10 +100,10 @@ def __init__(self, file_path): def write(self): """Write fake data to file.""" - with Dataset(self.file_path, 'w') as dataset: + with Dataset(self.file_path, "w") as dataset: self._write_attributes(dataset) self._write_quality_group(dataset) - data_group = dataset.createGroup('data') + data_group = dataset.createGroup("data") self._write_measurement_data_group(data_group) self._write_navigation_data_group(data_group) @@ -118,59 +118,59 @@ def _write_attributes(dataset): @staticmethod def _write_quality_group(dataset): """Write the quality group.""" - group = dataset.createGroup('quality') + group = dataset.createGroup("quality") group.overall_quality_flag = 0 duration_of_product = group.createVariable( - 'duration_of_product', "f4" + "duration_of_product", "f4" ) duration_of_product[:] = 1000. @staticmethod def _write_navigation_data_group(dataset): """Write the navigation data group.""" - group = dataset.createGroup('navigation_data') - group.createDimension('n_scan', N_SCAN) - group.createDimension('n_samples', N_SAMPLES) - group.createDimension('n_subs', N_SUBS) - group.createDimension('n_horns', N_HORNS) - subs = group.createVariable('n_subs', "i4", dimensions=('n_subs',)) + group = dataset.createGroup("navigation_data") + group.createDimension("n_scan", N_SCAN) + group.createDimension("n_samples", N_SAMPLES) + group.createDimension("n_subs", N_SUBS) + group.createDimension("n_horns", N_HORNS) + subs = group.createVariable("n_subs", "i4", dimensions=("n_subs",)) subs[:] = np.arange(N_SUBS) - dimensions = ('n_scan', 'n_subs', 'n_horns') + dimensions = ("n_scan", "n_subs", "n_horns") shape = (N_SCAN, N_SUBS, N_HORNS) longitude = group.createVariable( - 'longitude', + "longitude", np.float32, dimensions=dimensions, ) longitude[:] = np.ones(shape) latitude = group.createVariable( - 'latitude', + "latitude", np.float32, dimensions=dimensions, ) latitude[:] = 2. * np.ones(shape) azimuth = group.createVariable( - 'ici_solar_azimuth_angle', + "ici_solar_azimuth_angle", np.float32, dimensions=dimensions, ) azimuth[:] = 3. * np.ones(shape) zenith = group.createVariable( - 'ici_solar_zenith_angle', + "ici_solar_zenith_angle", np.float32, dimensions=dimensions, ) zenith[:] = 4. * np.ones(shape) - dimensions = ('n_scan', 'n_samples', 'n_horns') + dimensions = ("n_scan", "n_samples", "n_horns") shape = (N_SCAN, N_SAMPLES, N_HORNS) delta_longitude = group.createVariable( - 'delta_longitude', + "delta_longitude", np.float32, dimensions=dimensions, ) delta_longitude[:] = 1000. * np.ones(shape) delta_latitude = group.createVariable( - 'delta_latitude', + "delta_latitude", np.float32, dimensions=dimensions, ) @@ -179,35 +179,35 @@ def _write_navigation_data_group(dataset): @staticmethod def _write_measurement_data_group(dataset): """Write the measurement data group.""" - group = dataset.createGroup('measurement_data') - group.createDimension('n_scan', N_SCAN) - group.createDimension('n_samples', N_SAMPLES) - group.createDimension('n_channels', N_CHANNELS) - group.createDimension('n_183', N_183) - scan = group.createVariable('n_scan', "i4", dimensions=('n_scan',)) + group = dataset.createGroup("measurement_data") + group.createDimension("n_scan", N_SCAN) + group.createDimension("n_samples", N_SAMPLES) + group.createDimension("n_channels", N_CHANNELS) + group.createDimension("n_183", N_183) + scan = group.createVariable("n_scan", "i4", dimensions=("n_scan",)) scan[:] = np.arange(N_SCAN) samples = group.createVariable( - 'n_samples', "i4", dimensions=('n_samples',) + "n_samples", "i4", dimensions=("n_samples",) ) samples[:] = np.arange(N_SAMPLES) bt_a = group.createVariable( - 'bt_conversion_a', np.float32, dimensions=('n_channels',) + "bt_conversion_a", np.float32, dimensions=("n_channels",) ) bt_a[:] = np.ones(N_CHANNELS) bt_b = group.createVariable( - 'bt_conversion_b', np.float32, dimensions=('n_channels',) + "bt_conversion_b", np.float32, dimensions=("n_channels",) ) bt_b[:] = np.zeros(N_CHANNELS) cw = group.createVariable( - 'centre_wavenumber', np.float32, dimensions=('n_channels',) + "centre_wavenumber", np.float32, dimensions=("n_channels",) ) cw[:] = np.array( [6.0] * 3 + [8.0] * 2 + [11.0] * 3 + [15.0] * 3 + [22.0] * 2 ) ici_radiance_183 = group.createVariable( - 'ici_radiance_183', + "ici_radiance_183", np.float32, - dimensions=('n_scan', 'n_samples', 'n_183'), + dimensions=("n_scan", "n_samples", "n_183"), ) ici_radiance_183[:] = 0.08 * np.ones((N_SCAN, N_SAMPLES, N_183)) @@ -254,11 +254,11 @@ def test_solar_zenith(self, reader): def test_calibrate_raises_for_unknown_calibration_method(self, reader): """Test perform calibration raises for unknown calibration method.""" variable = xr.DataArray(np.ones(3)) - dataset_info = {'calibration': 'unknown', 'name': 'radiance'} - with pytest.raises(ValueError, match='Unknown calibration'): + dataset_info = {"calibration": "unknown", "name": "radiance"} + with pytest.raises(ValueError, match="Unknown calibration"): reader._calibrate(variable, dataset_info) - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_calibrate_does_not_call_calibrate_bt_if_not_needed( self, mocked_calibrate, @@ -270,13 +270,13 @@ def test_calibrate_does_not_call_calibrate_bt_if_not_needed( [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]), - dims=('n_scan', 'n_samples'), + dims=("n_scan", "n_samples"), ) - dataset_info = {'calibration': 'radiance'} + dataset_info = {"calibration": "radiance"} reader._calibrate(variable, dataset_info) mocked_calibrate.assert_not_called() - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_calibrate_calls_calibrate_bt( self, mocked_calibrate_bt, @@ -288,11 +288,11 @@ def test_calibrate_calls_calibrate_bt( [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]), - dims=('n_scan', 'n_samples'), + dims=("n_scan", "n_samples"), ) dataset_info = { - 'calibration': 'brightness_temperature', - 'chan_index': 2, + "calibration": "brightness_temperature", + "chan_index": 2, } reader._calibrate(variable, dataset_info) mocked_calibrate_bt.assert_called_once_with( @@ -320,9 +320,9 @@ def test_calibrate_bt(self, reader): ]) np.testing.assert_allclose(bt, expected_bt) - @pytest.mark.parametrize('dims', ( - ('n_scan', 'n_samples'), - ('x', 'y'), + @pytest.mark.parametrize("dims", ( + ("n_scan", "n_samples"), + ("x", "y"), )) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" @@ -331,11 +331,11 @@ def test_standardize_dims(self, reader, dims): dims=dims, ) standardized = reader._standardize_dims(variable) - assert standardized.dims == ('y', 'x') + assert standardized.dims == ("y", "x") - @pytest.mark.parametrize('dims,data_info,expect', ( - (('y', 'x', 'n_horns'), {"n_horns": 1}, 1), - (('y', 'x', 'n_183'), {"n_183": 2}, 2), + @pytest.mark.parametrize("dims,data_info,expect", ( + (("y", "x", "n_horns"), {"n_horns": 1}, 1), + (("y", "x", "n_183"), {"n_183": 2}, 2), )) def test_filter_variable(self, reader, dims, data_info, expect): """Test filter variable.""" @@ -345,7 +345,7 @@ def test_filter_variable(self, reader, dims, data_info, expect): dims=dims, ) filtered = reader._filter_variable(variable, data_info) - assert filtered.dims == ('y', 'x') + assert filtered.dims == ("y", "x") assert (filtered == data[:, :, expect]).all() def test_drop_coords(self, reader): @@ -353,7 +353,7 @@ def test_drop_coords(self, reader): coords = "dummy" data = xr.DataArray( np.ones(10), - dims=('y'), + dims=("y"), coords={coords: 0}, ) assert coords in data.coords @@ -362,22 +362,22 @@ def test_drop_coords(self, reader): def test_get_third_dimension_name(self, reader): """Test get third dimension name.""" - data = xr.DataArray(np.ones((1, 1, 1)), dims=('x', 'y', 'z')) - assert reader._get_third_dimension_name(data) == 'z' + data = xr.DataArray(np.ones((1, 1, 1)), dims=("x", "y", "z")) + assert reader._get_third_dimension_name(data) == "z" def test_get_third_dimension_name_return_none_for_2d_data(self, reader): """Test get third dimension name return none for 2d data.""" - data = xr.DataArray(np.ones((1, 1)), dims=('x', 'y')) + data = xr.DataArray(np.ones((1, 1)), dims=("x", "y")) assert reader._get_third_dimension_name(data) is None def test_get_dataset_return_none_if_data_not_exist(self, reader): """Tes get dataset return none if data does not exist.""" - dataset_id = {'name': 'unknown'} - dataset_info = {'file_key': 'non/existing/data'} + dataset_id = {"name": "unknown"} + dataset_info = {"file_key": "non/existing/data"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset is None - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_get_dataset_does_not_calibrate_if_not_desired( self, mocked_calibrate, @@ -385,10 +385,10 @@ def test_get_dataset_does_not_calibrate_if_not_desired( dataset_info, ): """Test get dataset does not calibrate if not desired.""" - dataset_id = {'name': '1'} - dataset_info.pop('calibration') + dataset_id = {"name": "1"} + dataset_info.pop("calibration") dataset = reader.get_dataset(dataset_id, dataset_info) - assert dataset.dims == ('y', 'x') + assert dataset.dims == ("y", "x") mocked_calibrate.assert_not_called() assert isinstance(dataset, xr.DataArray) @@ -397,15 +397,15 @@ def test_get_dataset_orthorectifies_if_orthorect_data_defined( reader, ): """Test get dataset orthorectifies if orthorect data is defined.""" - dataset_id = {'name': 'lon_pixels_horn_1'} + dataset_id = {"name": "lon_pixels_horn_1"} dataset_info = { - 'name': 'lon_pixels_horn_1', - 'file_type': 'nc_ici_l1b_rad', - 'file_key': 'longitude', - 'orthorect_data': 'data/navigation_data/delta_longitude', - 'standard_name': 'longitude', - 'n_horns': 0, - 'modifiers': (), + "name": "lon_pixels_horn_1", + "file_type": "nc_ici_l1b_rad", + "file_key": "longitude", + "orthorect_data": "data/navigation_data/delta_longitude", + "standard_name": "longitude", + "n_horns": 0, + "modifiers": (), } dataset = reader.get_dataset(dataset_id, dataset_info) np.testing.assert_allclose(dataset, 1.009139, atol=1e-6) @@ -416,7 +416,7 @@ def test_get_dataset_handles_calibration( dataset_info, ): """Test get dataset handles calibration.""" - dataset_id = {'name': '1'} + dataset_id = {"name": "1"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset.attrs["calibration"] == "brightness_temperature" np.testing.assert_allclose(dataset, 272.73734) @@ -428,13 +428,13 @@ def test_interpolate_returns_none_if_dataset_not_exist(self, reader): ) assert azimuth is None and zenith is None - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_geo') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_geo") def test_interpolate_calls_interpolate_geo(self, mock, reader): """Test interpolate calls interpolate_geo.""" reader._interpolate(InterpolationType.LONLAT) mock.assert_called_once() - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_viewing_angle') # noqa: E501 + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_viewing_angle") # noqa: E501 def test_interpolate_calls_interpolate_viewing_angles(self, mock, reader): """Test interpolate calls interpolate viewing_angles.""" reader._interpolate(InterpolationType.SOLAR_ANGLES) @@ -443,13 +443,13 @@ def test_interpolate_calls_interpolate_viewing_angles(self, mock, reader): def test_interpolate_geo(self, reader): """Test interpolate geographic coordinates.""" shape = (N_SCAN, N_SUBS, N_HORNS) - dims = ('n_scan', 'n_subs', 'n_horns') + dims = ("n_scan", "n_subs", "n_horns") longitude = xr.DataArray( 2. * np.ones(shape), dims=dims, coords={ - 'n_horns': np.arange(N_HORNS), - 'n_subs': np.arange(N_SUBS), + "n_horns": np.arange(N_HORNS), + "n_subs": np.arange(N_SUBS), }, ) latitude = xr.DataArray(np.ones(shape), dims=dims) @@ -467,13 +467,13 @@ def test_interpolate_geo(self, reader): def test_interpolate_viewing_angle(self, reader): """Test interpolate viewing angle.""" shape = (N_SCAN, N_SUBS, N_HORNS) - dims = ('n_scan', 'n_subs', 'n_horns') + dims = ("n_scan", "n_subs", "n_horns") azimuth = xr.DataArray( np.ones(shape), dims=dims, coords={ - 'n_horns': np.arange(N_HORNS), - 'n_subs': np.arange(N_SUBS), + "n_horns": np.arange(N_HORNS), + "n_subs": np.arange(N_SUBS), }, ) zenith = xr.DataArray(100. * np.ones(shape), dims=dims) @@ -492,11 +492,11 @@ def test_orthorectify(self, reader): """Test orthorectify.""" variable = xr.DataArray( np.ones((N_SCAN, N_SAMPLES, N_HORNS)), - dims=('y', 'x', 'n_horns'), - coords={'n_horns': np.arange(N_HORNS)} + dims=("y", "x", "n_horns"), + coords={"n_horns": np.arange(N_HORNS)} ) - variable = variable.sel({'n_horns': 0}) - orthorect_data_name = 'data/navigation_data/delta_longitude' + variable = variable.sel({"n_horns": 0}) + orthorect_data_name = "data/navigation_data/delta_longitude" orthorectified = reader._orthorectify( variable, orthorect_data_name, @@ -507,18 +507,18 @@ def test_get_global_attributes(self, reader): """Test get global attributes.""" attributes = reader._get_global_attributes() assert attributes == { - 'filename': reader.filename, - 'start_time': datetime(2000, 1, 2, 3, 4, 5), - 'end_time': datetime(2000, 1, 2, 4, 5, 6), - 'spacecraft_name': 'SGB', - 'ssp_lon': None, - 'sensor': 'ICI', - 'filename_start_time': datetime(2000, 1, 1, 1, 0), - 'filename_end_time': datetime(2000, 1, 1, 2, 0), - 'platform_name': 'SGB', - 'quality_group': { - 'duration_of_product': np.array(1000., dtype=np.float32), - 'overall_quality_flag': 0, + "filename": reader.filename, + "start_time": datetime(2000, 1, 2, 3, 4, 5), + "end_time": datetime(2000, 1, 2, 4, 5, 6), + "spacecraft_name": "SGB", + "ssp_lon": None, + "sensor": "ICI", + "filename_start_time": datetime(2000, 1, 1, 1, 0), + "filename_end_time": datetime(2000, 1, 1, 2, 0), + "platform_name": "SGB", + "quality_group": { + "duration_of_product": np.array(1000., dtype=np.float32), + "overall_quality_flag": 0, } } @@ -526,12 +526,12 @@ def test_get_quality_attributes(self, reader): """Test get quality attributes.""" attributes = reader._get_quality_attributes() assert attributes == { - 'duration_of_product': np.array(1000., dtype=np.float32), - 'overall_quality_flag': 0, + "duration_of_product": np.array(1000., dtype=np.float32), + "overall_quality_flag": 0, } @patch( - 'satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._get_global_attributes', + "satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._get_global_attributes", return_value={"mocked_global_attributes": True}, ) def test_manage_attributes(self, mock, reader): @@ -540,11 +540,11 @@ def test_manage_attributes(self, mock, reader): np.ones(N_SCAN), attrs={"season": "summer"}, ) - dataset_info = {'name': 'ici_1', 'units': 'K'} + dataset_info = {"name": "ici_1", "units": "K"} variable = reader._manage_attributes(variable, dataset_info) assert variable.attrs == { - 'season': 'summer', - 'units': 'K', - 'name': 'ici_1', - 'mocked_global_attributes': True, + "season": "summer", + "units": "K", + "name": "ici_1", + "mocked_global_attributes": True, } diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 1c22ef515d..26c2a9a7f8 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -228,7 +228,7 @@ def test_filehandler_returns_masked_data_in_space(insat_filehandler): fh = insat_filehandler ds_info = None - ds_id = make_dataid(name="VIS", resolution=1000, calibration='reflectance') + ds_id = make_dataid(name="VIS", resolution=1000, calibration="reflectance") darr = fh.get_dataset(ds_id, ds_info) assert np.isnan(darr[0, 0]) @@ -238,7 +238,7 @@ def test_insat3d_has_orbital_parameters(insat_filehandler): fh = insat_filehandler ds_info = None - ds_id = make_dataid(name="VIS", resolution=1000, calibration='reflectance') + ds_id = make_dataid(name="VIS", resolution=1000, calibration="reflectance") darr = fh.get_dataset(ds_id, ds_info) assert "orbital_parameters" in darr.attrs diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 40042aa1de..62eff6d18f 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -43,7 +43,7 @@ def std_filetype_infos(): cfg = load_yaml_configs(cpaths[0]) # get the li_l2 filetype: - ftypes = cfg['file_types'] + ftypes = cfg["file_types"] yield ftypes @@ -69,31 +69,31 @@ def _test_dataset_single_variable(self, vname, desc, settings, handler): """Check the validity of a given variable.""" dname = vname - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) - var_path = settings.get('variable_path', '') + var_path = settings.get("variable_path", "") # Compute shape from dimensions: - if desc['shape'] == (): + if desc["shape"] == (): # scalar case, dim should have been added in the code by validate_array_dimensions shape = (1,) else: - shape = tuple([dims[dim_name] for dim_name in desc['shape']]) + shape = tuple([dims[dim_name] for dim_name in desc["shape"]]) dataset_info = { - 'name': dname, - 'variable_name': vname, - 'use_rescaling': False, + "name": dname, + "variable_name": vname, + "use_rescaling": False, } var_params = [dataset_info, desc, dname, handler, shape, var_path] self._test_dataset_variable(var_params) def _test_dataset_variables(self, settings, ds_desc, handler): """Check the loading of the non in sector variables.""" - assert 'variables' in ds_desc - all_vars = ds_desc['variables'] + assert "variables" in ds_desc + all_vars = ds_desc["variables"] - variables = settings.get('variables') + variables = settings.get("variables") for vname, desc in variables.items(): # variable should be in list of dataset: assert vname in all_vars @@ -105,17 +105,17 @@ def _test_dataset_single_sector_variable(self, names, desc, settings, handler): dname = f"{vname}_{sname}_sector" - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) - var_path = settings.get('variable_path', '') + var_path = settings.get("variable_path", "") - shape = tuple([dims[dim_name] for dim_name in desc['shape']]) + shape = tuple([dims[dim_name] for dim_name in desc["shape"]]) dataset_info = { - 'name': dname, - 'variable_name': vname, - 'sector_name': sname, - 'use_rescaling': False, + "name": dname, + "variable_name": vname, + "sector_name": sname, + "use_rescaling": False, } var_params = [dataset_info, desc, vname, handler, shape, var_path] self._test_dataset_variable(var_params, sname=sname) @@ -125,7 +125,7 @@ def _test_dataset_variable(self, var_params, sname=""): dataset_info, desc, dname, handler, shape, var_path = var_params res = self.get_variable_dataset(dataset_info, dname, handler) assert res.shape == shape - assert res.dims[0] == 'y' + assert res.dims[0] == "y" # Should retrieve content with fullname key: full_name = self.create_fullname_key(desc, var_path, dname, sname=sname) # Note: 'content' is not recognized as a valid member of the class below @@ -140,23 +140,23 @@ def get_variable_dataset(self, dataset_info, dname, handler): res = handler.get_dataset(dataset_id, dataset_info) return res - def create_fullname_key(self, desc, var_path, vname, sname=''): + def create_fullname_key(self, desc, var_path, vname, sname=""): """Create full name key for sector/non-sector content retrieval.""" - vpath = desc.get('path', var_path) - if vpath != "" and vpath[-1] != '/': - vpath += '/' + vpath = desc.get("path", var_path) + if vpath != "" and vpath[-1] != "/": + vpath += "/" if sname != "": - sname += '/' + sname += "/" full_name = f"{vpath}{sname}{vname}" return full_name def _test_dataset_sector_variables(self, settings, ds_desc, handler): """Check the loading of the in sector variables.""" - sector_vars = settings.get('sector_variables') - sectors = settings.get('sectors', ['north', 'east', 'south', 'west']) + sector_vars = settings.get("sector_variables") + sectors = settings.get("sectors", ["north", "east", "south", "west"]) - assert 'sector_variables' in ds_desc - all_vars = ds_desc['sector_variables'] + assert "sector_variables" in ds_desc + all_vars = ds_desc["sector_variables"] for sname in sectors: for vname, desc in sector_vars.items(): @@ -168,33 +168,33 @@ def test_dataset_loading(self, filetype_infos): """Test loading of all datasets from all products.""" # Iterate on all the available product types: for ptype, pinfo in products_dict.items(): - ftype = pinfo['ftype'] + ftype = pinfo["ftype"] filename_info = { - 'start_time': "0000", - 'end_time': "1000" + "start_time": "0000", + "end_time": "1000" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, ftype)) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, ftype)) ds_desc = handler.ds_desc # retrieve the schema that what used to generate the content for that product: settings = get_product_schema(ptype) # Now we check all the variables are available: - if 'variables' in settings: + if "variables" in settings: self._test_dataset_variables(settings, ds_desc, handler) # check the sector variables: - if 'sector_variables' in settings: + if "sector_variables" in settings: self._test_dataset_sector_variables(settings, ds_desc, handler) def test_unregistered_dataset_loading(self, filetype_infos): """Test loading of an unregistered dataset.""" # Iterate on all the available product types: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) - dataset_id = make_dataid(name='test_dataset') + dataset_id = make_dataid(name="test_dataset") with pytest.raises(KeyError): handler.get_dataset(dataset_id) @@ -202,22 +202,22 @@ def test_dataset_not_in_provided_dataset(self, filetype_infos): """Test loading of a dataset that is not provided.""" # Iterate on all the available product types: - dataset_dict = {'name': 'test_dataset'} + dataset_dict = {"name": "test_dataset"} - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) - dataset_id = make_dataid(name='test_dataset') + dataset_id = make_dataid(name="test_dataset") assert handler.get_dataset(dataset_id, ds_info=dataset_dict) is None def test_filename_infos(self, filetype_infos): """Test settings retrieved from filename.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416" + "start_time": "20101112131415", + "end_time": "20101112131416" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_af_nc")) # Start and end time should come from filename info: assert handler.start_time == "20101112131415" @@ -236,19 +236,19 @@ def test_filename_infos(self, filetype_infos): assert len(handler.provided_datasets) > 0 # Sensor names should be just 'li' - assert handler.sensor_names == {'li'} + assert handler.sensor_names == {"li"} # check product type: - assert handler.product_type == '2-AF' + assert handler.product_type == "2-AF" def test_var_path_exists(self, filetype_infos): """Test variable_path_exists from li reader.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # Check variable paths: assert handler.variable_path_exists("dummy") is False @@ -265,11 +265,11 @@ def test_var_path_exists(self, filetype_infos): def test_get_first_valid_variable(self, filetype_infos): """Test get_first_valid_variable from li reader.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # Check variable paths: var1 = handler.get_first_valid_variable(["dummy/path", "data/north/event_id"]) @@ -311,30 +311,30 @@ def test_get_first_valid_variable(self, filetype_infos): assert id(meas2) == id(var3) # We should have a fill value on those variables: - assert var1.attrs.get('_FillValue') == 65535 - assert var2.attrs.get('_FillValue') == 65535 + assert var1.attrs.get("_FillValue") == 65535 + assert var2.attrs.get("_FillValue") == 65535 def test_get_first_valid_variable_not_found(self, filetype_infos): """Test get_first_valid_variable from li reader if the variable is not found.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) with pytest.raises(KeyError): handler.get_first_valid_variable(["dummy/path", "data/test/test_var"]) def test_available_datasets(self, filetype_infos): """Test available_datasets from li reader.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # get current ds_infos. These should all be returned by the available_datasets ds_infos_to_compare = handler.dataset_infos.copy() # now add a dummy configured dataset to make sure that it is included in the available_datasets output - ds_info_dummy = {'test': 'test'} + ds_info_dummy = {"test": "test"} conf_ds_dummy = [(True, ds_info_dummy)] ds_infos_to_compare.insert(0, ds_info_dummy) @@ -343,11 +343,11 @@ def test_available_datasets(self, filetype_infos): def test_variable_scaling(self, filetype_infos): """Test automatic rescaling with offset and scale attributes.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416" + "start_time": "20101112131415", + "end_time": "20101112131416" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Get the raw variable without rescaling: vname = "latitude" @@ -355,9 +355,9 @@ def test_variable_scaling(self, filetype_infos): # Get the dataset without rescaling: dataset_info = { - 'name': vname, - 'variable_name': vname, - 'use_rescaling': False, + "name": vname, + "variable_name": vname, + "use_rescaling": False, } dataset_id = make_dataid(name=vname) @@ -365,7 +365,7 @@ def test_variable_scaling(self, filetype_infos): assert np.all(lat_noscale.values == rawlat) # Now get the dataset with scaling: - dataset_info['use_rescaling'] = True + dataset_info["use_rescaling"] = True lat_scaled = handler.get_dataset(dataset_id, dataset_info) # By default we write data in the ranges [-88.3/0.0027, 88.3/0.0027] for latitude and longitude: @@ -374,12 +374,12 @@ def test_variable_scaling(self, filetype_infos): def test_swath_coordinates(self, filetype_infos): """Test that swath coordinates are used correctly to assign coordinates to some datasets.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Check latitude: dsid = make_dataid(name="latitude") dset = handler.get_dataset(dsid) - assert 'coordinates' not in dset.attrs + assert "coordinates" not in dset.attrs # get_area_def should raise exception: with pytest.raises(NotImplementedError): @@ -388,21 +388,21 @@ def test_swath_coordinates(self, filetype_infos): # Check radiance: dsid = make_dataid(name="radiance") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert "coordinates" in dset.attrs + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" with pytest.raises(NotImplementedError): handler.get_area_def(dsid) def test_report_datetimes(self, filetype_infos): """Should report time variables as numpy datetime64 type and time durations as timedelta64.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # Check epoch_time: dsid = make_dataid(name="epoch_time_north_sector") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('datetime64[ns]') + assert dset.values.dtype == np.dtype("datetime64[ns]") # The default epoch_time should be 1.234 seconds after epoch: ref_time = np.datetime64(datetime(2000, 1, 1, 0, 0, 1, 234000)) @@ -411,14 +411,14 @@ def test_report_datetimes(self, filetype_infos): # Check time_offset: dsid = make_dataid(name="time_offset_east_sector") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('timedelta64[ns]') + assert dset.values.dtype == np.dtype("timedelta64[ns]") # The default time_offset should be: np.linspace(0.0, 1000.0, nobs) # but then we first multiply by 1e6 to generate us times: # Note that below no automatic transform to np.float64 is happening: nobs = dset.shape[0] ref_data = np.linspace(0.0, 1000.0, nobs).astype(np.float32) - ref_data = (ref_data * 1e9).astype('timedelta64[ns]') + ref_data = (ref_data * 1e9).astype("timedelta64[ns]") # And not absolutely sure why, but we always get the timedelta in ns from the dataset: # ref_data = (ref_data).astype('timedelta64[ns]') @@ -427,33 +427,33 @@ def test_report_datetimes(self, filetype_infos): def test_milliseconds_to_timedelta(self, filetype_infos): """Should covert milliseconds to timedelta.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Check flash_duration: dsid = make_dataid(name="flash_duration") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('timedelta64[ns]') + assert dset.values.dtype == np.dtype("timedelta64[ns]") nobs = dset.shape[0] - ref_data = np.linspace(0, 1000, nobs).astype('u2') - ref_data = (ref_data * 1e6).astype('timedelta64[ns]') + ref_data = np.linspace(0, 1000, nobs).astype("u2") + ref_data = (ref_data * 1e6).astype("timedelta64[ns]") assert np.all(dset.values == ref_data) def test_apply_accumulate_index_offset(self, filetype_infos): """Should accumulate index offsets.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # Check time offset: dsid = make_dataid(name="l1b_chunk_offsets_north_sector") dset = handler.get_dataset(dsid) nobs = dset.shape[0] - ref_data = (np.arange(nobs)).astype('u4') + ref_data = (np.arange(nobs)).astype("u4") # check first execution without offset assert np.all(dset.values == ref_data) # check that the offset is being stored - assert handler.current_ds_info['__index_offset'] == 123 + assert handler.current_ds_info["__index_offset"] == 123 # check execution with offset value # this simulates the case where we are loading this variable from multiple files and concatenating it @@ -462,62 +462,62 @@ def test_apply_accumulate_index_offset(self, filetype_infos): def test_combine_info(self, filetype_infos): """Test overridden combine_info.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # get a dataset including the index_offset in the ds_info dsid = make_dataid(name="l1b_chunk_offsets_north_sector") - ds_info = {'name': 'l1b_chunk_offsets_north_sector', - 'variable_name': 'l1b_chunk_offsets', - 'sector_name': 'north', - '__index_offset': 1000, - 'accumulate_index_offset': "{sector_name}/l1b_window"} + ds_info = {"name": "l1b_chunk_offsets_north_sector", + "variable_name": "l1b_chunk_offsets", + "sector_name": "north", + "__index_offset": 1000, + "accumulate_index_offset": "{sector_name}/l1b_window"} dset = handler.get_dataset(dsid, ds_info=ds_info) handler.combine_info([dset.attrs]) # combine_info should have removed the index_offset key from the ds_info passed to get_dataset - assert '__index_offset' not in ds_info + assert "__index_offset" not in ds_info # and reset the current_ds_info dict, in order to avoid failures if we call combine_info again assert handler.current_ds_info is None def test_coordinates_projection(self, filetype_infos): """Should automatically generate lat/lon coords from projection data.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) dsid = make_dataid(name="flash_accumulation") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" with pytest.raises(NotImplementedError): handler.get_area_def(dsid) - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_afr_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afr_nc")) dsid = make_dataid(name="flash_radiance") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_afa_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afa_nc")) dsid = make_dataid(name="accumulated_flash_area") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" def test_generate_coords_on_accumulated_prods(self, filetype_infos): """Test daskified generation of coords.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) dset = handler.get_dataset(dsid) # Check dataset type @@ -527,12 +527,12 @@ def test_generate_coords_on_accumulated_prods(self, filetype_infos): def test_generate_coords_on_lon_lat(self, filetype_infos): """Test getting lon/lat dataset on accumulated product.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) handler.generate_coords_from_scan_angles = mock.MagicMock( side_effect=handler.generate_coords_from_scan_angles) @@ -541,12 +541,12 @@ def test_generate_coords_on_lon_lat(self, filetype_infos): def test_generate_coords_inverse_proj(self, filetype_infos): """Test inverse_projection execution delayed until .values is called on the dataset.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) handler.inverse_projection = mock.MagicMock(side_effect=handler.inverse_projection) dset = handler.get_dataset(dsid) @@ -557,17 +557,17 @@ def test_generate_coords_inverse_proj(self, filetype_infos): def test_generate_coords_not_called_on_non_coord_dataset(self, filetype_infos): """Test that the method is not called when getting non-coord dataset.""" - handler = self.generate_coords(filetype_infos, 'li_l2_af_nc', 'flash_accumulation') + handler = self.generate_coords(filetype_infos, "li_l2_af_nc", "flash_accumulation") assert not handler.generate_coords_from_scan_angles.called def test_generate_coords_not_called_on_non_accum_dataset(self, filetype_infos): """Test that the method is not called when getting non-accum dataset.""" - handler = self.generate_coords(filetype_infos, 'li_l2_lef_nc', 'latitude_north_sector') + handler = self.generate_coords(filetype_infos, "li_l2_lef_nc", "latitude_north_sector") assert not handler.generate_coords_from_scan_angles.called def generate_coords(self, filetype_infos, file_type_name, variable_name): """Generate file handler and mimic coordinate generator call.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, file_type_name)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, file_type_name)) dsid = make_dataid(name=variable_name) handler.generate_coords_from_scan_angles = mock.MagicMock( side_effect=handler.generate_coords_from_scan_angles) @@ -576,10 +576,10 @@ def generate_coords(self, filetype_infos, file_type_name, variable_name): def test_generate_coords_called_once(Self, filetype_infos): """Test that the method is called only once.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) # check internal variable is empty assert len(handler.internal_variables) == 0 - coordinate_datasets = ['longitude', 'latitude'] + coordinate_datasets = ["longitude", "latitude"] handler.generate_coords_from_scan_angles = mock.MagicMock(side_effect=handler.generate_coords_from_scan_angles) for ds_name in coordinate_datasets: @@ -593,34 +593,34 @@ def test_generate_coords_called_once(Self, filetype_infos): def test_coords_generation(self, filetype_infos): """Compare daskified coords generation results with non-daskified.""" # Prepare dummy (but somewhat realistic) arrays of azimuth/elevation values. - products = ['li_l2_af_nc', - 'li_l2_afr_nc', - 'li_l2_afa_nc'] + products = ["li_l2_af_nc", + "li_l2_afr_nc", + "li_l2_afa_nc"] for prod in products: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, prod)) # Get azimuth/elevation arrays from handler - azimuth = handler.get_measured_variable(handler.swath_coordinates['azimuth']) + azimuth = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) azimuth = handler.apply_use_rescaling(azimuth) - elevation = handler.get_measured_variable(handler.swath_coordinates['elevation']) + elevation = handler.get_measured_variable(handler.swath_coordinates["elevation"]) elevation = handler.apply_use_rescaling(elevation) # Initialize proj_dict - proj_var = handler.swath_coordinates['projection'] + proj_var = handler.swath_coordinates["projection"] geos_proj = handler.get_measured_variable(proj_var, fill_value=None) major_axis = float(geos_proj.attrs["semi_major_axis"]) point_height = 35786400.0 # float(geos_proj.attrs["perspective_point_height"]) inv_flattening = float(geos_proj.attrs["inverse_flattening"]) lon_0 = float(geos_proj.attrs["longitude_of_projection_origin"]) sweep = str(geos_proj.attrs["sweep_angle_axis"]) - proj_dict = {'a': major_axis, - 'lon_0': lon_0, - 'h': point_height, + proj_dict = {"a": major_axis, + "lon_0": lon_0, + "h": point_height, "rf": inv_flattening, - 'proj': 'geos', - 'units': 'm', + "proj": "geos", + "units": "m", "sweep": sweep} # Compute reference values @@ -633,8 +633,8 @@ def test_coords_generation(self, filetype_infos): lat_ref = lat_ref.astype(np.float32) handler.generate_coords_from_scan_angles() - lon = handler.internal_variables['longitude'].values - lat = handler.internal_variables['latitude'].values + lon = handler.internal_variables["longitude"].values + lat = handler.internal_variables["latitude"].values # Compare the arrays, should be the same: np.testing.assert_equal(lon, lon_ref) @@ -642,7 +642,7 @@ def test_coords_generation(self, filetype_infos): def test_get_area_def_acc_products(self, filetype_infos): """Test retrieval of area def for accumulated products.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) dsid = make_dataid(name="flash_accumulation") @@ -656,7 +656,7 @@ def test_get_area_def_acc_products(self, filetype_infos): def test_get_area_def_non_acc_products(self, filetype_infos): """Test retrieval of area def for non-accumulated products.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lgr_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lgr_nc"), with_area_definition=True) # Should throw for non-accum products: with pytest.raises(NotImplementedError): @@ -672,9 +672,9 @@ def write_flash_accum(_vname, _ocname, _settings): # We return the settings we want to use here to generate our custom/fixed product content: return { - 'num_obs': 1234, - 'providers': { - 'flash_accumulation': write_flash_accum, + "num_obs": 1234, + "providers": { + "flash_accumulation": write_flash_accum, } } @@ -682,7 +682,7 @@ def test_without_area_def(self, filetype_infos): """Test accumulated products data array without area definition.""" # without area definition handler_without_area_def = LIL2NCFileHandler( - 'filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), with_area_definition=False) + "filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) dsid = make_dataid(name="flash_accumulation") @@ -692,7 +692,7 @@ def test_without_area_def(self, filetype_infos): def test_with_area_def(self, filetype_infos): """Test accumulated products data array with area definition.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") dsid = make_dataid(name="flash_accumulation") # Retrieve the 2D array: arr = handler.get_dataset(dsid).values @@ -700,7 +700,7 @@ def test_with_area_def(self, filetype_infos): def test_get_on_fci_grid_exc(self, filetype_infos): """Test the execution of the get_on_fci_grid function for an accumulated gridded variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="flash_accumulation") handler.get_dataset(dsid) @@ -708,7 +708,7 @@ def test_get_on_fci_grid_exc(self, filetype_infos): def test_get_on_fci_grid_exc_non_grid(self, filetype_infos): """Test the non-execution of the get_on_fci_grid function for an accumulated non-gridded variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="accumulation_offsets") handler.get_dataset(dsid) @@ -716,7 +716,7 @@ def test_get_on_fci_grid_exc_non_grid(self, filetype_infos): def test_get_on_fci_grid_exc_non_accum(self, filetype_infos): """Test the non-execution of the get_on_fci_grid function for a non-accumulated variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_lef_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_lef_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="radiance_north_sector") handler.get_dataset(dsid) @@ -724,7 +724,7 @@ def test_get_on_fci_grid_exc_non_accum(self, filetype_infos): def test_with_area_def_vars_with_no_pattern(self, filetype_infos): """Test accumulated products variable with no patterns and with area definition.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") # variable with no patterns dsid = make_dataid(name="accumulation_offsets") assert handler.get_dataset(dsid).shape == (1,) @@ -734,7 +734,7 @@ def handler_with_area(self, filetype_infos, product_name): # Note: we need a test param provider here to ensure we write the same values for both handlers below: FakeLIFileHandlerBase.schema_parameters = TestLIL2.param_provider # with area definition - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, product_name), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, product_name), with_area_definition=True) return handler @@ -743,7 +743,7 @@ def test_with_area_def_pixel_placement(self, filetype_infos): # with area definition FakeLIFileHandlerBase.schema_parameters = TestLIL2.param_provider - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) dsid = make_dataid(name="flash_accumulation") @@ -751,11 +751,11 @@ def test_with_area_def_pixel_placement(self, filetype_infos): arr = handler.get_dataset(dsid).values # Retrieve the x/y coordinates: - xarr = handler.get_measured_variable('x').values.astype(int) - yarr = handler.get_measured_variable('y').values.astype(int) + xarr = handler.get_measured_variable("x").values.astype(int) + yarr = handler.get_measured_variable("y").values.astype(int) handler_without_area_def = LIL2NCFileHandler( - 'filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), with_area_definition=False) + "filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) FakeLIFileHandlerBase.schema_parameters = None diff --git a/satpy/tests/reader_tests/test_meris_nc.py b/satpy/tests/reader_tests/test_meris_nc.py index 926eccc672..0ab28b1fef 100644 --- a/satpy/tests/reader_tests/test_meris_nc.py +++ b/satpy/tests/reader_tests/test_meris_nc.py @@ -21,49 +21,49 @@ class TestMERISReader(unittest.TestCase): """Test various meris_nc_sen3 filehandlers.""" - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.meris_nc_sen3 import NCMERIS2, NCMERISCal, NCMERISGeo from satpy.tests.utils import make_dataid - ds_id = make_dataid(name='M01', calibration='reflectance') - ds_id2 = make_dataid(name='wsqf', calibration='reflectance') - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + ds_id = make_dataid(name="M01", calibration="reflectance") + ds_id2 = make_dataid(name="wsqf", calibration="reflectance") + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} - test = NCMERISCal('somedir/somefile.nc', filename_info, 'c') + test = NCMERISCal("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCMERISGeo('somedir/somefile.nc', filename_info, 'c') + test = NCMERISGeo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCMERIS2('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, {'nc_key': 'the_key'}) - test.get_dataset(ds_id2, {'nc_key': 'the_key'}) + test = NCMERIS2("somedir/somefile.nc", filename_info, "c") + test.get_dataset(ds_id, {"nc_key": "the_key"}) + test.get_dataset(ds_id2, {"nc_key": "the_key"}) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} open_file = mock.MagicMock() - file_handler = NCOLCIBase(open_file, filename_info, 'c') + file_handler = NCOLCIBase(open_file, filename_info, "c") # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or - open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) + open_file.open.return_value == mocked_open_dataset.call_args[1].get("filename_or_obj")) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_dataset(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -71,17 +71,17 @@ def test_get_dataset(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERIS2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'ENV', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCMERIS2('somedir/somefile.nc', filename_info, 'c') - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) - - @mock.patch('xarray.open_dataset') + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "ENV", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCMERIS2("somedir/somefile.nc", filename_info, "c") + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + self.assertEqual(res.dtype, np.dtype("bool")) + + @mock.patch("xarray.open_dataset") def test_meris_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -90,31 +90,31 @@ def test_meris_angles(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERISAngles from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], + mocked_dataset.return_value = xr.Dataset({"SAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'SZA': (['tie_rows', 'tie_columns'], + "SZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OAA': (['tie_rows', 'tie_columns'], + "OAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OZA': (['tie_rows', 'tie_columns'], + "OZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='solar_azimuth_angle') - ds_id2 = make_dataid(name='satellite_zenith_angle') - test = NCMERISAngles('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="solar_azimuth_angle") + ds_id2 = make_dataid(name="satellite_zenith_angle") + test = NCMERISAngles("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_meris_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -123,26 +123,26 @@ def test_meris_meteo(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERISMeteo from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - data = {'humidity': (['tie_rows', 'tie_columns'], + data = {"humidity": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_ozone': (['tie_rows', 'tie_columns'], + "total_ozone": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'sea_level_pressure': (['tie_rows', 'tie_columns'], + "sea_level_pressure": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], + "total_columnar_water_vapour": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'ENV', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "humidity", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='humidity') - ds_id2 = make_dataid(name='total_ozone') - test = NCMERISMeteo('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="humidity") + ds_id2 = make_dataid(name="total_ozone") + test = NCMERISMeteo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() @@ -160,21 +160,21 @@ def test_bitflags(self): from satpy.readers.olci_nc import BitFlags - flag_list = ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', 'CASE2_S', 'CASE2_ANOM', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + flag_list = ["SEA_ICE", "MEGLINT", "HIGHGLINT", "CASE2_S", "CASE2_ANOM", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(bits, flag_list=flag_list) - items = ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + items = ["SEA_ICE", "MEGLINT", "HIGHGLINT", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, True, True, False, False, True, True, diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index acccb7a28d..1df0d41f12 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -29,185 +29,185 @@ def _get_calibration(num_scans): calibration = { - 'Calibration/VIS_Cal_Coeff': + "Calibration/VIS_Cal_Coeff": xr.DataArray( da.ones((19, 3), chunks=1024), - attrs={'Slope': np.array([1.] * 19), 'Intercept': np.array([0.] * 19)}, - dims=('_bands', '_coeffs')), - 'Calibration/IR_Cal_Coeff': + attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + dims=("_bands", "_coeffs")), + "Calibration/IR_Cal_Coeff": xr.DataArray( da.ones((6, 4, num_scans), chunks=1024), - attrs={'Slope': np.array([1.] * 6), 'Intercept': np.array([0.] * 6)}, - dims=('_bands', '_coeffs', '_scans')), + attrs={"Slope": np.array([1.] * 6), "Intercept": np.array([0.] * 6)}, + dims=("_bands", "_coeffs", "_scans")), } return calibration def _get_250m_data(num_scans, rows_per_scan, num_cols): # Set some default attributes - def_attrs = {'FillValue': 65535, - 'valid_range': [0, 4095], - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1) + def_attrs = {"FillValue": 65535, + "valid_range": [0, 4095], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) } - nounits_attrs = {**def_attrs, **{'units': 'NO'}} - radunits_attrs = {**def_attrs, **{'units': 'mW/ (m2 cm-1 sr)'}} + nounits_attrs = {**def_attrs, **{"units": "NO"}} + radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} data = { - 'Data/EV_250_RefSB_b1': + "Data/EV_250_RefSB_b1": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b2': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b2": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b3': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b3": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b4': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b4": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b24': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b24": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b25': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b25": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return data def _get_1km_data(num_scans, rows_per_scan, num_cols): data = { - 'Data/EV_1KM_LL': + "Data/EV_1KM_LL": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.]), 'Intercept': np.array([0.]), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'1km Earth View Science Data', + "Slope": np.array([1.]), "Intercept": np.array([0.]), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data", }, - dims=('_rows', '_cols')), - 'Data/EV_1KM_RefSB': + dims=("_rows", "_cols")), + "Data/EV_1KM_RefSB": xr.DataArray( da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 15), 'Intercept': np.array([0.] * 15), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'1km Earth View Science Data', + "Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data", }, - dims=('_ref_bands', '_rows', '_cols')), - 'Data/EV_1KM_Emissive': + dims=("_ref_bands", "_rows", "_cols")), + "Data/EV_1KM_Emissive": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 4), 'Intercept': np.array([0.] * 4), - 'FillValue': 65535, - 'units': 'mW/ (m2 cm-1 sr)', - 'valid_range': [0, 25000], - 'long_name': b'1km Emissive Bands Earth View ' - b'Science Data', + "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 25000], + "long_name": b"1km Emissive Bands Earth View " + b"Science Data", }, - dims=('_ir_bands', '_rows', '_cols')), - 'Data/EV_250_Aggr.1KM_RefSB': + dims=("_ir_bands", "_rows", "_cols")), + "Data/EV_250_Aggr.1KM_RefSB": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 4), 'Intercept': np.array([0.] * 4), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'250m Reflective Bands Earth View ' - b'Science Data Aggregated to 1 km' + "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"250m Reflective Bands Earth View " + b"Science Data Aggregated to 1 km" }, - dims=('_ref250_bands', '_rows', '_cols')), - 'Data/EV_250_Aggr.1KM_Emissive': + dims=("_ref250_bands", "_rows", "_cols")), + "Data/EV_250_Aggr.1KM_Emissive": xr.DataArray( da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 2), 'Intercept': np.array([0.] * 2), - 'FillValue': 65535, - 'units': 'mW/ (m2 cm-1 sr)', - 'valid_range': [0, 4095], - 'long_name': b'250m Emissive Bands Earth View ' - b'Science Data Aggregated to 1 km' + "Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 4095], + "long_name": b"250m Emissive Bands Earth View " + b"Science Data Aggregated to 1 km" }, - dims=('_ir250_bands', '_rows', '_cols')), + dims=("_ir250_bands", "_rows", "_cols")), } return data def _get_250m_ll_data(num_scans, rows_per_scan, num_cols): # Set some default attributes - def_attrs = {'FillValue': 65535, - 'valid_range': [0, 4095], - 'Slope': np.array([1.]), 'Intercept': np.array([0.]), - 'long_name': b'250m Earth View Science Data', - 'units': 'mW/ (m2 cm-1 sr)', + def_attrs = {"FillValue": 65535, + "valid_range": [0, 4095], + "Slope": np.array([1.]), "Intercept": np.array([0.]), + "long_name": b"250m Earth View Science Data", + "units": "mW/ (m2 cm-1 sr)", } data = { - 'Data/EV_250_Emissive_b6': + "Data/EV_250_Emissive_b6": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=def_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b7': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b7": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=def_attrs, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return data def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): geo = { - prefix + 'Longitude': + prefix + "Longitude": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [-90, 90], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [-90, 90], }, - dims=('_rows', '_cols')), - prefix + 'Latitude': + dims=("_rows", "_cols")), + prefix + "Latitude": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [-180, 180], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [-180, 180], }, - dims=('_rows', '_cols')), - prefix + 'SensorZenith': + dims=("_rows", "_cols")), + prefix + "SensorZenith": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([.01] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [0, 28000], + "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [0, 28000], }, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return geo @@ -225,15 +225,15 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): @property def _rows_per_scan(self): - return self.filetype_info.get('rows_per_scan', 10) + return self.filetype_info.get("rows_per_scan", 10) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { - '/attr/Observing Beginning Date': '2019-01-01', - '/attr/Observing Ending Date': '2019-01-01', - '/attr/Observing Beginning Time': '18:27:39.720', - '/attr/Observing Ending Time': '18:38:36.728', + "/attr/Observing Beginning Date": "2019-01-01", + "/attr/Observing Ending Date": "2019-01-01", + "/attr/Observing Beginning Time": "18:27:39.720", + "/attr/Observing Ending Time": "18:38:36.728", } global_attrs = self._set_sensor_attrs(global_attrs) @@ -247,12 +247,12 @@ def get_test_content(self, filename, filename_info, filetype_info): return test_content def _set_sensor_attrs(self, global_attrs): - if 'mersi2_l1b' in self.filetype_info['file_type']: - global_attrs['/attr/Satellite Name'] = 'FY-3D' - global_attrs['/attr/Sensor Identification Code'] = 'MERSI' - elif 'mersi_ll' in self.filetype_info['file_type']: - global_attrs['/attr/Satellite Name'] = 'FY-3E' - global_attrs['/attr/Sensor Identification Code'] = 'MERSI LL' + if "mersi2_l1b" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3D" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + elif "mersi_ll" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3E" + global_attrs["/attr/Sensor Identification Code"] = "MERSI LL" return global_attrs def _get_data_file_content(self): @@ -272,7 +272,7 @@ def _add_band_data_file_content(self): num_scans = self.num_scans rows_per_scan = self._rows_per_scan is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") - is_1km = "_1000" in self.filetype_info['file_type'] + is_1km = "_1000" in self.filetype_info["file_type"] data_func = _get_1km_data if is_1km else (_get_250m_data if is_mersi2 else _get_250m_ll_data) return data_func(num_scans, rows_per_scan, num_cols) @@ -280,12 +280,12 @@ def _add_tbb_coefficients(self, global_attrs): if not self.filetype_info["file_type"].startswith("mersi2_"): return - if "_1000" in self.filetype_info['file_type']: - global_attrs['/attr/TBB_Trans_Coefficient_A'] = np.array([1.0] * 6) - global_attrs['/attr/TBB_Trans_Coefficient_B'] = np.array([0.0] * 6) + if "_1000" in self.filetype_info["file_type"]: + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([1.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) else: - global_attrs['/attr/TBB_Trans_Coefficient_A'] = np.array([0.0] * 6) - global_attrs['/attr/TBB_Trans_Coefficient_B'] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) @property def _num_cols_for_file_type(self): @@ -298,18 +298,18 @@ def _geo_prefix_for_file_type(self): def _test_helper(res): """Remove test code duplication.""" - assert (2 * 40, 2048 * 2) == res['1'].shape - assert 'reflectance' == res['1'].attrs['calibration'] - assert '%' == res['1'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['2'].shape - assert 'reflectance' == res['2'].attrs['calibration'] - assert '%' == res['2'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['3'].shape - assert 'reflectance' == res['3'].attrs['calibration'] - assert '%' == res['3'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['4'].shape - assert 'reflectance' == res['4'].attrs['calibration'] - assert '%' == res['4'].attrs['units'] + assert (2 * 40, 2048 * 2) == res["1"].shape + assert "reflectance" == res["1"].attrs["calibration"] + assert "%" == res["1"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["2"].shape + assert "reflectance" == res["2"].attrs["calibration"] + assert "%" == res["2"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["3"].shape + assert "reflectance" == res["3"].attrs["calibration"] + assert "%" == res["3"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["4"].shape + assert "reflectance" == res["4"].attrs["calibration"] + assert "%" == res["4"].attrs["units"] class MERSIL1BTester: @@ -319,9 +319,9 @@ def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mersi_l1b import MERSIL1B - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MERSIL1B, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(MERSIL1B, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -334,8 +334,8 @@ class TestMERSI2L1B(MERSIL1BTester): """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi2_l1b.yaml" - filenames_1000m = ['tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF'] - filenames_250m = ['tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF'] + filenames_1000m = ["tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF"] + filenames_250m = ["tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF"] filenames_all = filenames_1000m + filenames_250m def test_all_resolutions(self): @@ -355,8 +355,8 @@ def test_all_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -370,20 +370,20 @@ def test_all_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'reflectance' - assert res['5'].attrs['units'] == '%' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'brightness_temperature' - assert res['20'].attrs['units'] == 'K' - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "reflectance" + assert res["5"].attrs["units"] == "%" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "brightness_temperature" + assert res["20"].attrs["units"] == "K" + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" def test_counts_calib(self): """Test loading data at counts calibration.""" @@ -398,43 +398,43 @@ def test_counts_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '2', '3', '4', '5', '20', '24', '25']: - ds_ids.append(make_dataid(name=band_name, calibration='counts')) - ds_ids.append(make_dataid(name='satellite_zenith_angle')) + for band_name in ["1", "2", "3", "4", "5", "20", "24", "25"]: + ds_ids.append(make_dataid(name=band_name, calibration="counts")) + ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) assert len(res) == 9 - assert res['1'].shape == (2 * 40, 2048 * 2) - assert res['1'].attrs['calibration'] == 'counts' - assert res['1'].dtype == np.uint16 - assert res['1'].attrs['units'] == '1' - assert res['2'].shape == (2 * 40, 2048 * 2) - assert res['2'].attrs['calibration'] == 'counts' - assert res['2'].dtype == np.uint16 - assert res['2'].attrs['units'] == '1' - assert res['3'].shape == (2 * 40, 2048 * 2) - assert res['3'].attrs['calibration'] == 'counts' - assert res['3'].dtype == np.uint16 - assert res['3'].attrs['units'] == '1' - assert res['4'].shape == (2 * 40, 2048 * 2) - assert res['4'].attrs['calibration'] == 'counts' - assert res['4'].dtype == np.uint16 - assert res['4'].attrs['units'] == '1' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'counts' - assert res['5'].dtype == np.uint16 - assert res['5'].attrs['units'] == '1' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'counts' - assert res['20'].dtype == np.uint16 - assert res['20'].attrs['units'] == '1' - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'counts' - assert res['24'].dtype == np.uint16 - assert res['24'].attrs['units'] == '1' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'counts' - assert res['25'].dtype == np.uint16 - assert res['25'].attrs['units'] == '1' + assert res["1"].shape == (2 * 40, 2048 * 2) + assert res["1"].attrs["calibration"] == "counts" + assert res["1"].dtype == np.uint16 + assert res["1"].attrs["units"] == "1" + assert res["2"].shape == (2 * 40, 2048 * 2) + assert res["2"].attrs["calibration"] == "counts" + assert res["2"].dtype == np.uint16 + assert res["2"].attrs["units"] == "1" + assert res["3"].shape == (2 * 40, 2048 * 2) + assert res["3"].attrs["calibration"] == "counts" + assert res["3"].dtype == np.uint16 + assert res["3"].attrs["units"] == "1" + assert res["4"].shape == (2 * 40, 2048 * 2) + assert res["4"].attrs["calibration"] == "counts" + assert res["4"].dtype == np.uint16 + assert res["4"].attrs["units"] == "1" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "counts" + assert res["5"].dtype == np.uint16 + assert res["5"].attrs["units"] == "1" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "counts" + assert res["20"].dtype == np.uint16 + assert res["20"].attrs["units"] == "1" + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "counts" + assert res["24"].dtype == np.uint16 + assert res["24"].attrs["units"] == "1" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "counts" + assert res["25"].dtype == np.uint16 + assert res["25"].attrs["units"] == "1" def test_rad_calib(self): """Test loading data at radiance calibration.""" @@ -449,25 +449,25 @@ def test_rad_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '2', '3', '4', '5']: - ds_ids.append(make_dataid(name=band_name, calibration='radiance')) + for band_name in ["1", "2", "3", "4", "5"]: + ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res['1'].shape == (2 * 40, 2048 * 2) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 40, 2048 * 2) - assert res['2'].attrs['calibration'] == 'radiance' - assert res['2'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['3'].shape == (2 * 40, 2048 * 2) - assert res['3'].attrs['calibration'] == 'radiance' - assert res['3'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['4'].shape == (2 * 40, 2048 * 2) - assert res['4'].attrs['calibration'] == 'radiance' - assert res['4'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'radiance' - assert res['5'].attrs['units'] == 'mW/ (m2 cm-1 sr)' + assert res["1"].shape == (2 * 40, 2048 * 2) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 40, 2048 * 2) + assert res["2"].attrs["calibration"] == "radiance" + assert res["2"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["3"].shape == (2 * 40, 2048 * 2) + assert res["3"].attrs["calibration"] == "radiance" + assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["4"].shape == (2 * 40, 2048 * 2) + assert res["4"].attrs["calibration"] == "radiance" + assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "radiance" + assert res["5"].attrs["units"] == "mW/ (m2 cm-1 sr)" def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -486,8 +486,8 @@ def test_1km_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -500,32 +500,32 @@ def test_1km_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - assert res['1'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'reflectance' - assert res['1'].attrs['units'] == '%' - assert res['2'].shape == (2 * 10, 2048) - assert res['2'].attrs['calibration'] == 'reflectance' - assert res['2'].attrs['units'] == '%' - assert res['3'].shape == (2 * 10, 2048) - assert res['3'].attrs['calibration'] == 'reflectance' - assert res['3'].attrs['units'] == '%' - assert res['4'].shape == (2 * 10, 2048) - assert res['4'].attrs['calibration'] == 'reflectance' - assert res['4'].attrs['units'] == '%' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'reflectance' - assert res['5'].attrs['units'] == '%' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'brightness_temperature' - assert res['20'].attrs['units'] == 'K' - assert res['24'].shape == (2 * 10, 2048) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 10, 2048) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["1"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "reflectance" + assert res["1"].attrs["units"] == "%" + assert res["2"].shape == (2 * 10, 2048) + assert res["2"].attrs["calibration"] == "reflectance" + assert res["2"].attrs["units"] == "%" + assert res["3"].shape == (2 * 10, 2048) + assert res["3"].attrs["calibration"] == "reflectance" + assert res["3"].attrs["units"] == "%" + assert res["4"].shape == (2 * 10, 2048) + assert res["4"].attrs["calibration"] == "reflectance" + assert res["4"].attrs["units"] == "%" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "reflectance" + assert res["5"].attrs["units"] == "%" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "brightness_temperature" + assert res["20"].attrs["units"] == "K" + assert res["24"].shape == (2 * 10, 2048) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 10, 2048) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" @@ -544,8 +544,8 @@ def test_250_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -558,27 +558,27 @@ def test_250_resolutions(self): with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=num_results, best=False) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 6 with pytest.raises(KeyError): - res.__getitem__('5') + res.__getitem__("5") with pytest.raises(KeyError): - res.__getitem__('20') + res.__getitem__("20") _test_helper(res) - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" class TestMERSILLL1B(MERSIL1BTester): """Test the FY3E MERSI-LL L1B reader.""" yaml_file = "mersi_ll_l1b.yaml" - filenames_1000m = ['FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF', 'FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF'] - filenames_250m = ['FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF', 'FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF'] + filenames_1000m = ["FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF"] + filenames_250m = ["FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF"] filenames_all = filenames_1000m + filenames_250m def test_all_resolutions(self): @@ -598,7 +598,7 @@ def test_all_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('6', '7'): + for band_name in ("6", "7"): num_results = 2 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, @@ -609,17 +609,17 @@ def test_all_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '4', '7']) + res = reader.load(["1", "2", "4", "7"]) assert len(res) == 4 - assert res['4'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 10, 2048) - assert res['2'].attrs['calibration'] == 'brightness_temperature' - assert res['2'].attrs['units'] == 'K' - assert res['7'].shape == (2 * 40, 2048 * 2) - assert res['7'].attrs['calibration'] == 'brightness_temperature' - assert res['7'].attrs['units'] == 'K' + assert res["4"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 10, 2048) + assert res["2"].attrs["calibration"] == "brightness_temperature" + assert res["2"].attrs["units"] == "K" + assert res["7"].shape == (2 * 40, 2048 * 2) + assert res["7"].attrs["calibration"] == "brightness_temperature" + assert res["7"].attrs["units"] == "K" def test_rad_calib(self): """Test loading data at radiance calibration.""" @@ -634,25 +634,25 @@ def test_rad_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '3', '4', '6', '7']: - ds_ids.append(make_dataid(name=band_name, calibration='radiance')) + for band_name in ["1", "3", "4", "6", "7"]: + ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res['1'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['3'].shape == (2 * 10, 2048) - assert res['3'].attrs['calibration'] == 'radiance' - assert res['3'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['4'].shape == (2 * 10, 2048) - assert res['4'].attrs['calibration'] == 'radiance' - assert res['4'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['6'].shape == (2 * 40, 2048 * 2) - assert res['6'].attrs['calibration'] == 'radiance' - assert res['6'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['7'].shape == (2 * 40, 2048 * 2) - assert res['7'].attrs['calibration'] == 'radiance' - assert res['7'].attrs['units'] == 'mW/ (m2 cm-1 sr)' + assert res["1"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["3"].shape == (2 * 10, 2048) + assert res["3"].attrs["calibration"] == "radiance" + assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["4"].shape == (2 * 10, 2048) + assert res["4"].attrs["calibration"] == "radiance" + assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["6"].shape == (2 * 40, 2048 * 2) + assert res["6"].attrs["calibration"] == "radiance" + assert res["6"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["7"].shape == (2 * 40, 2048 * 2) + assert res["7"].attrs["calibration"] == "radiance" + assert res["7"].attrs["units"] == "mW/ (m2 cm-1 sr)" def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -671,8 +671,8 @@ def test_1km_resolutions(self): # - Band 6-7 (IR) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '6', '7'): - if band_name == '1': + for band_name in ("1", "2", "3", "4", "6", "7"): + if band_name == "1": # don't know how to get anything apart from radiance for LL band num_results = 1 else: @@ -683,31 +683,31 @@ def test_1km_resolutions(self): ds_id = make_dataid(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) - if band_name == '1': + if band_name == "1": assert num_results == len([res]) else: assert num_results == len(res) - res = reader.load(['1', '2', '3', '5', '6', '7']) + res = reader.load(["1", "2", "3", "5", "6", "7"]) assert len(res) == 6 - assert res['1'].shape == (2 * 10, 2048) - assert 'radiance' == res['1'].attrs['calibration'] - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['2'].attrs['calibration'] - assert res['2'].attrs['units'] == 'K' - assert res['3'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['3'].attrs['calibration'] - assert res['3'].attrs['units'] == 'K' - assert res['5'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['5'].attrs['calibration'] - assert res['5'].attrs['units'] == 'K' - assert res['6'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['6'].attrs['calibration'] - assert res['6'].attrs['units'] == 'K' - assert res['7'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['7'].attrs['calibration'] - assert res['7'].attrs['units'] == 'K' + assert res["1"].shape == (2 * 10, 2048) + assert "radiance" == res["1"].attrs["calibration"] + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["2"].attrs["calibration"] + assert res["2"].attrs["units"] == "K" + assert res["3"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["3"].attrs["calibration"] + assert res["3"].attrs["units"] == "K" + assert res["5"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["5"].attrs["calibration"] + assert res["5"].attrs["units"] == "K" + assert res["6"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["6"].attrs["calibration"] + assert res["6"].attrs["units"] == "K" + assert res["7"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["7"].attrs["calibration"] + assert res["7"].attrs["units"] == "K" def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" @@ -725,7 +725,7 @@ def test_250_resolutions(self): # Verify that we have multiple resolutions for: # - Bands 6-7 available_datasets = reader.available_dataset_ids - for band_name in ('6', '7'): + for band_name in ("6", "7"): num_results = 2 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, @@ -735,13 +735,13 @@ def test_250_resolutions(self): with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=num_results, best=False) - res = reader.load(['1', '6', '7']) + res = reader.load(["1", "6", "7"]) assert 2 == len(res) with pytest.raises(KeyError): - res.__getitem__('1') - assert (2 * 40, 2048 * 2) == res['6'].shape - assert 'brightness_temperature' == res['6'].attrs['calibration'] - assert 'K' == res['6'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['7'].shape - assert 'brightness_temperature' == res['7'].attrs['calibration'] - assert 'K' == res['7'].attrs['units'] + res.__getitem__("1") + assert (2 * 40, 2048 * 2) == res["6"].shape + assert "brightness_temperature" == res["6"].attrs["calibration"] + assert "K" == res["6"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["7"].shape + assert "brightness_temperature" == res["7"].attrs["calibration"] + assert "K" == res["7"].attrs["units"] diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py index cafadf9e77..77344e6856 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py @@ -39,9 +39,9 @@ DEFAULT_FILE_DATE_DATA = np.clip(DEFAULT_FILE_FLOAT_DATA, 0, 1049) DEFAULT_FILE_UBYTE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=np.ubyte) -float_variables = ['tpwGrid', 'tpwGridPrior', 'tpwGridSubseq', 'footGridPrior', 'footGridSubseq'] -date_variables = ['timeAwayGridPrior', 'timeAwayGridSubseq'] -ubyte_variables = ['satGridPrior', 'satGridSubseq'] +float_variables = ["tpwGrid", "tpwGridPrior", "tpwGridSubseq", "footGridPrior", "footGridSubseq"] +date_variables = ["timeAwayGridPrior", "timeAwayGridSubseq"] +ubyte_variables = ["satGridPrior", "satGridSubseq"] file_content_attr = dict() @@ -50,57 +50,57 @@ class FakeNetCDF4FileHandlerMimicLow(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content for lower resolution files.""" - dt_s = filename_info.get('start_time', DEFAULT_DATE) - dt_e = filename_info.get('end_time', DEFAULT_DATE) + dt_s = filename_info.get("start_time", DEFAULT_DATE) + dt_e = filename_info.get("end_time", DEFAULT_DATE) - if filetype_info['file_type'] == 'mimicTPW2_comp': + if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { - '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), - '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), - '/attr/platform_shortname': 'aggregated microwave', - '/attr/sensor': 'mimic', + "/attr/start_time": dt_s.strftime("%Y%m%d.%H%M%S"), + "/attr/end_time": dt_e.strftime("%Y%m%d.%H%M%S"), + "/attr/platform_shortname": "aggregated microwave", + "/attr/sensor": "mimic", } - file_content['latArr'] = DEFAULT_LAT - file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) - file_content['latArr/attr/units'] = 'degress_north' + file_content["latArr"] = DEFAULT_LAT + file_content["latArr/shape"] = (DEFAULT_FILE_SHAPE[0],) + file_content["latArr/attr/units"] = "degress_north" - file_content['lonArr'] = DEFAULT_LON - file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) - file_content['lonArr/attr/units'] = 'degrees_east' + file_content["lonArr"] = DEFAULT_LON + file_content["lonArr/shape"] = (DEFAULT_FILE_SHAPE[1],) + file_content["lonArr/attr/units"] = "degrees_east" - file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] - file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] + file_content["/dimension/lat"] = DEFAULT_FILE_SHAPE[0] + file_content["/dimension/lon"] = DEFAULT_FILE_SHAPE[1] for float_var in float_variables: file_content[float_var] = DEFAULT_FILE_FLOAT_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(float_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(float_var)] = DEFAULT_FILE_SHAPE file_content_attr[float_var] = {"units": "mm"} for date_var in date_variables: file_content[date_var] = DEFAULT_FILE_DATE_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(date_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(date_var)] = DEFAULT_FILE_SHAPE file_content_attr[date_var] = {"units": "minutes"} for ubyte_var in ubyte_variables: file_content[ubyte_var] = DEFAULT_FILE_UBYTE_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(ubyte_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(ubyte_var)] = DEFAULT_FILE_SHAPE file_content_attr[ubyte_var] = {"source_key": "Key: 0: None, 1: NOAA-N, 2: NOAA-P, 3: Metop-A, \ 4: Metop-B, 5: SNPP, 6: SSMI-17, 7: SSMI-18"} # convert to xarrays for key, val in file_content.items(): - if key == 'lonArr' or key == 'latArr': + if key == "lonArr" or key == "latArr": file_content[key] = xr.DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: - file_content[key] = xr.DataArray(val, dims=('y', 'x'), attrs=file_content_attr[key]) + file_content[key] = xr.DataArray(val, dims=("y", "x"), attrs=file_content_attr[key]) else: file_content[key] = xr.DataArray(val) for key in itertools.chain(float_variables, ubyte_variables): - file_content[key].attrs['_FillValue'] = -999.0 - file_content[key].attrs['name'] = key - file_content[key].attrs['file_key'] = key - file_content[key].attrs['file_type'] = self.filetype_info['file_type'] + file_content[key].attrs["_FillValue"] = -999.0 + file_content[key].attrs["name"] = key + file_content[key].attrs["file_key"] = key + file_content[key].attrs["file_type"] = self.filetype_info["file_type"] else: - msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) + msg = "Wrong Test Reader for file_type {}".format(filetype_info["file_type"]) raise AssertionError(msg) return file_content @@ -115,9 +115,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimicLow,)) + self.p = mock.patch.object(MimicTPW2FileHandler, "__bases__", (FakeNetCDF4FileHandlerMimicLow,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -130,7 +130,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -141,54 +141,54 @@ def test_load_mimic_float(self): """Load TPW mimic float data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(float_variables) self.assertEqual(len(ds), len(float_variables)) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertEqual(d.attrs['units'], 'mm') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") + self.assertEqual(d.attrs["sensor"], "mimic") + self.assertEqual(d.attrs["units"], "mm") + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) def test_load_mimic_timedelta(self): """Load TPW mimic timedelta data (data latency variables).""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(date_variables) self.assertEqual(len(ds), len(date_variables)) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertEqual(d.attrs['units'], 'minutes') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") + self.assertEqual(d.attrs["sensor"], "mimic") + self.assertEqual(d.attrs["units"], "minutes") + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) self.assertEqual(d.dtype, DEFAULT_FILE_DTYPE) def test_load_mimic_ubyte(self): """Load TPW mimic sensor grids.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(ubyte_variables) self.assertEqual(len(ds), len(ubyte_variables)) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertIn('source_key', d.attrs) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") + self.assertEqual(d.attrs["sensor"], "mimic") + self.assertIn("source_key", d.attrs) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) self.assertEqual(d.dtype, np.uint8) diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py index 544c805e70..9c6c24b5a7 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py @@ -43,42 +43,42 @@ class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get('start_time', datetime(2019, 6, 19, 13, 0)) - dt_e = filename_info.get('end_time', datetime(2019, 6, 19, 13, 0)) + dt_s = filename_info.get("start_time", datetime(2019, 6, 19, 13, 0)) + dt_e = filename_info.get("end_time", datetime(2019, 6, 19, 13, 0)) - if filetype_info['file_type'] == 'mimicTPW2_comp': + if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { - '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), - '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), - '/attr/platform_shortname': 'aggregated microwave', - '/attr/sensor': 'mimic', + "/attr/start_time": dt_s.strftime("%Y%m%d.%H%M%S"), + "/attr/end_time": dt_e.strftime("%Y%m%d.%H%M%S"), + "/attr/platform_shortname": "aggregated microwave", + "/attr/sensor": "mimic", } - file_content['latArr'] = DEFAULT_LAT - file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) - file_content['latArr/attr/units'] = 'degress_north' + file_content["latArr"] = DEFAULT_LAT + file_content["latArr/shape"] = (DEFAULT_FILE_SHAPE[0],) + file_content["latArr/attr/units"] = "degress_north" - file_content['lonArr'] = DEFAULT_LON - file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) - file_content['lonArr/attr/units'] = 'degrees_east' + file_content["lonArr"] = DEFAULT_LON + file_content["lonArr/shape"] = (DEFAULT_FILE_SHAPE[1],) + file_content["lonArr/attr/units"] = "degrees_east" - file_content['tpwGrid'] = DEFAULT_FILE_DATA - file_content['tpwGrid/shape'] = DEFAULT_FILE_SHAPE - file_content_units['tpwGrid'] = 'mm' + file_content["tpwGrid"] = DEFAULT_FILE_DATA + file_content["tpwGrid/shape"] = DEFAULT_FILE_SHAPE + file_content_units["tpwGrid"] = "mm" - file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] - file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] + file_content["/dimension/lat"] = DEFAULT_FILE_SHAPE[0] + file_content["/dimension/lon"] = DEFAULT_FILE_SHAPE[1] # convert to xarrays for key, val in file_content.items(): - if key == 'lonArr' or key == 'latArr': + if key == "lonArr" or key == "latArr": file_content[key] = DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: - file_content[key] = DataArray(val, dims=('y', 'x'), attrs={"units": file_content_units[key]}) + file_content[key] = DataArray(val, dims=("y", "x"), attrs={"units": file_content_units[key]}) else: file_content[key] = DataArray(val) else: - msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) + msg = "Wrong Test Reader for file_type {}".format(filetype_info["file_type"]) raise AssertionError(msg) return file_content @@ -93,9 +93,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimic,)) + self.p = mock.patch.object(MimicTPW2FileHandler, "__bases__", (FakeNetCDF4FileHandlerMimic,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -108,7 +108,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -119,16 +119,16 @@ def test_load_mimic(self): """Load Mimic data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['tpwGrid']) + ds = r.load(["tpwGrid"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertIn('area', d.attrs) - self.assertIn('units', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") + self.assertEqual(d.attrs["sensor"], "mimic") + self.assertIn("area", d.attrs) + self.assertIn("units", d.attrs) + self.assertIsNotNone(d.attrs["area"]) diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index b726a519e5..69f5543411 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -46,17 +46,17 @@ FREQ = xr.DataArray([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], - dims='Channel', - attrs={'description': "Central Frequencies (GHz)"}) + dims="Channel", + attrs={"description": "Central Frequencies (GHz)"}) POLO = xr.DataArray([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, - 3, 3, 3][:N_CHANNEL], dims='Channel', - attrs={'description': "Polarizations"}) - -DS_IDS = ['RR', 'longitude', 'latitude'] -TEST_VARS = ['btemp_88v', 'btemp_165h', - 'btemp_23v', 'RR', 'Sfc_type'] -DEFAULT_UNITS = {'btemp_88v': 'K', 'btemp_165h': 'K', - 'btemp_23v': 'K', 'RR': 'mm/hr', 'Sfc_type': "1"} + 3, 3, 3][:N_CHANNEL], dims="Channel", + attrs={"description": "Polarizations"}) + +DS_IDS = ["RR", "longitude", "latitude"] +TEST_VARS = ["btemp_88v", "btemp_165h", + "btemp_23v", "RR", "Sfc_type"] +DEFAULT_UNITS = {"btemp_88v": "K", "btemp_165h": "K", + "btemp_23v": "K", "RR": "mm/hr", "Sfc_type": "1"} PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"} SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"} @@ -96,20 +96,20 @@ def fake_coeff_from_fn(fn): coeff_str = [] for idx in range(1, N_CHANNEL + 1): nx = idx - 1 - coeff_str.append('\n') - next_line = ' {} {} {}\n'.format(idx, all_nchx[nx], ameans[nx]) + coeff_str.append("\n") + next_line = " {} {} {}\n".format(idx, all_nchx[nx], ameans[nx]) coeff_str.append(next_line) - next_line = ' {}\n'.format(" ".join([str(x) for x in locations[idx - 1]])) + next_line = " {}\n".format(" ".join([str(x) for x in locations[idx - 1]])) coeff_str.append(next_line) for fov in range(1, N_FOV+1): random_coeff = np.random.rand(all_nchx[nx]) random_coeff = np.ones(all_nchx[nx]) - str_coeff = ' '.join([str(x) for x in random_coeff]) + str_coeff = " ".join([str(x) for x in random_coeff]) random_means = np.random.uniform(261, 267, all_nchx[nx]) random_means = np.zeros(all_nchx[nx]) - str_means = ' '.join([str(x) for x in random_means]) + str_means = " ".join([str(x) for x in random_means]) error_val = np.random.uniform(0, 4) - coeffs_line = ' {:>2} {:>2} {} {} {}\n'.format(idx, fov, + coeffs_line = " {:>2} {:>2} {} {} {}\n".format(idx, fov, str_coeff, str_means, error_val) @@ -122,50 +122,50 @@ def _get_datasets_with_attributes(**kwargs): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). reshape(N_SCANLINE, N_FOV, N_CHANNEL), - attrs={'long_name': "Channel Temperature (K)", - 'units': "Kelvin", - 'coordinates': "Longitude Latitude Freq", - 'scale_factor': 0.01, - '_FillValue': -999, - 'valid_range': [0, 50000]}, - dims=('Scanline', 'Field_of_view', 'Channel')) + attrs={"long_name": "Channel Temperature (K)", + "units": "Kelvin", + "coordinates": "Longitude Latitude Freq", + "scale_factor": 0.01, + "_FillValue": -999, + "valid_range": [0, 50000]}, + dims=("Scanline", "Field_of_view", "Channel")) rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), - attrs={'long_name': "Rain Rate (mm/hr)", - 'units': "mm/hr", - 'coordinates': "Longitude Latitude", - 'scale_factor': 0.1, - '_FillValue': -999, - 'valid_range': [0, 1000]}, - dims=('Scanline', 'Field_of_view')) + attrs={"long_name": "Rain Rate (mm/hr)", + "units": "mm/hr", + "coordinates": "Longitude Latitude", + "scale_factor": 0.1, + "_FillValue": -999, + "valid_range": [0, 1000]}, + dims=("Scanline", "Field_of_view")) sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), - attrs={'description': "type of surface:0-ocean," + + attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", - 'units': "1", - 'coordinates': "Longitude Latitude", - '_FillValue': -999, - 'valid_range': [0, 3] + "units": "1", + "coordinates": "Longitude Latitude", + "_FillValue": -999, + "valid_range": [0, 3] }, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Latitude of the view (-90,90)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Longitude of the view (-180,180)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) ds_vars = { - 'Freq': FREQ, - 'Polo': POLO, - 'BT': bt, - 'RR': rr, - 'Sfc_type': sfc_type, - 'Latitude': latitude, - 'Longitude': longitude + "Freq": FREQ, + "Polo": POLO, + "BT": bt, + "RR": rr, + "Sfc_type": sfc_type, + "Latitude": latitude, + "Longitude": longitude } - attrs = {'missing_value': -999.} + attrs = {"missing_value": -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds @@ -175,38 +175,38 @@ def _get_datasets_with_less_attributes(): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). reshape(N_SCANLINE, N_FOV, N_CHANNEL), - attrs={'long_name': "Channel Temperature (K)", - 'scale_factor': 0.01}, - dims=('Scanline', 'Field_of_view', 'Channel')) + attrs={"long_name": "Channel Temperature (K)", + "scale_factor": 0.01}, + dims=("Scanline", "Field_of_view", "Channel")) rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), - attrs={'long_name': "Rain Rate (mm/hr)", - 'scale_factor': 0.1}, - dims=('Scanline', 'Field_of_view')) + attrs={"long_name": "Rain Rate (mm/hr)", + "scale_factor": 0.1}, + dims=("Scanline", "Field_of_view")) sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), - attrs={'description': "type of surface:0-ocean," + + attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Latitude of the view (-90,90)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), attrs={"long_name": "Longitude of the view (-180,180)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) ds_vars = { - 'Freq': FREQ, - 'Polo': POLO, - 'BT': bt, - 'RR': rr, - 'Sfc_type': sfc_type, - 'Longitude': longitude, - 'Latitude': latitude + "Freq": FREQ, + "Polo": POLO, + "BT": bt, + "RR": rr, + "Sfc_type": sfc_type, + "Longitude": longitude, + "Latitude": latitude } - attrs = {'missing_value': -999.} + attrs = {"missing_value": -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds @@ -227,7 +227,7 @@ class TestMirsL2_NcReader: def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -240,7 +240,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -260,7 +260,7 @@ def test_reader_creation(self, filenames, expected_loadables): def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -272,12 +272,12 @@ def test_available_datasets(self, filenames, expected_datasets): @staticmethod def _check_area(data_arr): from pyresample.geometry import SwathDefinition - area = data_arr.attrs['area'] + area = data_arr.attrs["area"] assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_arr): - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float64 @@ -285,23 +285,23 @@ def _check_fill(data_arr): @staticmethod def _check_valid_range(data_arr, test_valid_range): # valid_range is popped out of data_arr.attrs when it is applied - assert 'valid_range' not in data_arr.attrs + assert "valid_range" not in data_arr.attrs assert data_arr.data.min() >= test_valid_range[0] assert data_arr.data.max() <= test_valid_range[1] @staticmethod def _check_fill_value(data_arr, test_fill_value): - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs assert not (data_arr.data == test_fill_value).any() @staticmethod def _check_attrs(data_arr, platform_name): attrs = data_arr.attrs - assert 'scale_factor' not in attrs - assert 'platform_name' in attrs - assert attrs['platform_name'] == platform_name - assert attrs['start_time'] == START_TIME - assert attrs['end_time'] == END_TIME + assert "scale_factor" not in attrs + assert "platform_name" in attrs + assert attrs["platform_name"] == platform_name + assert attrs["start_time"] == START_TIME + assert attrs["end_time"] == END_TIME @pytest.mark.parametrize( ("filenames", "loadable_ids", "platform_name"), @@ -312,18 +312,18 @@ def _check_attrs(data_arr, platform_name): ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), ] ) - @pytest.mark.parametrize('reader_kw', [{}, {'limb_correction': False}]) + @pytest.mark.parametrize("reader_kw", [{}, {"limb_correction": False}]) def test_basic_load(self, filenames, loadable_ids, platform_name, reader_kw): """Test that variables are loaded properly.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables, fh_kwargs=reader_kw) - with mock.patch('satpy.readers.mirs.read_atms_coeff_to_string') as \ - fd, mock.patch('satpy.readers.mirs.retrieve'): + with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ + fd, mock.patch("satpy.readers.mirs.retrieve"): fd.side_effect = fake_coeff_from_fn loaded_data_arrs = r.load(loadable_ids) assert len(loaded_data_arrs) == len(loadable_ids) @@ -332,12 +332,12 @@ def test_basic_load(self, filenames, loadable_ids, for _data_id, data_arr in loaded_data_arrs.items(): data_arr = data_arr.compute() var_name = data_arr.attrs["name"] - if var_name not in ['latitude', 'longitude']: + if var_name not in ["latitude", "longitude"]: self._check_area(data_arr) self._check_fill(data_arr) self._check_attrs(data_arr, platform_name) - input_fake_data = test_data['BT'] if "btemp" in var_name \ + input_fake_data = test_data["BT"] if "btemp" in var_name \ else test_data[var_name] if "valid_range" in input_fake_data.attrs: valid_range = input_fake_data.attrs["valid_range"] @@ -346,9 +346,9 @@ def test_basic_load(self, filenames, loadable_ids, fill_value = input_fake_data.attrs["_FillValue"] self._check_fill_value(data_arr, fill_value) - sensor = data_arr.attrs['sensor'] - if reader_kw.get('limb_correction', True) and sensor == 'atms': + sensor = data_arr.attrs["sensor"] + if reader_kw.get("limb_correction", True) and sensor == "atms": fd.assert_called() else: fd.assert_not_called() - assert data_arr.attrs['units'] == DEFAULT_UNITS[var_name] + assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 57ae3dfc31..5f8490151f 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -955,7 +955,7 @@ def test_xml_calibration_to_radiance(self): def test_xml_navigation(self): """Test the navigation.""" from pyproj import CRS - crs = CRS('EPSG:32616') + crs = CRS("EPSG:32616") dsid = make_dataid(name="B01", resolution=60) result = self.xml_tile_fh.get_area_def(dsid) diff --git a/satpy/tests/reader_tests/test_msu_gsa_l1b.py b/satpy/tests/reader_tests/test_msu_gsa_l1b.py index a5efc52be6..f55c9638c8 100644 --- a/satpy/tests/reader_tests/test_msu_gsa_l1b.py +++ b/satpy/tests/reader_tests/test_msu_gsa_l1b.py @@ -27,7 +27,7 @@ from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import make_dataid -SOLCONST = '273.59' +SOLCONST = "273.59" class FakeHDF5FileHandler2(FakeHDF5FileHandler): @@ -35,70 +35,70 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def _get_data(self, num_scans, num_cols): data = { - 'Data/resolution_1km/Solar_Zenith_Angle': + "Data/resolution_1km/Solar_Zenith_Angle": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_1km/Latitude': + dims=("x", "y")), + "Geolocation/resolution_1km/Latitude": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_1km/Longitude': + dims=("x", "y")), + "Geolocation/resolution_1km/Longitude": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Data/resolution_1km/Radiance_01': + dims=("x", "y")), + "Data/resolution_1km/Radiance_01": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999., 'F_solar_constant': SOLCONST + "scale": 0.01, "offset": 0., "fill_value": -999., "F_solar_constant": SOLCONST }, - dims=('x', 'y')), - 'Data/resolution_4km/Solar_Zenith_Angle': + dims=("x", "y")), + "Data/resolution_4km/Solar_Zenith_Angle": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_4km/Latitude': + dims=("x", "y")), + "Geolocation/resolution_4km/Latitude": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_4km/Longitude': + dims=("x", "y")), + "Geolocation/resolution_4km/Longitude": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Data/resolution_4km/Brightness_Temperature_09': + dims=("x", "y")), + "Data/resolution_4km/Brightness_Temperature_09": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), + dims=("x", "y")), } return data @@ -107,10 +107,10 @@ def get_test_content(self, filename, filename_info, filetype_info): num_scans = 20 num_cols = 2048 global_attrs = { - '/attr/timestamp_without_timezone': '2022-01-13T12:45:00', - '/attr/satellite_observation_point_height': '38500.0', - '/attr/satellite_observation_point_latitude': '71.25', - '/attr/satellite_observation_point_longitude': '21.44', + "/attr/timestamp_without_timezone": "2022-01-13T12:45:00", + "/attr/satellite_observation_point_height": "38500.0", + "/attr/satellite_observation_point_latitude": "71.25", + "/attr/satellite_observation_point_longitude": "21.44", } data = self._get_data(num_scans, num_cols) @@ -131,13 +131,13 @@ def setup_method(self): from satpy._config import config_search_paths from satpy.readers import load_reader from satpy.readers.msu_gsa_l1b import MSUGSAFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MSUGSAFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(MSUGSAFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - filenames = ['ArcticaM1_202201131245.h5'] + filenames = ["ArcticaM1_202201131245.h5"] self.reader = load_reader(self.reader_configs) files = self.reader.select_files_from_pathnames(filenames) self.reader.create_filehandlers(files) @@ -148,34 +148,34 @@ def teardown_method(self): def test_irbt(self): """Test retrieval in brightness temperature.""" - ds_ids = [make_dataid(name='C09', calibration='brightness_temperature')] + ds_ids = [make_dataid(name="C09", calibration="brightness_temperature")] res = self.reader.load(ds_ids) - assert 'C09' in res - assert res['C09'].attrs['calibration'] == 'brightness_temperature' - assert res['C09'].attrs['platform_name'] == 'Arctica-M-N1' - assert res['C09'].attrs['sat_latitude'] == 71.25 - assert res['C09'].attrs['sat_longitude'] == 21.44 - assert res['C09'].attrs['sat_altitude'] == 38500. - assert res['C09'].attrs['resolution'] == 4000 + assert "C09" in res + assert res["C09"].attrs["calibration"] == "brightness_temperature" + assert res["C09"].attrs["platform_name"] == "Arctica-M-N1" + assert res["C09"].attrs["sat_latitude"] == 71.25 + assert res["C09"].attrs["sat_longitude"] == 21.44 + assert res["C09"].attrs["sat_altitude"] == 38500. + assert res["C09"].attrs["resolution"] == 4000 def test_nocounts(self): """Test we can't get IR or VIS data as counts.""" - ds_ids = [make_dataid(name='C01', calibration='counts')] + ds_ids = [make_dataid(name="C01", calibration="counts")] with pytest.raises(KeyError): self.reader.load(ds_ids) - ds_ids = [make_dataid(name='C09', calibration='counts')] + ds_ids = [make_dataid(name="C09", calibration="counts")] with pytest.raises(KeyError): self.reader.load(ds_ids) def test_vis_cal(self): """Test that we can retrieve VIS data as both radiance and reflectance.""" - ds_ids = [make_dataid(name='C01', calibration='radiance')] + ds_ids = [make_dataid(name="C01", calibration="radiance")] res = self.reader.load(ds_ids) - rad = res['C01'].data - ds_ids = [make_dataid(name='C01', calibration='reflectance')] + rad = res["C01"].data + ds_ids = [make_dataid(name="C01", calibration="reflectance")] res = self.reader.load(ds_ids) - refl = res['C01'].data + refl = res["C01"].data # Check the RAD->REFL conversion np.testing.assert_allclose(100 * np.pi * rad / float(SOLCONST), refl) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 749386bb98..8a57507141 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -44,27 +44,27 @@ # - request attrs_exp: dict = { - 'platform': 'MET7', - 'raw_metadata': {'foo': 'bar'}, - 'sensor': 'MVIRI', - 'orbital_parameters': { - 'projection_longitude': 57.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785860.0, - 'satellite_actual_longitude': 57.1, - 'satellite_actual_latitude': 0.1, + "platform": "MET7", + "raw_metadata": {"foo": "bar"}, + "sensor": "MVIRI", + "orbital_parameters": { + "projection_longitude": 57.0, + "projection_latitude": 0.0, + "projection_altitude": 35785860.0, + "satellite_actual_longitude": 57.1, + "satellite_actual_latitude": 0.1, } } attrs_refl_exp = attrs_exp.copy() attrs_refl_exp.update( - {'sun_earth_distance_correction_applied': True, - 'sun_earth_distance_correction_factor': 1.} + {"sun_earth_distance_correction_applied": True, + "sun_earth_distance_correction_factor": 1.} ) -acq_time_vis_exp = [np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 02:30'), - np.datetime64('1970-01-01 02:30')] +acq_time_vis_exp = [np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 02:30"), + np.datetime64("1970-01-01 02:30")] vis_counts_exp = xr.DataArray( np.array( [[0., 17., 34., 51.], @@ -73,9 +73,9 @@ [204., 221., 238., 255]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -87,9 +87,9 @@ [235.48, 255.2, 274.92, 294.64]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -104,9 +104,9 @@ # (0, 0) and (2, 2) are NaN because radiance is NaN # (0, 2) is NaN because SZA >= 90 degrees # Last row/col is NaN due to SZA interpolation - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_refl_exp ) @@ -118,23 +118,23 @@ [1.3, 1.4, 1.5, 1.6]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) -acq_time_ir_wv_exp = [np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 02:30')] +acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 02:30")] wv_counts_exp = xr.DataArray( np.array( [[0, 85], [170, 255]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -144,9 +144,9 @@ [8, 12.25]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -156,9 +156,9 @@ [252.507448, 266.863289]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -168,9 +168,9 @@ [170, 255]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -180,9 +180,9 @@ [165, 250]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -192,9 +192,9 @@ [204.32955838, 223.28709913]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -206,9 +206,9 @@ [0, 0, 0, 0]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -220,7 +220,7 @@ [np.nan, np.nan, np.nan, np.nan]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs_exp ) sza_ir_wv_exp = xr.DataArray( @@ -229,33 +229,33 @@ [0, 45]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs_exp ) area_vis_exp = AreaDefinition( - area_id='geos_mviri_4x4', - proj_id='geos_mviri_4x4', - description='MVIRI Geostationary Projection', + area_id="geos_mviri_4x4", + proj_id="geos_mviri_4x4", + description="MVIRI Geostationary Projection", projection={ - 'proj': 'geos', - 'lon_0': 57.0, - 'h': ALTITUDE, - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS + "proj": "geos", + "lon_0": 57.0, + "h": ALTITUDE, + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS }, width=4, height=4, area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392] ) area_ir_wv_exp = area_vis_exp.copy( - area_id='geos_mviri_2x2', - proj_id='geos_mviri_2x2', + area_id="geos_mviri_2x2", + proj_id="geos_mviri_2x2", width=2, height=2 ) -@pytest.fixture(name='fake_dataset') +@pytest.fixture(name="fake_dataset") def fixture_fake_dataset(): """Create fake dataset.""" count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) @@ -277,55 +277,55 @@ def fixture_fake_dataset(): dtype=np.uint8 ) ) - time = np.arange(4).astype('datetime64[h]').reshape(2, 2) + time = np.arange(4).astype("datetime64[h]").reshape(2, 2) ds = xr.Dataset( data_vars={ - 'count_vis': (('y', 'x'), count_vis), - 'count_wv': (('y_ir_wv', 'x_ir_wv'), count_wv), - 'count_ir': (('y_ir_wv', 'x_ir_wv'), count_ir), - 'toa_bidirectional_reflectance_vis': vis_refl_exp / 100, - 'u_independent_toa_bidirectional_reflectance': u_vis_refl_exp / 100, - 'quality_pixel_bitmask': (('y', 'x'), mask), - 'solar_zenith_angle': (('y_tie', 'x_tie'), sza), - 'time_ir_wv': (('y_ir_wv', 'x_ir_wv'), time), - 'a_ir': -5.0, - 'b_ir': 1.0, - 'bt_a_ir': 10.0, - 'bt_b_ir': -1000.0, - 'a_wv': -0.5, - 'b_wv': 0.05, - 'bt_a_wv': 10.0, - 'bt_b_wv': -2000.0, - 'years_since_launch': 20.0, - 'a0_vis': 1.0, - 'a1_vis': 0.01, - 'a2_vis': -0.0001, - 'mean_count_space_vis': 1.0, - 'distance_sun_earth': 1.0, - 'solar_irradiance_vis': 650.0, - 'sub_satellite_longitude_start': 57.1, - 'sub_satellite_longitude_end': np.nan, - 'sub_satellite_latitude_start': np.nan, - 'sub_satellite_latitude_end': 0.1, + "count_vis": (("y", "x"), count_vis), + "count_wv": (("y_ir_wv", "x_ir_wv"), count_wv), + "count_ir": (("y_ir_wv", "x_ir_wv"), count_ir), + "toa_bidirectional_reflectance_vis": vis_refl_exp / 100, + "u_independent_toa_bidirectional_reflectance": u_vis_refl_exp / 100, + "quality_pixel_bitmask": (("y", "x"), mask), + "solar_zenith_angle": (("y_tie", "x_tie"), sza), + "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time), + "a_ir": -5.0, + "b_ir": 1.0, + "bt_a_ir": 10.0, + "bt_b_ir": -1000.0, + "a_wv": -0.5, + "b_wv": 0.05, + "bt_a_wv": 10.0, + "bt_b_wv": -2000.0, + "years_since_launch": 20.0, + "a0_vis": 1.0, + "a1_vis": 0.01, + "a2_vis": -0.0001, + "mean_count_space_vis": 1.0, + "distance_sun_earth": 1.0, + "solar_irradiance_vis": 650.0, + "sub_satellite_longitude_start": 57.1, + "sub_satellite_longitude_end": np.nan, + "sub_satellite_latitude_start": np.nan, + "sub_satellite_latitude_end": 0.1, }, coords={ - 'y': [1, 2, 3, 4], - 'x': [1, 2, 3, 4], - 'y_ir_wv': [1, 2], - 'x_ir_wv': [1, 2], - 'y_tie': [1, 2], - 'x_tie': [1, 2] + "y": [1, 2, 3, 4], + "x": [1, 2, 3, 4], + "y_ir_wv": [1, 2], + "x_ir_wv": [1, 2], + "y_tie": [1, 2], + "x_tie": [1, 2] }, - attrs={'foo': 'bar'} + attrs={"foo": "bar"} ) - ds['count_ir'].attrs['ancillary_variables'] = 'a_ir b_ir' - ds['count_wv'].attrs['ancillary_variables'] = 'a_wv b_wv' + ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" + ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" return ds @pytest.fixture( - name='file_handler', + name="file_handler", params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) @@ -334,21 +334,21 @@ def fixture_file_handler(fake_dataset, request): marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True if marker: - mask_bad_quality = marker.kwargs['mask_bad_quality'] + mask_bad_quality = marker.kwargs["mask_bad_quality"] fh_class = request.param - with mock.patch('satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset') as open_dataset: + with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset") as open_dataset: open_dataset.return_value = fake_dataset return fh_class( - filename='filename', - filename_info={'platform': 'MET7', - 'sensor': 'MVIRI', - 'projection_longitude': '57.0'}, - filetype_info={'foo': 'bar'}, + filename="filename", + filename_info={"platform": "MET7", + "sensor": "MVIRI", + "projection_longitude": "57.0"}, + filetype_info={"foo": "bar"}, mask_bad_quality=mask_bad_quality ) -@pytest.fixture(name='reader') +@pytest.fixture(name="reader") def fixture_reader(): """Return MVIRI FIDUCEO FCDR reader.""" from satpy._config import config_search_paths @@ -369,35 +369,35 @@ def test_init(self, file_handler): assert file_handler.mask_bad_quality is True @pytest.mark.parametrize( - ('name', 'calibration', 'resolution', 'expected'), + ("name", "calibration", "resolution", "expected"), [ - ('VIS', 'counts', 2250, vis_counts_exp), - ('VIS', 'radiance', 2250, vis_rad_exp), - ('VIS', 'reflectance', 2250, vis_refl_exp), - ('WV', 'counts', 4500, wv_counts_exp), - ('WV', 'radiance', 4500, wv_rad_exp), - ('WV', 'brightness_temperature', 4500, wv_bt_exp), - ('IR', 'counts', 4500, ir_counts_exp), - ('IR', 'radiance', 4500, ir_rad_exp), - ('IR', 'brightness_temperature', 4500, ir_bt_exp), - ('quality_pixel_bitmask', None, 2250, quality_pixel_bitmask_exp), - ('solar_zenith_angle', None, 2250, sza_vis_exp), - ('solar_zenith_angle', None, 4500, sza_ir_wv_exp), - ('u_independent_toa_bidirectional_reflectance', None, 4500, u_vis_refl_exp) + ("VIS", "counts", 2250, vis_counts_exp), + ("VIS", "radiance", 2250, vis_rad_exp), + ("VIS", "reflectance", 2250, vis_refl_exp), + ("WV", "counts", 4500, wv_counts_exp), + ("WV", "radiance", 4500, wv_rad_exp), + ("WV", "brightness_temperature", 4500, wv_bt_exp), + ("IR", "counts", 4500, ir_counts_exp), + ("IR", "radiance", 4500, ir_rad_exp), + ("IR", "brightness_temperature", 4500, ir_bt_exp), + ("quality_pixel_bitmask", None, 2250, quality_pixel_bitmask_exp), + ("solar_zenith_angle", None, 2250, sza_vis_exp), + ("solar_zenith_angle", None, 4500, sza_ir_wv_exp), + ("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp) ] ) def test_get_dataset(self, file_handler, name, calibration, resolution, expected): """Test getting datasets.""" - id_keys = {'name': name, 'resolution': resolution} + id_keys = {"name": name, "resolution": resolution} if calibration: - id_keys['calibration'] = calibration + id_keys["calibration"] = calibration dataset_id = make_dataid(**id_keys) - dataset_info = {'platform': 'MET7'} + dataset_info = {"platform": "MET7"} is_easy = isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler) - is_vis = name == 'VIS' - is_refl = calibration == 'reflectance' + is_vis = name == "VIS" + is_refl = calibration == "reflectance" if is_easy and is_vis and not is_refl: # VIS counts/radiance not available in easy FCDR with pytest.raises(ValueError): @@ -412,34 +412,34 @@ def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" # Time may have different names and satellite position might be missing file_handler.nc.nc = file_handler.nc.nc.rename( - {'time_ir_wv': 'time'} + {"time_ir_wv": "time"} ) file_handler.nc.nc = file_handler.nc.nc.drop_vars( - ['sub_satellite_longitude_start'] + ["sub_satellite_longitude_start"] ) dataset_id = make_dataid( - name='VIS', - calibration='reflectance', + name="VIS", + calibration="reflectance", resolution=2250 ) - ds = file_handler.get_dataset(dataset_id, {'platform': 'MET7'}) - assert 'actual_satellite_longitude' not in ds.attrs['orbital_parameters'] - assert 'actual_satellite_latitude' not in ds.attrs['orbital_parameters'] + ds = file_handler.get_dataset(dataset_id, {"platform": "MET7"}) + assert "actual_satellite_longitude" not in ds.attrs["orbital_parameters"] + assert "actual_satellite_latitude" not in ds.attrs["orbital_parameters"] xr.testing.assert_allclose(ds, vis_refl_exp) @mock.patch( - 'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time' + "satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time" ) def test_time_cache(self, interp_acq_time, file_handler): """Test caching of acquisition times.""" dataset_id = make_dataid( - name='VIS', + name="VIS", resolution=2250, - calibration='reflectance' + calibration="reflectance" ) info = {} - interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims='y') + interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims="y") # Cache init file_handler.get_dataset(dataset_id, info) @@ -451,22 +451,22 @@ def test_time_cache(self, interp_acq_time, file_handler): interp_acq_time.assert_not_called() # Cache miss - interp_acq_time.return_value = xr.DataArray([1, 2], dims='y') + interp_acq_time.return_value = xr.DataArray([1, 2], dims="y") another_id = make_dataid( - name='IR', + name="IR", resolution=4500, - calibration='brightness_temperature' + calibration="brightness_temperature" ) interp_acq_time.reset_mock() file_handler.get_dataset(another_id, info) interp_acq_time.assert_called() @mock.patch( - 'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints' + "satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints" ) def test_angle_cache(self, interp_tiepoints, file_handler): """Test caching of angle datasets.""" - dataset_id = make_dataid(name='solar_zenith_angle', + dataset_id = make_dataid(name="solar_zenith_angle", resolution=2250) info = {} @@ -480,21 +480,21 @@ def test_angle_cache(self, interp_tiepoints, file_handler): interp_tiepoints.assert_not_called() # Cache miss - another_id = make_dataid(name='solar_zenith_angle', + another_id = make_dataid(name="solar_zenith_angle", resolution=4500) interp_tiepoints.reset_mock() file_handler.get_dataset(another_id, info) interp_tiepoints.assert_called() @pytest.mark.parametrize( - ('name', 'resolution', 'area_exp'), + ("name", "resolution", "area_exp"), [ - ('VIS', 2250, area_vis_exp), - ('WV', 4500, area_ir_wv_exp), - ('IR', 4500, area_ir_wv_exp), - ('quality_pixel_bitmask', 2250, area_vis_exp), - ('solar_zenith_angle', 2250, area_vis_exp), - ('solar_zenith_angle', 4500, area_ir_wv_exp) + ("VIS", 2250, area_vis_exp), + ("WV", 4500, area_ir_wv_exp), + ("IR", 4500, area_ir_wv_exp), + ("quality_pixel_bitmask", 2250, area_vis_exp), + ("solar_zenith_angle", 2250, area_vis_exp), + ("solar_zenith_angle", 4500, area_ir_wv_exp) ] ) def test_get_area_definition(self, file_handler, name, resolution, @@ -508,7 +508,7 @@ def test_get_area_definition(self, file_handler, name, resolution, assert b == b_exp assert area.width == area_exp.width assert area.height == area_exp.height - for key in ['h', 'lon_0', 'proj', 'units']: + for key in ["h", "lon_0", "proj", "units"]: assert area.proj_dict[key] == area_exp.proj_dict[key] np.testing.assert_allclose(area.area_extent, area_exp.area_extent) @@ -516,38 +516,38 @@ def test_calib_exceptions(self, file_handler): """Test calibration exceptions.""" with pytest.raises(KeyError): file_handler.get_dataset( - make_dataid(name='solar_zenith_angle', calibration='counts'), + make_dataid(name="solar_zenith_angle", calibration="counts"), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( - name='VIS', + name="VIS", resolution=2250, - calibration='brightness_temperature'), + calibration="brightness_temperature"), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( - name='IR', + name="IR", resolution=4500, - calibration='reflectance'), + calibration="reflectance"), {} ) if isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler): with pytest.raises(KeyError): file_handler.get_dataset( - {'name': 'VIS', 'calibration': 'counts'}, + {"name": "VIS", "calibration": "counts"}, {} ) # not available in easy FCDR @pytest.mark.file_handler_data(mask_bad_quality=False) def test_bad_quality_warning(self, file_handler): """Test warning about bad VIS quality.""" - file_handler.nc.nc['quality_pixel_bitmask'] = 2 - vis = make_dataid(name='VIS', resolution=2250, - calibration='reflectance') + file_handler.nc.nc["quality_pixel_bitmask"] = 2 + vis = make_dataid(name="VIS", resolution=2250, + calibration="reflectance") with pytest.warns(UserWarning): file_handler.get_dataset(vis, {}) @@ -579,25 +579,25 @@ def test_reassign_coords(self): """ nc = mock.MagicMock( coords={ - 'y': [.1, .2], - 'x': [.3, .4] + "y": [.1, .2], + "x": [.3, .4] }, - dims=('y', 'x') + dims=("y", "x") ) nc.__getitem__.return_value = xr.DataArray( [[1, 2], [3, 4]], - dims=('y', 'x') + dims=("y", "x") ) foo_exp = xr.DataArray( [[1, 2], [3, 4]], - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'y': [.1, .2], - 'x': [.3, .4] + "y": [.1, .2], + "x": [.3, .4] } ) ds = DatasetWrapper(nc) - foo = ds['foo'] + foo = ds["foo"] xr.testing.assert_equal(foo, foo_exp) diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py index a304b18782..89a6eb4700 100644 --- a/satpy/tests/reader_tests/test_mws_l1b_nc.py +++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py @@ -49,23 +49,23 @@ def reader(fake_file): return MWSL1BFile( filename=fake_file, filename_info={ - 'start_time': ( - datetime.fromisoformat('2000-01-01T01:00:00') + "start_time": ( + datetime.fromisoformat("2000-01-01T01:00:00") ), - 'end_time': ( - datetime.fromisoformat('2000-01-01T02:00:00') + "end_time": ( + datetime.fromisoformat("2000-01-01T02:00:00") ), - 'creation_time': ( - datetime.fromisoformat('2000-01-01T03:00:00') + "creation_time": ( + datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ - 'longitude': 'data/navigation_data/mws_lon', - 'latitude': 'data/navigation_data/mws_lat', - 'solar_azimuth': 'data/navigation/mws_solar_azimuth_angle', - 'solar_zenith': 'data/navigation/mws_solar_zenith_angle', - 'satellite_azimuth': 'data/navigation/mws_satellite_azimuth_angle', - 'satellite_zenith': 'data/navigation/mws_satellite_zenith_angle', + "longitude": "data/navigation_data/mws_lon", + "latitude": "data/navigation_data/mws_lat", + "solar_azimuth": "data/navigation/mws_solar_azimuth_angle", + "solar_zenith": "data/navigation/mws_solar_zenith_angle", + "satellite_azimuth": "data/navigation/mws_satellite_azimuth_angle", + "satellite_zenith": "data/navigation/mws_satellite_zenith_angle", } ) @@ -73,7 +73,7 @@ def reader(fake_file): @pytest.fixture def fake_file(tmp_path): """Return file path to level-1b file.""" - file_path = tmp_path / 'test_file_mws_l1b.nc' + file_path = tmp_path / "test_file_mws_l1b.nc" writer = MWSL1BFakeFileWriter(file_path) writer.write() yield file_path @@ -88,11 +88,11 @@ def __init__(self, file_path): def write(self): """Write fake data to file.""" - with Dataset(self.file_path, 'w') as dataset: + with Dataset(self.file_path, "w") as dataset: self._write_attributes(dataset) self._write_status_group(dataset) self._write_quality_group(dataset) - data_group = dataset.createGroup('data') + data_group = dataset.createGroup("data") self._create_scan_dimensions(data_group) self._write_navigation_data_group(data_group) self._write_calibration_data_group(data_group) @@ -109,45 +109,45 @@ def _write_attributes(dataset): @staticmethod def _write_status_group(dataset): """Write the status group.""" - group = dataset.createGroup('/status/satellite') + group = dataset.createGroup("/status/satellite") subsat_latitude_start = group.createVariable( - 'subsat_latitude_start', "f4" + "subsat_latitude_start", "f4" ) subsat_latitude_start[:] = 52.19 subsat_longitude_start = group.createVariable( - 'subsat_longitude_start', "f4" + "subsat_longitude_start", "f4" ) subsat_longitude_start[:] = 23.26 subsat_latitude_end = group.createVariable( - 'subsat_latitude_end', "f4" + "subsat_latitude_end", "f4" ) subsat_latitude_end[:] = 60.00 subsat_longitude_end = group.createVariable( - 'subsat_longitude_end', "f4" + "subsat_longitude_end", "f4" ) subsat_longitude_end[:] = 2.47 @staticmethod def _write_quality_group(dataset): """Write the quality group.""" - group = dataset.createGroup('quality') + group = dataset.createGroup("quality") group.overall_quality_flag = 0 duration_of_product = group.createVariable( - 'duration_of_product', "f4" + "duration_of_product", "f4" ) duration_of_product[:] = 5944. @staticmethod def _write_navigation_data_group(dataset): """Write the navigation data group.""" - group = dataset.createGroup('navigation') - dimensions = ('n_scans', 'n_fovs') + group = dataset.createGroup("navigation") + dimensions = ("n_scans", "n_fovs") shape = (N_SCANS, N_FOVS) longitude = group.createVariable( - 'mws_lon', + "mws_lon", np.int32, dimensions=dimensions, ) @@ -157,14 +157,14 @@ def _write_navigation_data_group(dataset): longitude[:] = 35.7535 * np.ones(shape) latitude = group.createVariable( - 'mws_lat', + "mws_lat", np.float32, dimensions=dimensions, ) latitude[:] = 2. * np.ones(shape) azimuth = group.createVariable( - 'mws_solar_azimuth_angle', + "mws_solar_azimuth_angle", np.float32, dimensions=dimensions, ) @@ -173,19 +173,19 @@ def _write_navigation_data_group(dataset): @staticmethod def _create_scan_dimensions(dataset): """Create the scan/fovs dimensions.""" - dataset.createDimension('n_channels', N_CHANNELS) - dataset.createDimension('n_channels_os', N_CHANNELS_OS) - dataset.createDimension('n_scans', N_SCANS) - dataset.createDimension('n_fovs', N_FOVS) - dataset.createDimension('n_prts', N_PRTS) - dataset.createDimension('n_fovs_cal', N_FOVS_CAL) + dataset.createDimension("n_channels", N_CHANNELS) + dataset.createDimension("n_channels_os", N_CHANNELS_OS) + dataset.createDimension("n_scans", N_SCANS) + dataset.createDimension("n_fovs", N_FOVS) + dataset.createDimension("n_prts", N_PRTS) + dataset.createDimension("n_fovs_cal", N_FOVS_CAL) @staticmethod def _write_calibration_data_group(dataset): """Write the calibration data group.""" - group = dataset.createGroup('calibration') + group = dataset.createGroup("calibration") toa_bt = group.createVariable( - 'mws_toa_brightness_temperature', np.float32, dimensions=('n_scans', 'n_fovs', 'n_channels',) + "mws_toa_brightness_temperature", np.float32, dimensions=("n_scans", "n_fovs", "n_channels",) ) toa_bt.scale_factor = 1.0 # 1.0E-8 toa_bt.add_offset = 0.0 @@ -195,9 +195,9 @@ def _write_calibration_data_group(dataset): @staticmethod def _write_measurement_data_group(dataset): """Write the measurement data group.""" - group = dataset.createGroup('measurement') + group = dataset.createGroup("measurement") counts = group.createVariable( - 'mws_earth_view_counts', np.int32, dimensions=('n_scans', 'n_fovs', 'n_channels',) + "mws_earth_view_counts", np.int32, dimensions=("n_scans", "n_fovs", "n_channels",) ) counts[:] = 24100 * np.ones((N_SCANS, N_FOVS, N_CHANNELS), dtype=np.int32) @@ -239,9 +239,9 @@ def test_sub_satellite_latitude_end(self, reader): def test_get_dataset_get_channeldata_counts(self, reader): """Test getting channel data.""" - dataset_id = {'name': '1', 'units': None, - 'calibration': 'counts'} - dataset_info = {'file_key': 'data/measurement/mws_earth_view_counts'} + dataset_id = {"name": "1", "units": None, + "calibration": "counts"} + dataset_info = {"file_key": "data/measurement/mws_earth_view_counts"} dataset = reader.get_dataset(dataset_id, dataset_info) expected_bt = np.array([[24100, 24100], @@ -251,9 +251,9 @@ def test_get_dataset_get_channeldata_counts(self, reader): def test_get_dataset_get_channeldata_bts(self, reader): """Test getting channel data.""" - dataset_id = {'name': '1', 'units': 'K', - 'calibration': 'brightness_temperature'} - dataset_info = {'file_key': 'data/calibration/mws_toa_brightness_temperature'} + dataset_id = {"name": "1", "units": "K", + "calibration": "brightness_temperature"} + dataset_info = {"file_key": "data/calibration/mws_toa_brightness_temperature"} dataset = reader.get_dataset(dataset_id, dataset_info) @@ -268,15 +268,15 @@ def test_get_dataset_get_channeldata_bts(self, reader): def test_get_dataset_return_none_if_data_not_exist(self, reader): """Test get dataset return none if data does not exist.""" - dataset_id = {'name': 'unknown'} - dataset_info = {'file_key': 'non/existing/data'} + dataset_id = {"name": "unknown"} + dataset_info = {"file_key": "non/existing/data"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset is None def test_get_navigation_longitudes(self, caplog, fake_file, reader): """Test get the longitudes.""" - dataset_id = {'name': 'mws_lon'} - dataset_info = {'file_key': 'data/navigation_data/mws_lon'} + dataset_id = {"name": "mws_lon"} + dataset_info = {"file_key": "data/navigation_data/mws_lon"} dataset = reader.get_dataset(dataset_id, dataset_info) @@ -291,8 +291,8 @@ def test_get_navigation_longitudes(self, caplog, fake_file, reader): def test_get_dataset_logs_debug_message(self, caplog, fake_file, reader): """Test get dataset return none if data does not exist.""" - dataset_id = {'name': 'mws_lon'} - dataset_info = {'file_key': 'data/navigation_data/mws_lon'} + dataset_id = {"name": "mws_lon"} + dataset_info = {"file_key": "data/navigation_data/mws_lon"} with caplog.at_level(logging.DEBUG): _ = reader.get_dataset(dataset_id, dataset_info) @@ -302,8 +302,8 @@ def test_get_dataset_logs_debug_message(self, caplog, fake_file, reader): def test_get_dataset_aux_data_not_supported(self, reader): """Test get auxillary dataset not supported.""" - dataset_id = {'name': 'scantime_utc'} - dataset_info = {'file_key': 'non/existing'} + dataset_id = {"name": "scantime_utc"} + dataset_info = {"file_key": "non/existing"} with pytest.raises(NotImplementedError) as exec_info: _ = reader.get_dataset(dataset_id, dataset_info) @@ -312,8 +312,8 @@ def test_get_dataset_aux_data_not_supported(self, reader): def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): """Test get auxillary dataset which is not present but supposed to be in file.""" - dataset_id = {'name': 'surface_type'} - dataset_info = {'file_key': 'non/existing'} + dataset_id = {"name": "surface_type"} + dataset_info = {"file_key": "non/existing"} with caplog.at_level(logging.ERROR): with pytest.raises(KeyError) as exec_info: @@ -325,9 +325,9 @@ def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): " no valid Dataset created") assert log_output in caplog.text - @pytest.mark.parametrize('dims', ( - ('n_scans', 'n_fovs'), - ('x', 'y'), + @pytest.mark.parametrize("dims", ( + ("n_scans", "n_fovs"), + ("x", "y"), )) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" @@ -336,7 +336,7 @@ def test_standardize_dims(self, reader, dims): dims=dims, ) standardized = reader._standardize_dims(variable) - assert standardized.dims == ('y', 'x') + assert standardized.dims == ("y", "x") @staticmethod def test_drop_coords(reader): @@ -344,7 +344,7 @@ def test_drop_coords(reader): coords = "dummy" data = xr.DataArray( np.ones(10), - dims=('y'), + dims=("y"), coords={coords: 0}, ) assert coords in data.coords @@ -355,22 +355,22 @@ def test_get_global_attributes(self, reader): """Test get global attributes.""" attributes = reader._get_global_attributes() assert attributes == { - 'filename': reader.filename, - 'start_time': datetime(2000, 1, 2, 3, 4, 5), - 'end_time': datetime(2000, 1, 2, 4, 5, 6), - 'spacecraft_name': 'Metop-SG-A1', - 'sensor': 'MWS', - 'filename_start_time': datetime(2000, 1, 1, 1, 0), - 'filename_end_time': datetime(2000, 1, 1, 2, 0), - 'platform_name': 'Metop-SG-A1', - 'quality_group': { - 'duration_of_product': np.array(5944., dtype=np.float32), - 'overall_quality_flag': 0, + "filename": reader.filename, + "start_time": datetime(2000, 1, 2, 3, 4, 5), + "end_time": datetime(2000, 1, 2, 4, 5, 6), + "spacecraft_name": "Metop-SG-A1", + "sensor": "MWS", + "filename_start_time": datetime(2000, 1, 1, 1, 0), + "filename_end_time": datetime(2000, 1, 1, 2, 0), + "platform_name": "Metop-SG-A1", + "quality_group": { + "duration_of_product": np.array(5944., dtype=np.float32), + "overall_quality_flag": 0, } } @patch( - 'satpy.readers.mws_l1b.MWSL1BFile._get_global_attributes', + "satpy.readers.mws_l1b.MWSL1BFile._get_global_attributes", return_value={"mocked_global_attributes": True}, ) def test_manage_attributes(self, mock, reader): @@ -379,17 +379,17 @@ def test_manage_attributes(self, mock, reader): np.ones(N_SCANS), attrs={"season": "summer"}, ) - dataset_info = {'name': '1', 'units': 'K'} + dataset_info = {"name": "1", "units": "K"} variable = reader._manage_attributes(variable, dataset_info) assert variable.attrs == { - 'season': 'summer', - 'units': 'K', - 'name': '1', - 'mocked_global_attributes': True, + "season": "summer", + "units": "K", + "name": "1", + "mocked_global_attributes": True, } -@pytest.mark.parametrize("name, index", [('1', 0), ('2', 1), ('24', 23)]) +@pytest.mark.parametrize("name, index", [("1", 0), ("2", 1), ("24", 23)]) def test_get_channel_index_from_name(name, index): """Test getting the MWS channel index from the channel name.""" ch_idx = get_channel_index_from_name(name) @@ -399,7 +399,7 @@ def test_get_channel_index_from_name(name, index): def test_get_channel_index_from_name_throw_exception(): """Test that an excpetion is thrown when getting the MWS channel index from an unsupported name.""" with pytest.raises(Exception) as excinfo: - _ = get_channel_index_from_name('channel 1') + _ = get_channel_index_from_name("channel 1") assert str(excinfo.value) == "Channel name 'channel 1' not supported" assert excinfo.type == AttributeError diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 8570d8bd34..16dfc57a83 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -76,26 +76,26 @@ class TestNetCDF4FileHandler(unittest.TestCase): def setUp(self): """Create a test NetCDF4 file.""" from netCDF4 import Dataset - with Dataset('test.nc', 'w') as nc: + with Dataset("test.nc", "w") as nc: # Create dimensions - nc.createDimension('rows', 10) - nc.createDimension('cols', 100) + nc.createDimension("rows", 10) + nc.createDimension("cols", 100) # Create Group - g1 = nc.createGroup('test_group') + g1 = nc.createGroup("test_group") # Add datasets - ds1_f = g1.createVariable('ds1_f', np.float32, - dimensions=('rows', 'cols')) + ds1_f = g1.createVariable("ds1_f", np.float32, + dimensions=("rows", "cols")) ds1_f[:] = np.arange(10. * 100).reshape((10, 100)) - ds1_i = g1.createVariable('ds1_i', np.int32, - dimensions=('rows', 'cols')) + ds1_i = g1.createVariable("ds1_i", np.int32, + dimensions=("rows", "cols")) ds1_i[:] = np.arange(10 * 100).reshape((10, 100)) - ds2_f = nc.createVariable('ds2_f', np.float32, - dimensions=('rows', 'cols')) + ds2_f = nc.createVariable("ds2_f", np.float32, + dimensions=("rows", "cols")) ds2_f[:] = np.arange(10. * 100).reshape((10, 100)) - ds2_i = nc.createVariable('ds2_i', np.int32, - dimensions=('rows', 'cols')) + ds2_i = nc.createVariable("ds2_i", np.int32, + dimensions=("rows", "cols")) ds2_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_s = nc.createVariable("ds2_s", np.int8, dimensions=("rows",)) @@ -104,63 +104,63 @@ def setUp(self): ds2_sc[:] = 42 # Add attributes - nc.test_attr_str = 'test_string' + nc.test_attr_str = "test_string" nc.test_attr_int = 0 nc.test_attr_float = 1.2 nc.test_attr_str_arr = np.array(b"test_string2") - g1.test_attr_str = 'test_string' + g1.test_attr_str = "test_string" g1.test_attr_int = 0 g1.test_attr_float = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: - d.test_attr_str = 'test_string' + d.test_attr_str = "test_string" d.test_attr_int = 0 d.test_attr_float = 1.2 def tearDown(self): """Remove the previously created test file.""" - os.remove('test.nc') + os.remove("test.nc") def test_all_basic(self): """Test everything about the NetCDF4 class.""" import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}) + file_handler = NetCDF4FileHandler("test.nc", {}, {}) - self.assertEqual(file_handler['/dimension/rows'], 10) - self.assertEqual(file_handler['/dimension/cols'], 100) + self.assertEqual(file_handler["/dimension/rows"], 10) + self.assertEqual(file_handler["/dimension/cols"], 100) - for ds in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): - self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith('f') else np.int32) - self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) - self.assertEqual(file_handler[ds + '/dimensions'], ("rows", "cols")) - self.assertEqual(file_handler[ds + '/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler[ds + '/attr/test_attr_int'], 0) - self.assertEqual(file_handler[ds + '/attr/test_attr_float'], 1.2) + for ds in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): + self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith("f") else np.int32) + self.assertTupleEqual(file_handler[ds + "/shape"], (10, 100)) + self.assertEqual(file_handler[ds + "/dimensions"], ("rows", "cols")) + self.assertEqual(file_handler[ds + "/attr/test_attr_str"], "test_string") + self.assertEqual(file_handler[ds + "/attr/test_attr_int"], 0) + self.assertEqual(file_handler[ds + "/attr/test_attr_float"], 1.2) - test_group = file_handler['test_group'] - self.assertTupleEqual(test_group['ds1_i'].shape, (10, 100)) - self.assertTupleEqual(test_group['ds1_i'].dims, ('rows', 'cols')) + test_group = file_handler["test_group"] + self.assertTupleEqual(test_group["ds1_i"].shape, (10, 100)) + self.assertTupleEqual(test_group["ds1_i"].dims, ("rows", "cols")) - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) + self.assertEqual(file_handler["/attr/test_attr_str"], "test_string") + self.assertEqual(file_handler["/attr/test_attr_str_arr"], "test_string2") + self.assertEqual(file_handler["/attr/test_attr_int"], 0) + self.assertEqual(file_handler["/attr/test_attr_float"], 1.2) global_attrs = { - 'test_attr_str': 'test_string', - 'test_attr_str_arr': 'test_string2', - 'test_attr_int': 0, - 'test_attr_float': 1.2 + "test_attr_str": "test_string", + "test_attr_str_arr": "test_string2", + "test_attr_int": 0, + "test_attr_float": 1.2 } - self.assertEqual(file_handler['/attrs'], global_attrs) + self.assertEqual(file_handler["/attrs"], global_attrs) - self.assertIsInstance(file_handler.get('ds2_f')[:], xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') + self.assertIsInstance(file_handler.get("ds2_f")[:], xr.DataArray) + self.assertIsNone(file_handler.get("fake_ds")) + self.assertEqual(file_handler.get("fake_ds", "test"), "test") - self.assertTrue('ds2_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) + self.assertTrue("ds2_f" in file_handler) + self.assertFalse("fake_ds" in file_handler) self.assertIsNone(file_handler.file_handle) self.assertEqual(file_handler["ds2_sc"], 42) @@ -169,43 +169,43 @@ def test_listed_variables(self): from satpy.readers.netcdf_utils import NetCDF4FileHandler filetype_info = { - 'required_netcdf_variables': [ - 'test_group/attr/test_attr_str', - 'attr/test_attr_str', + "required_netcdf_variables": [ + "test_group/attr/test_attr_str", + "attr/test_attr_str", ] } - file_handler = NetCDF4FileHandler('test.nc', {}, filetype_info) + file_handler = NetCDF4FileHandler("test.nc", {}, filetype_info) assert len(file_handler.file_content) == 2 - assert 'test_group/attr/test_attr_str' in file_handler.file_content - assert 'attr/test_attr_str' in file_handler.file_content + assert "test_group/attr/test_attr_str" in file_handler.file_content + assert "attr/test_attr_str" in file_handler.file_content def test_listed_variables_with_composing(self): """Test that composing for listed variables is performed.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler filetype_info = { - 'required_netcdf_variables': [ - 'test_group/{some_parameter}/attr/test_attr_str', - 'test_group/attr/test_attr_str', + "required_netcdf_variables": [ + "test_group/{some_parameter}/attr/test_attr_str", + "test_group/attr/test_attr_str", ], - 'variable_name_replacements': { - 'some_parameter': [ - 'ds1_f', - 'ds1_i', + "variable_name_replacements": { + "some_parameter": [ + "ds1_f", + "ds1_i", ], - 'another_parameter': [ - 'not_used' + "another_parameter": [ + "not_used" ], } } - file_handler = NetCDF4FileHandler('test.nc', {}, filetype_info) + file_handler = NetCDF4FileHandler("test.nc", {}, filetype_info) assert len(file_handler.file_content) == 3 - assert 'test_group/ds1_f/attr/test_attr_str' in file_handler.file_content - assert 'test_group/ds1_i/attr/test_attr_str' in file_handler.file_content - assert not any('not_used' in var for var in file_handler.file_content) - assert not any('some_parameter' in var for var in file_handler.file_content) - assert not any('another_parameter' in var for var in file_handler.file_content) - assert 'test_group/attr/test_attr_str' in file_handler.file_content + assert "test_group/ds1_f/attr/test_attr_str" in file_handler.file_content + assert "test_group/ds1_i/attr/test_attr_str" in file_handler.file_content + assert not any("not_used" in var for var in file_handler.file_content) + assert not any("some_parameter" in var for var in file_handler.file_content) + assert not any("another_parameter" in var for var in file_handler.file_content) + assert "test_group/attr/test_attr_str" in file_handler.file_content def test_caching(self): """Test that caching works as intended.""" @@ -241,21 +241,21 @@ def test_get_and_cache_npxr_is_xr(self): import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}, cache_handle=True) + file_handler = NetCDF4FileHandler("test.nc", {}, {}, cache_handle=True) - data = file_handler.get_and_cache_npxr('test_group/ds1_f') + data = file_handler.get_and_cache_npxr("test_group/ds1_f") assert isinstance(data, xr.DataArray) def test_get_and_cache_npxr_data_is_cached(self): """Test that the data are cached when get_and_cache_npxr() is called.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}, cache_handle=True) - data = file_handler.get_and_cache_npxr('test_group/ds1_f') + file_handler = NetCDF4FileHandler("test.nc", {}, {}, cache_handle=True) + data = file_handler.get_and_cache_npxr("test_group/ds1_f") # Delete the dataset from the file content dict, it should be available from the cache del file_handler.file_content["test_group/ds1_f"] - data2 = file_handler.get_and_cache_npxr('test_group/ds1_f') + data2 = file_handler.get_and_cache_npxr("test_group/ds1_f") assert np.all(data == data2) diff --git a/satpy/tests/reader_tests/test_nucaps.py b/satpy/tests/reader_tests/test_nucaps.py index 2f7b0c97a5..5b1c061798 100644 --- a/satpy/tests/reader_tests/test_nucaps.py +++ b/satpy/tests/reader_tests/test_nucaps.py @@ -61,93 +61,93 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/time_coverage_start': "2020-10-20T12:00:00.5Z", - '/attr/time_coverage_end': "2020-10-20T12:00:36Z", - '/attr/start_orbit_number': 1, - '/attr/end_orbit_number': 2, - '/attr/platform_name': 'NPP', - '/attr/instrument_name': 'CrIS, ATMS, VIIRS', + "/attr/time_coverage_start": "2020-10-20T12:00:00.5Z", + "/attr/time_coverage_end": "2020-10-20T12:00:36Z", + "/attr/start_orbit_number": 1, + "/attr/end_orbit_number": 2, + "/attr/platform_name": "NPP", + "/attr/instrument_name": "CrIS, ATMS, VIIRS", } for k, units, standard_name in [ - ('Solar_Zenith', 'degrees', 'solar_zenith_angle'), - ('Topography', 'meters', ''), - ('Land_Fraction', '1', ''), - ('Surface_Pressure', 'mb', ''), - ('Skin_Temperature', 'Kelvin', 'surface_temperature'), + ("Solar_Zenith", "degrees", "solar_zenith_angle"), + ("Topography", "meters", ""), + ("Land_Fraction", "1", ""), + ("Surface_Pressure", "mb", ""), + ("Skin_Temperature", "Kelvin", "surface_temperature"), ]: file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = units - file_content[k + '/attr/valid_range'] = (0., 120.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = units + file_content[k + "/attr/valid_range"] = (0., 120.) + file_content[k + "/attr/_FillValue"] = -9999. if standard_name: - file_content[k + '/attr/standard_name'] = standard_name + file_content[k + "/attr/standard_name"] = standard_name for k, units, standard_name in [ - ('Temperature', 'Kelvin', 'air_temperature'), - ('Effective_Pressure', 'mb', ''), - ('H2O', '1', ''), - ('H2O_MR', 'g/g', ''), - ('O3', '1', ''), - ('O3_MR', '1', ''), - ('Liquid_H2O', '1', ''), - ('Liquid_H2O_MR', 'g/g', 'cloud_liquid_water_mixing_ratio'), - ('CO', '1', ''), - ('CO_MR', '1', ''), - ('CH4', '1', ''), - ('CH4_MR', '1', ''), - ('CO2', '1', ''), - ('HNO3', '1', ''), - ('HNO3_MR', '1', ''), - ('N2O', '1', ''), - ('N2O_MR', '1', ''), - ('SO2', '1', ''), - ('SO2_MR', '1', ''), + ("Temperature", "Kelvin", "air_temperature"), + ("Effective_Pressure", "mb", ""), + ("H2O", "1", ""), + ("H2O_MR", "g/g", ""), + ("O3", "1", ""), + ("O3_MR", "1", ""), + ("Liquid_H2O", "1", ""), + ("Liquid_H2O_MR", "g/g", "cloud_liquid_water_mixing_ratio"), + ("CO", "1", ""), + ("CO_MR", "1", ""), + ("CH4", "1", ""), + ("CH4_MR", "1", ""), + ("CO2", "1", ""), + ("HNO3", "1", ""), + ("HNO3_MR", "1", ""), + ("N2O", "1", ""), + ("N2O_MR", "1", ""), + ("SO2", "1", ""), + ("SO2_MR", "1", ""), ]: file_content[k] = DEFAULT_PRES_FILE_DATA - file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE - file_content[k + '/attr/units'] = units - file_content[k + '/attr/valid_range'] = (0., 120.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_PRES_FILE_SHAPE + file_content[k + "/attr/units"] = units + file_content[k + "/attr/valid_range"] = (0., 120.) + file_content[k + "/attr/_FillValue"] = -9999. if standard_name: - file_content[k + '/attr/standard_name'] = standard_name - k = 'Pressure' + file_content[k + "/attr/standard_name"] = standard_name + k = "Pressure" file_content[k] = ALL_PRESSURE_LEVELS - file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE - file_content[k + '/attr/units'] = 'mb' - file_content[k + '/attr/valid_range'] = (0., 2000.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_PRES_FILE_SHAPE + file_content[k + "/attr/units"] = "mb" + file_content[k + "/attr/valid_range"] = (0., 2000.) + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Quality_Flag' + k = "Quality_Flag" file_content[k] = DEFAULT_FILE_DATA.astype(np.int32) - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/valid_range'] = (0, 31) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/valid_range"] = (0, 31) + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Longitude' + k = "Longitude" file_content[k] = DEFAULT_LON_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = 'degrees_east' - file_content[k + '/attr/valid_range'] = (-180., 180.) - file_content[k + '/attr/standard_name'] = 'longitude' - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = "degrees_east" + file_content[k + "/attr/valid_range"] = (-180., 180.) + file_content[k + "/attr/standard_name"] = "longitude" + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Latitude' + k = "Latitude" file_content[k] = DEFAULT_LAT_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = 'degrees_north' - file_content[k + '/attr/valid_range'] = (-90., 90.) - file_content[k + '/attr/standard_name'] = 'latitude' - file_content[k + '/attr/_FillValue'] = -9999. - - attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') - cris_fors_dim_name = 'Number_of_CrIS_FORs' - pressure_levels_dim_name = 'Number_of_P_Levels' - if ('_v1' in filename): - cris_fors_dim_name = 'number_of_FORs' - pressure_levels_dim_name = 'number_of_p_levels' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = "degrees_north" + file_content[k + "/attr/valid_range"] = (-90., 90.) + file_content[k + "/attr/standard_name"] = "latitude" + file_content[k + "/attr/_FillValue"] = -9999. + + attrs = ("_FillValue", "flag_meanings", "flag_values", "units") + cris_fors_dim_name = "Number_of_CrIS_FORs" + pressure_levels_dim_name = "Number_of_P_Levels" + if ("_v1" in filename): + cris_fors_dim_name = "number_of_FORs" + pressure_levels_dim_name = "number_of_p_levels" convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', cris_fors_dim_name, pressure_levels_dim_name)) + dims=("z", cris_fors_dim_name, pressure_levels_dim_name)) return file_content @@ -160,9 +160,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(NUCAPSFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -175,7 +175,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -187,10 +187,10 @@ def test_init_with_kwargs(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, mask_surface=False) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) self.assertEqual(len(loadables), 1) - r.create_filehandlers(loadables, fh_kwargs={'mask_surface': False}) + r.create_filehandlers(loadables, fh_kwargs={"mask_surface": False}) # make sure we have some files self.assertTrue(r.file_handlers) @@ -199,70 +199,70 @@ def test_load_nonpressure_based(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Solar_Zenith', - 'Topography', - 'Land_Fraction', - 'Surface_Pressure', - 'Skin_Temperature', - 'Quality_Flag', + datasets = r.load(["Solar_Zenith", + "Topography", + "Land_Fraction", + "Surface_Pressure", + "Skin_Temperature", + "Quality_Flag", ]) self.assertEqual(len(datasets), 6) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) # self.assertEqual(v.info['units'], 'degrees') self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs'])) - self.assertEqual(type(v.attrs['start_time']), datetime.datetime) - self.assertEqual(type(v.attrs['end_time']), datetime.datetime) + self.assertEqual(v.attrs["sensor"], set(["cris", "atms", "viirs"])) + self.assertEqual(type(v.attrs["start_time"]), datetime.datetime) + self.assertEqual(type(v.attrs["end_time"]), datetime.datetime) def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', - 'Effective_Pressure', - 'H2O', - 'H2O_MR', - 'O3', - 'O3_MR', - 'Liquid_H2O', - 'Liquid_H2O_MR', - 'CO', - 'CO_MR', - 'CH4', - 'CH4_MR', - 'CO2', - 'HNO3', - 'HNO3_MR', - 'N2O', - 'N2O_MR', - 'SO2', - 'SO2_MR', + datasets = r.load(["Temperature", + "Effective_Pressure", + "H2O", + "H2O_MR", + "O3", + "O3_MR", + "Liquid_H2O", + "Liquid_H2O_MR", + "CO", + "CO_MR", + "CH4", + "CH4_MR", + "CO2", + "HNO3", + "HNO3_MR", + "N2O", + "N2O_MR", + "SO2", + "SO2_MR", ]) self.assertEqual(len(datasets), 19) for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) self.assertEqual(v.ndim, 2) if np.issubdtype(v.dtype, np.floating): - assert '_FillValue' not in v.attrs + assert "_FillValue" not in v.attrs def test_load_multiple_files_pressure(self): """Test loading Temperature from multiple input files.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', - 'NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", + "NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -272,10 +272,10 @@ def test_load_individual_pressure_levels_true(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -285,10 +285,10 @@ def test_load_individual_pressure_levels_min_max(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -298,10 +298,10 @@ def test_load_individual_pressure_levels_single(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -311,10 +311,10 @@ def test_load_pressure_levels_true(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=True) + datasets = r.load(["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -325,10 +325,10 @@ def test_load_pressure_levels_min_max(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) + datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -340,10 +340,10 @@ def test_load_pressure_levels_single(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(103.017,)) + datasets = r.load(["Temperature"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -355,16 +355,16 @@ def test_load_pressure_levels_single_and_pressure_levels(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) + datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 2) - t_ds = datasets['Temperature'] + t_ds = datasets["Temperature"] self.assertEqual(t_ds.ndim, 2) self.assertTupleEqual(t_ds.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) - pl_ds = datasets['Pressure_Levels'] + pl_ds = datasets["Pressure_Levels"] self.assertTupleEqual(pl_ds.shape, (1,)) @@ -377,9 +377,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(NUCAPSFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -392,7 +392,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -404,46 +404,46 @@ def test_load_nonpressure_based(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Topography', - 'Land_Fraction', - 'Surface_Pressure', - 'Skin_Temperature', - 'Quality_Flag', + datasets = r.load(["Topography", + "Land_Fraction", + "Surface_Pressure", + "Skin_Temperature", + "Quality_Flag", ]) self.assertEqual(len(datasets), 5) for v in datasets.values(): self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs'])) - self.assertEqual(type(v.attrs['start_time']), datetime.datetime) - self.assertEqual(type(v.attrs['end_time']), datetime.datetime) + self.assertEqual(v.attrs["sensor"], set(["cris", "atms", "viirs"])) + self.assertEqual(type(v.attrs["start_time"]), datetime.datetime) + self.assertEqual(type(v.attrs["end_time"]), datetime.datetime) def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', - 'H2O', - 'H2O_MR', - 'O3', - 'O3_MR', - 'CO', - 'CO_MR', - 'CH4', - 'CH4_MR', - 'CO2', - 'HNO3', - 'HNO3_MR', - 'N2O', - 'N2O_MR', - 'SO2', - 'SO2_MR', + datasets = r.load(["Temperature", + "H2O", + "H2O_MR", + "O3", + "O3_MR", + "CO", + "CO_MR", + "CH4", + "CH4_MR", + "CO2", + "HNO3", + "HNO3_MR", + "N2O", + "N2O_MR", + "SO2", + "SO2_MR", ]) self.assertEqual(len(datasets), 16) for v in datasets.values(): @@ -455,10 +455,10 @@ def test_load_individual_pressure_levels_true(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 100) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -468,10 +468,10 @@ def test_load_individual_pressure_levels_min_max(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 6) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -481,10 +481,10 @@ def test_load_individual_pressure_levels_single(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 1) @@ -494,10 +494,10 @@ def test_load_pressure_levels_true(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=True) + datasets = r.load(["Temperature"], pressure_levels=True) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -508,10 +508,10 @@ def test_load_pressure_levels_min_max(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) + datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -523,10 +523,10 @@ def test_load_pressure_levels_single(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(103.017,)) + datasets = r.load(["Temperature"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 1) for v in datasets.values(): self.assertEqual(v.ndim, 2) @@ -538,14 +538,14 @@ def test_load_pressure_levels_single_and_pressure_levels(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) + datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) self.assertEqual(len(datasets), 2) - t_ds = datasets['Temperature'] + t_ds = datasets["Temperature"] self.assertEqual(t_ds.ndim, 2) self.assertTupleEqual(t_ds.shape, (DEFAULT_PRES_FILE_SHAPE[0], 1)) - pl_ds = datasets['Pressure_Levels'] + pl_ds = datasets["Pressure_Levels"] self.assertTupleEqual(pl_ds.shape, (1,)) diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 5e3053058e..e323baeb20 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -428,9 +428,9 @@ } AREA_DEF_DICT = { - "proj_dict": {'proj': 'geos', 'lon_0': 0, 'h': 35785831, 'x_0': 0, 'y_0': 0, - 'a': 6378169, 'b': 6356583.8, 'units': 'm', 'no_defs': None, 'type': 'crs'}, - "area_id": 'MSG-N', + "proj_dict": {"proj": "geos", "lon_0": 0, "h": 35785831, "x_0": 0, "y_0": 0, + "a": 6378169, "b": 6356583.8, "units": "m", "no_defs": None, "type": "crs"}, + "area_id": "MSG-N", "x_size": 3712, "y_size": 1856, "area_extent": (-5570248.2825, 1501.0099, 5567247.8793, 5570247.8784) @@ -481,20 +481,20 @@ def test_get_area_def(self): area_def = test.get_area_def(dsid) - aext_res = AREA_DEF_DICT['area_extent'] + aext_res = AREA_DEF_DICT["area_extent"] for i in range(4): self.assertAlmostEqual(area_def.area_extent[i], aext_res[i], 4) - proj_dict = AREA_DEF_DICT['proj_dict'] - self.assertEqual(proj_dict['proj'], area_def.proj_dict['proj']) + proj_dict = AREA_DEF_DICT["proj_dict"] + self.assertEqual(proj_dict["proj"], area_def.proj_dict["proj"]) # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) - self.assertEqual(AREA_DEF_DICT['x_size'], area_def.width) - self.assertEqual(AREA_DEF_DICT['y_size'], area_def.height) + self.assertEqual(AREA_DEF_DICT["x_size"], area_def.width) + self.assertEqual(AREA_DEF_DICT["y_size"], area_def.height) - self.assertEqual(AREA_DEF_DICT['area_id'], area_def.area_id) + self.assertEqual(AREA_DEF_DICT["area_id"], area_def.area_id) def test_get_dataset(self): """Retrieve datasets from a NWCSAF msgv2013 hdf5 file.""" diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 095533d959..fb7187af1f 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -23,19 +23,19 @@ from satpy.readers.nwcsaf_nc import NcNWCSAF, read_nwcsaf_time -PROJ_KM = {'gdal_projection': '+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000', - 'gdal_xgeo_up_left': -5569500.0, - 'gdal_ygeo_up_left': 5437500.0, - 'gdal_xgeo_low_right': 5566500.0, - 'gdal_ygeo_low_right': 2653500.0} +PROJ_KM = {"gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", + "gdal_xgeo_up_left": -5569500.0, + "gdal_ygeo_up_left": 5437500.0, + "gdal_xgeo_low_right": 5566500.0, + "gdal_ygeo_low_right": 2653500.0} NOMINAL_ALTITUDE = 35785863.0 -PROJ = {'gdal_projection': f'+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h={NOMINAL_ALTITUDE:.3f}', - 'gdal_xgeo_up_left': -5569500.0, - 'gdal_ygeo_up_left': 5437500.0, - 'gdal_xgeo_low_right': 5566500.0, - 'gdal_ygeo_low_right': 2653500.0} +PROJ = {"gdal_projection": f"+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h={NOMINAL_ALTITUDE:.3f}", + "gdal_xgeo_up_left": -5569500.0, + "gdal_ygeo_up_left": 5437500.0, + "gdal_xgeo_low_right": 5566500.0, + "gdal_ygeo_low_right": 2653500.0} dimensions = {"nx": 1530, @@ -263,13 +263,13 @@ def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrumen def test_get_area_def(self, nwcsaf_geo_ct_filehandler): """Test that get_area_def() returns proper area.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} _check_area_def(nwcsaf_geo_ct_filehandler.get_area_def(dsid)) def test_get_area_def_km(self, nwcsaf_old_geo_ct_filehandler): """Test that get_area_def() returns proper area when the projection is in km.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} _check_area_def(nwcsaf_old_geo_ct_filehandler.get_area_def(dsid)) def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): @@ -277,58 +277,58 @@ def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): import numpy as np import xarray as xr - attrs = {'scale_factor': np.array(10), - 'add_offset': np.array(20)} + attrs = {"scale_factor": np.array(10), + "add_offset": np.array(20)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, [30, 40, 50]) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs - @pytest.mark.parametrize("attrs, expected", [({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - '_FillValue': 1}, + @pytest.mark.parametrize("attrs, expected", [({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "_FillValue": 1}, [np.nan, 5.5, 7]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_min': 1.1}, + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_min": 1.1}, [np.nan, 5.5, 7]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_max': 2.1}, + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_max": 2.1}, [4, 5.5, np.nan]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_range': (1.1, 2.1)}, + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_range": (1.1, 2.1)}, [np.nan, 5.5, np.nan])]) def test_scale_dataset_floating(self, nwcsaf_geo_ct_filehandler, attrs, expected): """Test the scaling of the dataset with floating point values.""" var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, expected) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs def test_scale_dataset_floating_nwcsaf_geo_ctth(self, nwcsaf_geo_ct_filehandler): """Test the scaling of the dataset with floating point values for CTTH NWCSAF/Geo v2016/v2018.""" - attrs = {'scale_factor': np.array(1.), - 'add_offset': np.array(-2000.), - 'valid_range': (0., 27000.)} + attrs = {"scale_factor": np.array(1.), + "add_offset": np.array(-2000.), + "valid_range": (0., 27000.)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, [-1999., -1998., -1997.]) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs - np.testing.assert_equal(var.attrs['valid_range'], (-2000., 25000.)) + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs + np.testing.assert_equal(var.attrs["valid_range"], (-2000., 25000.)) def test_orbital_parameters_are_correct(self, nwcsaf_geo_ct_filehandler): """Test that orbital parameters are present in the dataset attributes.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} var = nwcsaf_geo_ct_filehandler.get_dataset(dsid, {}) assert "orbital_parameters" in var.attrs - for param in var.attrs['orbital_parameters']: - assert isinstance(var.attrs['orbital_parameters'][param], (float, int)) + for param in var.attrs["orbital_parameters"]: + assert isinstance(var.attrs["orbital_parameters"][param], (float, int)) assert var.attrs["orbital_parameters"]["satellite_nominal_altitude"] == NOMINAL_ALTITUDE assert var.attrs["orbital_parameters"]["satellite_nominal_longitude"] == NOMINAL_LONGITUDE @@ -336,7 +336,7 @@ def test_orbital_parameters_are_correct(self, nwcsaf_geo_ct_filehandler): def test_times_are_in_dataset_attributes(self, nwcsaf_geo_ct_filehandler): """Check that start/end times are in the attributes of datasets.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} var = nwcsaf_geo_ct_filehandler.get_dataset(dsid, {}) assert "start_time" in var.attrs assert "end_time" in var.attrs @@ -363,29 +363,29 @@ def test_end_time(self, nwcsaf_pps_cmic_filehandler): def test_drop_xycoords(self, nwcsaf_pps_cmic_filehandler): """Test the drop of x and y coords.""" - y_line = xr.DataArray(list(range(5)), dims=('y'), attrs={"long_name": "scan line number"}) - x_pixel = xr.DataArray(list(range(10)), dims=('x'), attrs={"long_name": "pixel number"}) + y_line = xr.DataArray(list(range(5)), dims=("y"), attrs={"long_name": "scan line number"}) + x_pixel = xr.DataArray(list(range(10)), dims=("x"), attrs={"long_name": "pixel number"}) lat = xr.DataArray(np.ones((5, 10)), - dims=('y', 'x'), - coords={'y': y_line, 'x': x_pixel}, - attrs={'name': 'lat', - 'standard_name': 'latitude'}) + dims=("y", "x"), + coords={"y": y_line, "x": x_pixel}, + attrs={"name": "lat", + "standard_name": "latitude"}) lon = xr.DataArray(np.ones((5, 10)), - dims=('y', 'x'), - coords={'y': y_line, 'x': x_pixel}, - attrs={'name': 'lon', - 'standard_name': 'longitude'}) + dims=("y", "x"), + coords={"y": y_line, "x": x_pixel}, + attrs={"name": "lon", + "standard_name": "longitude"}) data_array_in = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(0, dtype=float), "add_offset": np.array(1, dtype=float)}, - dims=('y', 'x'), - coords={'lon': lon, 'lat': lat, 'y': y_line, 'x': x_pixel}) + dims=("y", "x"), + coords={"lon": lon, "lat": lat, "y": y_line, "x": x_pixel}) data_array_out = nwcsaf_pps_cmic_filehandler.drop_xycoords(data_array_in) - assert 'y' not in data_array_out.coords + assert "y" not in data_array_out.coords def test_get_dataset_scales_and_offsets(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() returns scaled and offseted data.""" - dsid = {'name': 'cpp_cot'} + dsid = {"name": "cpp_cot"} info = dict(name="cpp_cot", file_type="nc_nwcsaf_cpp") @@ -395,7 +395,7 @@ def test_get_dataset_scales_and_offsets(self, nwcsaf_pps_cpp_filehandler): def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() returns scaled palette_meanings with another dataset as scaling source.""" - dsid = {'name': 'cpp_cot_pal'} + dsid = {"name": "cpp_cot_pal"} info = dict(name="cpp_cot_pal", file_type="nc_nwcsaf_cpp", @@ -407,7 +407,7 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel def test_get_palette_fill_value_color_added(self, nwcsaf_pps_ctth_filehandler): """Test that get_dataset() returns scaled palette_meanings with fill_value_color added.""" - dsid = {'name': 'ctth_alti_pal'} + dsid = {"name": "ctth_alti_pal"} info = dict(name="ctth_alti_pal", file_type="nc_nwcsaf_ctth", @@ -420,7 +420,7 @@ def test_get_palette_fill_value_color_added(self, nwcsaf_pps_ctth_filehandler): def test_get_dataset_raises_when_dataset_missing(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() raises an error when the requested dataset is missing.""" - dsid = {'name': 'cpp_phase'} + dsid = {"name": "cpp_phase"} info = dict(name="cpp_phase", file_type="nc_nwcsaf_cpp") with pytest.raises(KeyError): @@ -428,8 +428,8 @@ def test_get_dataset_raises_when_dataset_missing(self, nwcsaf_pps_cpp_filehandle def test_get_dataset_uses_file_key_if_present(self, nwcsaf_pps_cmic_filehandler, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() uses a file_key if present.""" - dsid_cpp = {'name': 'cpp_cot'} - dsid_cmic = {'name': 'cmic_cot'} + dsid_cpp = {"name": "cpp_cot"} + dsid_cmic = {"name": "cmic_cot"} file_key = "cmic_cot" @@ -449,17 +449,17 @@ def test_get_dataset_uses_file_key_if_present(self, nwcsaf_pps_cmic_filehandler, def test_get_dataset_can_handle_file_key_list(self, nwcsaf_pps_cmic_filehandler, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() can handle a list of file_keys.""" - dsid_cpp = {'name': 'cpp_reff'} - dsid_cmic = {'name': 'cmic_cre'} + dsid_cpp = {"name": "cpp_reff"} + dsid_cmic = {"name": "cmic_cre"} info_cpp = dict(name="cmic_reff", - file_key=['reff', 'cre'], + file_key=["reff", "cre"], file_type="nc_nwcsaf_cpp") res_cpp = nwcsaf_pps_cpp_filehandler.get_dataset(dsid_cpp, info_cpp) info_cmic = dict(name="cmic_reff", - file_key=['reff', 'cre'], + file_key=["reff", "cre"], file_type="nc_nwcsaf_cpp") res_cmic = nwcsaf_pps_cmic_filehandler.get_dataset(dsid_cmic, info_cmic) @@ -471,8 +471,8 @@ class TestNcNWCSAFFileKeyPrefix: def test_get_dataset_uses_file_key_prefix(self, nwcsaf_pps_cmic_filehandler): """Test that get_dataset() uses a file_key_prefix.""" - dsid_cpp = {'name': 'cpp_cot'} - dsid_cmic = {'name': 'cmic_cot'} + dsid_cpp = {"name": "cpp_cot"} + dsid_cmic = {"name": "cmic_cot"} file_key = "cot" @@ -490,7 +490,7 @@ def test_get_dataset_uses_file_key_prefix(self, nwcsaf_pps_cmic_filehandler): def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self, nwcsaf_pps_cmic_filehandler): """Test that get_dataset() returns scaled palette_meanings using another dataset as scaling source.""" - dsid = {'name': 'cpp_cot_pal'} + dsid = {"name": "cpp_cot_pal"} info = dict(name="cpp_cot_pal", file_key="cot_pal", @@ -503,11 +503,11 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel def _check_area_def(area_definition): - correct_h = float(PROJ['gdal_projection'].split('+h=')[-1]) - correct_a = float(PROJ['gdal_projection'].split('+a=')[-1].split()[0]) - assert area_definition.proj_dict['h'] == correct_h - assert area_definition.proj_dict['a'] == correct_a - assert area_definition.proj_dict['units'] == 'm' + correct_h = float(PROJ["gdal_projection"].split("+h=")[-1]) + correct_a = float(PROJ["gdal_projection"].split("+a=")[-1].split()[0]) + assert area_definition.proj_dict["h"] == correct_h + assert area_definition.proj_dict["a"] == correct_a + assert area_definition.proj_dict["units"] == "m" correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index 8575c337cb..f0ed47f4f8 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -49,12 +49,12 @@ def fake_dataset(): nobs = xr.DataArray( [[5, 118, 5, 100], [0, 15, 0, 1]], dims=("lat", "lon"), - attrs={'_FillValue': 0} + attrs={"_FillValue": 0} ) nobs_filt = xr.DataArray( [[5, 118, 5, 100], [np.nan, 15, np.nan, 1]], dims=("lat", "lon"), - attrs={'_FillValue': 0} + attrs={"_FillValue": 0} ) watcls = xr.DataArray( [[12.2, 0.01, 6.754, 5.33], [12.5, 101.5, 103.5, 204.]], @@ -83,15 +83,15 @@ def fake_dataset(): ) -ds_dict = {'adg_490': 'adg_490', - 'water_class10': 'water_class10', - 'seawifs_nobs_sum': 'test_nobs', - 'kd_490': 'kd_490', - 'atot_665': 'atot_665'} +ds_dict = {"adg_490": "adg_490", + "water_class10": "water_class10", + "seawifs_nobs_sum": "test_nobs", + "kd_490": "kd_490", + "atot_665": "atot_665"} -ds_list_all = ['adg_490', 'water_class10', 'seawifs_nobs_sum', 'kd_490', 'atot_665'] -ds_list_iop = ['adg_490', 'water_class10', 'seawifs_nobs_sum', 'atot_665'] -ds_list_kd = ['kd_490', 'water_class10', 'seawifs_nobs_sum'] +ds_list_all = ["adg_490", "water_class10", "seawifs_nobs_sum", "kd_490", "atot_665"] +ds_list_iop = ["adg_490", "water_class10", "seawifs_nobs_sum", "atot_665"] +ds_list_kd = ["kd_490", "water_class10", "seawifs_nobs_sum"] @pytest.fixture @@ -100,31 +100,31 @@ def fake_file_dict(fake_dataset, tmp_path): fdict = {} filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-10M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['bad_month'] = filename + fdict["bad_month"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-2D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['bad_day'] = filename + fdict["bad_day"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-1M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['ocprod_1m'] = filename + fdict["ocprod_1m"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-5D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['ocprod_5d'] = filename + fdict["ocprod_5d"] = filename filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-8D_DAILY_4km_GEO_PML_RRS-20211117-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['iop_8d'] = filename + fdict["iop_8d"] = filename filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-1D_DAILY_4km_GEO_PML_OCx-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['iop_1d'] = filename + fdict["iop_1d"] = filename filename = tmp_path / "ESACCI-OC-L3S-K_490-MERGED-1D_DAILY_4km_GEO_PML_RRS-20210113-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['k490_1d'] = filename + fdict["k490_1d"] = filename yield fdict @@ -137,7 +137,7 @@ def setup_method(self): from satpy._config import config_search_paths self.yaml_file = "oceancolorcci_l3_nc.yaml" - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) def _create_reader_for_resolutions(self, filename): from satpy.readers import load_reader @@ -152,7 +152,7 @@ def _create_reader_for_resolutions(self, filename): @pytest.fixture def area_exp(self): """Get expected area definition.""" - proj_dict = {'datum': 'WGS84', 'no_defs': 'None', 'proj': 'longlat', 'type': 'crs'} + proj_dict = {"datum": "WGS84", "no_defs": "None", "proj": "longlat", "type": "crs"} return AreaDefinition( area_id="gridded_occci", @@ -166,9 +166,9 @@ def area_exp(self): def test_get_area_def(self, area_exp, fake_file_dict): """Test area definition.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_1m']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) res = reader.load([ds_list_all[0]]) - area = res[ds_list_all[0]].attrs['area'] + area = res[ds_list_all[0]].attrs["area"] assert area.area_id == area_exp.area_id assert area.area_extent == area_exp.area_extent @@ -178,75 +178,75 @@ def test_get_area_def(self, area_exp, fake_file_dict): def test_bad_fname(self, fake_dataset, fake_file_dict): """Test case where an incorrect composite period is given.""" - reader = self._create_reader_for_resolutions([fake_file_dict['bad_month']]) + reader = self._create_reader_for_resolutions([fake_file_dict["bad_month"]]) res = reader.load([ds_list_all[0]]) assert len(res) == 0 - reader = self._create_reader_for_resolutions([fake_file_dict['bad_day']]) + reader = self._create_reader_for_resolutions([fake_file_dict["bad_day"]]) res = reader.load([ds_list_all[0]]) assert len(res) == 0 def test_get_dataset_monthly_allprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_1m']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 94 res = reader.load(ds_list_all) assert len(res) == len(ds_list_all) for curds in ds_list_all: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == 'monthly' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "monthly" def test_get_dataset_8d_iopprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['iop_8d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 70 res = reader.load(ds_list_iop) assert len(res) == len(ds_list_iop) for curds in ds_list_iop: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == '8-day' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "8-day" def test_get_dataset_1d_kprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['k490_1d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 25 res = reader.load(ds_list_kd) assert len(res) == len(ds_list_kd) for curds in ds_list_kd: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == 'daily' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "daily" def test_get_dataset_5d_allprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_5d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_5d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 94 res = reader.load(ds_list_all) assert len(res) == len(ds_list_all) for curds in ds_list_all: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == '5-day' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "5-day" def test_start_time(self, fake_file_dict): """Test start time property.""" - reader = self._create_reader_for_resolutions([fake_file_dict['k490_1d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) assert reader.start_time == datetime(2021, 8, 1, 0, 0, 0) def test_end_time(self, fake_file_dict): """Test end time property.""" - reader = self._create_reader_for_resolutions([fake_file_dict['iop_8d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) assert reader.end_time == datetime(2021, 8, 31, 23, 59, 0) def test_correct_dimnames(self, fake_file_dict): """Check that the loaded dimension names are correct.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_5d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_5d"]]) res = reader.load(ds_list_all) for dsname in ds_list_all: - assert res[dsname].dims[0] == 'y' - assert res[dsname].dims[1] == 'x' + assert res[dsname].dims[0] == "y" + assert res[dsname].dims[1] == "x" diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index 6761511cf5..b6f5863a25 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -24,7 +24,7 @@ class TestOLCIReader(unittest.TestCase): """Test various olci_nc filehandlers.""" - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" import xarray as xr @@ -34,66 +34,66 @@ def test_instantiate(self, mocked_dataset): cal_data = xr.Dataset( { - 'solar_flux': (('bands'), [0, 1, 2]), - 'detector_index': (('bands'), [0, 1, 2]), + "solar_flux": (("bands"), [0, 1, 2]), + "detector_index": (("bands"), [0, 1, 2]), }, - {'bands': [0, 1, 2], }, + {"bands": [0, 1, 2], }, ) - ds_id = make_dataid(name='Oa01', calibration='reflectance') - ds_id2 = make_dataid(name='wsqf', calibration='reflectance') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + ds_id = make_dataid(name="Oa01", calibration="reflectance") + ds_id2 = make_dataid(name="wsqf", calibration="reflectance") + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} - test = NCOLCIBase('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIBase("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCICal('somedir/somefile.nc', filename_info, 'c') + test = NCOLCICal("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCIGeo('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIGeo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCIChannelBase('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIChannelBase("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() cal = mock.Mock() cal.nc = cal_data - test = NCOLCI1B('somedir/somefile.nc', filename_info, 'c', cal) + test = NCOLCI1B("somedir/somefile.nc", filename_info, "c", cal) test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, {'nc_key': 'the_key'}) - test.get_dataset(ds_id2, {'nc_key': 'the_key'}) + test = NCOLCI2("somedir/somefile.nc", filename_info, "c") + test.get_dataset(ds_id, {"nc_key": "the_key"}) + test.get_dataset(ds_id2, {"nc_key": "the_key"}) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} open_file = mock.MagicMock() - file_handler = NCOLCIBase(open_file, filename_info, 'c') + file_handler = NCOLCIBase(open_file, filename_info, "c") # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or - open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) + open_file.open.return_value == mocked_open_dataset.call_args[1].get("filename_or_obj")) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_mask(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -101,15 +101,15 @@ def test_get_mask(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCOLCI2("somedir/somefile.nc", filename_info, "c") + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + self.assertEqual(res.dtype, np.dtype("bool")) expected = np.array([[True, False, True, True, True, True], [False, False, True, True, False, False], [False, False, False, False, False, True], @@ -117,7 +117,7 @@ def test_get_mask(self, mocked_dataset): [True, False, False, True, False, False]]) np.testing.assert_array_equal(res.values, expected) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_mask_with_alternative_items(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -125,19 +125,19 @@ def test_get_mask_with_alternative_items(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c', mask_items=["INVALID"]) - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCOLCI2("somedir/somefile.nc", filename_info, "c", mask_items=["INVALID"]) + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + self.assertEqual(res.dtype, np.dtype("bool")) expected = np.array([True] + [False] * 29).reshape(5, 6) np.testing.assert_array_equal(res.values, expected) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_olci_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -146,31 +146,31 @@ def test_olci_angles(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCIAngles from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], + mocked_dataset.return_value = xr.Dataset({"SAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'SZA': (['tie_rows', 'tie_columns'], + "SZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OAA': (['tie_rows', 'tie_columns'], + "OAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OZA': (['tie_rows', 'tie_columns'], + "OZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='solar_azimuth_angle') - ds_id2 = make_dataid(name='satellite_zenith_angle') - test = NCOLCIAngles('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="solar_azimuth_angle") + ds_id2 = make_dataid(name="satellite_zenith_angle") + test = NCOLCIAngles("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_olci_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -179,26 +179,26 @@ def test_olci_meteo(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCIMeteo from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - data = {'humidity': (['tie_rows', 'tie_columns'], + data = {"humidity": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_ozone': (['tie_rows', 'tie_columns'], + "total_ozone": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'sea_level_pressure': (['tie_rows', 'tie_columns'], + "sea_level_pressure": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], + "total_columnar_water_vapour": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "humidity", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='humidity') - ds_id2 = make_dataid(name='total_ozone') - test = NCOLCIMeteo('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="humidity") + ds_id2 = make_dataid(name="total_ozone") + test = NCOLCIMeteo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() @@ -213,28 +213,28 @@ def test_chl_nn(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 64, - 'al_subsampling_factor': 1, + "ac_subsampling_factor": 64, + "al_subsampling_factor": 1, } - data = {'CHL_NN': (['rows', 'columns'], + data = {"CHL_NN": (["rows", "columns"], np.arange(30).reshape(5, 6).astype(float), {"units": "lg(re mg.m-3)"})} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - ds_info = {'name': 'chl_nn', 'sensor': 'olci', 'resolution': 300, - 'standard_name': 'algal_pigment_concentration', 'units': 'lg(re mg.m-3)', - 'coordinates': ('longitude', 'latitude'), 'file_type': 'esa_l2_chl_nn', 'nc_key': 'CHL_NN', - 'modifiers': ()} - filename_info = {'mission_id': 'S3A', 'datatype_id': 'WFR', - 'start_time': datetime.datetime(2019, 9, 24, 9, 29, 39), - 'end_time': datetime.datetime(2019, 9, 24, 9, 32, 39), - 'creation_time': datetime.datetime(2019, 9, 24, 11, 40, 26), 'duration': 179, 'cycle': 49, - 'relative_orbit': 307, 'frame': 1800, 'centre': 'MAR', 'mode': 'O', 'timeliness': 'NR', - 'collection': '002'} - ds_id = make_dataid(name='chl_nn') - file_handler = NCOLCI2('somedir/somefile.nc', filename_info, None, unlog=True) + ds_info = {"name": "chl_nn", "sensor": "olci", "resolution": 300, + "standard_name": "algal_pigment_concentration", "units": "lg(re mg.m-3)", + "coordinates": ("longitude", "latitude"), "file_type": "esa_l2_chl_nn", "nc_key": "CHL_NN", + "modifiers": ()} + filename_info = {"mission_id": "S3A", "datatype_id": "WFR", + "start_time": datetime.datetime(2019, 9, 24, 9, 29, 39), + "end_time": datetime.datetime(2019, 9, 24, 9, 32, 39), + "creation_time": datetime.datetime(2019, 9, 24, 11, 40, 26), "duration": 179, "cycle": 49, + "relative_orbit": 307, "frame": 1800, "centre": "MAR", "mode": "O", "timeliness": "NR", + "collection": "002"} + ds_id = make_dataid(name="chl_nn") + file_handler = NCOLCI2("somedir/somefile.nc", filename_info, None, unlog=True) res = file_handler.get_dataset(ds_id, ds_info) assert res.attrs["units"] == "mg.m-3" @@ -251,13 +251,13 @@ def test_bitflags(self): import numpy as np from satpy.readers.olci_nc import BitFlags - flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', - 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', - 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', - 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', - 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', - 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', - 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] + flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", + "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", + "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", + "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", + "WHITE_SCATT", "LOWRW", "HIGHRW"] bits = np.array([1 << x for x in range(len(flag_list))]) diff --git a/satpy/tests/reader_tests/test_omps_edr.py b/satpy/tests/reader_tests/test_omps_edr.py index 2c211013cc..f89e41f5d0 100644 --- a/satpy/tests/reader_tests/test_omps_edr.py +++ b/satpy/tests/reader_tests/test_omps_edr.py @@ -44,120 +44,120 @@ def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = {} attrs = [] - if 'SO2NRT' in filename: - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM' + if "SO2NRT" in filename: + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Vertical Column Amount SO2 (TRM)' - file_content[k + '/attr/Units'] = 'D.U.' - file_content[k + '/attr/ValidRange'] = (-10, 2000) - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Vertical Column Amount SO2 (TRM)" + file_content[k + "/attr/Units"] = "D.U." + file_content[k + "/attr/ValidRange"] = (-10, 2000) + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude" file_content[k] = DEFAULT_LON_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/Units'] = 'deg' - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Geodetic Longitude' - file_content[k + '/attr/ValidRange'] = (-180, 180) - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/Units"] = "deg" + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Geodetic Longitude" + file_content[k + "/attr/ValidRange"] = (-180, 180) + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude" file_content[k] = DEFAULT_LAT_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/Units'] = 'deg' - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Geodetic Latitude' - file_content[k + '/attr/ValidRange'] = (-90, 90) - elif 'NMSO2' in filename: - file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA - file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Longitude/attr/valid_max'] = 180 - file_content['GEOLOCATION_DATA/Longitude/attr/valid_min'] = -180 - file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -1.26765e+30 - file_content['GEOLOCATION_DATA/Longitude/attr/long_name'] = 'Longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/standard_name'] = 'longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/units'] = 'degrees_east' - file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA - file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Latitude/attr/valid_max'] = 90 - file_content['GEOLOCATION_DATA/Latitude/attr/valid_min'] = -90 - file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -1.26765e+30 - file_content['GEOLOCATION_DATA/Latitude/attr/long_name'] = 'Latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/standard_name'] = 'latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/units'] = 'degress_north' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/Units"] = "deg" + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Geodetic Latitude" + file_content[k + "/attr/ValidRange"] = (-90, 90) + elif "NMSO2" in filename: + file_content["GEOLOCATION_DATA/Longitude"] = DEFAULT_LON_DATA + file_content["GEOLOCATION_DATA/Longitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Longitude/attr/valid_max"] = 180 + file_content["GEOLOCATION_DATA/Longitude/attr/valid_min"] = -180 + file_content["GEOLOCATION_DATA/Longitude/attr/_FillValue"] = -1.26765e+30 + file_content["GEOLOCATION_DATA/Longitude/attr/long_name"] = "Longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/standard_name"] = "longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/units"] = "degrees_east" + file_content["GEOLOCATION_DATA/Latitude"] = DEFAULT_LAT_DATA + file_content["GEOLOCATION_DATA/Latitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Latitude/attr/valid_max"] = 90 + file_content["GEOLOCATION_DATA/Latitude/attr/valid_min"] = -90 + file_content["GEOLOCATION_DATA/Latitude/attr/_FillValue"] = -1.26765e+30 + file_content["GEOLOCATION_DATA/Latitude/attr/long_name"] = "Latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/standard_name"] = "latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/units"] = "degress_north" - k = 'SCIENCE_DATA/ColumnAmountSO2_TRM' + k = "SCIENCE_DATA/ColumnAmountSO2_TRM" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRM)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRM)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 - k = 'SCIENCE_DATA/ColumnAmountSO2_STL' + k = "SCIENCE_DATA/ColumnAmountSO2_STL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (STL)' - file_content[k + '/attr/units'] = 'DU' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (STL)" + file_content[k + "/attr/units"] = "DU" - k = 'SCIENCE_DATA/ColumnAmountSO2_TRL' + k = "SCIENCE_DATA/ColumnAmountSO2_TRL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRL)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 - file_content[k + '/attr/DIMENSION_LIST'] = [10, 10] - attrs = ['_FillValue', 'long_name', 'units', 'valid_max', 'valid_min', 'DIMENSION_LIST'] + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRL)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 + file_content[k + "/attr/DIMENSION_LIST"] = [10, 10] + attrs = ["_FillValue", "long_name", "units", "valid_max", "valid_min", "DIMENSION_LIST"] - k = 'SCIENCE_DATA/ColumnAmountSO2_TRU' + k = "SCIENCE_DATA/ColumnAmountSO2_TRU" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRU)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRU)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 # Dataset with out unit - k = 'SCIENCE_DATA/ColumnAmountSO2_PBL' + k = "SCIENCE_DATA/ColumnAmountSO2_PBL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (PBL)' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (PBL)" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 else: - for k in ['Reflectivity331', 'UVAerosolIndex']: - k = 'SCIENCE_DATA/' + k + for k in ["Reflectivity331", "UVAerosolIndex"]: + k = "SCIENCE_DATA/" + k file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/Units'] = 'Unitless' - if k == 'UVAerosolIndex': - file_content[k + '/attr/ValidRange'] = (-30, 30) - file_content[k + '/attr/Title'] = 'UV Aerosol Index' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/Units"] = "Unitless" + if k == "UVAerosolIndex": + file_content[k + "/attr/ValidRange"] = (-30, 30) + file_content[k + "/attr/Title"] = "UV Aerosol Index" else: - file_content[k + '/attr/ValidRange'] = (-0.15, 1.15) - file_content[k + '/attr/Title'] = 'Effective Surface Reflectivity at 331 nm' - file_content[k + '/attr/_FillValue'] = -1. - file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA - file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Longitude/attr/ValidRange'] = (-180, 180) - file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -999. - file_content['GEOLOCATION_DATA/Longitude/attr/Title'] = 'Geodetic Longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/Units'] = 'deg' - file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA - file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Latitude/attr/ValidRange'] = (-90, 90) - file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -999. - file_content['GEOLOCATION_DATA/Latitude/attr/Title'] = 'Geodetic Latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/Units'] = 'deg' + file_content[k + "/attr/ValidRange"] = (-0.15, 1.15) + file_content[k + "/attr/Title"] = "Effective Surface Reflectivity at 331 nm" + file_content[k + "/attr/_FillValue"] = -1. + file_content["GEOLOCATION_DATA/Longitude"] = DEFAULT_LON_DATA + file_content["GEOLOCATION_DATA/Longitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Longitude/attr/ValidRange"] = (-180, 180) + file_content["GEOLOCATION_DATA/Longitude/attr/_FillValue"] = -999. + file_content["GEOLOCATION_DATA/Longitude/attr/Title"] = "Geodetic Longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/Units"] = "deg" + file_content["GEOLOCATION_DATA/Latitude"] = DEFAULT_LAT_DATA + file_content["GEOLOCATION_DATA/Latitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Latitude/attr/ValidRange"] = (-90, 90) + file_content["GEOLOCATION_DATA/Latitude/attr/_FillValue"] = -999. + file_content["GEOLOCATION_DATA/Latitude/attr/Title"] = "Geodetic Latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/Units"] = "deg" convert_file_content_to_data_array(file_content, attrs) return file_content @@ -172,12 +172,12 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.omps_edr import EDREOSFileHandler, EDRFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(EDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(EDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - self.p2 = mock.patch.object(EDREOSFileHandler, '__bases__', (EDRFileHandler,)) + self.p2 = mock.patch.object(EDREOSFileHandler, "__bases__", (EDRFileHandler,)) self.fake_handler2 = self.p2.start() self.p2.is_local = True @@ -191,9 +191,9 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) @@ -205,35 +205,35 @@ def test_basic_load_so2(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) - ds = r.load(['so2_trm']) + ds = r.load(["so2_trm"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) + self.assertEqual(d.attrs["resolution"], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) - ds = r.load(['tcso2_trm_sampo']) + ds = r.load(["tcso2_trm_sampo"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) + self.assertEqual(d.attrs["resolution"], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - ds = r.load(['tcso2_stl_sampo']) + ds = r.load(["tcso2_stl_sampo"]) self.assertEqual(len(ds), 0) # Dataset without _FillValue - ds = r.load(['tcso2_tru_sampo']) + ds = r.load(["tcso2_tru_sampo"]) self.assertEqual(len(ds), 1) # Dataset without unit - ds = r.load(['tcso2_pbl_sampo']) + ds = r.load(["tcso2_pbl_sampo"]) self.assertEqual(len(ds), 0) def test_basic_load_to3(self): @@ -241,22 +241,22 @@ def test_basic_load_to3(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) self.assertEqual(len(loadables), 3) r.create_filehandlers(loadables) - ds = r.load(['reflectivity_331', 'uvaerosol_index']) + ds = r.load(["reflectivity_331", "uvaerosol_index"]) self.assertEqual(len(ds), 2) for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) + self.assertEqual(d.attrs["resolution"], 50000) self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) - @mock.patch('satpy.readers.hdf5_utils.HDF5FileHandler._get_reference') - @mock.patch('h5py.File') + @mock.patch("satpy.readers.hdf5_utils.HDF5FileHandler._get_reference") + @mock.patch("h5py.File") def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_reference): """Test load of so2 datasets with DIMENSION_LIST.""" from satpy.readers import load_reader @@ -264,9 +264,9 @@ def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_refer mock_hdf5_utils_get_reference.return_value = [[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]]] r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['tcso2_trl_sampo']) + ds = r.load(["tcso2_trl_sampo"]) self.assertEqual(len(ds), 1) diff --git a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py index 937470724f..a529ae9f50 100644 --- a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py +++ b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py @@ -28,40 +28,40 @@ class TestSAFENC(unittest.TestCase): """Test various SAFE SAR L2 OCN file handlers.""" - @mock.patch('satpy.readers.safe_sar_l2_ocn.xr') + @mock.patch("satpy.readers.safe_sar_l2_ocn.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.safe_sar_l2_ocn import SAFENC - self.channels = ['owiWindSpeed', 'owiLon', 'owiLat', 'owiHs', 'owiNrcs', 'foo', - 'owiPolarisationName', 'owiCalConstObsi'] + self.channels = ["owiWindSpeed", "owiLon", "owiLat", "owiHs", "owiNrcs", "foo", + "owiPolarisationName", "owiCalConstObsi"] # Mock file access to return a fake dataset. self.dummy3d = np.zeros((2, 2, 1)) self.dummy2d = np.zeros((2, 2)) self.dummy1d = np.zeros((2)) self.band = 1 self.nc = xr.Dataset( - {'owiWindSpeed': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize'), attrs={'_FillValue': np.nan}), - 'owiLon': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiLat': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiHs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPartition')), - 'owiNrcs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPolarization')), - 'foo': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiPolarisationName': xr.DataArray(self.dummy1d, dims=('owiPolarisation')), - 'owiCalConstObsi': xr.DataArray(self.dummy1d, dims=('owiIncSize')) + {"owiWindSpeed": xr.DataArray(self.dummy2d, dims=("owiAzSize", "owiRaSize"), attrs={"_FillValue": np.nan}), + "owiLon": xr.DataArray(data=self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiLat": xr.DataArray(data=self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiHs": xr.DataArray(data=self.dummy3d, dims=("owiAzSize", "owiRaSize", "oswPartition")), + "owiNrcs": xr.DataArray(data=self.dummy3d, dims=("owiAzSize", "owiRaSize", "oswPolarization")), + "foo": xr.DataArray(self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiPolarisationName": xr.DataArray(self.dummy1d, dims=("owiPolarisation")), + "owiCalConstObsi": xr.DataArray(self.dummy1d, dims=("owiIncSize")) }, - attrs={'_FillValue': np.nan, - 'missionName': 'S1A'}) + attrs={"_FillValue": np.nan, + "missionName": "S1A"}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. - self.reader = SAFENC(filename='dummy', - filename_info={'start_time': 0, - 'end_time': 0, - 'fstart_time': 0, - 'fend_time': 0, - 'polarization': 'vv'}, + self.reader = SAFENC(filename="dummy", + filename_info={"start_time": 0, + "end_time": 0, + "fstart_time": 0, + "fend_time": 0, + "polarization": "vv"}, filetype_info={}) def test_init(self): @@ -78,5 +78,5 @@ def test_get_dataset(self): key=make_dataid(name=ch), info={}) # ... this only compares the valid (unmasked) elements self.assertTrue(np.all(self.nc[ch] == dt.to_masked_array()), - msg='get_dataset() returns invalid data for ' - 'dataset {}'.format(ch)) + msg="get_dataset() returns invalid data for " + "dataset {}".format(ch)) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index e796c11b77..4ac4d97cfe 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -33,54 +33,54 @@ class TestSAFEGRD(unittest.TestCase): """Test the SAFE GRD file handler.""" - @mock.patch('rasterio.open') + @mock.patch("rasterio.open") def setUp(self, mocked_rio_open): """Set up the test case.""" from satpy.readers.sar_c_safe import SAFEGRD - filename_info = {'mission_id': 'S1A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, - 'polarization': 'vv'} - filetype_info = 'bla' + filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": 0, "end_time": 0, + "polarization": "vv"} + filetype_info = "bla" self.noisefh = mock.MagicMock() - self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=['y', 'x']) + self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=["y", "x"]) self.calfh = mock.MagicMock() self.calfh.get_calibration_constant.return_value = 1 - self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=['y', 'x']) + self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=["y", "x"]) self.annotationfh = mock.MagicMock() - self.test_fh = SAFEGRD('S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/' - 's1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff', + self.test_fh = SAFEGRD("S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/" + "s1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff", filename_info, filetype_info, self.calfh, self.noisefh, self.annotationfh) self.mocked_rio_open = mocked_rio_open def test_instantiate(self): """Test initialization of file handlers.""" - assert self.test_fh._polarization == 'vv' + assert self.test_fh._polarization == "vv" assert self.test_fh.calibration == self.calfh assert self.test_fh.noise == self.noisefh self.mocked_rio_open.assert_called() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_read_calibrated_natural(self, mocked_xarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), - dims=['y', 'x']) + dims=["y", "x"]) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity='natural'), info=dict()) + calibration=calibration, quantity="natural"), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 2], [5, 10]]) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_read_calibrated_dB(self, mocked_xarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), - dims=['y', 'x']) + dims=["y", "x"]) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity='dB'), info=dict()) + calibration=calibration, quantity="dB"), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]]) def test_read_lon_lats(self): @@ -109,7 +109,7 @@ def __init__(self, *args): FakeGCP(15, 0, 0, 3, 0), ] - crs = dict(init='epsg:4326') + crs = dict(init="epsg:4326") self.mocked_rio_open.return_value.gcps = [gcps, crs] self.mocked_rio_open.return_value.shape = [16, 16] diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 605f595e1f..b335fd09c8 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -42,46 +42,46 @@ def _create_test_netcdf(filename, resolution=742): lon = -13.0 * data_visir lat = xr.DataArray(lat, - dims=('y', 'x'), - attrs={'name': 'lat', - 'standard_name': 'latitude', - 'modifiers': np.array([])}) + dims=("y", "x"), + attrs={"name": "lat", + "standard_name": "latitude", + "modifiers": np.array([])}) lon = xr.DataArray(lon, - dims=('y', 'x'), - attrs={'name': 'lon', - 'standard_name': 'longitude', - 'modifiers': np.array([])}) + dims=("y", "x"), + attrs={"name": "lon", + "standard_name": "longitude", + "modifiers": np.array([])}) solar_zenith_angle_i = xr.DataArray(data_visir, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', - 'coordinates': 'lat lon', - 'resolution': resolution}) + dims=("y", "x"), + attrs={"name": "solar_zenith_angle", + "coordinates": "lat lon", + "resolution": resolution}) scene = Scene() - scene.attrs['sensor'] = ['viirs'] + scene.attrs["sensor"] = ["viirs"] scene_dict = { - 'lat': lat, - 'lon': lon, - 'solar_zenith_angle': solar_zenith_angle_i + "lat": lat, + "lon": lon, + "solar_zenith_angle": solar_zenith_angle_i } tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) common_attrs = { - 'start_time': tstart, - 'end_time': tend, - 'platform_name': 'NOAA 20', - 'orbit_number': 99999 + "start_time": tstart, + "end_time": tend, + "platform_name": "NOAA 20", + "orbit_number": 99999 } for key in scene_dict: scene[key] = scene_dict[key] - if key != 'swath_data': + if key != "swath_data": scene[key].attrs.update(common_attrs) - scene.save_datasets(writer='cf', + scene.save_datasets(writer="cf", filename=filename, - engine='h5netcdf', + engine="h5netcdf", flatten_attrs=True, pretty=True) return filename @@ -100,15 +100,15 @@ def _cf_scene(): lon = -13.0 * np.array([[1, 2], [3, 4]]) proj_dict = { - 'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm' + "a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m" } x_size, y_size = data_visir.shape area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685) area = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, @@ -120,94 +120,94 @@ def _cf_scene(): x_visir = x[0, :] common_attrs = { - 'start_time': tstart, - 'end_time': tend, - 'platform_name': 'tirosn', - 'orbit_number': 99999, - 'area': area + "start_time": tstart, + "end_time": tend, + "platform_name": "tirosn", + "orbit_number": 99999, + "area": area } vis006 = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, attrs={ - 'name': 'image0', 'id_tag': 'ch_r06', - 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', - 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'), - 'orbital_parameters': { - 'projection_longitude': 1, - 'projection_latitude': 1, - 'projection_altitude': 1, - 'satellite_nominal_longitude': 1, - 'satellite_nominal_latitude': 1, - 'satellite_actual_longitude': 1, - 'satellite_actual_latitude': 1, - 'satellite_actual_altitude': 1, - 'nadir_longitude': 1, - 'nadir_latitude': 1, - 'only_in_1': False + "name": "image0", "id_tag": "ch_r06", + "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "orbital_parameters": { + "projection_longitude": 1, + "projection_latitude": 1, + "projection_altitude": 1, + "satellite_nominal_longitude": 1, + "satellite_nominal_latitude": 1, + "satellite_actual_longitude": 1, + "satellite_actual_latitude": 1, + "satellite_actual_altitude": 1, + "nadir_longitude": 1, + "nadir_latitude": 1, + "only_in_1": False } }) ir_108 = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, - attrs={'name': 'image1', 'id_tag': 'ch_tb11', 'coordinates': 'lat lon'}) + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, + attrs={"name": "image1", "id_tag": "ch_tb11", "coordinates": "lat lon"}) qual_f = xr.DataArray(qual_data, - dims=('y', 'z'), - coords={'y': y_visir, 'z': z_visir, 'acq_time': ('y', time_vis006)}, + dims=("y", "z"), + coords={"y": y_visir, "z": z_visir, "acq_time": ("y", time_vis006)}, attrs={ - 'name': 'qual_flags', - 'id_tag': 'qual_flags' + "name": "qual_flags", + "id_tag": "qual_flags" }) lat = xr.DataArray(lat, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': 'lat', - 'standard_name': 'latitude', - 'modifiers': np.array([]) + "name": "lat", + "standard_name": "latitude", + "modifiers": np.array([]) }) lon = xr.DataArray(lon, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': 'lon', - 'standard_name': 'longitude', - 'modifiers': np.array([]) + "name": "lon", + "standard_name": "longitude", + "modifiers": np.array([]) }) # for prefix testing prefix_data = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': '1', 'id_tag': 'ch_r06', - 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', - 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'), - 'area': area + "name": "1", "id_tag": "ch_r06", + "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "area": area }) # for swath testing area = SwathDefinition(lons=lon, lats=lat) swath_data = prefix_data.copy() - swath_data.attrs.update({'name': 'swath_data', 'area': area}) + swath_data.attrs.update({"name": "swath_data", "area": area}) scene = Scene() - scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3'] + scene.attrs["sensor"] = ["avhrr-1", "avhrr-2", "avhrr-3"] scene_dict = { - 'image0': vis006, - 'image1': ir_108, - 'swath_data': swath_data, - '1': prefix_data, - 'lat': lat, - 'lon': lon, - 'qual_flags': qual_f + "image0": vis006, + "image1": ir_108, + "swath_data": swath_data, + "1": prefix_data, + "lat": lat, + "lon": lon, + "qual_flags": qual_f } for key in scene_dict: scene[key] = scene_dict[key] - if key != 'swath_data': + if key != "swath_data": scene[key].attrs.update(common_attrs) return scene @@ -215,14 +215,14 @@ def _cf_scene(): @pytest.fixture def _nc_filename(tmp_path): now = datetime.utcnow() - filename = f'testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc' + filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @pytest.fixture def _nc_filename_i(tmp_path): now = datetime.utcnow() - filename = f'testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc' + filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -231,20 +231,20 @@ class TestCFReader: def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): """Save a dataset with an area definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='h5netcdf', + engine="h5netcdf", flatten_attrs=True, pretty=True) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['image0', 'image1', 'lat']) - np.testing.assert_array_equal(scn_['image0'].data, _cf_scene['image0'].data) - np.testing.assert_array_equal(scn_['lat'].data, _cf_scene['lat'].data) # lat loaded as dataset - np.testing.assert_array_equal(scn_['image0'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord - assert isinstance(scn_['image0'].attrs['wavelength'], WavelengthRange) - expected_area = _cf_scene['image0'].attrs['area'] - actual_area = scn_['image0'].attrs['area'] + scn_.load(["image0", "image1", "lat"]) + np.testing.assert_array_equal(scn_["image0"].data, _cf_scene["image0"].data) + np.testing.assert_array_equal(scn_["lat"].data, _cf_scene["lat"].data) # lat loaded as dataset + np.testing.assert_array_equal(scn_["image0"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + assert isinstance(scn_["image0"].attrs["wavelength"], WavelengthRange) + expected_area = _cf_scene["image0"].attrs["area"] + actual_area = scn_["image0"].attrs["area"] assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent assert expected_area.proj_dict == actual_area.proj_dict assert expected_area.shape == actual_area.shape @@ -254,146 +254,146 @@ def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): def test_write_and_read_with_swath_definition(self, _cf_scene, _nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='h5netcdf', + engine="h5netcdf", flatten_attrs=True, pretty=True, datasets=["swath_data"]) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['swath_data']) - expected_area = _cf_scene['swath_data'].attrs['area'] - actual_area = scn_['swath_data'].attrs['area'] + scn_.load(["swath_data"]) + expected_area = _cf_scene["swath_data"].attrs["area"] + actual_area = scn_["swath_data"].attrs["area"] assert expected_area.shape == actual_area.shape np.testing.assert_array_equal(expected_area.lons.data, actual_area.lons.data) np.testing.assert_array_equal(expected_area.lats.data, actual_area.lats.data) def test_fix_modifier_attr(self): """Check that fix modifier can handle empty list as modifier attribute.""" - reader = SatpyCFFileHandler('filename', + reader = SatpyCFFileHandler("filename", {}, - {'filetype': 'info'}) - ds_info = {'modifiers': []} + {"filetype": "info"}) + ds_info = {"modifiers": []} reader.fix_modifier_attr(ds_info) - assert ds_info['modifiers'] == () + assert ds_info["modifiers"] == () def test_read_prefixed_channels(self, _cf_scene, _nc_filename): """Check channels starting with digit is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename], reader_kwargs={}) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, _cf_scene['1'].data) + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, _cf_scene["1"].data) def test_read_prefixed_channels_include_orig_name(self, _cf_scene, _nc_filename): """Check channels starting with digit and includeed orig name is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=True) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord - assert scn_['1'].attrs['original_name'] == '1' + assert scn_["1"].attrs["original_name"] == "1" # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, _cf_scene['1'].data) + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, _cf_scene["1"].data) def test_read_prefixed_channels_by_user(self, _cf_scene, _nc_filename): """Check channels starting with digit is prefixed by user and read back correctly.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename], reader_kwargs={'numeric_name_prefix': 'USER'}) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", + filenames=[_nc_filename], reader_kwargs={"numeric_name_prefix": "USER"}) + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['USER1'].data, _cf_scene['1'].data) + np.testing.assert_array_equal(ds_disk["USER1"].data, _cf_scene["1"].data) def test_read_prefixed_channels_by_user2(self, _cf_scene, _nc_filename): """Check channels starting with digit is prefixed by user when saving and read back correctly without prefix.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=False, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['USER1']) - np.testing.assert_array_equal(scn_['USER1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['USER1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + scn_.load(["USER1"]) + np.testing.assert_array_equal(scn_["USER1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["USER1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord def test_read_prefixed_channels_by_user_include_prefix(self, _cf_scene, _nc_filename): """Check channels starting with digit is prefixed by user and include original name when saving.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=True, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord def test_read_prefixed_channels_by_user_no_prefix(self, _cf_scene, _nc_filename): """Check channels starting with digit is not prefixed by user.""" with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message=".*starts with a digit.*") - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename, - engine='netcdf4', + engine="netcdf4", flatten_attrs=True, pretty=True, - numeric_name_prefix='') - scn_ = Scene(reader='satpy_cf_nc', + numeric_name_prefix="") + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord def test_orbital_parameters(self, _cf_scene, _nc_filename): """Test that the orbital parameters in attributes are handled correctly.""" - _cf_scene.save_datasets(writer='cf', + _cf_scene.save_datasets(writer="cf", filename=_nc_filename) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename]) - scn_.load(['image0']) - orig_attrs = _cf_scene['image0'].attrs['orbital_parameters'] - new_attrs = scn_['image0'].attrs['orbital_parameters'] + scn_.load(["image0"]) + orig_attrs = _cf_scene["image0"].attrs["orbital_parameters"] + new_attrs = scn_["image0"].attrs["orbital_parameters"] assert isinstance(new_attrs, dict) for key in orig_attrs: assert orig_attrs[key] == new_attrs[key] @@ -402,30 +402,30 @@ def test_write_and_read_from_two_files(self, _nc_filename, _nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" _create_test_netcdf(_nc_filename, resolution=742) _create_test_netcdf(_nc_filename_i, resolution=371) - scn_ = Scene(reader='satpy_cf_nc', + scn_ = Scene(reader="satpy_cf_nc", filenames=[_nc_filename, _nc_filename_i]) - scn_.load(['solar_zenith_angle'], resolution=742) - assert scn_['solar_zenith_angle'].attrs['resolution'] == 742 + scn_.load(["solar_zenith_angle"], resolution=742) + assert scn_["solar_zenith_angle"].attrs["resolution"] == 742 scn_.unload() - scn_.load(['solar_zenith_angle'], resolution=371) - assert scn_['solar_zenith_angle'].attrs['resolution'] == 371 + scn_.load(["solar_zenith_angle"], resolution=371) + assert scn_["solar_zenith_angle"].attrs["resolution"] == 371 def test_dataid_attrs_equal_matching_dataset(self, _cf_scene, _nc_filename): """Check that get_dataset returns valid dataset when keys matches.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=742, modifiers=()) + reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=()) res = reader.get_dataset(ds_id, {}) - assert res.attrs['resolution'] == 742 + assert res.attrs["resolution"] == 742 def test_dataid_attrs_equal_not_matching_dataset(self, _cf_scene, _nc_filename): """Check that get_dataset returns None when key(s) are not matching.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) + reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) not_existing_resolution = 9999999 - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=not_existing_resolution, + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=not_existing_resolution, modifiers=()) assert reader.get_dataset(ds_id, {}) is None @@ -433,8 +433,8 @@ def test_dataid_attrs_equal_contains_not_matching_key(self, _cf_scene, _nc_filen """Check that get_dataset returns valid dataset when dataid have key(s) not existing in data.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=742, - modifiers=(), calibration='counts') + reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, + modifiers=(), calibration="counts") res = reader.get_dataset(ds_id, {}) - assert res.attrs['resolution'] == 742 + assert res.attrs["resolution"] == 742 diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 6d42720c8d..45fcc9caee 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -56,7 +56,7 @@ def close(self): class TestSCMIFileHandler(unittest.TestCase): """Test the SCMIFileHandler reader.""" - @mock.patch('satpy.readers.scmi.xr') + @mock.patch("satpy.readers.scmi.xr") def setUp(self, xr_): """Set up for test.""" from satpy.readers.scmi import SCMIFileHandler @@ -68,37 +68,37 @@ def setUp(self, xr_): time = xr.DataArray(0.) rad = xr.DataArray( rad_data, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 20, - 'standard_name': 'toa_bidirectional_reflectance', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 20, + "standard_name": "toa_bidirectional_reflectance", }, coords={ - 'time': time, + "time": time, } ) xr_.open_dataset.return_value = FakeDataset( { - 'Sectorized_CMI': rad, + "Sectorized_CMI": rad, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), }, { - 'start_date_time': "2017210120000", - 'satellite_id': 'GOES-16', - 'satellite_longitude': -90., - 'satellite_latitude': 0., - 'satellite_altitude': 35785831., + "start_date_time": "2017210120000", + "satellite_id": "GOES-16", + "satellite_longitude": -90., + "satellite_latitude": 0., + "satellite_altitude": 35785831., }, - {'y': 2, 'x': 5}, + {"y": 2, "x": 5}, ) - self.reader = SCMIFileHandler('filename', - {'platform_shortname': 'G16'}, - {'filetype': 'info'}) + self.reader = SCMIFileHandler("filename", + {"platform_shortname": "G16"}, + {"filetype": "info"}) def test_basic_attributes(self): """Test getting basic file attributes.""" @@ -109,74 +109,74 @@ def test_basic_attributes(self): datetime(2017, 7, 29, 12, 0, 0, 0)) self.assertEqual(self.reader.end_time, datetime(2017, 7, 29, 12, 0, 0, 0)) - self.assertEqual(self.reader.get_shape(make_dataid(name='C05'), {}), + self.assertEqual(self.reader.get_shape(make_dataid(name="C05"), {}), (2, 5)) def test_data_load(self): """Test data loading.""" from satpy.tests.utils import make_dataid res = self.reader.get_dataset( - make_dataid(name='C05', calibration='reflectance'), {}) + make_dataid(name="C05", calibration="reflectance"), {}) np.testing.assert_allclose(res.data, self.expected_rad, equal_nan=True) - self.assertNotIn('scale_factor', res.attrs) - self.assertNotIn('_FillValue', res.attrs) - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') - assert 'orbital_parameters' in res.attrs - orb_params = res.attrs['orbital_parameters'] - assert orb_params['projection_longitude'] == -90.0 - assert orb_params['projection_latitude'] == 0.0 - assert orb_params['projection_altitude'] == 35785831.0 + self.assertNotIn("scale_factor", res.attrs) + self.assertNotIn("_FillValue", res.attrs) + self.assertEqual(res.attrs["standard_name"], + "toa_bidirectional_reflectance") + assert "orbital_parameters" in res.attrs + orb_params = res.attrs["orbital_parameters"] + assert orb_params["projection_longitude"] == -90.0 + assert orb_params["projection_latitude"] == 0.0 + assert orb_params["projection_altitude"] == 35785831.0 class TestSCMIFileHandlerArea(unittest.TestCase): """Test the SCMIFileHandler's area creation.""" - @mock.patch('satpy.readers.scmi.xr') + @mock.patch("satpy.readers.scmi.xr") def create_reader(self, proj_name, proj_attrs, xr_): """Create a fake reader.""" from satpy.readers.scmi import SCMIFileHandler proj = xr.DataArray([], attrs=proj_attrs) x__ = xr.DataArray( [0, 1], - attrs={'scale_factor': 2., 'add_offset': -1., 'units': 'meters'}, + attrs={"scale_factor": 2., "add_offset": -1., "units": "meters"}, ) y__ = xr.DataArray( [0, 1], - attrs={'scale_factor': -2., 'add_offset': 1., 'units': 'meters'}, + attrs={"scale_factor": -2., "add_offset": 1., "units": "meters"}, ) xr_.open_dataset.return_value = FakeDataset({ - 'goes_imager_projection': proj, - 'x': x__, - 'y': y__, - 'Sectorized_CMI': np.ones((2, 2))}, + "goes_imager_projection": proj, + "x": x__, + "y": y__, + "Sectorized_CMI": np.ones((2, 2))}, { - 'satellite_id': 'GOES-16', - 'grid_mapping': proj_name, + "satellite_id": "GOES-16", + "grid_mapping": proj_name, }, { - 'y': y__.size, - 'x': x__.size, + "y": y__.size, + "x": x__.size, } ) - return SCMIFileHandler('filename', - {'platform_shortname': 'G16'}, - {'filetype': 'info'}) + return SCMIFileHandler("filename", + {"platform_shortname": "G16"}, + {"filetype": "info"}) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_geos(self, adef): """Test the area generation for geos projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'perspective_point_height': 1., - 'longitude_of_projection_origin': -90., - 'sweep_angle_axis': u'x', - 'grid_mapping_name': 'geostationary', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "perspective_point_height": 1., + "longitude_of_projection_origin": -90., + "sweep_angle_axis": u"x", + "grid_mapping_name": "geostationary", } ) reader.get_area_def(None) @@ -184,24 +184,24 @@ def test_get_area_def_geos(self, adef): self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, - 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) + "a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "lat_0": 0.0, + "proj": "geos", "sweep": "x", "units": "m"}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_lcc(self, adef): """Test the area generation for lcc projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_central_meridian': -90., - 'standard_parallel': 25., - 'latitude_of_projection_origin': 25., - 'grid_mapping_name': 'lambert_conformal_conic', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_central_meridian": -90., + "standard_parallel": 25., + "latitude_of_projection_origin": 25., + "grid_mapping_name": "lambert_conformal_conic", } ) reader.get_area_def(None) @@ -209,24 +209,24 @@ def test_get_area_def_lcc(self, adef): self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 25.0, 'lat_1': 25.0, - 'proj': 'lcc', 'units': 'm'}) + "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 25.0, "lat_1": 25.0, + "proj": "lcc", "units": "m"}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_stere(self, adef): """Test the area generation for stere projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'straight_vertical_longitude_from_pole': -90., - 'standard_parallel': 60., - 'latitude_of_projection_origin': 90., - 'grid_mapping_name': 'polar_stereographic', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "straight_vertical_longitude_from_pole": -90., + "standard_parallel": 60., + "latitude_of_projection_origin": 90., + "grid_mapping_name": "polar_stereographic", } ) reader.get_area_def(None) @@ -234,23 +234,23 @@ def test_get_area_def_stere(self, adef): self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 90.0, 'lat_ts': 60.0, - 'proj': 'stere', 'units': 'm'}) + "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 90.0, "lat_ts": 60.0, + "proj": "stere", "units": "m"}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_merc(self, adef): """Test the area generation for merc projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_projection_origin': -90., - 'standard_parallel': 0., - 'grid_mapping_name': 'mercator', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_projection_origin": -90., + "standard_parallel": 0., + "grid_mapping_name": "mercator", } ) reader.get_area_def(None) @@ -258,23 +258,23 @@ def test_get_area_def_merc(self, adef): self.assertEqual(adef.call_count, 1) call_args = tuple(adef.call_args)[0] self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, 'lat_ts': 0.0, - 'proj': 'merc', 'units': 'm'}) + "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 0.0, "lat_ts": 0.0, + "proj": "merc", "units": "m"}) self.assertEqual(call_args[4], reader.ncols) self.assertEqual(call_args[5], reader.nlines) np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_bad(self, adef): """Test the area generation for bad projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_projection_origin': -90., - 'standard_parallel': 0., - 'grid_mapping_name': 'fake', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_projection_origin": -90., + "standard_parallel": 0., + "grid_mapping_name": "fake", } ) self.assertRaises(ValueError, reader.get_area_def, None) diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py index c56fe42b33..01de26e96b 100644 --- a/satpy/tests/reader_tests/test_seadas_l2.py +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -112,13 +112,13 @@ def _create_seadas_chlor_a_hdf4_file(full_path, mission, sensor): def _add_variable_to_hdf4_file(h, var_name, var_info): - v = h.create(var_name, var_info['type'], var_info['data'].shape) - v[:] = var_info['data'] - for dim_count, dimension_name in enumerate(var_info['dim_labels']): + v = h.create(var_name, var_info["type"], var_info["data"].shape) + v[:] = var_info["data"] + for dim_count, dimension_name in enumerate(var_info["dim_labels"]): v.dim(dim_count).setname(dimension_name) - if var_info.get('fill_value'): - v.setfillvalue(var_info['fill_value']) - for attr_key, attr_val in var_info['attrs'].items(): + if var_info.get("fill_value"): + v.setfillvalue(var_info["fill_value"]) + for attr_key, attr_val in var_info["attrs"].items(): setattr(v, attr_key, attr_val) @@ -196,8 +196,8 @@ def _create_seadas_chlor_a_netcdf_file(full_path, mission, sensor): def _add_variable_to_netcdf_file(nc, var_name, var_info): v = nc.createVariable(var_name, var_info["data"].dtype.str[1:], dimensions=var_info["dim_labels"], fill_value=var_info.get("fill_value")) - v[:] = var_info['data'] - for attr_key, attr_val in var_info['attrs'].items(): + v[:] = var_info["data"] + for attr_key, attr_val in var_info["attrs"].items(): setattr(v, attr_key, attr_val) @@ -206,7 +206,7 @@ class TestSEADAS: def test_available_reader(self): """Test that SEADAS L2 reader is available.""" - assert 'seadas_l2' in available_readers() + assert "seadas_l2" in available_readers() @pytest.mark.parametrize( "input_files", @@ -217,10 +217,10 @@ def test_available_reader(self): ]) def test_scene_available_datasets(self, input_files): """Test that datasets are available.""" - scene = Scene(reader='seadas_l2', filenames=input_files) + scene = Scene(reader="seadas_l2", filenames=input_files) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 - assert 'chlor_a' in available_datasets + assert "chlor_a" in available_datasets @pytest.mark.parametrize( ("input_files", "exp_plat", "exp_sensor", "exp_rps"), @@ -234,13 +234,13 @@ def test_scene_available_datasets(self, input_files): def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps, apply_quality_flags): """Test that we can load 'chlor_a'.""" reader_kwargs = {"apply_quality_flags": apply_quality_flags} - scene = Scene(reader='seadas_l2', filenames=input_files, reader_kwargs=reader_kwargs) - scene.load(['chlor_a']) - data_arr = scene['chlor_a'] + scene = Scene(reader="seadas_l2", filenames=input_files, reader_kwargs=reader_kwargs) + scene.load(["chlor_a"]) + data_arr = scene["chlor_a"] assert data_arr.dims == ("y", "x") - assert data_arr.attrs['platform_name'] == exp_plat - assert data_arr.attrs['sensor'] == exp_sensor - assert data_arr.attrs['units'] == 'mg m^-3' + assert data_arr.attrs["platform_name"] == exp_plat + assert data_arr.attrs["sensor"] == exp_sensor + assert data_arr.attrs["units"] == "mg m^-3" assert data_arr.dtype.type == np.float32 assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["rows_per_scan"] == exp_rps diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 157ed88bbf..b6540d8623 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -75,24 +75,24 @@ def test_get_cds_time(self): """Test the get_cds_time function.""" # Scalar self.assertEqual(get_cds_time(days=21246, msecs=12*3600*1000), - np.datetime64('2016-03-03 12:00')) + np.datetime64("2016-03-03 12:00")) # Array days = np.array([21246, 21247, 21248]) msecs = np.array([12*3600*1000, 13*3600*1000 + 1, 14*3600*1000 + 2]) - expected = np.array([np.datetime64('2016-03-03 12:00:00.000'), - np.datetime64('2016-03-04 13:00:00.001'), - np.datetime64('2016-03-05 14:00:00.002')]) + expected = np.array([np.datetime64("2016-03-03 12:00:00.000"), + np.datetime64("2016-03-04 13:00:00.001"), + np.datetime64("2016-03-05 14:00:00.002")]) np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) days = 21246 msecs = 12*3600*1000 - expected = np.datetime64('2016-03-03 12:00:00.000') + expected = np.datetime64("2016-03-03 12:00:00.000") np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" - data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((1, 10)), dims=("y", "x")) east_bound = 5 west_bound = 10 final_size = (1, 20) @@ -101,7 +101,7 @@ def test_pad_data_horizontally_bad_shape(self): def test_pad_data_vertically_bad_shape(self): """Test the error handling for the vertical hrv padding.""" - data = xr.DataArray(data=np.zeros((10, 1)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((10, 1)), dims=("y", "x")) south_bound = 5 north_bound = 10 final_size = (20, 1) @@ -134,7 +134,7 @@ def test_round_nom_time(self): @staticmethod def test_pad_data_horizontally(): """Test the horizontal hrv padding.""" - data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((1, 10)), dims=("y", "x")) east_bound = 4 west_bound = 13 final_size = (1, 20) @@ -146,7 +146,7 @@ def test_pad_data_horizontally(): @staticmethod def test_pad_data_vertically(): """Test the vertical hrv padding.""" - data = xr.DataArray(data=np.zeros((10, 1)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((10, 1)), dims=("y", "x")) south_bound = 4 north_bound = 13 final_size = (20, 1) @@ -176,30 +176,30 @@ def test_get_padding_area_int(): ORBIT_POLYNOMIALS = { - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(1958, 1, 1, 0)] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(2006, 1, 2, 0), datetime(1958, 1, 1, 0) ] ]), - 'X': [np.zeros(8), + "X": [np.zeros(8), [8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, 7.70548174e-04, -1.44262718e-04], np.zeros(8)], - 'Y': [np.zeros(8), + "Y": [np.zeros(8), [-5.21170255e+03, 5.12998948e+00, -1.33370453e+00, -3.09634144e-01, 6.18232793e-02, 7.50505681e-03, -1.35131011e-03, -1.12054405e-04], np.zeros(8)], - 'Z': [np.zeros(8), + "Z": [np.zeros(8), [-6.51293855e+02, 1.45830459e+02, 5.61379400e+01, -3.90970565e+00, -7.38137565e-01, 3.06131644e-02, 3.82892428e-03, @@ -211,7 +211,7 @@ def test_get_padding_area_int(): # 01-01: Small gap (12:00 - 13:00) # 01-02: Large gap (04:00 - 18:00) # 01-03: Overlap (10:00 - 13:00) - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(2005, 12, 31, 10), datetime(2005, 12, 31, 12), datetime(2006, 1, 1, 10), datetime(2006, 1, 1, 13), @@ -219,7 +219,7 @@ def test_get_padding_area_int(): datetime(2006, 1, 3, 6), datetime(2006, 1, 3, 10), ] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(2005, 12, 31, 12), datetime(2005, 12, 31, 18), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), @@ -227,24 +227,24 @@ def test_get_padding_area_int(): datetime(2006, 1, 3, 13), datetime(2006, 1, 3, 18), ] ]), - 'X': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], - 'Y': [1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 8.1], - 'Z': [1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 8.2], + "X": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], + "Y": [1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 8.1], + "Z": [1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 8.2], } ORBIT_POLYNOMIALS_INVALID = { - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(1958, 1, 1), datetime(1958, 1, 1) ] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(1958, 1, 1), datetime(1958, 1, 1) ] ]), - 'X': [1, 2], - 'Y': [3, 4], - 'Z': [5, 6], + "X": [1, 2], + "Y": [3, 4], + "Z": [5, 6], } @@ -301,7 +301,7 @@ class TestOrbitPolynomialFinder: """Unit tests for orbit polynomial finder.""" @pytest.mark.parametrize( - ('orbit_polynomials', 'time', 'orbit_polynomial_exp'), + ("orbit_polynomials", "time", "orbit_polynomial_exp"), [ # Contiguous validity intervals (that's the norm) ( @@ -309,8 +309,8 @@ class TestOrbitPolynomialFinder: datetime(2005, 12, 31, 12, 15), OrbitPolynomial( coefs=(2.0, 2.1, 2.2), - start_time=np.datetime64('2005-12-31 12:00'), - end_time=np.datetime64('2005-12-31 18:00') + start_time=np.datetime64("2005-12-31 12:00"), + end_time=np.datetime64("2005-12-31 18:00") ) ), # No interval enclosing the given timestamp, but closest interval @@ -320,8 +320,8 @@ class TestOrbitPolynomialFinder: datetime(2006, 1, 1, 12, 15), OrbitPolynomial( coefs=(3.0, 3.1, 3.2), - start_time=np.datetime64('2006-01-01 10:00'), - end_time=np.datetime64('2006-01-01 12:00') + start_time=np.datetime64("2006-01-01 10:00"), + end_time=np.datetime64("2006-01-01 12:00") ) ), # Overlapping intervals @@ -330,8 +330,8 @@ class TestOrbitPolynomialFinder: datetime(2006, 1, 3, 12, 15), OrbitPolynomial( coefs=(8.0, 8.1, 8.2), - start_time=np.datetime64('2006-01-03 10:00'), - end_time=np.datetime64('2006-01-03 18:00') + start_time=np.datetime64("2006-01-03 10:00"), + end_time=np.datetime64("2006-01-03 18:00") ) ), ] @@ -344,7 +344,7 @@ def test_get_orbit_polynomial(self, orbit_polynomials, time, assert orbit_polynomial == orbit_polynomial_exp @pytest.mark.parametrize( - ('orbit_polynomials', 'time'), + ("orbit_polynomials", "time"), [ # No interval enclosing the given timestamp and closest interval # too far away diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index fdcedea3f2..cc1107cc6c 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -53,7 +53,7 @@ CAL_TYPE1 = 1 CAL_TYPE2 = 2 CAL_TYPEBAD = -1 -CHANNEL_NAME = 'IR_108' +CHANNEL_NAME = "IR_108" PLATFORM_ID = 323 # Met-10 TBS_OUTPUT1 = xr.DataArray( @@ -152,28 +152,28 @@ def test_init(self): platform_id=None, channel_name=None, coefs=None, - calib_mode='invalid', + calib_mode="invalid", scan_time=None ) - def _get_calibration_handler(self, calib_mode='NOMINAL', ext_coefs=None): + def _get_calibration_handler(self, calib_mode="NOMINAL", ext_coefs=None): """Provide a calibration handler.""" return SEVIRICalibrationHandler( platform_id=324, - channel_name='IR_108', + channel_name="IR_108", coefs={ - 'coefs': { - 'NOMINAL': { - 'gain': 10, - 'offset': -1 + "coefs": { + "NOMINAL": { + "gain": 10, + "offset": -1 }, - 'GSICS': { - 'gain': 20, - 'offset': -2 + "GSICS": { + "gain": 20, + "offset": -2 }, - 'EXTERNAL': ext_coefs or {} + "EXTERNAL": ext_coefs or {} }, - 'radiance_type': 1 + "radiance_type": 1 }, calib_mode=calib_mode, scan_time=None @@ -183,15 +183,15 @@ def test_calibrate_exceptions(self): """Test exceptions raised by the calibration handler.""" calib = self._get_calibration_handler() with pytest.raises(ValueError): - calib.calibrate(None, 'invalid') + calib.calibrate(None, "invalid") @pytest.mark.parametrize( - ('calib_mode', 'ext_coefs', 'expected'), + ("calib_mode", "ext_coefs", "expected"), [ - ('NOMINAL', {}, (10, -1)), - ('GSICS', {}, (20, -40)), - ('GSICS', {'gain': 30, 'offset': -3}, (30, -3)), - ('NOMINAL', {'gain': 30, 'offset': -3}, (30, -3)) + ("NOMINAL", {}, (10, -1)), + ("GSICS", {}, (20, -40)), + ("GSICS", {"gain": 30, "offset": -3}, (30, -3)), + ("NOMINAL", {"gain": 30, "offset": -3}, (30, -3)) ] ) def test_get_gain_offset(self, calib_mode, ext_coefs, expected): @@ -214,145 +214,145 @@ class TestFileHandlerCalibrationBase: radiance_types = 2 * np.ones(12) scan_time = datetime(2020, 1, 1) external_coefs = { - 'VIS006': {'gain': 10, 'offset': -10}, - 'IR_108': {'gain': 20, 'offset': -20}, - 'HRV': {'gain': 5, 'offset': -5} + "VIS006": {"gain": 10, "offset": -10}, + "IR_108": {"gain": 20, "offset": -20}, + "HRV": {"gain": 5, "offset": -5} } - spectral_channel_ids = {'VIS006': 1, 'IR_108': 9, 'HRV': 12} + spectral_channel_ids = {"VIS006": 1, "IR_108": 9, "HRV": 12} expected = { - 'VIS006': { - 'counts': { - 'NOMINAL': xr.DataArray( + "VIS006": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 9], [99, 254]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 9], [99, 254]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 90], [990, 2540]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'reflectance': { - 'NOMINAL': xr.DataArray( + "reflectance": { + "NOMINAL": xr.DataArray( [[np.nan, 41.88985], [460.7884, 1182.2247]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 418.89853], [4607.8843, 11822.249]], - dims=('y', 'x') + dims=("y", "x") ) } }, - 'IR_108': { - 'counts': { - 'NOMINAL': xr.DataArray( + "IR_108": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 81], [891, 2286]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 8.19], [89.19, 228.69]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 180], [1980, 5080]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'brightness_temperature': { - 'NOMINAL': xr.DataArray( + "brightness_temperature": { + "NOMINAL": xr.DataArray( [[np.nan, 279.82318], [543.2585, 812.77167]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 189.20985], [285.53293, 356.06668]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 335.14236], [758.6249, 1262.7567]], - dims=('y', 'x') + dims=("y", "x") ), } }, - 'HRV': { - 'counts': { - 'NOMINAL': xr.DataArray( + "HRV": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 108], [1188, 3048]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 108], [1188, 3048]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 45], [495, 1270]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'reflectance': { - 'NOMINAL': xr.DataArray( + "reflectance": { + "NOMINAL": xr.DataArray( [[np.nan, 415.26767], [4567.944, 11719.775]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 173.02817], [1903.31, 4883.2397]], - dims=('y', 'x') + dims=("y", "x") ) } } } - @pytest.fixture(name='counts') + @pytest.fixture(name="counts") def counts(self): """Provide fake image counts.""" return xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) def _get_expected( self, channel, calibration, calib_mode, use_ext_coefs ): if use_ext_coefs: - return self.expected[channel][calibration]['EXTERNAL'] + return self.expected[channel][calibration]["EXTERNAL"] return self.expected[channel][calibration][calib_mode] diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 90785ffdbf..ae042999e3 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -54,89 +54,89 @@ def setUp(self): ncols=5568, ) self.reader.mda.update({ - 'segment_sequence_number': 18, - 'planned_start_segment_number': 1 + "segment_sequence_number": 18, + "planned_start_segment_number": 1 }) self.reader.fill_hrv = True - @mock.patch('satpy.readers.hrit_base.np.memmap') + @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_hrv_band(self, memmap): """Test reading the hrv band.""" - nbits = self.reader.mda['number_of_bits_per_pixel'] + nbits = self.reader.mda["number_of_bits_per_pixel"] memmap.return_value = np.random.randint(0, 256, size=int((464 * 5568 * nbits) / 8), dtype=np.uint8) - res = self.reader.read_band('HRV', None) + res = self.reader.read_band("HRV", None) self.assertEqual(res.shape, (464, 5568)) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the hrv dataset.""" - key = make_dataid(name='HRV', calibration='reflectance') + key = make_dataid(name="HRV", calibration="reflectance") info = setup.get_fake_dataset_info() parent_get_dataset.return_value = mock.MagicMock() - calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) + calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) self.assertEqual(res.shape, (464, 11136)) # Test method calls parent_get_dataset.assert_called_with(key, info) - calibrate.assert_called_with(parent_get_dataset(), key['calibration']) + calibrate.assert_called_with(parent_get_dataset(), key["calibration"]) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( - res['acq_time'], + res["acq_time"], setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): """Test getting a non-filled hrv dataset.""" - key = make_dataid(name='HRV', calibration='reflectance') - key.name = 'HRV' + key = make_dataid(name="HRV", calibration="reflectance") + key.name = "HRV" info = setup.get_fake_dataset_info() self.reader.fill_hrv = False parent_get_dataset.return_value = mock.MagicMock() - calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) + calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) self.assertEqual(res.shape, (464, 5568)) # Test method calls parent_get_dataset.assert_called_with(key, info) - calibrate.assert_called_with(parent_get_dataset(), key['calibration']) + calibrate.assert_called_with(parent_get_dataset(), key["calibration"]) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( - res['acq_time'], + res["acq_time"], setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def(make_dataid(name='HRV', resolution=1000)) + area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) self.assertEqual(area.area_extent, (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378169.0) self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict['h'], 35785831.0) - self.assertEqual(proj_dict['lon_0'], 0.0) - self.assertEqual(proj_dict['proj'], 'geos') - self.assertEqual(proj_dict['units'], 'm') + self.assertEqual(proj_dict["h"], 35785831.0) + self.assertEqual(proj_dict["lon_0"], 0.0) + self.assertEqual(proj_dict["proj"], "geos") + self.assertEqual(proj_dict["units"], "m") self.reader.fill_hrv = False - area = self.reader.get_area_def(make_dataid(name='HRV', resolution=1000)) + area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, (-22017598561055.01, -2926674655354.9604, 23564847539690.22, 77771774058.38356)) npt.assert_allclose(area.defs[1].area_extent, (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604)) - self.assertEqual(area.defs[0].area_id, 'msg_seviri_fes_1km') - self.assertEqual(area.defs[1].area_id, 'msg_seviri_fes_1km') + self.assertEqual(area.defs[0].area_id, "msg_seviri_fes_1km") + self.assertEqual(area.defs[1].area_id, "msg_seviri_fes_1km") class TestHRITMSGFileHandler(TestHRITMSGBase): @@ -155,70 +155,70 @@ def setUp(self): projection_longitude=self.projection_longitude ) self.reader.mda.update({ - 'segment_sequence_number': 18, - 'planned_start_segment_number': 1 + "segment_sequence_number": 18, + "planned_start_segment_number": 1 }) def _get_fake_data(self): return xr.DataArray( data=np.zeros((self.nlines, self.ncols)), - dims=('y', 'x') + dims=("y", "x") ) def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def(make_dataid(name='VIS006', resolution=3000)) + area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) self.assertEqual(a, 6378169.0) self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict['h'], 35785831.0) - self.assertEqual(proj_dict['lon_0'], self.projection_longitude) - self.assertEqual(proj_dict['proj'], 'geos') - self.assertEqual(proj_dict['units'], 'm') + self.assertEqual(proj_dict["h"], 35785831.0) + self.assertEqual(proj_dict["lon_0"], self.projection_longitude) + self.assertEqual(proj_dict["proj"], "geos") + self.assertEqual(proj_dict["units"], "m") self.assertEqual(area.area_extent, (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356)) # Data shifted by 1.5km to N-W - self.reader.mda['offset_corrected'] = False - area = self.reader.get_area_def(make_dataid(name='VIS006', resolution=3000)) + self.reader.mda["offset_corrected"] = False + area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) self.assertEqual(area.area_extent, (-77771772558.38356, -3720765402503.719, 30310525627938.438, 77771772558.38356)) - self.assertEqual(area.area_id, 'msg_seviri_rss_3km') + self.assertEqual(area.area_id, "msg_seviri_rss_3km") - @mock.patch('satpy.readers.hrit_base.np.memmap') + @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_band(self, memmap): """Test reading a band.""" - nbits = self.reader.mda['number_of_bits_per_pixel'] + nbits = self.reader.mda["number_of_bits_per_pixel"] memmap.return_value = np.random.randint(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) self.assertEqual(res.shape, (464, 3712)) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() res = self.reader.get_dataset(key, info) # Test method calls - new_data = np.zeros_like(data.data).astype('float32') + new_data = np.zeros_like(data.data).astype("float32") new_data[:, :] = np.nan expected = data.copy(data=new_data) - expected['acq_time'] = ( - 'y', + expected["acq_time"] = ( + "y", setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) @@ -237,26 +237,26 @@ def test_get_dataset(self, calibrate, parent_get_dataset): # test repeat cycle duration self.assertEqual(15, self.reader._repeat_cycle_duration) # Change the reducescan scenario to test the repeat cycle duration handling - self.reader.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + self.reader.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 self.assertEqual(5, self.reader._repeat_cycle_duration) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() self.reader.mask_bad_quality_scan_lines = False res = self.reader.get_dataset(key, info) # Test method calls expected = data.copy() - expected['acq_time'] = ( - 'y', + expected["acq_time"] = ( + "y", setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) @@ -265,27 +265,27 @@ def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_ setup.get_attrs_exp(self.projection_longitude) ) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_with_raw_metadata(self, calibrate, parent_get_dataset): """Test getting the dataset.""" calibrate.return_value = self._get_fake_data() - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() self.reader.include_raw_metadata = True res = self.reader.get_dataset(key, info) - assert 'raw_metadata' in res.attrs + assert "raw_metadata" in res.attrs def test_get_raw_mda(self): """Test provision of raw metadata.""" - self.reader.mda = {'segment': 1, 'loff': 123} - self.reader.prologue_.reduce = lambda max_size: {'prologue': 1} - self.reader.epilogue_.reduce = lambda max_size: {'epilogue': 1} - expected = {'prologue': 1, 'epilogue': 1, 'segment': 1} + self.reader.mda = {"segment": 1, "loff": 123} + self.reader.prologue_.reduce = lambda max_size: {"prologue": 1} + self.reader.epilogue_.reduce = lambda max_size: {"epilogue": 1} + expected = {"prologue": 1, "epilogue": 1, "segment": 1} self.assertDictEqual(self.reader._get_raw_mda(), expected) # Make sure _get_raw_mda() doesn't modify the original dictionary - self.assertIn('loff', self.reader.mda) + self.assertIn("loff", self.reader.mda) def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" @@ -297,8 +297,8 @@ def test_satpos_no_valid_orbit_polynomial(self): orbit_polynomials=ORBIT_POLYNOMIALS_INVALID ) self.assertNotIn( - 'satellite_actual_longitude', - reader.mda['orbital_parameters'] + "satellite_actual_longitude", + reader.mda["orbital_parameters"] ) @@ -314,8 +314,8 @@ def setUp(self, *mocks): ) self.reader = fh.prologue_ - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the prologue file handler accepts extra keyword arguments.""" @@ -324,31 +324,31 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - HRITMSGPrologueFileHandler(filename='dummy_prologue_filename', - filename_info={'service': ''}, + HRITMSGPrologueFileHandler(filename="dummy_prologue_filename", + filename_info={"service": ""}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') + @mock.patch("satpy.readers.seviri_l1b_hrit.utils.reduce_mda") def test_reduce(self, reduce_mda): """Test metadata reduction.""" - reduce_mda.return_value = 'reduced' + reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + self.assertEqual(self.reader.reduce(123), "reduced") # Read buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + self.assertEqual(self.reader.reduce(123), "reduced") reduce_mda.assert_called_once() class TestHRITMSGEpilogueFileHandler(unittest.TestCase): """Test the HRIT epilogue file handler.""" - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def setUp(self, init, *mocks): """Set up the test case.""" @@ -357,13 +357,13 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - self.reader = HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename', - filename_info={'service': ''}, + self.reader = HRITMSGEpilogueFileHandler(filename="dummy_epilogue_filename", + filename_info={"service": ""}, filetype_info=None, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the epilogue file handler accepts extra keyword arguments.""" @@ -372,74 +372,74 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename', - filename_info={'service': ''}, + HRITMSGEpilogueFileHandler(filename="dummy_epilogue_filename", + filename_info={"service": ""}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') + @mock.patch("satpy.readers.seviri_l1b_hrit.utils.reduce_mda") def test_reduce(self, reduce_mda): """Test metadata reduction.""" - reduce_mda.return_value = 'reduced' + reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + self.assertEqual(self.reader.reduce(123), "reduced") reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() - self.reader._reduced = 'red' - self.assertEqual(self.reader.reduce(123), 'red') + self.reader._reduced = "red" + self.assertEqual(self.reader.reduce(123), "red") reduce_mda.assert_not_called() class TestHRITMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self): """Create a mocked file handler.""" prolog = { - 'RadiometricProcessing': { - 'Level15ImageCalibration': { - 'CalSlope': self.gains_nominal, - 'CalOffset': self.offsets_nominal, + "RadiometricProcessing": { + "Level15ImageCalibration": { + "CalSlope": self.gains_nominal, + "CalOffset": self.offsets_nominal, }, - 'MPEFCalFeedback': { - 'GSICSCalCoeff': self.gains_gsics, - 'GSICSOffsetCount': self.offsets_gsics, + "MPEFCalFeedback": { + "GSICSCalCoeff": self.gains_gsics, + "GSICSOffsetCount": self.offsets_gsics, } }, - 'ImageDescription': { - 'Level15ImageProduction': { - 'PlannedChanProcessing': self.radiance_types + "ImageDescription": { + "Level15ImageProduction": { + "PlannedChanProcessing": self.radiance_types } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': self.scan_time, + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": self.scan_time, } } } epilog = { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': self.scan_time + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": self.scan_time } } } mda = { - 'image_segment_line_quality': { - 'line_validity': np.array([3, 3]), - 'line_radiometric_quality': np.array([4, 4]), - 'line_geometric_quality': np.array([4, 4]) + "image_segment_line_quality": { + "line_validity": np.array([3, 3]), + "line_radiometric_quality": np.array([4, 4]), + "line_geometric_quality": np.array([4, 4]) }, } with mock.patch( - 'satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.__init__', + "satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.__init__", return_value=None ): fh = HRITMSGFileHandler() @@ -450,33 +450,33 @@ def file_handler(self): return fh @pytest.mark.parametrize( - ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), + ("channel", "calibration", "calib_mode", "use_ext_coefs"), [ # VIS channel, internal coefficients - ('VIS006', 'counts', 'NOMINAL', False), - ('VIS006', 'radiance', 'NOMINAL', False), - ('VIS006', 'radiance', 'GSICS', False), - ('VIS006', 'reflectance', 'NOMINAL', False), + ("VIS006", "counts", "NOMINAL", False), + ("VIS006", "radiance", "NOMINAL", False), + ("VIS006", "radiance", "GSICS", False), + ("VIS006", "reflectance", "NOMINAL", False), # VIS channel, external coefficients (mode should have no effect) - ('VIS006', 'radiance', 'GSICS', True), - ('VIS006', 'reflectance', 'NOMINAL', True), + ("VIS006", "radiance", "GSICS", True), + ("VIS006", "reflectance", "NOMINAL", True), # IR channel, internal coefficients - ('IR_108', 'counts', 'NOMINAL', False), - ('IR_108', 'radiance', 'NOMINAL', False), - ('IR_108', 'radiance', 'GSICS', False), - ('IR_108', 'brightness_temperature', 'NOMINAL', False), - ('IR_108', 'brightness_temperature', 'GSICS', False), + ("IR_108", "counts", "NOMINAL", False), + ("IR_108", "radiance", "NOMINAL", False), + ("IR_108", "radiance", "GSICS", False), + ("IR_108", "brightness_temperature", "NOMINAL", False), + ("IR_108", "brightness_temperature", "GSICS", False), # IR channel, external coefficients (mode should have no effect) - ('IR_108', 'radiance', 'NOMINAL', True), - ('IR_108', 'brightness_temperature', 'GSICS', True), + ("IR_108", "radiance", "NOMINAL", True), + ("IR_108", "brightness_temperature", "GSICS", True), # HRV channel, internal coefficiens - ('HRV', 'counts', 'NOMINAL', False), - ('HRV', 'radiance', 'NOMINAL', False), - ('HRV', 'radiance', 'GSICS', False), - ('HRV', 'reflectance', 'NOMINAL', False), + ("HRV", "counts", "NOMINAL", False), + ("HRV", "radiance", "NOMINAL", False), + ("HRV", "radiance", "GSICS", False), + ("HRV", "reflectance", "NOMINAL", False), # HRV channel, external coefficients (mode should have no effect) - ('HRV', 'radiance', 'GSICS', True), - ('HRV', 'reflectance', 'NOMINAL', True), + ("HRV", "radiance", "GSICS", True), + ("HRV", "reflectance", "NOMINAL", True), ] ) def test_calibrate( @@ -493,7 +493,7 @@ def test_calibrate( ) fh = file_handler - fh.mda['spectral_channel_id'] = self.spectral_channel_ids[channel] + fh.mda["spectral_channel_id"] = self.spectral_channel_ids[channel] fh.channel_name = channel fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs @@ -502,18 +502,18 @@ def test_calibrate( def test_mask_bad_quality(self, file_handler): """Test the masking of bad quality scan lines.""" - channel = 'VIS006' + channel = "VIS006" expected = self._get_expected( channel=channel, - calibration='radiance', - calib_mode='NOMINAL', + calibration="radiance", + calib_mode="NOMINAL", use_ext_coefs=False ) fh = file_handler res = fh._mask_bad_quality(expected) - new_data = np.zeros_like(expected.data).astype('float32') + new_data = np.zeros_like(expected.data).astype("float32") new_data[:, :] = np.nan expected = expected.copy(data=new_data) xr.testing.assert_equal(res, expected) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index 841d45b943..b9ff1f95ea 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -28,15 +28,15 @@ def new_get_hd(instance, hdr_info): """Generate some metadata.""" - instance.mda = {'spectral_channel_id': 1} - instance.mda.setdefault('number_of_bits_per_pixel', 10) + instance.mda = {"spectral_channel_id": 1} + instance.mda.setdefault("number_of_bits_per_pixel", 10) - instance.mda['projection_parameters'] = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'SSP_longitude': 0.0} - instance.mda['orbital_parameters'] = {} - instance.mda['total_header_length'] = 12 + instance.mda["projection_parameters"] = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "SSP_longitude": 0.0} + instance.mda["orbital_parameters"] = {} + instance.mda["total_header_length"] = 12 def get_new_read_prologue(prologue): @@ -55,18 +55,18 @@ def get_fake_file_handler(observation_start_time, nlines, ncols, projection_long epilogue = get_fake_epilogue() m = mock.mock_open() - with mock.patch('satpy.readers.seviri_l1b_hrit.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen, \ - mock.patch('satpy.readers.utils.open', m, create=True) as utilopen, \ - mock.patch('satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES'), \ - mock.patch.object(HRITMSGFileHandler, '_get_hd', new=new_get_hd), \ - mock.patch.object(HRITMSGPrologueFileHandler, 'read_prologue', + with mock.patch("satpy.readers.seviri_l1b_hrit.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.hrit_base.open", m, create=True) as newopen, \ + mock.patch("satpy.readers.utils.open", m, create=True) as utilopen, \ + mock.patch("satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES"), \ + mock.patch.object(HRITMSGFileHandler, "_get_hd", new=new_get_hd), \ + mock.patch.object(HRITMSGPrologueFileHandler, "read_prologue", new=get_new_read_prologue(prologue)): fromfile.return_value = np.array( [(1, 2)], - dtype=[('total_header_length', int), - ('hdr_id', int)] + dtype=[("total_header_length", int), + ("hdr_id", int)] ) newopen.return_value.__enter__.return_value.tell.return_value = 1 # The size of the return value hereafter was chosen arbitrarily with the expectation @@ -74,16 +74,16 @@ def get_fake_file_handler(observation_start_time, nlines, ncols, projection_long # files. utilopen.return_value.__enter__.return_value.read.return_value = bytes([0]*8192) prologue = HRITMSGPrologueFileHandler( - filename='dummy_prologue_filename', + filename="dummy_prologue_filename", filename_info=filename_info, filetype_info={} ) epilogue = mock.MagicMock(epilogue=epilogue) reader = HRITMSGFileHandler( - 'filename', + "filename", filename_info, - {'filetype': 'info'}, + {"filetype": "info"}, prologue, epilogue ) @@ -99,30 +99,30 @@ def get_fake_prologue(projection_longitude, orbit_polynomials): "SatelliteId": 324, "NominalLongitude": -3.5 }, - 'Orbit': { - 'OrbitPolynomial': orbit_polynomials, + "Orbit": { + "OrbitPolynomial": orbit_polynomials, } }, - 'GeometricProcessing': { - 'EarthModel': { - 'TypeOfEarthModel': 2, - 'EquatorialRadius': 6378.169, - 'NorthPolarRadius': 6356.5838, - 'SouthPolarRadius': 6356.5838 + "GeometricProcessing": { + "EarthModel": { + "TypeOfEarthModel": 2, + "EquatorialRadius": 6378.169, + "NorthPolarRadius": 6356.5838, + "SouthPolarRadius": 6356.5838 } }, - 'ImageDescription': { - 'ProjectionDescription': { - 'LongitudeOfSSP': projection_longitude + "ImageDescription": { + "ProjectionDescription": { + "LongitudeOfSSP": projection_longitude }, - 'Level15ImageProduction': { - 'ImageProcDirection': 1 + "Level15ImageProduction": { + "ImageProcDirection": 1 } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0) + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0) } } } @@ -131,21 +131,21 @@ def get_fake_prologue(projection_longitude, orbit_polynomials): def get_fake_epilogue(): """Create a fake HRIT epilogue.""" return { - 'ImageProductionStats': { - 'ActualL15CoverageHRV': { - 'LowerSouthLineActual': 1, - 'LowerNorthLineActual': 8256, - 'LowerEastColumnActual': 2877, - 'LowerWestColumnActual': 8444, - 'UpperSouthLineActual': 8257, - 'UpperNorthLineActual': 11136, - 'UpperEastColumnActual': 1805, - 'UpperWestColumnActual': 7372 + "ImageProductionStats": { + "ActualL15CoverageHRV": { + "LowerSouthLineActual": 1, + "LowerNorthLineActual": 8256, + "LowerEastColumnActual": 2877, + "LowerWestColumnActual": 8444, + "UpperSouthLineActual": 8257, + "UpperNorthLineActual": 11136, + "UpperEastColumnActual": 1805, + "UpperWestColumnActual": 7372 }, - 'ActualScanningSummary': { - 'ReducedScan': 0, - 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 39, 0) + "ActualScanningSummary": { + "ReducedScan": 0, + "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 39, 0) } } } @@ -156,19 +156,19 @@ def get_fake_mda(nlines, ncols, start_time): nbits = 10 tline = get_acq_time_cds(start_time, nlines) return { - 'number_of_bits_per_pixel': nbits, - 'number_of_lines': nlines, - 'number_of_columns': ncols, - 'data_field_length': nlines * ncols * nbits, - 'cfac': 5, - 'lfac': 5, - 'coff': 10, - 'loff': 10, - 'image_segment_line_quality': { - 'line_mean_acquisition': tline, - 'line_validity': np.full(nlines, 3), - 'line_radiometric_quality': np.full(nlines, 4), - 'line_geometric_quality': np.full(nlines, 4) + "number_of_bits_per_pixel": nbits, + "number_of_lines": nlines, + "number_of_columns": ncols, + "data_field_length": nlines * ncols * nbits, + "cfac": 5, + "lfac": 5, + "coff": 10, + "loff": 10, + "image_segment_line_quality": { + "line_mean_acquisition": tline, + "line_validity": np.full(nlines, 3), + "line_radiometric_quality": np.full(nlines, 4), + "line_geometric_quality": np.full(nlines, 4) } } @@ -176,18 +176,18 @@ def get_fake_mda(nlines, ncols, start_time): def get_fake_filename_info(start_time): """Create fake filename information.""" return { - 'platform_shortname': 'MSG3', - 'start_time': start_time, - 'service': 'MSG' + "platform_shortname": "MSG3", + "start_time": start_time, + "service": "MSG" } def get_fake_dataset_info(): """Create fake dataset info.""" return { - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } @@ -196,47 +196,47 @@ def get_acq_time_cds(start_time, nlines): days_since_1958 = (start_time - datetime(1958, 1, 1)).days tline = np.zeros( nlines, - dtype=[('days', '>u2'), ('milliseconds', '>u4')] + dtype=[("days", ">u2"), ("milliseconds", ">u4")] ) - tline['days'][1:-1] = days_since_1958 * np.ones(nlines - 2) + tline["days"][1:-1] = days_since_1958 * np.ones(nlines - 2) offset_second = (start_time - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()*1000 - tline['milliseconds'][1:-1] = np.arange(nlines - 2)+offset_second + tline["milliseconds"][1:-1] = np.arange(nlines - 2)+offset_second return tline def get_acq_time_exp(start_time, nlines): """Get expected scanline acquisition times.""" - tline_exp = np.zeros(464, dtype='datetime64[ms]') - tline_exp[0] = np.datetime64('NaT') - tline_exp[-1] = np.datetime64('NaT') + tline_exp = np.zeros(464, dtype="datetime64[ms]") + tline_exp[0] = np.datetime64("NaT") + tline_exp[-1] = np.datetime64("NaT") tline_exp[1:-1] = np.datetime64(start_time) - tline_exp[1:-1] += np.arange(nlines - 2).astype('timedelta64[ms]') + tline_exp[1:-1] += np.arange(nlines - 2).astype("timedelta64[ms]") return tline_exp def get_attrs_exp(projection_longitude=0.0): """Get expected dataset attributes.""" return { - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name', - 'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'orbital_parameters': {'projection_longitude': projection_longitude, - 'projection_latitude': 0., - 'projection_altitude': 35785831.0, - 'satellite_nominal_longitude': -3.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_actual_longitude': -3.55117540817073, - 'satellite_actual_latitude': -0.5711243456528018, - 'satellite_actual_altitude': 35783296.150123544}, - 'georef_offset_corrected': True, - 'nominal_start_time': (datetime(2006, 1, 1, 12, 15),), - 'nominal_end_time': (datetime(2006, 1, 1, 12, 30),), - 'time_parameters': { - 'nominal_start_time': datetime(2006, 1, 1, 12, 15), - 'nominal_end_time': datetime(2006, 1, 1, 12, 30), - 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'observation_end_time': datetime(2006, 1, 1, 12, 27, 39, 0) + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name", + "platform_name": "Meteosat-11", + "sensor": "seviri", + "orbital_parameters": {"projection_longitude": projection_longitude, + "projection_latitude": 0., + "projection_altitude": 35785831.0, + "satellite_nominal_longitude": -3.5, + "satellite_nominal_latitude": 0.0, + "satellite_actual_longitude": -3.55117540817073, + "satellite_actual_latitude": -0.5711243456528018, + "satellite_actual_altitude": 35783296.150123544}, + "georef_offset_corrected": True, + "nominal_start_time": (datetime(2006, 1, 1, 12, 15),), + "nominal_end_time": (datetime(2006, 1, 1, 12, 30),), + "time_parameters": { + "nominal_start_time": datetime(2006, 1, 1, 12, 15), + "nominal_end_time": datetime(2006, 1, 1, 12, 30), + "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": datetime(2006, 1, 1, 12, 27, 39, 0) } } diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 5ca8ac1a2e..81d385bc89 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -38,56 +38,56 @@ class FakeHDF4FileHandler2(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/Nadir_Pixel_Size'] = 3000. - file_content['/attr/Beginning_Acquisition_Date'] = "2004-12-29T12:15:00Z" - file_content['/attr/End_Acquisition_Date'] = "2004-12-29T12:27:44Z" - file_content['/attr/Geolocation'] = ('1.3642337E7', '1856.0', '1.3642337E7', '1856.0') - file_content['/attr/Altitude'] = '42164.0' - file_content['/attr/Geographic_Projection'] = 'geos' - file_content['/attr/Projection_Longitude'] = '0.0' - file_content['/attr/Sub_Satellite_Longitude'] = '3.4' - file_content['/attr/Sensors'] = 'MSG1/SEVIRI' - file_content['/attr/Zone'] = 'G' - file_content['/attr/_FillValue'] = 1 - file_content['/attr/scale_factor'] = 1. - file_content['/attr/add_offset'] = 0. + file_content["/attr/Nadir_Pixel_Size"] = 3000. + file_content["/attr/Beginning_Acquisition_Date"] = "2004-12-29T12:15:00Z" + file_content["/attr/End_Acquisition_Date"] = "2004-12-29T12:27:44Z" + file_content["/attr/Geolocation"] = ("1.3642337E7", "1856.0", "1.3642337E7", "1856.0") + file_content["/attr/Altitude"] = "42164.0" + file_content["/attr/Geographic_Projection"] = "geos" + file_content["/attr/Projection_Longitude"] = "0.0" + file_content["/attr/Sub_Satellite_Longitude"] = "3.4" + file_content["/attr/Sensors"] = "MSG1/SEVIRI" + file_content["/attr/Zone"] = "G" + file_content["/attr/_FillValue"] = 1 + file_content["/attr/scale_factor"] = 1. + file_content["/attr/add_offset"] = 0. # test one IR and one VIS channel - file_content['Normalized_Radiance'] = DEFAULT_FILE_DATA - file_content['Normalized_Radiance/attr/_FillValue'] = 1 - file_content['Normalized_Radiance/attr/scale_factor'] = 1. - file_content['Normalized_Radiance/attr/add_offset'] = 0. - file_content['Normalized_Radiance/shape'] = DEFAULT_FILE_SHAPE - - file_content['Brightness_Temperature'] = DEFAULT_FILE_DATA - file_content['Brightness_Temperature/attr/_FillValue'] = 1 - file_content['Brightness_Temperature/attr/scale_factor'] = 1. - file_content['Brightness_Temperature/attr/add_offset'] = 0. - file_content['Brightness_Temperature/shape'] = DEFAULT_FILE_SHAPE + file_content["Normalized_Radiance"] = DEFAULT_FILE_DATA + file_content["Normalized_Radiance/attr/_FillValue"] = 1 + file_content["Normalized_Radiance/attr/scale_factor"] = 1. + file_content["Normalized_Radiance/attr/add_offset"] = 0. + file_content["Normalized_Radiance/shape"] = DEFAULT_FILE_SHAPE + + file_content["Brightness_Temperature"] = DEFAULT_FILE_DATA + file_content["Brightness_Temperature/attr/_FillValue"] = 1 + file_content["Brightness_Temperature/attr/scale_factor"] = 1. + file_content["Brightness_Temperature/attr/add_offset"] = 0. + file_content["Brightness_Temperature/shape"] = DEFAULT_FILE_SHAPE # convert to xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} - for a in ['_FillValue', 'scale_factor', 'add_offset']: - if key + '/attr/' + a in file_content: - attrs[a] = file_content[key + '/attr/' + a] - file_content[key] = DataArray(da.from_array(val), dims=('x', 'y'), attrs=attrs) + for a in ["_FillValue", "scale_factor", "add_offset"]: + if key + "/attr/" + a in file_content: + attrs[a] = file_content[key + "/attr/" + a] + file_content[key] = DataArray(da.from_array(val), dims=("x", "y"), attrs=attrs) return file_content class TestSEVIRIICAREReader(unittest.TestCase): """Test SEVIRI L1b HDF4 from ICARE Reader.""" - yaml_file = 'seviri_l1b_icare.yaml' + yaml_file = "seviri_l1b_icare.yaml" def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.seviri_l1b_icare import SEVIRI_ICARE - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(SEVIRI_ICARE, '__bases__', (FakeHDF4FileHandler2,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(SEVIRI_ICARE, "__bases__", (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -97,25 +97,25 @@ def tearDown(self): def compare_areas(self, v): """Compare produced AreaDefinition with expected.""" - test_area = {'area_id': 'geosmsg', - 'width': 10, - 'height': 300, - 'area_extent': (-5567248.2834071, + test_area = {"area_id": "geosmsg", + "width": 10, + "height": 300, + "area_extent": (-5567248.2834071, -5570248.6866857, -5537244.2506213, -4670127.7031114)} - self.assertEqual(v.attrs['area'].area_id, test_area['area_id']) - self.assertEqual(v.attrs['area'].width, test_area['width']) - self.assertEqual(v.attrs['area'].height, test_area['height']) - np.testing.assert_almost_equal(v.attrs['area'].area_extent, - test_area['area_extent']) + self.assertEqual(v.attrs["area"].area_id, test_area["area_id"]) + self.assertEqual(v.attrs["area"].width, test_area["width"]) + self.assertEqual(v.attrs["area"].height, test_area["height"]) + np.testing.assert_almost_equal(v.attrs["area"].area_extent, + test_area["area_extent"]) def test_init(self): """Test basic init with no extra parameters.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf', - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf", + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) self.assertEqual(len(loadables), 2) r.create_filehandlers(loadables) @@ -126,49 +126,49 @@ def test_load_dataset_vis(self): from datetime import datetime r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['VIS008']) + datasets = r.load(["VIS008"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): dt = datetime(2004, 12, 29, 12, 27, 44) - self.assertEqual(v.attrs['end_time'], dt) - self.assertEqual(v.attrs['calibration'], 'reflectance') + self.assertEqual(v.attrs["end_time"], dt) + self.assertEqual(v.attrs["calibration"], "reflectance") def test_load_dataset_ir(self): """Test loading all datasets from a full swath file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['IR_108']) + datasets = r.load(["IR_108"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['calibration'], 'brightness_temperature') + self.assertEqual(v.attrs["calibration"], "brightness_temperature") def test_area_def_lores(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - ds = r.load(['VIS008']) - self.compare_areas(ds['VIS008']) - self.assertEqual(ds['VIS008'].attrs['area'].proj_id, 'msg_lowres') + ds = r.load(["VIS008"]) + self.compare_areas(ds["VIS008"]) + self.assertEqual(ds["VIS008"].attrs["area"].proj_id, "msg_lowres") def test_area_def_hires(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_HRV_V1-04.hdf', + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_HRV_V1-04.hdf", ]) r.create_filehandlers(loadables) - ds = r.load(['HRV']) - self.compare_areas(ds['HRV']) - self.assertEqual(ds['HRV'].attrs['area'].proj_id, 'msg_hires') + ds = r.load(["HRV"]) + self.compare_areas(ds["HRV"]) + self.assertEqual(ds["HRV"].attrs["area"].proj_id, "msg_hires") def test_sensor_names(self): """Check satellite name conversion is correct, including error case.""" @@ -176,12 +176,12 @@ def test_sensor_names(self): mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) - sensor_list = {'Meteosat-08': 'MSG1/SEVIRI', - 'Meteosat-09': 'MSG2/SEVIRI', - 'Meteosat-10': 'MSG3/SEVIRI', - 'Meteosat-11': 'MSG4/SEVIRI'} - with mock.patch('satpy.tests.reader_tests.test_seviri_l1b_icare.' - 'FakeHDF4FileHandler2.get_test_content') as patched_func: + sensor_list = {"Meteosat-08": "MSG1/SEVIRI", + "Meteosat-09": "MSG2/SEVIRI", + "Meteosat-10": "MSG3/SEVIRI", + "Meteosat-11": "MSG4/SEVIRI"} + with mock.patch("satpy.tests.reader_tests.test_seviri_l1b_icare." + "FakeHDF4FileHandler2.get_test_content") as patched_func: def _run_target(): patched_func.return_value = file_data return self.p.target(mock.MagicMock(), @@ -189,12 +189,12 @@ def _run_target(): mock.MagicMock()).sensor_name for sat in sensor_list: - file_data['/attr/Sensors'] = sensor_list[sat] + file_data["/attr/Sensors"] = sensor_list[sat] plat, sens = _run_target() self.assertEqual(plat, sat) with self.assertRaises(NameError): - file_data['/attr/Sensors'] = 'BADSAT/NOSENSE' + file_data["/attr/Sensors"] = "BADSAT/NOSENSE" plat, sens = _run_target() def test_bad_bandname(self): @@ -202,7 +202,7 @@ def test_bad_bandname(self): with self.assertRaises(NameError): self.p.target(mock.MagicMock(), mock.MagicMock(), - mock.MagicMock())._get_dsname({'name': 'badband'}) + mock.MagicMock())._get_dsname({"name": "badband"}) def test_nocompute(self): """Test that dask does not compute anything in the reader itself.""" @@ -212,7 +212,7 @@ def test_nocompute(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - r.load(['VIS008']) + r.load(["VIS008"]) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index e344d09ff9..0130740246 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -43,476 +43,476 @@ from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid -CHANNEL_INDEX_LIST = ['VIS006', 'VIS008', 'IR_016', 'IR_039', - 'WV_062', 'WV_073', 'IR_087', 'IR_097', - 'IR_108', 'IR_120', 'IR_134', 'HRV'] +CHANNEL_INDEX_LIST = ["VIS006", "VIS008", "IR_016", "IR_039", + "WV_062", "WV_073", "IR_087", "IR_097", + "IR_108", "IR_120", "IR_134", "HRV"] AVAILABLE_CHANNELS = {} for item in CHANNEL_INDEX_LIST: AVAILABLE_CHANNELS[item] = True -SEC15HDR = '15_SECONDARY_PRODUCT_HEADER' -IDS = 'SelectedBandIDs' +SEC15HDR = "15_SECONDARY_PRODUCT_HEADER" +IDS = "SelectedBandIDs" TEST1_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST1_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX--XX--XX--' +TEST1_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XX--XX--XX--" TEST2_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST2_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX-XXXX----X' +TEST2_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XX-XXXX----X" TEST3_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST3_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XXXXXXXXXXXX' +TEST3_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XXXXXXXXXXXX" TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 1392, - 'Area extent': (5568748.275756836, 5568748.275756836, -5568748.275756836, 1392187.068939209) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 1392, + "Area extent": (5568748.275756836, 5568748.275756836, -5568748.275756836, 1392187.068939209) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 2516, - 'Number of rows': 1829, - 'Area extent': (5337717.232, 5154692.6389, -2211297.1332, -333044.7514) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 2516, + "Number of rows": 1829, + "Area extent": (5337717.232, 5154692.6389, -2211297.1332, -333044.7514) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 11136, - 'Area extent 0': (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525), - 'Area extent 1': (3602483.924627304, 5569748.188853264, -1966264.1298770905, 2625352.665781975) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 11136, + "Area extent 0": (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525), + "Area extent 1": (3602483.924627304, 5569748.188853264, -1966264.1298770905, 2625352.665781975) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 8192, - 'Area extent': (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 8192, + "Area extent": (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 7548, - 'Number of rows': 5487, - 'Area extent': (5336716.885566711, 5155692.568421364, -2212297.179698944, -332044.6038246155) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 7548, + "Number of rows": 5487, + "Area extent": (5336716.885566711, 5155692.568421364, -2212297.179698944, -332044.6038246155) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 11136, - 'Area extent 0': (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221), - 'Area extent 1': (3600983.723104, 5571248.390376568, -1967764.3314003944, 2626852.867305279) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 11136, + "Area extent 0": (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221), + "Area extent 1": (3600983.723104, 5571248.390376568, -1967764.3314003944, 2626852.867305279) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 1392, - 'Area extent': (5567248.074173927, 5570248.477339745, -5570248.477339745, 1393687.2705221176) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 1392, + "Area extent": (5567248.074173927, 5570248.477339745, -5570248.477339745, 1393687.2705221176) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 8192, - 'Area extent': (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 8192, + "Area extent": (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 2516, - 'Number of rows': 1829, - 'Area extent': (5336217.0304, 5156192.8405, -2212797.3348, -331544.5498) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 2516, + "Number of rows": 1829, + "Area extent": (5336217.0304, 5156192.8405, -2212797.3348, -331544.5498) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 7548, - 'Number of rows': 5487, - 'Area extent': (5335216.684043407, 5157192.769944668, -2213797.381222248, -330544.4023013115) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 7548, + "Number of rows": 5487, + "Area extent": (5335216.684043407, 5157192.769944668, -2213797.381222248, -330544.4023013115) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_IS_ROI_FULLDISK = { - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'is_roi': False + "is_full_disk": True, + "is_rapid_scan": 0, + "is_roi": False } TEST_IS_ROI_RAPIDSCAN = { - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'is_roi': False + "is_full_disk": False, + "is_rapid_scan": 1, + "is_roi": False } TEST_IS_ROI_ROI = { - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'is_roi': True + "is_full_disk": False, + "is_rapid_scan": 0, + "is_roi": True } TEST_CALIBRATION_MODE = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='IR_108', calibration='radiance'), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'calibration': 'radiance', - 'CalSlope': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], - 'CalOffset': [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], - 'GSICSCalCoeff': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], - 'GSICSOffsetCount': [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] + "earth_model": 1, + "dataset_id": make_dataid(name="IR_108", calibration="radiance"), + "is_full_disk": True, + "is_rapid_scan": 0, + "calibration": "radiance", + "CalSlope": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], + "CalOffset": [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], + "GSICSCalCoeff": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], + "GSICSOffsetCount": [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] } TEST_PADDER_RSS_ROI = { - 'img_bounds': {'south': [2], 'north': [4], 'east': [2], 'west': [3]}, - 'is_full_disk': False, - 'dataset_id': make_dataid(name='VIS006'), - 'dataset': xr.DataArray(np.ones((3, 2)), dims=['y', 'x']).astype(np.float32), - 'final_shape': (5, 5), - 'expected_padded_data': xr.DataArray(np.array([[np.nan, np.nan, np.nan, np.nan, np.nan], + "img_bounds": {"south": [2], "north": [4], "east": [2], "west": [3]}, + "is_full_disk": False, + "dataset_id": make_dataid(name="VIS006"), + "dataset": xr.DataArray(np.ones((3, 2)), dims=["y", "x"]).astype(np.float32), + "final_shape": (5, 5), + "expected_padded_data": xr.DataArray(np.array([[np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, np.nan, np.nan, np.nan]]), - dims=['y', 'x']).astype(np.float32) + dims=["y", "x"]).astype(np.float32) } TEST_PADDER_FES_HRV = { - 'img_bounds': {'south': [1, 4], 'north': [3, 5], 'east': [2, 3], 'west': [3, 4]}, - 'is_full_disk': True, - 'dataset_id': make_dataid(name='HRV'), - 'dataset': xr.DataArray(np.ones((5, 2)), dims=['y', 'x']).astype(np.float32), - 'final_shape': (5, 5), - 'expected_padded_data': xr.DataArray(np.array([[np.nan, 1.0, 1.0, np.nan, np.nan], + "img_bounds": {"south": [1, 4], "north": [3, 5], "east": [2, 3], "west": [3, 4]}, + "is_full_disk": True, + "dataset_id": make_dataid(name="HRV"), + "dataset": xr.DataArray(np.ones((5, 2)), dims=["y", "x"]).astype(np.float32), + "final_shape": (5, 5), + "expected_padded_data": xr.DataArray(np.array([[np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan]]), - dims=['y', 'x']).astype(np.float32) + dims=["y", "x"]).astype(np.float32) } -def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'): +def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual="OK"): """Create test header for SEVIRI L1.5 product. Header includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ - if dataset_id['name'] == 'HRV': - reference_grid = 'ReferenceGridHRV' + if dataset_id["name"] == "HRV": + reference_grid = "ReferenceGridHRV" column_dir_grid_step = 1.0001343488693237 line_dir_grid_step = 1.0001343488693237 else: - reference_grid = 'ReferenceGridVIS_IR' + reference_grid = "ReferenceGridVIS_IR" column_dir_grid_step = 3.0004031658172607 line_dir_grid_step = 3.0004031658172607 @@ -547,45 +547,45 @@ def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, goo n_hrv_lines = n_visir_lines * 3 ssp_lon = 0 header = { - '15_MAIN_PRODUCT_HEADER': { - 'QQOV': {'Name': 'QQOV', - 'Value': good_qual} + "15_MAIN_PRODUCT_HEADER": { + "QQOV": {"Name": "QQOV", + "Value": good_qual} }, - '15_DATA_HEADER': { - 'ImageDescription': { + "15_DATA_HEADER": { + "ImageDescription": { reference_grid: { - 'ColumnDirGridStep': column_dir_grid_step, - 'LineDirGridStep': line_dir_grid_step, - 'GridOrigin': 2, # south-east corner + "ColumnDirGridStep": column_dir_grid_step, + "LineDirGridStep": line_dir_grid_step, + "GridOrigin": 2, # south-east corner }, - 'ProjectionDescription': { - 'LongitudeOfSSP': ssp_lon + "ProjectionDescription": { + "LongitudeOfSSP": ssp_lon } }, - 'GeometricProcessing': { - 'EarthModel': { - 'TypeOfEarthModel': earth_model, - 'EquatorialRadius': 6378169.0, - 'NorthPolarRadius': 6356583.800000001, - 'SouthPolarRadius': 6356583.800000001, + "GeometricProcessing": { + "EarthModel": { + "TypeOfEarthModel": earth_model, + "EquatorialRadius": 6378169.0, + "NorthPolarRadius": 6356583.800000001, + "SouthPolarRadius": 6356583.800000001, } }, - 'SatelliteStatus': { - 'SatelliteDefinition': { - 'SatelliteId': 324 + "SatelliteStatus": { + "SatelliteDefinition": { + "SatelliteId": 324 } } }, - '15_SECONDARY_PRODUCT_HEADER': { - 'NorthLineSelectedRectangle': {'Value': north}, - 'EastColumnSelectedRectangle': {'Value': east}, - 'WestColumnSelectedRectangle': {'Value': west}, - 'SouthLineSelectedRectangle': {'Value': south}, - 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, - 'NumberColumnsVISIR': {'Value': n_visir_cols}, - 'NumberLinesVISIR': {'Value': n_visir_lines}, - 'NumberColumnsHRV': {'Value': n_hrv_cols}, - 'NumberLinesHRV': {'Value': n_hrv_lines}, + "15_SECONDARY_PRODUCT_HEADER": { + "NorthLineSelectedRectangle": {"Value": north}, + "EastColumnSelectedRectangle": {"Value": east}, + "WestColumnSelectedRectangle": {"Value": west}, + "SouthLineSelectedRectangle": {"Value": south}, + "SelectedBandIDs": {"Value": "xxxxxxxxxxxx"}, + "NumberColumnsVISIR": {"Value": n_visir_cols}, + "NumberLinesVISIR": {"Value": n_visir_lines}, + "NumberColumnsHRV": {"Value": n_hrv_cols}, + "NumberLinesHRV": {"Value": n_hrv_lines}, } } @@ -599,20 +599,20 @@ def create_test_trailer(is_rapid_scan): Trailer includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualL15CoverageHRV': { - 'UpperNorthLineActual': 11136, - 'UpperWestColumnActual': 7533, - 'UpperSouthLineActual': 8193, - 'UpperEastColumnActual': 1966, - 'LowerNorthLineActual': 8192, - 'LowerWestColumnActual': 5568, - 'LowerSouthLineActual': 1, - 'LowerEastColumnActual': 1 + "15TRAILER": { + "ImageProductionStats": { + "ActualL15CoverageHRV": { + "UpperNorthLineActual": 11136, + "UpperWestColumnActual": 7533, + "UpperSouthLineActual": 8193, + "UpperEastColumnActual": 1966, + "LowerNorthLineActual": 8192, + "LowerWestColumnActual": 5568, + "LowerSouthLineActual": 1, + "LowerEastColumnActual": 1 }, - 'ActualScanningSummary': { - 'ReducedScan': is_rapid_scan + "ActualScanningSummary": { + "ReducedScan": is_rapid_scan } } } @@ -623,21 +623,21 @@ def create_test_trailer(is_rapid_scan): def prepare_area_definitions(test_dict): """Prepare calculated and expected area definitions for equal checking.""" - earth_model = test_dict['earth_model'] - dataset_id = test_dict['dataset_id'] - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] - fill_disk = test_dict['fill_disk'] + earth_model = test_dict["earth_model"] + dataset_id = test_dict["dataset_id"] + is_full_disk = test_dict["is_full_disk"] + is_rapid_scan = test_dict["is_rapid_scan"] + fill_disk = test_dict["fill_disk"] header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = create_test_trailer(is_rapid_scan) - expected_area_def = test_dict['expected_area_def'] + expected_area_def = test_dict["expected_area_def"] - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( - 'satpy.readers.seviri_l1b_native.has_archive_header' + "satpy.readers.seviri_l1b_native.has_archive_header" ) as has_archive_header: has_archive_header.return_value = True fromfile.return_value = header @@ -681,10 +681,10 @@ def prepare_area_definitions(test_dict): def test_area_definitions(actual, expected): """Test area definitions with only one area.""" np.testing.assert_allclose(np.array(actual.area_extent), - np.array(expected['Area extent'])) - assert actual.width == expected['Number of columns'] - assert actual.height == expected['Number of rows'] - assert actual.area_id == expected['Area ID'] + np.array(expected["Area extent"])) + assert actual.width == expected["Number of columns"] + assert actual.height == expected["Number of rows"] + assert actual.area_id == expected["Area ID"] @pytest.mark.parametrize( @@ -697,31 +697,31 @@ def test_area_definitions(actual, expected): def test_stacked_area_definitions(actual, expected): """Test area definitions with stacked areas.""" np.testing.assert_allclose(np.array(actual.defs[0].area_extent), - np.array(expected['Area extent 0'])) + np.array(expected["Area extent 0"])) np.testing.assert_allclose(np.array(actual.defs[1].area_extent), - np.array(expected['Area extent 1'])) - assert actual.width == expected['Number of columns'] - assert actual.height == expected['Number of rows'] - assert actual.defs[0].area_id, expected['Area ID'] - assert actual.defs[1].area_id, expected['Area ID'] + np.array(expected["Area extent 1"])) + assert actual.width == expected["Number of columns"] + assert actual.height == expected["Number of rows"] + assert actual.defs[0].area_id, expected["Area ID"] + assert actual.defs[1].area_id, expected["Area ID"] def prepare_is_roi(test_dict): """Prepare calculated and expected check for region of interest data for equal checking.""" earth_model = 2 - dataset_id = make_dataid(name='VIS006') - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] + dataset_id = make_dataid(name="VIS006") + is_full_disk = test_dict["is_full_disk"] + is_rapid_scan = test_dict["is_rapid_scan"] header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = create_test_trailer(is_rapid_scan) - expected = test_dict['is_roi'] + expected = test_dict["is_roi"] - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( - 'satpy.readers.seviri_l1b_native.has_archive_header' + "satpy.readers.seviri_l1b_native.has_archive_header" ) as has_archive_header: has_archive_header.return_value = True fromfile.return_value = header @@ -754,7 +754,7 @@ class TestNativeMSGFileHandler(unittest.TestCase): def test_get_available_channels(self): """Test the derivation of the available channel list.""" available_chs = get_available_channels(TEST1_HEADER_CHNLIST) - trues = ('WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120') + trues = ("WV_062", "WV_073", "IR_108", "VIS006", "VIS008", "IR_120") for bandname in AVAILABLE_CHANNELS: if bandname in trues: self.assertTrue(available_chs[bandname]) @@ -762,7 +762,7 @@ def test_get_available_channels(self): self.assertFalse(available_chs[bandname]) available_chs = get_available_channels(TEST2_HEADER_CHNLIST) - trues = ('VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV') + trues = ("VIS006", "VIS008", "IR_039", "WV_062", "WV_073", "IR_087", "HRV") for bandname in AVAILABLE_CHANNELS: if bandname in trues: self.assertTrue(available_chs[bandname]) @@ -775,20 +775,20 @@ def test_get_available_channels(self): TEST_HEADER_CALIB = { - 'RadiometricProcessing': { - 'Level15ImageCalibration': { - 'CalSlope': TestFileHandlerCalibrationBase.gains_nominal, - 'CalOffset': TestFileHandlerCalibrationBase.offsets_nominal, + "RadiometricProcessing": { + "Level15ImageCalibration": { + "CalSlope": TestFileHandlerCalibrationBase.gains_nominal, + "CalOffset": TestFileHandlerCalibrationBase.offsets_nominal, }, - 'MPEFCalFeedback': { - 'GSICSCalCoeff': TestFileHandlerCalibrationBase.gains_gsics, - 'GSICSOffsetCount': TestFileHandlerCalibrationBase.offsets_gsics + "MPEFCalFeedback": { + "GSICSCalCoeff": TestFileHandlerCalibrationBase.gains_gsics, + "GSICSOffsetCount": TestFileHandlerCalibrationBase.offsets_gsics } }, - 'ImageDescription': { - 'Level15ImageProduction': { - 'PlannedChanProcessing': TestFileHandlerCalibrationBase.radiance_types + "ImageDescription": { + "Level15ImageProduction": { + "PlannedChanProcessing": TestFileHandlerCalibrationBase.radiance_types } }, } @@ -797,29 +797,29 @@ def test_get_available_channels(self): class TestNativeMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self): """Create a mocked file handler.""" header = { - '15_DATA_HEADER': { - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': self.scan_time + "15_DATA_HEADER": { + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": self.scan_time } } } } trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': self.scan_time + "15TRAILER": { + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": self.scan_time } } } } - header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) - with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', + header["15_DATA_HEADER"].update(TEST_HEADER_CALIB) + with mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__", return_value=None): fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header @@ -828,33 +828,33 @@ def file_handler(self): return fh @pytest.mark.parametrize( - ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), + ("channel", "calibration", "calib_mode", "use_ext_coefs"), ( # VIS channel, internal coefficients - ('VIS006', 'counts', 'NOMINAL', False), - ('VIS006', 'radiance', 'NOMINAL', False), - ('VIS006', 'radiance', 'GSICS', False), - ('VIS006', 'reflectance', 'NOMINAL', False), + ("VIS006", "counts", "NOMINAL", False), + ("VIS006", "radiance", "NOMINAL", False), + ("VIS006", "radiance", "GSICS", False), + ("VIS006", "reflectance", "NOMINAL", False), # VIS channel, external coefficients (mode should have no effect) - ('VIS006', 'radiance', 'GSICS', True), - ('VIS006', 'reflectance', 'NOMINAL', True), + ("VIS006", "radiance", "GSICS", True), + ("VIS006", "reflectance", "NOMINAL", True), # IR channel, internal coefficients - ('IR_108', 'counts', 'NOMINAL', False), - ('IR_108', 'radiance', 'NOMINAL', False), - ('IR_108', 'radiance', 'GSICS', False), - ('IR_108', 'brightness_temperature', 'NOMINAL', False), - ('IR_108', 'brightness_temperature', 'GSICS', False), + ("IR_108", "counts", "NOMINAL", False), + ("IR_108", "radiance", "NOMINAL", False), + ("IR_108", "radiance", "GSICS", False), + ("IR_108", "brightness_temperature", "NOMINAL", False), + ("IR_108", "brightness_temperature", "GSICS", False), # IR channel, external coefficients (mode should have no effect) - ('IR_108', 'radiance', 'NOMINAL', True), - ('IR_108', 'brightness_temperature', 'GSICS', True), + ("IR_108", "radiance", "NOMINAL", True), + ("IR_108", "brightness_temperature", "GSICS", True), # HRV channel, internal coefficiens - ('HRV', 'counts', 'NOMINAL', False), - ('HRV', 'radiance', 'NOMINAL', False), - ('HRV', 'radiance', 'GSICS', False), - ('HRV', 'reflectance', 'NOMINAL', False), + ("HRV", "counts", "NOMINAL", False), + ("HRV", "radiance", "NOMINAL", False), + ("HRV", "radiance", "GSICS", False), + ("HRV", "reflectance", "NOMINAL", False), # HRV channel, external coefficients (mode should have no effect) - ('HRV', 'radiance', 'GSICS', True), - ('HRV', 'reflectance', 'NOMINAL', True), + ("HRV", "radiance", "GSICS", True), + ("HRV", "reflectance", "NOMINAL", True), ) ) def test_calibrate( @@ -886,33 +886,33 @@ class TestNativeMSGDataset: def file_handler(self): """Create a file handler for testing.""" trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 9, 304888), - 'ReducedScan': 0 + "15TRAILER": { + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 9, 304888), + "ReducedScan": 0 } } } } mda = { - 'channel_list': ['VIS006', 'IR_108'], - 'number_of_lines': 4, - 'number_of_columns': 4, - 'is_full_disk': True, - 'platform_name': 'MSG-3', - 'offset_corrected': True, - 'projection_parameters': { - 'ssp_longitude': 0.0, - 'h': 35785831.0, - 'a': 6378169.0, - 'b': 6356583.8 + "channel_list": ["VIS006", "IR_108"], + "number_of_lines": 4, + "number_of_columns": 4, + "is_full_disk": True, + "platform_name": "MSG-3", + "offset_corrected": True, + "projection_parameters": { + "ssp_longitude": 0.0, + "h": 35785831.0, + "a": 6378169.0, + "b": 6356583.8 } } header = self._fake_header() data = self._fake_data() - with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', + with mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__", return_value=None): fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header @@ -921,7 +921,7 @@ def file_handler(self): fh.dask_array = da.from_array(data) fh.platform_id = 324 fh.fill_disk = False - fh.calib_mode = 'NOMINAL' + fh.calib_mode = "NOMINAL" fh.ext_calib_coefs = {} fh.include_raw_metadata = False fh.mda_max_array_size = 100 @@ -930,32 +930,32 @@ def file_handler(self): @staticmethod def _fake_header(): header = { - '15_DATA_HEADER': { - 'SatelliteStatus': { - 'SatelliteDefinition': { - 'NominalLongitude': 0.0 + "15_DATA_HEADER": { + "SatelliteStatus": { + "SatelliteDefinition": { + "NominalLongitude": 0.0 }, - 'Orbit': { - 'OrbitPolynomial': ORBIT_POLYNOMIALS + "Orbit": { + "OrbitPolynomial": ORBIT_POLYNOMIALS } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 0, 0), - 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0), + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 0, 0), + "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0), } } }, } - header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) + header["15_DATA_HEADER"].update(TEST_HEADER_CALIB) return header @staticmethod def _fake_data(): num_visir_cols = 5 # will be divided by 1.25 -> 4 columns visir_rec = [ - ('line_data', np.uint8, (num_visir_cols,)), - ('acq_time', time_cds_short) + ("line_data", np.uint8, (num_visir_cols,)), + ("acq_time", time_cds_short) ] vis006_line1 = ( [1, 2, 3, 4, 5], # line_data @@ -973,26 +973,26 @@ def _fake_data(): [(vis006_line2,), (ir108_line2,)], [(vis006_line3,), (ir108_line3,)], [(vis006_line4,), (ir108_line4,)]], - dtype=[('visir', visir_rec)] + dtype=[("visir", visir_rec)] ) return data def test_get_dataset(self, file_handler): """Test getting the dataset.""" dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } xarr = file_handler.get_dataset(dataset_id, dataset_info) expected = self._exp_data_array() xr.testing.assert_equal(xarr, expected) - assert 'raw_metadata' not in xarr.attrs + assert "raw_metadata" not in xarr.attrs assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0) assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0) assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4) @@ -1011,7 +1011,7 @@ def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" assert 15 == file_handler._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling - file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + file_handler.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 assert 5 == file_handler._repeat_cycle_duration @staticmethod @@ -1022,72 +1022,72 @@ def _exp_data_array(): [44., 192., 835., 527.], [64., 273., 132., 788.]], dtype=np.float32), - dims=['y', 'x'], + dims=["y", "x"], attrs={ - 'orbital_parameters': { - 'satellite_actual_longitude': -3.55117540817073, - 'satellite_actual_latitude': -0.5711243456528018, - 'satellite_actual_altitude': 35783296.150123544, - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785831.0 + "orbital_parameters": { + "satellite_actual_longitude": -3.55117540817073, + "satellite_actual_latitude": -0.5711243456528018, + "satellite_actual_altitude": 35783296.150123544, + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0.0, + "projection_longitude": 0.0, + "projection_latitude": 0.0, + "projection_altitude": 35785831.0 }, - 'time_parameters': { - 'nominal_start_time': datetime(2006, 1, 1, 12, 15, 0), - 'nominal_end_time': datetime(2006, 1, 1, 12, 30, 0), - 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'observation_end_time': datetime(2006, 1, 1, 12, 27, 9, 304888), + "time_parameters": { + "nominal_start_time": datetime(2006, 1, 1, 12, 15, 0), + "nominal_end_time": datetime(2006, 1, 1, 12, 30, 0), + "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": datetime(2006, 1, 1, 12, 27, 9, 304888), }, - 'georef_offset_corrected': True, - 'platform_name': 'MSG-3', - 'sensor': 'seviri', - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts', + "georef_offset_corrected": True, + "platform_name": "MSG-3", + "sensor": "seviri", + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts", } ) - expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'), - np.datetime64('1958-01-02 00:00:02'), - np.datetime64('1958-01-02 00:00:03'), - np.datetime64('1958-01-02 00:00:04')]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), + np.datetime64("1958-01-02 00:00:02"), + np.datetime64("1958-01-02 00:00:03"), + np.datetime64("1958-01-02 00:00:04")]) return expected def test_get_dataset_with_raw_metadata(self, file_handler): """Test provision of raw metadata.""" file_handler.include_raw_metadata = True dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } xarr = file_handler.get_dataset(dataset_id, dataset_info) - assert 'raw_metadata' in xarr.attrs + assert "raw_metadata" in xarr.attrs def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" - file_handler.header['15_DATA_HEADER']['SatelliteStatus'][ - 'Orbit']['OrbitPolynomial'] = ORBIT_POLYNOMIALS_INVALID + file_handler.header["15_DATA_HEADER"]["SatelliteStatus"][ + "Orbit"]["OrbitPolynomial"] = ORBIT_POLYNOMIALS_INVALID dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } with pytest.warns(UserWarning, match="No orbit polynomial"): xarr = file_handler.get_dataset(dataset_id, dataset_info) - assert 'satellite_actual_longitude' not in xarr.attrs[ - 'orbital_parameters'] + assert "satellite_actual_longitude" not in xarr.attrs[ + "orbital_parameters"] class TestNativeMSGPadder(unittest.TestCase): @@ -1096,12 +1096,12 @@ class TestNativeMSGPadder(unittest.TestCase): @staticmethod def prepare_padder(test_dict): """Initialize Padder and pad test data.""" - dataset_id = test_dict['dataset_id'] - img_bounds = test_dict['img_bounds'] - is_full_disk = test_dict['is_full_disk'] - dataset = test_dict['dataset'] - final_shape = test_dict['final_shape'] - expected_padded_data = test_dict['expected_padded_data'] + dataset_id = test_dict["dataset_id"] + img_bounds = test_dict["img_bounds"] + is_full_disk = test_dict["is_full_disk"] + dataset = test_dict["dataset"] + final_shape = test_dict["final_shape"] + expected_padded_data = test_dict["expected_padded_data"] padder = Padder(dataset_id, img_bounds, is_full_disk) padder._final_shape = final_shape @@ -1150,56 +1150,56 @@ def test_file_pattern(self, reader): @pytest.mark.parametrize( - 'file_content,exp_header_size', + "file_content,exp_header_size", ( (ASCII_STARTSWITH, 450400), # with ascii header - (b'foobar', 445286), # without ascii header + (b"foobar", 445286), # without ascii header ) ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" header = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0 ) - if file_content == b'foobar': - header.pop('15_SECONDARY_PRODUCT_HEADER') - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + if file_content == b"foobar": + header.pop("15_SECONDARY_PRODUCT_HEADER") + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("builtins.open", mock.mock_open(read_data=file_content)): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) assert fh.header_type.itemsize == exp_header_size - assert '15_SECONDARY_PRODUCT_HEADER' in fh.header + assert "15_SECONDARY_PRODUCT_HEADER" in fh.header def test_header_warning(): """Test warning is raised for NOK quality flag.""" header_good = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, - good_qual='OK' + good_qual="OK" ) header_bad = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, - good_qual='NOK' + good_qual="NOK" ) - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("builtins.open", mock.mock_open(read_data=ASCII_STARTSWITH)): recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) @@ -1217,30 +1217,30 @@ def test_header_warning(): # check that without Main Header the code doesn't crash header_missing = header_good.copy() - header_missing.pop('15_MAIN_PRODUCT_HEADER') + header_missing.pop("15_MAIN_PRODUCT_HEADER") fromfile.return_value = header_missing with warnings.catch_warnings(): warnings.simplefilter("error") - NativeMSGFileHandler('myfile', {}, None) + NativeMSGFileHandler("myfile", {}, None) @pytest.mark.parametrize( "starts_with, expected", [ (ASCII_STARTSWITH, True), - (b'this_shall_fail', False) + (b"this_shall_fail", False) ] ) def test_has_archive_header(starts_with, expected): """Test if the file includes an ASCII archive header.""" with mock.patch("builtins.open", mock.mock_open(read_data=starts_with)): - actual = has_archive_header('filename') + actual = has_archive_header("filename") assert actual == expected def test_read_header(): """Test that reading header returns the header correctly converted to a dictionary.""" - keys = ('SatelliteId', 'NominalLongitude', 'SatelliteStatus') + keys = ("SatelliteId", "NominalLongitude", "SatelliteStatus") values = (324, 0.0, 1) expected = dict(zip(keys, values)) @@ -1248,7 +1248,7 @@ def test_read_header(): dtypes = np.dtype([(k, t) for k, t in zip(keys, types)]) hdr_data = np.array([values], dtype=dtypes) - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile: + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile: fromfile.return_value = hdr_data actual = recarray2dict(hdr_data) assert actual == expected diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index f85e9f5aae..3f7b1a6296 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -29,17 +29,17 @@ from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid -channel_keys_dict = {'VIS006': 'ch1', 'IR_108': 'ch9'} +channel_keys_dict = {"VIS006": "ch1", "IR_108": "ch9"} def to_cds_time(time): """Convert datetime to (days, msecs) since 1958-01-01.""" if isinstance(time, datetime): time = np.datetime64(time) - t0 = np.datetime64('1958-01-01 00:00') + t0 = np.datetime64("1958-01-01 00:00") delta = time - t0 - days = (delta / np.timedelta64(1, 'D')).astype(int) - msecs = delta / np.timedelta64(1, 'ms') - days * 24 * 3600 * 1E3 + days = (delta / np.timedelta64(1, "D")).astype(int) + msecs = delta / np.timedelta64(1, "ms") - days * 24 * 3600 * 1E3 return days, msecs @@ -64,130 +64,130 @@ def _get_fake_dataset(self, counts, h5netcdf): orbit_poly_start_day, orbit_poly_start_msec = to_cds_time( np.array([datetime(2019, 12, 31, 18), datetime(2019, 12, 31, 22)], - dtype='datetime64') + dtype="datetime64") ) orbit_poly_end_day, orbit_poly_end_msec = to_cds_time( np.array([datetime(2019, 12, 31, 22), datetime(2020, 1, 1, 2)], - dtype='datetime64') + dtype="datetime64") ) counts = counts.rename({ - 'y': 'num_rows_vis_ir', - 'x': 'num_columns_vis_ir' + "y": "num_rows_vis_ir", + "x": "num_columns_vis_ir" }) scan_time_days, scan_time_msecs = to_cds_time(self.scan_time) ds = xr.Dataset( { - 'ch1': counts.copy(), - 'ch9': counts.copy(), - 'HRV': (('num_rows_hrv', 'num_columns_hrv'), [[1, 2, 3], + "ch1": counts.copy(), + "ch9": counts.copy(), + "HRV": (("num_rows_hrv", "num_columns_hrv"), [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), - 'planned_chan_processing': self.radiance_types, - 'channel_data_visir_data_l10_line_mean_acquisition_time_day': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "planned_chan_processing": self.radiance_types, + "channel_data_visir_data_l10_line_mean_acquisition_time_day": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), acq_time_day ), - 'channel_data_visir_data_l10_line_mean_acquisition_msec': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_l10_line_mean_acquisition_msec": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), acq_time_msec ), - 'channel_data_visir_data_line_validity': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_validity": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_validity ), - 'channel_data_visir_data_line_geometric_quality': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_geometric_quality": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_geom_radio_quality ), - 'channel_data_visir_data_line_radiometric_quality': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_radiometric_quality": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_geom_radio_quality ), - 'orbit_polynomial_x': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['X'][0:2] + "orbit_polynomial_x": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["X"][0:2] ), - 'orbit_polynomial_y': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['Y'][0:2] + "orbit_polynomial_y": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["Y"][0:2] ), - 'orbit_polynomial_z': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['Z'][0:2] + "orbit_polynomial_z": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["Z"][0:2] ), - 'orbit_polynomial_start_time_day': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_start_time_day": ( + "orbit_polynomial_dim_row", orbit_poly_start_day ), - 'orbit_polynomial_start_time_msec': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_start_time_msec": ( + "orbit_polynomial_dim_row", orbit_poly_start_msec ), - 'orbit_polynomial_end_time_day': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_end_time_day": ( + "orbit_polynomial_dim_row", orbit_poly_end_day ), - 'orbit_polynomial_end_time_msec': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_end_time_msec": ( + "orbit_polynomial_dim_row", orbit_poly_end_msec ), }, attrs={ - 'equatorial_radius': 6378.169, - 'north_polar_radius': 6356.5838, - 'south_polar_radius': 6356.5838, - 'longitude_of_SSP': 0.0, - 'nominal_longitude': -3.5, - 'satellite_id': self.platform_id, - 'true_repeat_cycle_start_day': scan_time_days, - 'true_repeat_cycle_start_mi_sec': scan_time_msecs, - 'planned_repeat_cycle_end_day': scan_time_days, - 'planned_repeat_cycle_end_mi_sec': scan_time_msecs, - 'north_most_line': 3712, - 'east_most_pixel': 1, - 'west_most_pixel': 3712, - 'south_most_line': 1, - 'vis_ir_grid_origin': 0, - 'vis_ir_column_dir_grid_step': 3.0004032, - 'vis_ir_line_dir_grid_step': 3.0004032, - 'type_of_earth_model': '0x02', - 'nominal_image_scanning': 'T', + "equatorial_radius": 6378.169, + "north_polar_radius": 6356.5838, + "south_polar_radius": 6356.5838, + "longitude_of_SSP": 0.0, + "nominal_longitude": -3.5, + "satellite_id": self.platform_id, + "true_repeat_cycle_start_day": scan_time_days, + "true_repeat_cycle_start_mi_sec": scan_time_msecs, + "planned_repeat_cycle_end_day": scan_time_days, + "planned_repeat_cycle_end_mi_sec": scan_time_msecs, + "north_most_line": 3712, + "east_most_pixel": 1, + "west_most_pixel": 3712, + "south_most_line": 1, + "vis_ir_grid_origin": 0, + "vis_ir_column_dir_grid_step": 3.0004032, + "vis_ir_line_dir_grid_step": 3.0004032, + "type_of_earth_model": "0x02", + "nominal_image_scanning": "T", } ) if h5netcdf: - nattrs = {'equatorial_radius': np.array([6378.169]), - 'north_polar_radius': np.array([6356.5838]), - 'south_polar_radius': np.array([6356.5838]), - 'longitude_of_SSP': np.array([0.0]), - 'vis_ir_column_dir_grid_step': np.array([3.0004032]), - 'vis_ir_line_dir_grid_step': np.array([3.0004032]) + nattrs = {"equatorial_radius": np.array([6378.169]), + "north_polar_radius": np.array([6356.5838]), + "south_polar_radius": np.array([6356.5838]), + "longitude_of_SSP": np.array([0.0]), + "vis_ir_column_dir_grid_step": np.array([3.0004032]), + "vis_ir_line_dir_grid_step": np.array([3.0004032]) } ds.attrs.update(nattrs) - ds['ch1'].attrs.update({ - 'scale_factor': self.gains_nominal[0], - 'add_offset': self.offsets_nominal[0] + ds["ch1"].attrs.update({ + "scale_factor": self.gains_nominal[0], + "add_offset": self.offsets_nominal[0] }) # IR_108 is dataset with key ch9 - ds['ch9'].attrs.update({ - 'scale_factor': self.gains_nominal[8], - 'add_offset': self.offsets_nominal[8], + ds["ch9"].attrs.update({ + "scale_factor": self.gains_nominal[8], + "add_offset": self.offsets_nominal[8], }) # Add some attributes so that the reader can strip them strip_attrs = { - 'comment': None, - 'long_name': None, - 'valid_min': None, - 'valid_max': None + "comment": None, + "long_name": None, + "valid_min": None, + "valid_max": None } - for name in ['ch1', 'ch9']: + for name in ["ch1", "ch9"]: ds[name].attrs.update(strip_attrs) return ds @@ -197,38 +197,38 @@ def h5netcdf(self): """Fixture for xr backend choice.""" return False - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self, counts, h5netcdf): """Create a mocked file handler.""" with mock.patch( - 'satpy.readers.seviri_l1b_nc.open_dataset', + "satpy.readers.seviri_l1b_nc.open_dataset", return_value=self._get_fake_dataset(counts=counts, h5netcdf=h5netcdf) ): return NCSEVIRIFileHandler( - 'filename', - {'platform_shortname': 'MSG3', - 'start_time': self.scan_time, - 'service': 'MSG'}, - {'filetype': 'info'} + "filename", + {"platform_shortname": "MSG3", + "start_time": self.scan_time, + "service": "MSG"}, + {"filetype": "info"} ) @pytest.mark.parametrize( - ('channel', 'calibration', 'use_ext_coefs'), + ("channel", "calibration", "use_ext_coefs"), [ # VIS channel, internal coefficients - ('VIS006', 'counts', False), - ('VIS006', 'radiance', False), - ('VIS006', 'reflectance', False), + ("VIS006", "counts", False), + ("VIS006", "radiance", False), + ("VIS006", "reflectance", False), # VIS channel, external coefficients - ('VIS006', 'radiance', True), - ('VIS006', 'reflectance', True), + ("VIS006", "radiance", True), + ("VIS006", "reflectance", True), # IR channel, internal coefficients - ('IR_108', 'counts', False), - ('IR_108', 'radiance', False), - ('IR_108', 'brightness_temperature', False), + ("IR_108", "counts", False), + ("IR_108", "radiance", False), + ("IR_108", "brightness_temperature", False), # IR channel, external coefficients - ('IR_108', 'radiance', True), - ('IR_108', 'brightness_temperature', True), + ("IR_108", "radiance", True), + ("IR_108", "brightness_temperature", True), # FUTURE: Enable once HRV reading has been fixed. # # HRV channel, internal coefficiens # ('HRV', 'counts', False), @@ -247,7 +247,7 @@ def test_calibrate( expected = self._get_expected( channel=channel, calibration=calibration, - calib_mode='NOMINAL', + calib_mode="NOMINAL", use_ext_coefs=use_ext_coefs ) fh = file_handler @@ -261,35 +261,35 @@ def test_calibrate( def test_mask_bad_quality(self, file_handler): """Test masking of bad quality scan lines.""" - channel = 'VIS006' + channel = "VIS006" key = channel_keys_dict[channel] dataset_info = { - 'nc_key': key, - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "nc_key": key, + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } expected = self._get_expected( channel=channel, - calibration='radiance', - calib_mode='NOMINAL', + calibration="radiance", + calib_mode="NOMINAL", use_ext_coefs=False ) fh = file_handler res = fh._mask_bad_quality(fh.nc[key], dataset_info) - new_data = np.zeros_like(expected.data).astype('float32') + new_data = np.zeros_like(expected.data).astype("float32") new_data[:, :] = np.nan expected = expected.copy(data=new_data) xr.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - ('channel', 'calibration', 'mask_bad_quality_scan_lines'), + ("channel", "calibration", "mask_bad_quality_scan_lines"), [ - ('VIS006', 'reflectance', True), - ('VIS006', 'reflectance', False), - ('IR_108', 'brightness_temperature', True) + ("VIS006", "reflectance", True), + ("VIS006", "reflectance", False), + ("IR_108", "brightness_temperature", True) ] ) def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_scan_lines): @@ -297,10 +297,10 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ dataset_id = make_dataid(name=channel, calibration=calibration) key = channel_keys_dict[channel] dataset_info = { - 'nc_key': key, - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "nc_key": key, + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } file_handler.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines @@ -310,43 +310,43 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ expected = self._get_expected( channel=channel, calibration=calibration, - calib_mode='NOMINAL', + calib_mode="NOMINAL", use_ext_coefs=False ) expected.attrs = { - 'orbital_parameters': { - 'satellite_actual_longitude': -3.541742131915741, - 'satellite_actual_latitude': -0.5203765167594427, - 'satellite_actual_altitude': 35783419.16135868, - 'satellite_nominal_longitude': -3.5, - 'satellite_nominal_latitude': 0.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785831.0 + "orbital_parameters": { + "satellite_actual_longitude": -3.541742131915741, + "satellite_actual_latitude": -0.5203765167594427, + "satellite_actual_altitude": 35783419.16135868, + "satellite_nominal_longitude": -3.5, + "satellite_nominal_latitude": 0.0, + "projection_longitude": 0.0, + "projection_latitude": 0.0, + "projection_altitude": 35785831.0 }, - 'time_parameters': { - 'nominal_start_time': datetime(2020, 1, 1, 0, 0), - 'nominal_end_time': datetime(2020, 1, 1, 0, 0), - 'observation_start_time': datetime(2020, 1, 1, 0, 0), - 'observation_end_time': datetime(2020, 1, 1, 0, 0), + "time_parameters": { + "nominal_start_time": datetime(2020, 1, 1, 0, 0), + "nominal_end_time": datetime(2020, 1, 1, 0, 0), + "observation_start_time": datetime(2020, 1, 1, 0, 0), + "observation_end_time": datetime(2020, 1, 1, 0, 0), }, - 'georef_offset_corrected': True, - 'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "georef_offset_corrected": True, + "platform_name": "Meteosat-11", + "sensor": "seviri", + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } - expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'), - np.datetime64('1958-01-02 00:00:02')]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), + np.datetime64("1958-01-02 00:00:02")]) expected = expected[::-1] # reader flips data upside down if mask_bad_quality_scan_lines: expected = file_handler._mask_bad_quality(expected, dataset_info) xr.testing.assert_allclose(res, expected) - for key in ['sun_earth_distance_correction_applied', - 'sun_earth_distance_correction_factor']: + for key in ["sun_earth_distance_correction_applied", + "sun_earth_distance_correction_factor"]: res.attrs.pop(key, None) assert_attrs_equal(res.attrs, expected.attrs, tolerance=1e-4) @@ -364,29 +364,29 @@ def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" assert 15 == file_handler._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling - file_handler.nc.attrs['nominal_image_scanning'] = '' - file_handler.nc.attrs['reduced_scanning'] = 'T' + file_handler.nc.attrs["nominal_image_scanning"] = "" + file_handler.nc.attrs["reduced_scanning"] = "T" # file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 assert 5 == file_handler._repeat_cycle_duration def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" - dataset_id = make_dataid(name='VIS006', calibration='counts') + dataset_id = make_dataid(name="VIS006", calibration="counts") dataset_info = { - 'name': 'VIS006', - 'nc_key': 'ch1', - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "name": "VIS006", + "nc_key": "ch1", + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } - file_handler.nc['orbit_polynomial_start_time_day'] = 0 - file_handler.nc['orbit_polynomial_end_time_day'] = 0 + file_handler.nc["orbit_polynomial_start_time_day"] = 0 + file_handler.nc["orbit_polynomial_end_time_day"] = 0 res = file_handler.get_dataset(dataset_id, dataset_info) - assert 'satellite_actual_longitude' not in res.attrs[ - 'orbital_parameters'] + assert "satellite_actual_longitude" not in res.attrs[ + "orbital_parameters"] - @pytest.mark.parametrize('h5netcdf', [True]) + @pytest.mark.parametrize("h5netcdf", [True]) def test_h5netcdf_pecularity(self, file_handler, h5netcdf): """Test conversion of attributes when xarray is used with h5netcdf backend.""" fh = file_handler - assert isinstance(fh.mda['projection_parameters']['a'], float) + assert isinstance(fh.mda["projection_parameters"]["a"], float) diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index 3578645e5b..2979084974 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -29,83 +29,83 @@ from satpy.tests.utils import make_dataid -FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_asr'} +FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} -FILENAME_INFO = {'start_time': '20191112000000', - 'spacecraft': 'MSG2'} -FILENAME_INFO2 = {'start_time': '20191112000000', - 'spacecraft': 'MSG2', - 'server': 'TESTSERVER'} +FILENAME_INFO = {"start_time": "20191112000000", + "spacecraft": "MSG2"} +FILENAME_INFO2 = {"start_time": "20191112000000", + "spacecraft": "MSG2", + "server": "TESTSERVER"} MPEF_PRODUCT_HEADER = { - 'NominalTime': datetime(2019, 11, 6, 18, 0), - 'SpacecraftName': '09', - 'RectificationLongitude': 'E0455' + "NominalTime": datetime(2019, 11, 6, 18, 0), + "SpacecraftName": "09", + "RectificationLongitude": "E0455" } DATASET_INFO = { - 'name': 'testdata', - 'key': '#1#brightnessTemperature', - 'coordinates': ('longitude', 'latitude'), - 'fill_value': 0 + "name": "testdata", + "key": "#1#brightnessTemperature", + "coordinates": ("longitude", "latitude"), + "fill_value": 0 } DATASET_INFO_LAT = { - 'name': 'latitude', - 'key': '#1#latitude', - 'fill_value': -1.e+100 + "name": "latitude", + "key": "#1#latitude", + "fill_value": -1.e+100 } DATASET_INFO_LON = { - 'name': 'longitude', - 'key': '#1#longitude', - 'fill_value': -1.e+100 + "name": "longitude", + "key": "#1#longitude", + "fill_value": -1.e+100 } DATASET_ATTRS = { - 'platform_name': 'MET09', - 'ssp_lon': 45.5, - 'seg_size': 16 + "platform_name": "MET09", + "ssp_lon": 45.5, + "seg_size": 16 } AREA_DEF = geometry.AreaDefinition( - 'msg_seviri_iodc_48km', - 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution', + "msg_seviri_iodc_48km", + "MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": DATASET_ATTRS["ssp_lon"], + "h": 35785831., "proj": "geos", "units": "m"}, 232, 232, (-5570248.6867, -5567248.2834, 5567248.2834, 5570248.6867) ) AREA_DEF_FES = geometry.AreaDefinition( - 'msg_seviri_res_48km', - 'MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution', + "msg_seviri_res_48km", + "MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': 0.0, - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": 0.0, + "h": 35785831., "proj": "geos", "units": "m"}, 232, 232, (-5570248.6867, -5567248.2834, 5567248.2834, 5570248.6867) ) AREA_DEF_EXT = geometry.AreaDefinition( - 'msg_seviri_iodc_9km_ext', - 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution ' - '(extended outside original 3km grid)', + "msg_seviri_iodc_9km_ext", + "MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution " + "(extended outside original 3km grid)", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": DATASET_ATTRS["ssp_lon"], + "h": 35785831., "proj": "geos", "units": "m"}, 1238, 1238, (-5571748.8883, -5571748.8882, 5571748.8882, 5571748.8883) ) TEST_FILES = [ - 'ASRBUFRProd_20191106130000Z_00_OMPEFS02_MET09_FES_E0000', - 'MSG2-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr', - 'MSG2-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148' + "ASRBUFRProd_20191106130000Z_00_OMPEFS02_MET09_FES_E0000", + "MSG2-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr", + "MSG2-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148" ] # Test data @@ -117,30 +117,30 @@ class SeviriL2BufrData: """Mock SEVIRI L2 BUFR data.""" - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") - def __init__(self, filename, with_adef=False, rect_lon='default'): + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + def __init__(self, filename, with_adef=False, rect_lon="default"): """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" import eccodes as ec from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler - self.buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') - ec.codes_set(self.buf1, 'unpack', 1) + self.buf1 = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") + ec.codes_set(self.buf1, "unpack", 1) # write the bufr test data twice as we want to read in and then concatenate the data in the reader # 55 id corresponds to METEOSAT 8` - ec.codes_set(self.buf1, 'satelliteIdentifier', 56) - ec.codes_set_array(self.buf1, '#1#latitude', LAT) - ec.codes_set_array(self.buf1, '#1#latitude', LAT) - ec.codes_set_array(self.buf1, '#1#longitude', LON) - ec.codes_set_array(self.buf1, '#1#longitude', LON) - ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) - ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) + ec.codes_set(self.buf1, "satelliteIdentifier", 56) + ec.codes_set_array(self.buf1, "#1#latitude", LAT) + ec.codes_set_array(self.buf1, "#1#latitude", LAT) + ec.codes_set_array(self.buf1, "#1#longitude", LON) + ec.codes_set_array(self.buf1, "#1#longitude", LON) + ec.codes_set_array(self.buf1, "#1#brightnessTemperature", DATA) + ec.codes_set_array(self.buf1, "#1#brightnessTemperature", DATA) self.m = mock.mock_open() # only our offline product contain MPEF product headers so we get the metadata from there - if ('BUFRProd' in filename): - with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile: + if ("BUFRProd" in filename): + with mock.patch("satpy.readers.seviri_l2_bufr.np.fromfile") as fromfile: fromfile.return_value = MPEF_PRODUCT_HEADER - with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict') as recarray2dict: + with mock.patch("satpy.readers.seviri_l2_bufr.recarray2dict") as recarray2dict: recarray2dict.side_effect = (lambda x: x) self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO, with_area_definition=with_adef, rectification_longitude=rect_lon) @@ -148,13 +148,13 @@ def __init__(self, filename, with_adef=False, rect_lon='default'): else: # No Mpef Header so we get the metadata from the BUFR messages - with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): - with mock.patch('eccodes.codes_bufr_new_from_file', + with mock.patch("satpy.readers.seviri_l2_bufr.open", self.m, create=True): + with mock.patch("eccodes.codes_bufr_new_from_file", side_effect=[self.buf1, None, self.buf1, None, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect - with mock.patch('eccodes.codes_set') as ec2: + with mock.patch("eccodes.codes_set") as ec2: ec2.return_value = 1 - with mock.patch('eccodes.codes_release') as ec5: + with mock.patch("eccodes.codes_release") as ec5: ec5.return_value = 1 self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO, with_area_definition=with_adef, @@ -162,15 +162,15 @@ def __init__(self, filename, with_adef=False, rect_lon='default'): def get_data(self, dataset_info): """Read data from mock file.""" - with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): - with mock.patch('eccodes.codes_bufr_new_from_file', + with mock.patch("satpy.readers.seviri_l2_bufr.open", self.m, create=True): + with mock.patch("eccodes.codes_bufr_new_from_file", side_effect=[self.buf1, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect - with mock.patch('eccodes.codes_set') as ec2: + with mock.patch("eccodes.codes_set") as ec2: ec2.return_value = 1 - with mock.patch('eccodes.codes_release') as ec5: + with mock.patch("eccodes.codes_release") as ec5: ec5.return_value = 1 - z = self.fh.get_dataset(make_dataid(name=dataset_info['name'], resolution=48000), dataset_info) + z = self.fh.get_dataset(make_dataid(name=dataset_info["name"], resolution=48000), dataset_info) return z @@ -193,9 +193,9 @@ def test_attributes_with_swath_definition(input_file): """Test correctness of dataset attributes with data loaded with a SwathDefinition (default behaviour).""" bufr_obj = SeviriL2BufrData(input_file) z = bufr_obj.get_data(DATASET_INFO) - assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] - assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] - assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] + assert z.attrs["platform_name"] == DATASET_ATTRS["platform_name"] + assert z.attrs["ssp_lon"] == DATASET_ATTRS["ssp_lon"] + assert z.attrs["seg_size"] == DATASET_ATTRS["seg_size"] @staticmethod def test_attributes_with_area_definition(input_file): @@ -204,9 +204,9 @@ def test_attributes_with_area_definition(input_file): _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data z = bufr_obj.get_data(DATASET_INFO) - assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] - assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] - assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] + assert z.attrs["platform_name"] == DATASET_ATTRS["platform_name"] + assert z.attrs["ssp_lon"] == DATASET_ATTRS["ssp_lon"] + assert z.attrs["seg_size"] == DATASET_ATTRS["seg_size"] @staticmethod def test_data_with_swath_definition(input_file): @@ -242,7 +242,7 @@ def test_data_with_area_definition(self, input_file): # Test that the correct AreaDefinition is identified for products with 3 pixel segements bufr_obj.fh.seg_size = 3 - ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name='dummmy', resolution=9000)) + ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name="dummmy", resolution=9000)) assert ad_ext == AREA_DEF_EXT def test_data_with_rect_lon(self, input_file): @@ -260,14 +260,14 @@ def test_data_with_rect_lon(self, input_file): class SeviriL2AMVBufrData: """Mock SEVIRI L2 AMV BUFR data.""" - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def __init__(self, filename): """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler - with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile'): + with mock.patch("satpy.readers.seviri_l2_bufr.np.fromfile"): self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, - filetype_info={'file_type': 'seviri_l2_bufr_amv'}, + filetype_info={"file_type": "seviri_l2_bufr_amv"}, with_area_definition=True) @@ -277,5 +277,5 @@ class TestSeviriL2AMVBufrReader: @staticmethod def test_amv_with_area_def(): """Test that AMV data can not be loaded with an area definition.""" - bufr_obj = SeviriL2AMVBufrData('AMVBUFRProd_20201110124500Z_00_OMPEFS04_MET11_FES_E0000') + bufr_obj = SeviriL2AMVBufrData("AMVBUFRProd_20201110124500Z_00_OMPEFS04_MET11_FES_E0000") assert bufr_obj.fh.with_adef is False diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py index faee3f9bdb..a8b5310a78 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py @@ -29,17 +29,17 @@ # Dictionary to be used as fake GRIB message FAKE_MESSAGE = { - 'longitudeOfSubSatellitePointInDegrees': 9.5, - 'dataDate': 20191020, - 'dataTime': 1745, - 'Nx': 1000, - 'Ny': 1200, - 'earthMajorAxis': 6400., - 'earthMinorAxis': 6300., - 'NrInRadiusOfEarth': 6., - 'XpInGridLengths': 500, - 'parameterNumber': 30, - 'missingValue': 9999, + "longitudeOfSubSatellitePointInDegrees": 9.5, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 1000, + "Ny": 1200, + "earthMajorAxis": 6400., + "earthMinorAxis": 6300., + "NrInRadiusOfEarth": 6., + "XpInGridLengths": 500, + "parameterNumber": 30, + "missingValue": 9999, } # List to be used as fake GID source @@ -49,7 +49,7 @@ class Test_SeviriL2GribFileHandler(unittest.TestCase): """Test the SeviriL2GribFileHandler reader.""" - @mock.patch('satpy.readers.seviri_l2_grib.ec') + @mock.patch("satpy.readers.seviri_l2_grib.ec") def setUp(self, ec_): """Set up the test by creating a mocked eccodes library.""" fake_gid_generator = (i for i in FAKE_GID) @@ -58,9 +58,9 @@ def setUp(self, ec_): ec_.codes_get_values.return_value = np.ones(1000*1200) self.ec_ = ec_ - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") - @mock.patch('satpy.readers.seviri_l2_grib.xr') - @mock.patch('satpy.readers.seviri_l2_grib.da') + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + @mock.patch("satpy.readers.seviri_l2_grib.xr") + @mock.patch("satpy.readers.seviri_l2_grib.da") def test_data_reading(self, da_, xr_): """Test the reading of data from the product.""" from satpy.readers.seviri_l2_grib import REPEAT_CYCLE_DURATION, SeviriL2GribFileHandler @@ -68,18 +68,18 @@ def test_data_reading(self, da_, xr_): CHUNK_SIZE = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch('satpy.readers.seviri_l2_grib.ec', self.ec_): + with mock.patch("satpy.readers.seviri_l2_grib.ec", self.ec_): self.reader = SeviriL2GribFileHandler( - filename='test.grib', + filename="test.grib", filename_info={ - 'spacecraft': 'MET11', - 'start_time': datetime.datetime(year=2020, month=10, day=20, + "spacecraft": "MET11", + "start_time": datetime.datetime(year=2020, month=10, day=20, hour=19, minute=45, second=0) }, filetype_info={} ) - dataset_id = make_dataid(name='dummmy', resolution=3000) + dataset_id = make_dataid(name="dummmy", resolution=3000) # Checks that the codes_grib_multi_support_on function has been called self.ec_.codes_grib_multi_support_on.assert_called() @@ -91,11 +91,11 @@ def test_data_reading(self, da_, xr_): self.ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 30}) + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) # Checks the correct file open call - mock_file.assert_called_with('test.grib', 'rb') + mock_file.assert_called_with("test.grib", "rb") # Checks that the dataset has been created as a DataArray object - self.assertEqual(valid_dataset._extract_mock_name(), 'xr.DataArray()') + self.assertEqual(valid_dataset._extract_mock_name(), "xr.DataArray()") # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, @@ -108,7 +108,7 @@ def test_data_reading(self, da_, xr_): self.ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 50}) + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) # Checks that the function returns None self.assertEqual(invalid_dataset, None) # Checks that codes_release has been called after each codes_grib_new_from_file call @@ -122,11 +122,11 @@ def test_data_reading(self, da_, xr_): # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() expected_attributes = { - 'orbital_parameters': { - 'projection_longitude': 9.5 + "orbital_parameters": { + "projection_longitude": 9.5 }, - 'sensor': 'seviri', - 'platform_name': 'Meteosat-11' + "sensor": "seviri", + "platform_name": "Meteosat-11" } self.assertEqual(attributes, expected_attributes) @@ -140,45 +140,45 @@ def test_data_reading(self, da_, xr_): # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] - self.assertEqual(kwargs['dims'], ('y', 'x')) + self.assertEqual(kwargs["dims"], ("y", "x")) # Checks the correct execution of the _get_proj_area function pdict, area_dict = self.reader._get_proj_area(0) expected_pdict = { - 'a': 6400000., - 'b': 6300000., - 'h': 32000000., - 'ssp_lon': 9.5, - 'nlines': 1000, - 'ncols': 1200, - 'a_name': 'msg_seviri_rss_3km', - 'a_desc': 'MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution', - 'p_id': '', + "a": 6400000., + "b": 6300000., + "h": 32000000., + "ssp_lon": 9.5, + "nlines": 1000, + "ncols": 1200, + "a_name": "msg_seviri_rss_3km", + "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", + "p_id": "", } self.assertEqual(pdict, expected_pdict) expected_area_dict = { - 'center_point': 500, - 'north': 1200, - 'east': 1, - 'west': 1000, - 'south': 1, + "center_point": 500, + "north": 1200, + "east": 1, + "west": 1000, + "south": 1, } self.assertEqual(area_dict, expected_area_dict) # Checks the correct execution of the get_area_def function - with mock.patch('satpy.readers.seviri_l2_grib.calculate_area_extent', - mock.Mock(name='calculate_area_extent')) as cae: - with mock.patch('satpy.readers.seviri_l2_grib.get_area_definition', mock.Mock()) as gad: - dataset_id = make_dataid(name='dummmy', resolution=400.) + with mock.patch("satpy.readers.seviri_l2_grib.calculate_area_extent", + mock.Mock(name="calculate_area_extent")) as cae: + with mock.patch("satpy.readers.seviri_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=400.) self.reader.get_area_def(dataset_id) # Asserts that calculate_area_extent has been called with the correct arguments - expected_args = ({'center_point': 500, 'east': 1, 'west': 1000, 'south': 1, 'north': 1200, - 'column_step': 400., 'line_step': 400.},) + expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, + "column_step": 400., "line_step": 400.},) name, args, kwargs = cae.mock_calls[0] self.assertEqual(args, expected_args) # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] self.assertEqual(args[0], expected_pdict) # The second argument must be the return result of calculate_area_extent - self.assertEqual(args[1]._extract_mock_name(), 'calculate_area_extent()') + self.assertEqual(args[1]._extract_mock_name(), "calculate_area_extent()") diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index 9f516b4cde..cc0764685f 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -27,40 +27,40 @@ from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange from satpy.readers.slstr_l1b import NCSLSTR1B, NCSLSTRAngles, NCSLSTRFlag, NCSLSTRGeo -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'stripe': { - 'enum': [ - 'a', - 'b', - 'c', - 'i', - 'f', + "stripe": { + "enum": [ + "a", + "b", + "c", + "i", + "f", ] }, - 'view': { - 'enum': [ - 'nadir', - 'oblique', + "view": { + "enum": [ + "nadir", + "oblique", ] }, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } @@ -68,7 +68,7 @@ class TestSLSTRL1B(unittest.TestCase): """Common setup for SLSTR_L1B tests.""" - @mock.patch('satpy.readers.slstr_l1b.xr') + @mock.patch("satpy.readers.slstr_l1b.xr") def setUp(self, xr_): """Create a fake dataset using the given radiance data.""" self.base_data = np.array(([1., 2., 3.], [4., 5., 6.])) @@ -77,34 +77,34 @@ def setUp(self, xr_): self.end_time = "2020-05-10T12:06:18.012Z" self.rad = xr.DataArray( self.base_data, - dims=('columns', 'rows'), - attrs={'scale_factor': 1.0, 'add_offset': 0.0, - '_FillValue': -32768, 'units': 'mW.m-2.sr-1.nm-1', + dims=("columns", "rows"), + attrs={"scale_factor": 1.0, "add_offset": 0.0, + "_FillValue": -32768, "units": "mW.m-2.sr-1.nm-1", } ) det = xr.DataArray( self.base_data, - dims=('columns', 'rows'), - attrs={'scale_factor': 1.0, 'add_offset': 0.0, - '_FillValue': 255, + dims=("columns", "rows"), + attrs={"scale_factor": 1.0, "add_offset": 0.0, + "_FillValue": 255, } ) self.fake_dataset = xr.Dataset( data_vars={ - 'S5_radiance_an': self.rad, - 'S9_BT_ao': self.rad, - 'foo_radiance_an': self.rad, - 'S5_solar_irradiances': self.rad, - 'geometry_tn': self.rad, - 'latitude_an': self.rad, - 'x_tx': self.rad, - 'y_tx': self.rad, - 'x_in': self.rad, - 'y_in': self.rad, - 'x_an': self.rad, - 'y_an': self.rad, - 'flags_an': self.rad, - 'detector_an': det, + "S5_radiance_an": self.rad, + "S9_BT_ao": self.rad, + "foo_radiance_an": self.rad, + "S5_solar_irradiances": self.rad, + "geometry_tn": self.rad, + "latitude_an": self.rad, + "x_tx": self.rad, + "y_tx": self.rad, + "x_in": self.rad, + "y_in": self.rad, + "x_an": self.rad, + "y_an": self.rad, + "flags_an": self.rad, + "detector_an": det, }, attrs={ "start_time": self.start_time, @@ -129,122 +129,122 @@ def ev(foo_x, foo_y): """Fake function to return interpolated data.""" return np.zeros((3, 2)) - @mock.patch('satpy.readers.slstr_l1b.xr') - @mock.patch('scipy.interpolate.RectBivariateSpline') + @mock.patch("satpy.readers.slstr_l1b.xr") + @mock.patch("scipy.interpolate.RectBivariateSpline") def test_instantiate(self, bvs_, xr_): """Test initialization of file handlers.""" bvs_.return_value = self.FakeSpl xr_.open_dataset.return_value = self.fake_dataset good_start = datetime.strptime(self.start_time, - '%Y-%m-%dT%H:%M:%S.%fZ') + "%Y-%m-%dT%H:%M:%S.%fZ") good_end = datetime.strptime(self.end_time, - '%Y-%m-%dT%H:%M:%S.%fZ') + "%Y-%m-%dT%H:%M:%S.%fZ") - ds_id = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir') - ds_id_500 = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir', resolution=500) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - assert test.view == 'nadir' - assert test.stripe == 'a' - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + ds_id = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir") + ds_id_500 = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir", resolution=500) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + assert test.view == "nadir" + assert test.stripe == "a" + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'c', 'view': 'o'} - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') - assert test.view == 'oblique' - assert test.stripe == 'c' - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "c", "view": "o"} + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") + assert test.view == "oblique" + assert test.stripe == "c" + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - test = NCSLSTRGeo('somedir/geometry_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'latitude_{stripe:1s}{view:1s}'})) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + test = NCSLSTRGeo("somedir/geometry_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "latitude_{stripe:1s}{view:1s}"})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test = NCSLSTRFlag('somedir/S1_radiance_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'flags_{stripe:1s}{view:1s}'})) - assert test.view == 'nadir' - assert test.stripe == 'a' + test = NCSLSTRFlag("somedir/S1_radiance_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "flags_{stripe:1s}{view:1s}"})) + assert test.view == "nadir" + assert test.stripe == "a" self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test = NCSLSTRAngles('somedir/S1_radiance_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'})) + test = NCSLSTRAngles("somedir/S1_radiance_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) self.assertEqual(test.start_time, good_start) self.assertEqual(test.end_time, good_end) xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test.get_dataset(ds_id_500, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'})) + test.get_dataset(ds_id_500, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) class TestSLSTRCalibration(TestSLSTRL1B): """Test the implementation of the calibration factors.""" - @mock.patch('satpy.readers.slstr_l1b.xr') + @mock.patch("satpy.readers.slstr_l1b.xr") def test_radiance_calibration(self, xr_): """Test radiance calibration steps.""" from satpy.readers.slstr_l1b import CHANCALIB_FACTORS xr_.open_dataset.return_value = self.fake_dataset - ds_id = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} + ds_id = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir") + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") # Check warning is raised if we don't have calibration with warnings.catch_warnings(record=True) as w: - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert issubclass(w[-1].category, UserWarning) # Check user calibration is used correctly - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c', - user_calibration={'foo_nadir': 0.4}) - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c", + user_calibration={"foo_nadir": 0.4}) + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) np.testing.assert_allclose(data.values, self.base_data * 0.4) # Check internal calibration is used correctly - ds_id = make_dataid(name='S5', calibration='radiance', stripe='a', view='nadir') - filename_info['dataset_name'] = 'S5' - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'S5'})) + ds_id = make_dataid(name="S5", calibration="radiance", stripe="a", view="nadir") + filename_info["dataset_name"] = "S5" + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) np.testing.assert_allclose(data.values, - self.base_data * CHANCALIB_FACTORS['S5_nadir']) + self.base_data * CHANCALIB_FACTORS["S5_nadir"]) - @mock.patch('satpy.readers.slstr_l1b.xr') - @mock.patch('satpy.readers.slstr_l1b.da') + @mock.patch("satpy.readers.slstr_l1b.xr") + @mock.patch("satpy.readers.slstr_l1b.da") def test_reflectance_calibration(self, da_, xr_): """Test reflectance calibration.""" xr_.open_dataset.return_value = self.fake_dataset da_.map_blocks.return_value = self.rad / 100. - filename_info = {'mission_id': 'S3A', 'dataset_name': 'S5', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - ds_id = make_dataid(name='S5', calibration='reflectance', stripe='a', view='nadir') - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'S5'})) - self.assertEqual(data.units, '%') + filename_info = {"mission_id": "S3A", "dataset_name": "S5", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + ds_id = make_dataid(name="S5", calibration="reflectance", stripe="a", view="nadir") + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) + self.assertEqual(data.units, "%") np.testing.assert_allclose(data.values, self.rad * np.pi) def test_cal_rad(self): diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py index 731cd64181..3303abff17 100644 --- a/satpy/tests/reader_tests/test_smos_l2_wind.py +++ b/satpy/tests/reader_tests/test_smos_l2_wind.py @@ -35,39 +35,39 @@ class FakeNetCDF4FileHandlerSMOSL2WIND(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get('start_time', datetime(2020, 4, 22, 12, 0, 0)) - dt_e = filename_info.get('end_time', datetime(2020, 4, 22, 12, 0, 0)) + dt_s = filename_info.get("start_time", datetime(2020, 4, 22, 12, 0, 0)) + dt_e = filename_info.get("end_time", datetime(2020, 4, 22, 12, 0, 0)) - if filetype_info['file_type'] == 'smos_l2_wind': + if filetype_info["file_type"] == "smos_l2_wind": file_content = { - '/attr/time_coverage_start': dt_s.strftime('%Y-%m-%dT%H:%M:%S Z'), - '/attr/time_coverage_end': dt_e.strftime('%Y-%m-%dT%H:%M:%S Z'), - '/attr/platform_shortname': 'SM', - '/attr/platform': 'SMOS', - '/attr/instrument': 'MIRAS', - '/attr/processing_level': 'L2', - '/attr/geospatial_bounds_vertical_crs': 'EPSG:4623', + "/attr/time_coverage_start": dt_s.strftime("%Y-%m-%dT%H:%M:%S Z"), + "/attr/time_coverage_end": dt_e.strftime("%Y-%m-%dT%H:%M:%S Z"), + "/attr/platform_shortname": "SM", + "/attr/platform": "SMOS", + "/attr/instrument": "MIRAS", + "/attr/processing_level": "L2", + "/attr/geospatial_bounds_vertical_crs": "EPSG:4623", } - file_content['lat'] = np.arange(-90., 90.25, 0.25) - file_content['lat/shape'] = (len(file_content['lat']),) - file_content['lat'] = DataArray(file_content['lat'], dims=('lat')) - file_content['lat'].attrs['_FillValue'] = -999.0 - - file_content['lon'] = np.arange(0., 360., 0.25) - file_content['lon/shape'] = (len(file_content['lon']),) - file_content['lon'] = DataArray(file_content['lon'], dims=('lon')) - file_content['lon'].attrs['_FillValue'] = -999.0 - - file_content['wind_speed'] = np.ndarray(shape=(1, # Time dimension - len(file_content['lat']), - len(file_content['lon']))) - file_content['wind_speed/shape'] = (1, - len(file_content['lat']), - len(file_content['lon'])) - file_content['wind_speed'] = DataArray(file_content['wind_speed'], dims=('time', 'lat', 'lon'), - coords=[[1], file_content['lat'], file_content['lon']]) - file_content['wind_speed'].attrs['_FillValue'] = -999.0 + file_content["lat"] = np.arange(-90., 90.25, 0.25) + file_content["lat/shape"] = (len(file_content["lat"]),) + file_content["lat"] = DataArray(file_content["lat"], dims=("lat")) + file_content["lat"].attrs["_FillValue"] = -999.0 + + file_content["lon"] = np.arange(0., 360., 0.25) + file_content["lon/shape"] = (len(file_content["lon"]),) + file_content["lon"] = DataArray(file_content["lon"], dims=("lon")) + file_content["lon"].attrs["_FillValue"] = -999.0 + + file_content["wind_speed"] = np.ndarray(shape=(1, # Time dimension + len(file_content["lat"]), + len(file_content["lon"]))) + file_content["wind_speed/shape"] = (1, + len(file_content["lat"]), + len(file_content["lon"])) + file_content["wind_speed"] = DataArray(file_content["wind_speed"], dims=("time", "lat", "lon"), + coords=[[1], file_content["lat"], file_content["lon"]]) + file_content["wind_speed"].attrs["_FillValue"] = -999.0 else: raise AssertionError() @@ -84,9 +84,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(SMOSL2WINDFileHandler, '__bases__', (FakeNetCDF4FileHandlerSMOSL2WIND,)) + self.p = mock.patch.object(SMOSL2WINDFileHandler, "__bases__", (FakeNetCDF4FileHandlerSMOSL2WIND,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -99,7 +99,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -110,20 +110,20 @@ def test_load_wind_speed(self): """Load wind_speed dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['wind_speed']) + ds = r.load(["wind_speed"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'SM') - self.assertEqual(d.attrs['sensor'], 'MIRAS') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) + self.assertEqual(d.attrs["platform_shortname"], "SM") + self.assertEqual(d.attrs["sensor"], "MIRAS") + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertIn("y", d.dims) + self.assertIn("x", d.dims) self.assertEqual(d.shape, (719, 1440)) self.assertEqual(d.y[0].data, -89.75) self.assertEqual(d.y[d.shape[0] - 1].data, 89.75) @@ -132,15 +132,15 @@ def test_load_lat(self): """Load lat dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['lat']) + ds = r.load(["lat"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertIn('y', d.dims) + self.assertIn("y", d.dims) self.assertEqual(d.shape, (719,)) self.assertEqual(d.data[0], -89.75) self.assertEqual(d.data[d.shape[0] - 1], 89.75) @@ -149,15 +149,15 @@ def test_load_lon(self): """Load lon dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['lon']) + ds = r.load(["lon"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertIn('x', d.dims) + self.assertIn("x", d.dims) self.assertEqual(d.shape, (1440,)) self.assertEqual(d.data[0], -180.0) self.assertEqual(d.data[d.shape[0] - 1], 179.75) @@ -167,13 +167,13 @@ def test_adjust_lon(self): from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', - {}, filetype_info={'file_type': 'smos_l2_wind'}) - data = DataArray(np.arange(0., 360., 0.25), dims=('lon')) + smos_l2_wind_fh = SMOSL2WINDFileHandler("SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", + {}, filetype_info={"file_type": "smos_l2_wind"}) + data = DataArray(np.arange(0., 360., 0.25), dims=("lon")) adjusted = smos_l2_wind_fh._adjust_lon_coord(data) expected = DataArray(np.concatenate((np.arange(0, 180., 0.25), np.arange(-180.0, 0, 0.25))), - dims=('lon')) + dims=("lon")) self.assertEqual(adjusted.data.tolist(), expected.data.tolist()) def test_roll_dataset(self): @@ -181,9 +181,9 @@ def test_roll_dataset(self): from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', - {}, filetype_info={'file_type': 'smos_l2_wind'}) - data = DataArray(np.arange(0., 360., 0.25), dims=('lon')) + smos_l2_wind_fh = SMOSL2WINDFileHandler("SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", + {}, filetype_info={"file_type": "smos_l2_wind"}) + data = DataArray(np.arange(0., 360., 0.25), dims=("lon")) data = smos_l2_wind_fh._adjust_lon_coord(data) adjusted = smos_l2_wind_fh._roll_dataset_lon_coord(data) expected = np.arange(-180., 180., 0.25) diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index 4b6e3a8652..f2b3660089 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -41,41 +41,41 @@ class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt_s = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) - dt_e = filename_info.get('end_time', datetime(2016, 1, 1, 12, 0, 0)) + dt_s = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) + dt_e = filename_info.get("end_time", datetime(2016, 1, 1, 12, 0, 0)) - if filetype_info['file_type'] == 'tropomi_l2': + if filetype_info["file_type"] == "tropomi_l2": file_content = { - '/attr/time_coverage_start': (dt_s+timedelta(minutes=22)).strftime('%Y-%m-%dT%H:%M:%SZ'), - '/attr/time_coverage_end': (dt_e-timedelta(minutes=22)).strftime('%Y-%m-%dT%H:%M:%SZ'), - '/attr/platform_shortname': 'S5P', - '/attr/sensor': 'TROPOMI', + "/attr/time_coverage_start": (dt_s+timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/time_coverage_end": (dt_e-timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/platform_shortname": "S5P", + "/attr/sensor": "TROPOMI", } - file_content['PRODUCT/latitude'] = DEFAULT_FILE_DATA - file_content['PRODUCT/longitude'] = DEFAULT_FILE_DATA - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'] = DEFAULT_BOUND_DATA - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'] = DEFAULT_BOUND_DATA + file_content["PRODUCT/latitude"] = DEFAULT_FILE_DATA + file_content["PRODUCT/longitude"] = DEFAULT_FILE_DATA + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds"] = DEFAULT_BOUND_DATA + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds"] = DEFAULT_BOUND_DATA - if 'NO2' in filename: - file_content['PRODUCT/nitrogen_dioxide_total_column'] = DEFAULT_FILE_DATA - if 'SO2' in filename: - file_content['PRODUCT/sulfurdioxide_total_vertical_column'] = DEFAULT_FILE_DATA + if "NO2" in filename: + file_content["PRODUCT/nitrogen_dioxide_total_column"] = DEFAULT_FILE_DATA + if "SO2" in filename: + file_content["PRODUCT/sulfurdioxide_total_vertical_column"] = DEFAULT_FILE_DATA for k in list(file_content.keys()): - if not k.startswith('PRODUCT'): + if not k.startswith("PRODUCT"): continue - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE self._convert_data_content_to_dataarrays(file_content) - file_content['PRODUCT/latitude'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/longitude'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'].attrs['_FillValue'] = -999.0 - if 'NO2' in filename: - file_content['PRODUCT/nitrogen_dioxide_total_column'].attrs['_FillValue'] = -999.0 - if 'SO2' in filename: - file_content['PRODUCT/sulfurdioxide_total_vertical_column'].attrs['_FillValue'] = -999.0 + file_content["PRODUCT/latitude"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/longitude"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds"].attrs["_FillValue"] = -999.0 + if "NO2" in filename: + file_content["PRODUCT/nitrogen_dioxide_total_column"].attrs["_FillValue"] = -999.0 + if "SO2" in filename: + file_content["PRODUCT/sulfurdioxide_total_vertical_column"].attrs["_FillValue"] = -999.0 else: raise NotImplementedError("Test data for file types other than " @@ -89,9 +89,9 @@ def _convert_data_content_to_dataarrays(self, file_content): for key, val in file_content.items(): if isinstance(val, np.ndarray): if 1 < val.ndim <= 2: - file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel')) + file_content[key] = DataArray(val, dims=("scanline", "ground_pixel")) elif val.ndim > 2: - file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel', 'corner')) + file_content[key] = DataArray(val, dims=("scanline", "ground_pixel", "corner")) else: file_content[key] = DataArray(val) @@ -105,9 +105,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.tropomi_l2 import TROPOMIL2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(TROPOMIL2FileHandler, '__bases__', (FakeNetCDF4FileHandlerTL2,)) + self.p = mock.patch.object(TROPOMIL2FileHandler, "__bases__", (FakeNetCDF4FileHandlerTL2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -120,7 +120,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -131,69 +131,69 @@ def test_load_no2(self): """Load NO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['nitrogen_dioxide_total_column']) + ds = r.load(["nitrogen_dioxide_total_column"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'S5P') - self.assertEqual(d.attrs['sensor'], 'tropomi') - self.assertEqual(d.attrs['time_coverage_start'], datetime(2018, 7, 9, 17, 25, 34)) - self.assertEqual(d.attrs['time_coverage_end'], datetime(2018, 7, 9, 18, 23, 4)) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) + self.assertEqual(d.attrs["platform_shortname"], "S5P") + self.assertEqual(d.attrs["sensor"], "tropomi") + self.assertEqual(d.attrs["time_coverage_start"], datetime(2018, 7, 9, 17, 25, 34)) + self.assertEqual(d.attrs["time_coverage_end"], datetime(2018, 7, 9, 18, 23, 4)) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertIn("y", d.dims) + self.assertIn("x", d.dims) def test_load_so2(self): """Load SO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc', + "S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['sulfurdioxide_total_vertical_column']) + ds = r.load(["sulfurdioxide_total_vertical_column"]) self.assertEqual(len(ds), 1) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'S5P') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) + self.assertEqual(d.attrs["platform_shortname"], "S5P") + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) + self.assertIn("y", d.dims) + self.assertIn("x", d.dims) def test_load_bounds(self): """Load bounds dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) r.create_filehandlers(loadables) - keys = ['latitude_bounds', 'longitude_bounds'] + keys = ["latitude_bounds", "longitude_bounds"] ds = r.load(keys) self.assertEqual(len(ds), 2) for key in keys: - self.assertEqual(ds[key].attrs['platform_shortname'], 'S5P') - self.assertIn('y', ds[key].dims) - self.assertIn('x', ds[key].dims) - self.assertIn('corner', ds[key].dims) + self.assertEqual(ds[key].attrs["platform_shortname"], "S5P") + self.assertIn("y", ds[key].dims) + self.assertIn("x", ds[key].dims) + self.assertIn("corner", ds[key].dims) # check assembled bounds left = np.vstack([ds[key][:, :, 0], ds[key][-1:, :, 3]]) right = np.vstack([ds[key][:, -1:, 1], ds[key][-1:, -1:, 2]]) dest = np.hstack([left, right]) dest = xr.DataArray(dest, - dims=('y', 'x') + dims=("y", "x") ) dest.attrs = ds[key].attrs - self.assertEqual(dest.attrs['platform_shortname'], 'S5P') - self.assertIn('y', dest.dims) - self.assertIn('x', dest.dims) + self.assertEqual(dest.attrs["platform_shortname"], "S5P") + self.assertIn("y", dest.dims) + self.assertIn("x", dest.dims) self.assertEqual(DEFAULT_FILE_SHAPE[0] + 1, dest.shape[0]) self.assertEqual(DEFAULT_FILE_SHAPE[1] + 1, dest.shape[1]) self.assertIsNone(np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0])) diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 54b156e4c5..12af80ca2a 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -45,11 +45,11 @@ def test_lonlat_from_geos(self): lon_0 = 0 h = 35785831.00 geos_area.crs = CRS({ - 'a': 6378169.00, - 'b': 6356583.80, - 'h': h, - 'lon_0': lon_0, - 'proj': 'geos'}) + "a": 6378169.00, + "b": 6356583.80, + "h": h, + "lon_0": lon_0, + "proj": "geos"}) proj = pyproj.Proj(geos_area.crs) expected = proj(0, 0, inverse=True) @@ -77,12 +77,12 @@ def test_get_geostationary_bbox(self): geos_area = mock.MagicMock() lon_0 = 0 geos_area.crs = CRS({ - 'proj': 'geos', - 'lon_0': lon_0, - 'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'units': 'm'}) + "proj": "geos", + "lon_0": lon_0, + "a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "units": "m"}) geos_area.area_extent = [-5500000., -5500000., 5500000., 5500000.] lon, lat = hf.get_geostationary_bounding_box(geos_area, 20) @@ -107,21 +107,21 @@ def test_get_geostationary_angle_extent(self): """Get max geostationary angles.""" geos_area = mock.MagicMock() proj_dict = { - 'proj': 'geos', - 'sweep': 'x', - 'lon_0': -89.5, - 'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'units': 'm'} + "proj": "geos", + "sweep": "x", + "lon_0": -89.5, + "a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "units": "m"} geos_area.crs = CRS(proj_dict) expected = (0.15185342867090912, 0.15133555510297725) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) - proj_dict['a'] = 1000.0 - proj_dict['b'] = 1000.0 - proj_dict['h'] = np.sqrt(2) * 1000.0 - 1000.0 + proj_dict["a"] = 1000.0 + proj_dict["b"] = 1000.0 + proj_dict["h"] = np.sqrt(2) * 1000.0 - 1000.0 geos_area.reset_mock() geos_area.crs = CRS(proj_dict) expected = (np.deg2rad(45), np.deg2rad(45)) @@ -129,12 +129,12 @@ def test_get_geostationary_angle_extent(self): hf.get_geostationary_angle_extent(geos_area)) proj_dict = { - 'proj': 'geos', - 'sweep': 'x', - 'lon_0': -89.5, - 'ellps': 'GRS80', - 'h': 35785831.00, - 'units': 'm'} + "proj": "geos", + "sweep": "x", + "lon_0": -89.5, + "ellps": "GRS80", + "h": 35785831.00, + "units": "m"} geos_area.crs = CRS(proj_dict) expected = (0.15185277703584374, 0.15133971368991794) np.testing.assert_allclose(expected, @@ -144,15 +144,15 @@ def test_geostationary_mask(self): """Test geostationary mask.""" # Compute mask of a very elliptical earth area = pyresample.geometry.AreaDefinition( - 'FLDK', - 'Full Disk', - 'geos', - {'a': '6378169.0', - 'b': '3000000.0', - 'h': '35785831.0', - 'lon_0': '145.0', - 'proj': 'geos', - 'units': 'm'}, + "FLDK", + "Full Disk", + "geos", + {"a": "6378169.0", + "b": "3000000.0", + "h": "35785831.0", + "lon_0": "145.0", + "proj": "geos", + "units": "m"}, 101, 101, (-6498000.088960204, -6498000.088960204, @@ -181,37 +181,37 @@ def test_geostationary_mask(self): self.assertTrue(np.all(mask[range(68-1, 33-1, -1), range(33, 68)] == 1)) self.assertTrue(np.all(mask[range(33-1, -1, -1), range(68, 101)] == 0)) - @mock.patch('satpy.readers.utils.AreaDefinition') + @mock.patch("satpy.readers.utils.AreaDefinition") def test_sub_area(self, adef): """Sub area slicing.""" area = mock.MagicMock() area.pixel_size_x = 1.5 area.pixel_size_y = 1.5 area.pixel_upper_left = (0, 0) - area.area_id = 'fakeid' - area.name = 'fake name' - area.proj_id = 'fakeproj' - area.crs = 'some_crs' + area.area_id = "fakeid" + area.name = "fake name" + area.proj_id = "fakeproj" + area.crs = "some_crs" hf.get_sub_area(area, slice(1, 4), slice(0, 3)) - adef.assert_called_once_with('fakeid', 'fake name', 'fakeproj', - 'some_crs', + adef.assert_called_once_with("fakeid", "fake name", "fakeproj", + "some_crs", 3, 3, (0.75, -3.75, 5.25, 0.75)) def test_np2str(self): """Test the np2str function.""" # byte object - npstring = np.string_('hej') - self.assertEqual(hf.np2str(npstring), 'hej') + npstring = np.string_("hej") + self.assertEqual(hf.np2str(npstring), "hej") # single element numpy array np_arr = np.array([npstring]) - self.assertEqual(hf.np2str(np_arr), 'hej') + self.assertEqual(hf.np2str(np_arr), "hej") # scalar numpy array np_arr = np.array(npstring) - self.assertEqual(hf.np2str(np_arr), 'hej') + self.assertEqual(hf.np2str(np_arr), "hej") # multi-element array npstring = np.array([npstring, npstring]) @@ -243,44 +243,44 @@ def re(lat): def test_reduce_mda(self): """Test metadata size reduction.""" - mda = {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4])}}} - exp = {'a': 1, - 'b': np.array([1, 2, 3]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3])}}} + mda = {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4])}}} + exp = {"a": 1, + "b": np.array([1, 2, 3]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "d": {"a": 1, + "b": np.array([1, 2, 3])}}} numpy.testing.assert_equal(hf.reduce_mda(mda, max_size=3), exp) # Make sure, reduce_mda() doesn't modify the original dictionary - self.assertIn('c', mda) - self.assertIn('c', mda['d']) - self.assertIn('c', mda['d']['d']) + self.assertIn("c", mda) + self.assertIn("c", mda["d"]) + self.assertIn("c", mda["d"]["d"]) - @mock.patch('satpy.readers.utils.bz2.BZ2File') - @mock.patch('satpy.readers.utils.Popen') + @mock.patch("satpy.readers.utils.bz2.BZ2File") + @mock.patch("satpy.readers.utils.Popen") def test_unzip_file(self, mock_popen, mock_bz2): """Test the bz2 file unzipping techniques.""" process_mock = mock.Mock() - attrs = {'communicate.return_value': (b'output', b'error'), - 'returncode': 0} + attrs = {"communicate.return_value": (b"output", b"error"), + "returncode": 0} process_mock.configure_mock(**attrs) mock_popen.return_value = process_mock bz2_mock = mock.MagicMock() - bz2_mock.__enter__.return_value.read.return_value = b'TEST' + bz2_mock.__enter__.return_value.read.return_value = b"TEST" mock_bz2.return_value = bz2_mock - filename = 'tester.DAT.bz2' - whichstr = 'satpy.readers.utils.which' + filename = "tester.DAT.bz2" + whichstr = "satpy.readers.utils.which" segment = 3 segmentstr = str(segment).zfill(2) # no pbzip2 installed with prefix @@ -294,7 +294,7 @@ def test_unzip_file(self, mock_popen, mock_bz2): os.remove(new_fname) # pbzip2 installed without prefix with mock.patch(whichstr) as whichmock: - whichmock.return_value = '/usr/bin/pbzip2' + whichmock.return_value = "/usr/bin/pbzip2" new_fname = hf.unzip_file(filename) assert mock_popen.called assert os.path.exists(new_fname) @@ -302,21 +302,21 @@ def test_unzip_file(self, mock_popen, mock_bz2): if os.path.exists(new_fname): os.remove(new_fname) - filename = 'tester.DAT' + filename = "tester.DAT" new_fname = hf.unzip_file(filename) assert new_fname is None - @mock.patch('bz2.BZ2File') + @mock.patch("bz2.BZ2File") def test_generic_open_BZ2File(self, bz2_mock): """Test the generic_open method with bz2 filename input.""" mock_bz2_open = mock.MagicMock() - mock_bz2_open.read.return_value = b'TEST' + mock_bz2_open.read.return_value = b"TEST" bz2_mock.return_value = mock_bz2_open - filename = 'tester.DAT.bz2' + filename = "tester.DAT.bz2" with hf.generic_open(filename) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" assert mock_bz2_open.read.called @@ -328,27 +328,27 @@ def test_generic_open_FSFile_MemoryFileSystem(self): fsf = FSFile(mem_file) with hf.generic_open(fsf) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" - @mock.patch('satpy.readers.utils.open') + @mock.patch("satpy.readers.utils.open") def test_generic_open_filename(self, open_mock): """Test the generic_open method with filename (str).""" mock_fn_open = mock.MagicMock() - mock_fn_open.read.return_value = b'TEST' + mock_fn_open.read.return_value = b"TEST" open_mock.return_value = mock_fn_open filename = "test.DAT" with hf.generic_open(filename) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" assert mock_fn_open.read.called - @mock.patch('bz2.decompress', return_value=b'TEST_DECOMPRESSED') + @mock.patch("bz2.decompress", return_value=b"TEST_DECOMPRESSED") def test_unzip_FSFile(self, bz2_mock): """Test the FSFile bz2 file unzipping techniques.""" mock_bz2_decompress = mock.MagicMock() - mock_bz2_decompress.return_value = b'TEST_DECOMPRESSED' + mock_bz2_decompress.return_value = b"TEST_DECOMPRESSED" segment = 3 segmentstr = str(segment).zfill(2) @@ -382,10 +382,10 @@ def test_unzip_FSFile(self, bz2_mock): os.remove(new_fname) @mock.patch("os.remove") - @mock.patch("satpy.readers.utils.unzip_file", return_value='dummy.txt') + @mock.patch("satpy.readers.utils.unzip_file", return_value="dummy.txt") def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove): """Test the bz2 file unzipping context manager.""" - filename = 'dummy.txt.bz2' + filename = "dummy.txt.bz2" expected_filename = filename[:-4] with hf.unzip_context(filename) as new_filename: @@ -403,24 +403,24 @@ def test_apply_rad_correction(self): def test_get_user_calibration_factors(self): """Test the retrieval of user-supplied calibration factors.""" - radcor_dict = {'WV063': {'slope': 1.015, - 'offset': -0.0556}, - 'IR108': {'slo': 1.015, - 'off': -0.0556}} + radcor_dict = {"WV063": {"slope": 1.015, + "offset": -0.0556}, + "IR108": {"slo": 1.015, + "off": -0.0556}} # Test that correct values are returned from the dict - slope, offset = hf.get_user_calibration_factors('WV063', radcor_dict) + slope, offset = hf.get_user_calibration_factors("WV063", radcor_dict) self.assertEqual(slope, 1.015) self.assertEqual(offset, -0.0556) # Test that channels not present in dict return 1.0, 0.0 with self.assertWarns(UserWarning): - slope, offset = hf.get_user_calibration_factors('IR097', radcor_dict) + slope, offset = hf.get_user_calibration_factors("IR097", radcor_dict) self.assertEqual(slope, 1.) self.assertEqual(offset, 0.) # Check that incorrect dict keys throw an error with self.assertRaises(KeyError): - hf.get_user_calibration_factors('IR108', radcor_dict) + hf.get_user_calibration_factors("IR108", radcor_dict) class TestSunEarthDistanceCorrection: @@ -431,15 +431,15 @@ def setup_method(self): self.test_date = datetime(2020, 8, 15, 13, 0, 40) raw_refl = xr.DataArray(da.from_array([10., 20., 40., 1., 98., 50.]), - attrs={'start_time': self.test_date, - 'scheduled_time': self.test_date}) + attrs={"start_time": self.test_date, + "scheduled_time": self.test_date}) corr_refl = xr.DataArray(da.from_array([ 10.25484833, 20.50969667, 41.01939333, 1.02548483, 100.49751367, 51.27424167]), - attrs={'start_time': self.test_date, - 'scheduled_time': self.test_date}, + attrs={"start_time": self.test_date, + "scheduled_time": self.test_date}, ) self.raw_refl = raw_refl self.corr_refl = corr_refl @@ -448,13 +448,13 @@ def test_get_utc_time(self): """Test the retrieval of scene time from a dataset.""" # First check correct time is returned with 'start_time' tmp_array = self.raw_refl.copy() - del tmp_array.attrs['scheduled_time'] + del tmp_array.attrs["scheduled_time"] utc_time = hf.get_array_date(tmp_array, None) assert utc_time == self.test_date # Now check correct time is returned with 'scheduled_time' tmp_array = self.raw_refl.copy() - del tmp_array.attrs['start_time'] + del tmp_array.attrs["start_time"] utc_time = hf.get_array_date(tmp_array, None) assert utc_time == self.test_date @@ -466,8 +466,8 @@ def test_get_utc_time(self): # Finally, ensure error is raised if no datetime is available tmp_array = self.raw_refl.copy() - del tmp_array.attrs['scheduled_time'] - del tmp_array.attrs['start_time'] + del tmp_array.attrs["scheduled_time"] + del tmp_array.attrs["start_time"] with pytest.raises(KeyError): hf.get_array_date(tmp_array, None) @@ -475,14 +475,14 @@ def test_apply_sunearth_corr(self): """Test the correction of reflectances with sun-earth distance.""" out_refl = hf.apply_earthsun_distance_correction(self.raw_refl) np.testing.assert_allclose(out_refl, self.corr_refl) - assert out_refl.attrs['sun_earth_distance_correction_applied'] + assert out_refl.attrs["sun_earth_distance_correction_applied"] assert isinstance(out_refl.data, da.Array) def test_remove_sunearth_corr(self): """Test the removal of the sun-earth distance correction.""" out_refl = hf.remove_earthsun_distance_correction(self.corr_refl) np.testing.assert_allclose(out_refl, self.raw_refl) - assert not out_refl.attrs['sun_earth_distance_correction_applied'] + assert not out_refl.attrs["sun_earth_distance_correction_applied"] assert isinstance(out_refl.data, da.Array) @@ -493,19 +493,19 @@ def test_generic_open_binary(tmp_path, data, filename, mode): """Test the bz2 file unzipping context manager using dummy binary data.""" dummy_data = data dummy_filename = os.fspath(tmp_path / filename) - with open(dummy_filename, 'w' + mode) as f: + with open(dummy_filename, "w" + mode) as f: f.write(dummy_data) - with hf.generic_open(dummy_filename, 'r' + mode) as f: + with hf.generic_open(dummy_filename, "r" + mode) as f: read_binary_data = f.read() assert read_binary_data == dummy_data - dummy_filename = os.fspath(tmp_path / (filename + '.bz2')) - with hf.bz2.open(dummy_filename, 'w' + mode) as f: + dummy_filename = os.fspath(tmp_path / (filename + ".bz2")) + with hf.bz2.open(dummy_filename, "w" + mode) as f: f.write(dummy_data) - with hf.generic_open(dummy_filename, 'r' + mode) as f: + with hf.generic_open(dummy_filename, "r" + mode) as f: read_binary_data = f.read() assert read_binary_data == dummy_data diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index 9add08b1d2..c08450613a 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -34,13 +34,13 @@ def test_vaisala_gld360(self): expected_power = np.array([12.3, 13.2, -31.]) expected_lat = np.array([30.5342, -0.5727, 12.1529]) expected_lon = np.array([-90.1152, 104.0688, -10.8756]) - expected_time = np.array(['2017-06-20T00:00:00.007178000', '2017-06-20T00:00:00.020162000', - '2017-06-20T00:00:00.023183000'], dtype='datetime64[ns]') + expected_time = np.array(["2017-06-20T00:00:00.007178000", "2017-06-20T00:00:00.020162000", + "2017-06-20T00:00:00.023183000"], dtype="datetime64[ns]") filename = StringIO( - u'2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' - '2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n' - '2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA' + u"2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n" + "2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n" + "2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA" ) filename_info = {} filetype_info = {} @@ -52,25 +52,25 @@ def test_vaisala_gld360(self): filename.close() # test power - dataset_id = make_dataid(name='power') - dataset_info = {'units': 'kA'} + dataset_id = make_dataid(name="power") + dataset_info = {"units": "kA"} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_power, rtol=1e-05) # test lat - dataset_id = make_dataid(name='latitude') + dataset_id = make_dataid(name="latitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lat, rtol=1e-05) # test lon - dataset_id = make_dataid(name='longitude') + dataset_id = make_dataid(name="longitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lon, rtol=1e-05) # test time - dataset_id = make_dataid(name='time') + dataset_id = make_dataid(name="time") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_time) diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index c2afc04356..4f5dbcd141 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -30,20 +30,20 @@ from satpy.readers.vii_base_nc import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR, ViiNCBaseFileHandler -TEST_FILE = 'test_file_vii_base_nc.nc' +TEST_FILE = "test_file_vii_base_nc.nc" class TestViiNCBaseFileHandler(unittest.TestCase): """Test the ViiNCBaseFileHandler reader.""" - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation') + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation") def setUp(self, pgi_): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Add global attributes nc.sensing_start_time_utc = "20170920173040.888" nc.sensing_end_time_utc = "20170920174117.555" @@ -51,50 +51,50 @@ def setUp(self, pgi_): nc.instrument = "test_instrument" # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_pixels', 10) - g1.createDimension('num_lines', 100) + g1.createDimension("num_pixels", 10) + g1.createDimension("num_lines", 100) # Create data/measurement_data group - g1_1 = g1.createGroup('measurement_data') + g1_1 = g1.createGroup("measurement_data") # Add dimensions to data/measurement_data group - g1_1.createDimension('num_tie_points_act', 10) - g1_1.createDimension('num_tie_points_alt', 100) + g1_1.createDimension("num_tie_points_act", 10) + g1_1.createDimension("num_tie_points_alt", 100) # Add variables to data/measurement_data group - tpw = g1_1.createVariable('tpw', np.float32, dimensions=('num_pixels', 'num_lines')) + tpw = g1_1.createVariable("tpw", np.float32, dimensions=("num_pixels", "num_lines")) tpw[:] = 1. - tpw.test_attr = 'attr' - lon = g1_1.createVariable('longitude', + tpw.test_attr = "attr" + lon = g1_1.createVariable("longitude", np.float32, - dimensions=('num_tie_points_act', 'num_tie_points_alt')) + dimensions=("num_tie_points_act", "num_tie_points_alt")) lon[:] = 100. - lat = g1_1.createVariable('latitude', + lat = g1_1.createVariable("latitude", np.float32, - dimensions=('num_tie_points_act', 'num_tie_points_alt')) + dimensions=("num_tie_points_act", "num_tie_points_alt")) lat[:] = 10. # Create quality group - g2 = nc.createGroup('quality') + g2 = nc.createGroup("quality") # Add dimensions to quality group - g2.createDimension('gap_items', 2) + g2.createDimension("gap_items", 2) # Add variables to quality group - var = g2.createVariable('duration_of_product', np.double, dimensions=()) + var = g2.createVariable("duration_of_product", np.double, dimensions=()) var[:] = 1.0 - var = g2.createVariable('duration_of_data_present', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_present", np.double, dimensions=()) var[:] = 2.0 - var = g2.createVariable('duration_of_data_missing', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_missing", np.double, dimensions=()) var[:] = 3.0 - var = g2.createVariable('duration_of_data_degraded', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_degraded", np.double, dimensions=()) var[:] = 4.0 - var = g2.createVariable('gap_start_time_utc', np.double, dimensions=('gap_items',)) + var = g2.createVariable("gap_start_time_utc", np.double, dimensions=("gap_items",)) var[:] = [5.0, 6.0] - var = g2.createVariable('gap_end_time_utc', np.double, dimensions=('gap_items',)) + var = g2.createVariable("gap_end_time_utc", np.double, dimensions=("gap_items",)) var[:] = [7.0, 8.0] # Create longitude and latitude "interpolated" arrays @@ -104,11 +104,11 @@ def setUp(self, pgi_): # Filename info valid for all readers filename_info = { - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) } @@ -117,8 +117,8 @@ def setUp(self, pgi_): filename=self.test_file_name, filename_info=filename_info, filetype_info={ - 'cached_longitude': 'data/measurement_data/longitude', - 'cached_latitude': 'data/measurement_data/latitude' + "cached_longitude": "data/measurement_data/longitude", + "cached_latitude": "data/measurement_data/latitude" } ) @@ -128,10 +128,10 @@ def setUp(self, pgi_): filename=self.test_file_name, filename_info=filename_info, filetype_info={ - 'cached_longitude': 'data/measurement_data/longitude', - 'cached_latitude': 'data/measurement_data/latitude', - 'interpolate': False, - 'orthorect': False + "cached_longitude": "data/measurement_data/longitude", + "cached_latitude": "data/measurement_data/latitude", + "interpolate": False, + "orthorect": False }, orthorect=True ) @@ -170,24 +170,24 @@ def test_file_reading(self): # Checks that the global attributes are correctly read expected_global_attributes = { - 'filename': self.test_file_name, - 'start_time': expected_start_time, - 'end_time': expected_end_time, - 'spacecraft_name': "test_spacecraft", - 'ssp_lon': None, - 'sensor': "test_instrument", - 'filename_start_time': datetime.datetime(year=2017, month=9, day=20, + "filename": self.test_file_name, + "start_time": expected_start_time, + "end_time": expected_end_time, + "spacecraft_name": "test_spacecraft", + "ssp_lon": None, + "sensor": "test_instrument", + "filename_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'filename_end_time': datetime.datetime(year=2017, month=9, day=20, + "filename_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50), - 'platform_name': "test_spacecraft", - 'quality_group': { - 'duration_of_product': 1., - 'duration_of_data_present': 2., - 'duration_of_data_missing': 3., - 'duration_of_data_degraded': 4., - 'gap_start_time_utc': (5., 6.), - 'gap_end_time_utc': (7., 8.) + "platform_name": "test_spacecraft", + "quality_group": { + "duration_of_product": 1., + "duration_of_data_present": 2., + "duration_of_data_missing": 3., + "duration_of_data_degraded": 4., + "gap_start_time_utc": (5., 6.), + "gap_end_time_utc": (7., 8.) } } @@ -197,7 +197,7 @@ def test_file_reading(self): # Must iterate on all keys to confirm that the dictionaries are equal self.assertEqual(global_attributes.keys(), expected_global_attributes.keys()) for key in expected_global_attributes: - if key not in ['quality_group']: + if key not in ["quality_group"]: # Quality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key] == expected_global_attributes[key]) @@ -214,8 +214,8 @@ def test_file_reading(self): equal = global_attributes[key][inner_key] == expected_global_attributes[key][inner_key] self.assertTrue(equal) - @mock.patch('satpy.readers.vii_base_nc.tie_points_interpolation') - @mock.patch('satpy.readers.vii_base_nc.tie_points_geo_interpolation') + @mock.patch("satpy.readers.vii_base_nc.tie_points_interpolation") + @mock.patch("satpy.readers.vii_base_nc.tie_points_geo_interpolation") def test_functions(self, tpgi_, tpi_): """Test the functions.""" with self.assertRaises(NotImplementedError): @@ -226,16 +226,16 @@ def test_functions(self, tpgi_, tpi_): # Checks that the _perform_interpolation function is correctly executed variable = xr.DataArray( - dims=('y', 'x'), - name='test_name', + dims=("y", "x"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=np.zeros((10, 100)), ) tpi_.return_value = [xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=np.ones((10, 100)) )] @@ -243,37 +243,37 @@ def test_functions(self, tpgi_, tpi_): tpi_.assert_called_with([variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) self.assertTrue(np.allclose(return_value, np.ones((10, 100)))) - self.assertEqual(return_value.attrs, {'key_1': 'value_1', 'key_2': 'value_2'}) - self.assertEqual(return_value.name, 'test_name') - self.assertEqual(return_value.dims, ('num_pixels', 'num_lines')) + self.assertEqual(return_value.attrs, {"key_1": "value_1", "key_2": "value_2"}) + self.assertEqual(return_value.name, "test_name") + self.assertEqual(return_value.dims, ("num_pixels", "num_lines")) # Checks that the _perform_geo_interpolation function is correctly executed variable_lon = xr.DataArray( - dims=('y', 'x'), - name='test_lon', + dims=("y", "x"), + name="test_lon", attrs={ - 'key_1': 'value_lon_1', - 'key_2': 'value_lon_2' + "key_1": "value_lon_1", + "key_2": "value_lon_2" }, data=np.zeros((10, 100)) ) variable_lat = xr.DataArray( - dims=('y', 'x'), - name='test_lat', + dims=("y", "x"), + name="test_lat", attrs={ - 'key_1': 'value_lat_1', - 'key_2': 'value_lat_2' + "key_1": "value_lat_1", + "key_2": "value_lat_2" }, data=np.ones((10, 100)) * 2. ) tpgi_.return_value = ( xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=np.ones((10, 100)) ), xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=6 * np.ones((10, 100)) ) ) @@ -283,54 +283,54 @@ def test_functions(self, tpgi_, tpi_): tpgi_.assert_called_with(variable_lon, variable_lat, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) self.assertTrue(np.allclose(return_lon, np.ones((10, 100)))) - self.assertEqual(return_lon.attrs, {'key_1': 'value_lon_1', 'key_2': 'value_lon_2'}) - self.assertEqual(return_lon.name, 'test_lon') - self.assertEqual(return_lon.dims, ('num_pixels', 'num_lines')) + self.assertEqual(return_lon.attrs, {"key_1": "value_lon_1", "key_2": "value_lon_2"}) + self.assertEqual(return_lon.name, "test_lon") + self.assertEqual(return_lon.dims, ("num_pixels", "num_lines")) self.assertTrue(np.allclose(return_lat, 6 * np.ones((10, 100)))) - self.assertEqual(return_lat.attrs, {'key_1': 'value_lat_1', 'key_2': 'value_lat_2'}) - self.assertEqual(return_lat.name, 'test_lat') - self.assertEqual(return_lat.dims, ('num_pixels', 'num_lines')) + self.assertEqual(return_lat.attrs, {"key_1": "value_lat_1", "key_2": "value_lat_2"}) + self.assertEqual(return_lat.name, "test_lat") + self.assertEqual(return_lat.dims, ("num_pixels", "num_lines")) def test_standardize_dims(self): """Test the standardize dims function.""" test_variable = xr.DataArray( - dims=('num_pixels', 'num_lines'), - name='test_data', + dims=("num_pixels", "num_lines"), + name="test_data", attrs={ - 'key_1': 'value_lat_1', - 'key_2': 'value_lat_2' + "key_1": "value_lat_1", + "key_2": "value_lat_2" }, data=np.ones((10, 100)) * 1. ) out_variable = self.reader._standardize_dims(test_variable) self.assertTrue(np.allclose(out_variable.values, np.ones((100, 10)))) - self.assertEqual(out_variable.dims, ('y', 'x')) - self.assertEqual(out_variable.attrs['key_1'], 'value_lat_1') + self.assertEqual(out_variable.dims, ("y", "x")) + self.assertEqual(out_variable.attrs["key_1"], "value_lat_1") - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration') - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation') - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification') + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration") + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation") + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification") def test_dataset(self, po_, pi_, pc_): """Test the execution of the get_dataset function.""" # Checks the correct execution of the get_dataset function with a valid file_key - variable = self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None}) + variable = self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None}) pc_.assert_not_called() pi_.assert_not_called() po_.assert_not_called() self.assertTrue(np.allclose(variable.values, np.ones((100, 10)))) - self.assertEqual(variable.dims, ('y', 'x')) - self.assertEqual(variable.attrs['test_attr'], 'attr') - self.assertEqual(variable.attrs['units'], None) + self.assertEqual(variable.dims, ("y", "x")) + self.assertEqual(variable.attrs["test_attr"], "attr") + self.assertEqual(variable.attrs["units"], None) # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation - self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': 'reflectance', - 'interpolate': True, - 'standard_name': 'longitude'}) + self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": "reflectance", + "interpolate": True, + "standard_name": "longitude"}) pc_.assert_called() pi_.assert_called() po_.assert_not_called() @@ -338,13 +338,13 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification self.reader.orthorect = True - self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None, - 'orthorect_data': 'test_orthorect_data'}) + self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None, + "orthorect_data": "test_orthorect_data"}) po_.assert_called() # Checks the correct execution of the get_dataset function with an invalid file_key - invalid_dataset = self.reader.get_dataset(None, {'file_key': 'test_invalid', 'calibration': None}) + invalid_dataset = self.reader.get_dataset(None, {"file_key": "test_invalid", "calibration": None}) # Checks that the function returns None self.assertEqual(invalid_dataset, None) @@ -353,16 +353,16 @@ def test_dataset(self, po_, pi_, pc_): po_.reset_mock() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key - longitude = self.reader.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': 'reflectance', - 'interpolate': True}) + longitude = self.reader.get_dataset(None, {"file_key": "cached_longitude", + "calibration": "reflectance", + "interpolate": True}) pc_.assert_not_called() pi_.assert_not_called() self.assertEqual(longitude[0, 0], 1.) # Checks the correct execution of the get_dataset function with a 'cached_latitude' file_key - latitude = self.reader.get_dataset(None, {'file_key': 'cached_latitude', - 'calibration': None}) + latitude = self.reader.get_dataset(None, {"file_key": "cached_latitude", + "calibration": None}) self.assertEqual(latitude[0, 0], 2.) # Repeats some check with the reader where orthorectification and interpolation are inhibited @@ -374,30 +374,30 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation - self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': 'reflectance', - 'interpolate': True, - 'standard_name': 'longitude'}) + self.reader_2.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": "reflectance", + "interpolate": True, + "standard_name": "longitude"}) pc_.assert_called() pi_.assert_not_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification - self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None, - 'orthorect_data': 'test_orthorect_data'}) + self.reader_2.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None, + "orthorect_data": "test_orthorect_data"}) po_.assert_not_called() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key - longitude = self.reader_2.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': None}) + longitude = self.reader_2.get_dataset(None, {"file_key": "cached_longitude", + "calibration": None}) self.assertEqual(longitude[0, 0], 100.) # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key # in a reader without defined longitude - longitude = self.reader_3.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': 'reflectance', - 'interpolate': True}) + longitude = self.reader_3.get_dataset(None, {"file_key": "cached_longitude", + "calibration": "reflectance", + "interpolate": True}) # Checks that the function returns None self.assertEqual(longitude, None) diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index cf33e7872e..d62673d9f7 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -35,7 +35,7 @@ from satpy.readers.vii_l1b_nc import ViiL1bNCFileHandler from satpy.readers.vii_utils import MEAN_EARTH_RADIUS -TEST_FILE = 'test_file_vii_l1b_nc.nc' +TEST_FILE = "test_file_vii_l1b_nc.nc" class TestViiL1bNCFileHandler(unittest.TestCase): @@ -47,51 +47,51 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_chan_solar', 11) - g1.createDimension('num_chan_thermal', 9) - g1.createDimension('num_pixels', 72) - g1.createDimension('num_lines', 600) + g1.createDimension("num_chan_solar", 11) + g1.createDimension("num_chan_thermal", 9) + g1.createDimension("num_pixels", 72) + g1.createDimension("num_lines", 600) # Create calibration_data group - g1_1 = g1.createGroup('calibration_data') + g1_1 = g1.createGroup("calibration_data") # Add variables to data/calibration_data group - bt_a = g1_1.createVariable('bt_conversion_a', np.float32, dimensions=('num_chan_thermal',)) + bt_a = g1_1.createVariable("bt_conversion_a", np.float32, dimensions=("num_chan_thermal",)) bt_a[:] = np.arange(9) - bt_b = g1_1.createVariable('bt_conversion_b', np.float32, dimensions=('num_chan_thermal',)) + bt_b = g1_1.createVariable("bt_conversion_b", np.float32, dimensions=("num_chan_thermal",)) bt_b[:] = np.arange(9) - cw = g1_1.createVariable('channel_cw_thermal', np.float32, dimensions=('num_chan_thermal',)) + cw = g1_1.createVariable("channel_cw_thermal", np.float32, dimensions=("num_chan_thermal",)) cw[:] = np.arange(9) - isi = g1_1.createVariable('Band_averaged_solar_irradiance', np.float32, dimensions=('num_chan_solar',)) + isi = g1_1.createVariable("Band_averaged_solar_irradiance", np.float32, dimensions=("num_chan_solar",)) isi[:] = np.arange(11) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add dimensions to data/measurement_data group - g1_2.createDimension('num_tie_points_act', 10) - g1_2.createDimension('num_tie_points_alt', 100) + g1_2.createDimension("num_tie_points_act", 10) + g1_2.createDimension("num_tie_points_alt", 100) # Add variables to data/measurement_data group - sza = g1_2.createVariable('solar_zenith', np.float32, - dimensions=('num_tie_points_alt', 'num_tie_points_act')) + sza = g1_2.createVariable("solar_zenith", np.float32, + dimensions=("num_tie_points_alt", "num_tie_points_act")) sza[:] = 25.0 - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_lines", "num_pixels")) delta_lat[:] = 1.0 self.reader = ViiL1bNCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -128,40 +128,40 @@ def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_lines', 'num_pixels'), - name='test_name', + dims=("num_lines", "num_pixels"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((600, 72))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = np.degrees(np.ones((600, 72)) / MEAN_EARTH_RADIUS) + np.ones((600, 72)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) # Checks that the _perform_calibration function is correctly executed in all cases # radiance calibration: return value is simply a copy of the variable - return_variable = self.reader._perform_calibration(variable, {'calibration': 'radiance'}) + return_variable = self.reader._perform_calibration(variable, {"calibration": "radiance"}) self.assertTrue(np.all(return_variable == variable)) # invalid calibration: raises a ValueError with self.assertRaises(ValueError): self.reader._perform_calibration(variable, - {'calibration': 'invalid', 'name': 'test'}) + {"calibration": "invalid", "name": "test"}) # brightness_temperature calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, - {'calibration': 'brightness_temperature', - 'chan_thermal_index': 3}) + {"calibration": "brightness_temperature", + "chan_thermal_index": 3}) expected_values = np.full((600, 72), 1101.10413712) self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) # reflectance calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, - {'calibration': 'reflectance', - 'wavelength': [0.658, 0.668, 0.678], - 'chan_solar_index': 2}) + {"calibration": "reflectance", + "wavelength": [0.658, 0.668, 0.678], + "chan_solar_index": 2}) expected_values = np.full((600, 72), 173.3181982) self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) diff --git a/satpy/tests/reader_tests/test_vii_l2_nc.py b/satpy/tests/reader_tests/test_vii_l2_nc.py index e431d16e73..772f783684 100644 --- a/satpy/tests/reader_tests/test_vii_l2_nc.py +++ b/satpy/tests/reader_tests/test_vii_l2_nc.py @@ -30,7 +30,7 @@ from satpy.readers.vii_l2_nc import ViiL2NCFileHandler -TEST_FILE = 'test_file_vii_l2_nc.nc' +TEST_FILE = "test_file_vii_l2_nc.nc" class TestViiL2NCFileHandler(unittest.TestCase): @@ -42,29 +42,29 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_pixels', 100) - g1.createDimension('num_lines', 10) + g1.createDimension("num_pixels", 100) + g1.createDimension("num_lines", 10) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add variables to data/measurement_data group - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_lines", "num_pixels")) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -82,16 +82,16 @@ def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_lines', 'num_pixels'), - name='test_name', + dims=("num_lines", "num_pixels"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((10, 100))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs['key_1'], 'value_1') + self.assertEqual(orthorect_variable.attrs["key_1"], "value_1") diff --git a/satpy/tests/reader_tests/test_vii_wv_nc.py b/satpy/tests/reader_tests/test_vii_wv_nc.py index 991bbecec4..9d43f1ded1 100644 --- a/satpy/tests/reader_tests/test_vii_wv_nc.py +++ b/satpy/tests/reader_tests/test_vii_wv_nc.py @@ -30,7 +30,7 @@ from satpy.readers.vii_l2_nc import ViiL2NCFileHandler -TEST_FILE = 'test_file_vii_wv_nc.nc' +TEST_FILE = "test_file_vii_wv_nc.nc" class TestViiL2NCFileHandler(unittest.TestCase): @@ -42,29 +42,29 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_points_act', 100) - g1.createDimension('num_points_alt', 10) + g1.createDimension("num_points_act", 100) + g1.createDimension("num_points_alt", 10) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add variables to data/measurement_data group - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_points_alt', 'num_points_act')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_points_alt", "num_points_act")) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -82,16 +82,16 @@ def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_points_alt', 'num_points_act'), - name='test_name', + dims=("num_points_alt", "num_points_act"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((10, 100))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs['key_1'], 'value_1') + self.assertEqual(orthorect_variable.attrs["key_1"], "value_1") diff --git a/satpy/tests/reader_tests/test_viirs_atms_utils.py b/satpy/tests/reader_tests/test_viirs_atms_utils.py index 931cf6469e..cdcc0b9361 100644 --- a/satpy/tests/reader_tests/test_viirs_atms_utils.py +++ b/satpy/tests/reader_tests/test_viirs_atms_utils.py @@ -34,8 +34,8 @@ def test_get_file_units(caplog): """Test get the file-units from the dataset info.""" - did = make_dataid(name='some_variable', modifiers=()) - ds_info = {'file_units': None} + did = make_dataid(name="some_variable", modifiers=()) + ds_info = {"file_units": None} with caplog.at_level(logging.DEBUG): file_units = _get_file_units(did, ds_info) @@ -47,8 +47,8 @@ def test_get_file_units(caplog): def test_get_scale_factors_for_units_unsupported_units(): """Test get scale factors for units, when units are not supported.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = 'unknown unit' - output_units = '%' + file_units = "unknown unit" + output_units = "%" with pytest.raises(ValueError) as exec_info: _ = _get_scale_factors_for_units(factors, file_units, output_units) @@ -59,8 +59,8 @@ def test_get_scale_factors_for_units_unsupported_units(): def test_get_scale_factors_for_units_reflectances(caplog): """Test get scale factors for units, when variable is supposed to be a reflectance.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = '1' - output_units = '%' + file_units = "1" + output_units = "%" with caplog.at_level(logging.DEBUG): retv = _get_scale_factors_for_units(factors, file_units, output_units) @@ -72,8 +72,8 @@ def test_get_scale_factors_for_units_reflectances(caplog): def test_get_scale_factors_for_units_tbs(caplog): """Test get scale factors for units, when variable is supposed to be a brightness temperature.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = 'W cm-2 sr-1' - output_units = 'W m-2 sr-1' + file_units = "W cm-2 sr-1" + output_units = "W m-2 sr-1" with caplog.at_level(logging.DEBUG): retv = _get_scale_factors_for_units(factors, file_units, output_units) diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index 03ef09124c..006cdfe968 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -2445,17 +2445,17 @@ def _dataset_iterator(self): from satpy.tests.utils import make_dataid filename_info = {} - filetype_info = {'file_type': 'compact_dnb'} + filetype_info = {"file_type": "compact_dnb"} test = VIIRSCompactFileHandler(self.filename, filename_info, filetype_info) - dsid = make_dataid(name='DNB', calibration='radiance') + dsid = make_dataid(name="DNB", calibration="radiance") ds1 = test.get_dataset(dsid, {}) - dsid = make_dataid(name='longitude_dnb') - ds2 = test.get_dataset(dsid, {'standard_name': 'longitude'}) - dsid = make_dataid(name='latitude_dnb') - ds3 = test.get_dataset(dsid, {'standard_name': 'latitude'}) - dsid = make_dataid(name='solar_zenith_angle') - ds4 = test.get_dataset(dsid, {'standard_name': 'solar_zenith_angle'}) + dsid = make_dataid(name="longitude_dnb") + ds2 = test.get_dataset(dsid, {"standard_name": "longitude"}) + dsid = make_dataid(name="latitude_dnb") + ds3 = test.get_dataset(dsid, {"standard_name": "latitude"}) + dsid = make_dataid(name="solar_zenith_angle") + ds4 = test.get_dataset(dsid, {"standard_name": "solar_zenith_angle"}) for ds in [ds1, ds2, ds3, ds4]: yield ds @@ -2466,7 +2466,7 @@ def test_get_dataset(self): assert ds.shape == (752, 4064) assert ds.dtype == np.float32 assert ds.compute().shape == (752, 4064) - assert ds.attrs['rows_per_scan'] == 16 + assert ds.attrs["rows_per_scan"] == 16 def test_distributed(self): """Check that distributed computations work.""" diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py index df94283fba..de55a9c20c 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py +++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py @@ -60,23 +60,23 @@ class FakeModFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/data_id'] = "AFMOD" - file_content['/attr/satellite_name'] = "NPP" - file_content['/attr/instrument_name'] = 'VIIRS' - - file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA - file_content['Fire Pixels/FP_T13'] = DEFAULT_M13_FILE_DATA - file_content['Fire Pixels/FP_T13/attr/units'] = 'kelvins' - file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA - file_content['Fire Pixels/attr/units'] = 'none' - file_content['Fire Pixels/shape'] = DEFAULT_FILE_SHAPE - - attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') + file_content["/attr/data_id"] = "AFMOD" + file_content["/attr/satellite_name"] = "NPP" + file_content["/attr/instrument_name"] = "VIIRS" + + file_content["Fire Pixels/FP_latitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_longitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_power"] = DEFAULT_POWER_FILE_DATA + file_content["Fire Pixels/FP_T13"] = DEFAULT_M13_FILE_DATA + file_content["Fire Pixels/FP_T13/attr/units"] = "kelvins" + file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA + file_content["Fire Pixels/attr/units"] = "none" + file_content["Fire Pixels/shape"] = DEFAULT_FILE_SHAPE + + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'fakeDim0', 'fakeDim1')) + dims=("z", "fakeDim0", "fakeDim1")) return file_content @@ -86,21 +86,21 @@ class FakeImgFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/data_id'] = "AFIMG" - file_content['/attr/satellite_name'] = "NPP" - file_content['/attr/instrument_name'] = 'VIIRS' - - file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA - file_content['Fire Pixels/FP_T4'] = DEFAULT_M13_FILE_DATA - file_content['Fire Pixels/FP_T4/attr/units'] = 'kelvins' - file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA - - attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') + file_content["/attr/data_id"] = "AFIMG" + file_content["/attr/satellite_name"] = "NPP" + file_content["/attr/instrument_name"] = "VIIRS" + + file_content["Fire Pixels/FP_latitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_longitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_power"] = DEFAULT_POWER_FILE_DATA + file_content["Fire Pixels/FP_T4"] = DEFAULT_M13_FILE_DATA + file_content["Fire Pixels/FP_T4/attr/units"] = "kelvins" + file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA + + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'fakeDim0', 'fakeDim1')) + dims=("z", "fakeDim0", "fakeDim1")) return file_content @@ -114,13 +114,13 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} - self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = platform_key.get(self.filename_info["satellite_name"].upper(), "unknown") def get_test_content(self): """Create fake test file content.""" - fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n + fake_file = io.StringIO(u"""\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 - 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') + 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764""") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", @@ -139,13 +139,13 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): def get_test_content(self): """Create fake test file content.""" - fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n + fake_file = io.StringIO(u"""\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 - 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') + 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764""") platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} - self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = platform_key.get(self.filename_info["satellite_name"].upper(), "unknown") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", @@ -157,14 +157,14 @@ def get_test_content(self): class TestModVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeModFiresNetCDF4FileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresFileHandler, "__bases__", (FakeModFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -177,7 +177,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -188,40 +188,40 @@ def test_load_dataset(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_pct']) + datasets = r.load(["confidence_pct"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '%') - self.assertEqual(v.attrs['_FillValue'], 255) + self.assertEqual(v.attrs["units"], "%") + self.assertEqual(v.attrs["_FillValue"], 255) self.assertTrue(np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE)) - datasets = r.load(['T13']) + datasets = r.load(["T13"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + self.assertEqual(v.attrs["units"], "K") - datasets = r.load(['power']) + datasets = r.load(["power"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'NOAA-21') - self.assertEqual(v.attrs['sensor'], 'viirs') + self.assertEqual(v.attrs["units"], "MW") + self.assertEqual(v.attrs["platform_name"], "NOAA-21") + self.assertEqual(v.attrs["sensor"], "viirs") class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeImgFiresNetCDF4FileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresFileHandler, "__bases__", (FakeImgFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -234,7 +234,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -245,41 +245,41 @@ def test_load_dataset(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_cat']) + datasets = r.load(["confidence_cat"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) - self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) + self.assertEqual(v.attrs["units"], "1") + self.assertEqual(v.attrs["flag_meanings"], ["low", "medium", "high"]) + self.assertEqual(v.attrs["flag_values"], [7, 8, 9]) - datasets = r.load(['T4']) + datasets = r.load(["T4"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + self.assertEqual(v.attrs["units"], "K") - datasets = r.load(['power']) + datasets = r.load(["power"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') - self.assertEqual(v.attrs['sensor'], 'viirs') + self.assertEqual(v.attrs["units"], "MW") + self.assertEqual(v.attrs["platform_name"], "Suomi-NPP") + self.assertEqual(v.attrs["sensor"], "viirs") -@mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') +@mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") class TestModVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeModFiresTextFileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, "__bases__", (FakeModFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -292,7 +292,7 @@ def test_init(self, mock_obj): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -303,39 +303,39 @@ def test_load_dataset(self, csv_mock): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_pct']) + datasets = r.load(["confidence_pct"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '%') + self.assertEqual(v.attrs["units"], "%") - datasets = r.load(['T13']) + datasets = r.load(["T13"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + self.assertEqual(v.attrs["units"], "K") - datasets = r.load(['power']) + datasets = r.load(["power"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'NOAA-20') - self.assertEqual(v.attrs['sensor'], 'VIIRS') + self.assertEqual(v.attrs["units"], "MW") + self.assertEqual(v.attrs["platform_name"], "NOAA-20") + self.assertEqual(v.attrs["sensor"], "VIIRS") -@mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') +@mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") class TestImgVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeImgFiresTextFileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, "__bases__", (FakeImgFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -348,7 +348,7 @@ def test_init(self, mock_obj): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -359,24 +359,24 @@ def test_load_dataset(self, mock_obj): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_cat']) + datasets = r.load(["confidence_cat"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) - self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) + self.assertEqual(v.attrs["units"], "1") + self.assertEqual(v.attrs["flag_meanings"], ["low", "medium", "high"]) + self.assertEqual(v.attrs["flag_values"], [7, 8, 9]) - datasets = r.load(['T4']) + datasets = r.load(["T4"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + self.assertEqual(v.attrs["units"], "K") - datasets = r.load(['power']) + datasets = r.load(["power"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') - self.assertEqual(v.attrs['sensor'], 'VIIRS') + self.assertEqual(v.attrs["units"], "MW") + self.assertEqual(v.attrs["platform_name"], "Suomi-NPP") + self.assertEqual(v.attrs["sensor"], "VIIRS") diff --git a/satpy/tests/reader_tests/test_viirs_edr_flood.py b/satpy/tests/reader_tests/test_viirs_edr_flood.py index 9b544dc9f1..0141259784 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_flood.py +++ b/satpy/tests/reader_tests/test_viirs_edr_flood.py @@ -36,51 +36,51 @@ class FakeHDF4FileHandler2(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/Satellitename'] = filename_info['platform_shortname'] - file_content['/attr/SensorIdentifyCode'] = 'VIIRS' + file_content["/attr/Satellitename"] = filename_info["platform_shortname"] + file_content["/attr/SensorIdentifyCode"] = "VIIRS" # only one dataset for the flood reader - file_content['WaterDetection'] = DEFAULT_FILE_DATA - file_content['WaterDetection/attr/_Fillvalue'] = 1 - file_content['WaterDetection/attr/scale_factor'] = 1. - file_content['WaterDetection/attr/add_offset'] = 0. - file_content['WaterDetection/attr/units'] = 'none' - file_content['WaterDetection/shape'] = DEFAULT_FILE_SHAPE - file_content['WaterDetection/attr/ProjectionMinLatitude'] = 15. - file_content['WaterDetection/attr/ProjectionMaxLatitude'] = 68. - file_content['WaterDetection/attr/ProjectionMinLongitude'] = -124. - file_content['WaterDetection/attr/ProjectionMaxLongitude'] = -61. + file_content["WaterDetection"] = DEFAULT_FILE_DATA + file_content["WaterDetection/attr/_Fillvalue"] = 1 + file_content["WaterDetection/attr/scale_factor"] = 1. + file_content["WaterDetection/attr/add_offset"] = 0. + file_content["WaterDetection/attr/units"] = "none" + file_content["WaterDetection/shape"] = DEFAULT_FILE_SHAPE + file_content["WaterDetection/attr/ProjectionMinLatitude"] = 15. + file_content["WaterDetection/attr/ProjectionMaxLatitude"] = 68. + file_content["WaterDetection/attr/ProjectionMinLongitude"] = -124. + file_content["WaterDetection/attr/ProjectionMaxLongitude"] = -61. # convert tp xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} - for a in ['_Fillvalue', 'units', 'ProjectionMinLatitude', 'ProjectionMaxLongitude', - 'ProjectionMinLongitude', 'ProjectionMaxLatitude']: - if key + '/attr/' + a in file_content: - attrs[a] = file_content[key + '/attr/' + a] + for a in ["_Fillvalue", "units", "ProjectionMinLatitude", "ProjectionMaxLongitude", + "ProjectionMinLongitude", "ProjectionMaxLatitude"]: + if key + "/attr/" + a in file_content: + attrs[a] = file_content[key + "/attr/" + a] if val.ndim > 1: - file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs) + file_content[key] = DataArray(val, dims=("fakeDim0", "fakeDim1"), attrs=attrs) else: file_content[key] = DataArray(val, attrs=attrs) - if 'y' not in file_content['WaterDetection'].dims: - file_content['WaterDetection'] = file_content['WaterDetection'].rename({'fakeDim0': 'x', 'fakeDim1': 'y'}) + if "y" not in file_content["WaterDetection"].dims: + file_content["WaterDetection"] = file_content["WaterDetection"].rename({"fakeDim0": "x", "fakeDim1": "y"}) return file_content class TestVIIRSEDRFloodReader(unittest.TestCase): """Test VIIRS EDR Flood Reader.""" - yaml_file = 'viirs_edr_flood.yaml' + yaml_file = "viirs_edr_flood.yaml" def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_flood import VIIRSEDRFlood - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSEDRFlood, '__bases__', (FakeHDF4FileHandler2,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSEDRFlood, "__bases__", (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -93,7 +93,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -104,23 +104,23 @@ def test_load_dataset(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['WaterDetection']) + datasets = r.load(["WaterDetection"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'none') + self.assertEqual(v.attrs["units"], "none") def test_load_dataset_aoi(self): """Test loading all datasets from an area of interest file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['WaterDetection']) + datasets = r.load(["WaterDetection"]) self.assertEqual(len(datasets), 1) for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'none') + self.assertEqual(v.attrs["units"], "none") diff --git a/satpy/tests/reader_tests/test_viirs_l1b.py b/satpy/tests/reader_tests/test_viirs_l1b.py index 0d3b2ad1b9..e60f83cfd0 100644 --- a/satpy/tests/reader_tests/test_viirs_l1b.py +++ b/satpy/tests/reader_tests/test_viirs_l1b.py @@ -49,22 +49,22 @@ class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) + dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] num_scans = 5 num_luts = DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] file_content = { - '/dimension/number_of_scans': num_scans, - '/dimension/number_of_lines': num_lines, - '/dimension/number_of_pixels': num_pixels, - '/dimension/number_of_LUT_values': num_luts, - '/attr/time_coverage_start': dt.strftime('%Y-%m-%dT%H:%M:%S.000Z'), - '/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y-%m-%dT%H:%M:%S.000Z'), - '/attr/orbit_number': 26384, - '/attr/instrument': 'VIIRS', - '/attr/platform': 'Suomi-NPP', + "/dimension/number_of_scans": num_scans, + "/dimension/number_of_lines": num_lines, + "/dimension/number_of_pixels": num_pixels, + "/dimension/number_of_LUT_values": num_luts, + "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/orbit_number": 26384, + "/attr/instrument": "VIIRS", + "/attr/platform": "Suomi-NPP", } self._fill_contents_with_default_data(file_content, file_type) self._set_dataset_specific_metadata(file_content) @@ -73,57 +73,57 @@ def get_test_content(self, filename, filename_info, filetype_info): def _fill_contents_with_default_data(self, file_content, file_type): """Fill file contents with default data.""" - if file_type.startswith('vgeo'): - file_content['/attr/OrbitNumber'] = file_content.pop('/attr/orbit_number') - file_content['geolocation_data/latitude'] = DEFAULT_LAT_DATA - file_content['geolocation_data/longitude'] = DEFAULT_LON_DATA - file_content['geolocation_data/solar_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/solar_azimuth'] = DEFAULT_LON_DATA - file_content['geolocation_data/sensor_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/sensor_azimuth'] = DEFAULT_LON_DATA - if file_type.endswith('d'): - file_content['geolocation_data/lunar_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/lunar_azimuth'] = DEFAULT_LON_DATA - elif file_type == 'vl1bm': + if file_type.startswith("vgeo"): + file_content["/attr/OrbitNumber"] = file_content.pop("/attr/orbit_number") + file_content["geolocation_data/latitude"] = DEFAULT_LAT_DATA + file_content["geolocation_data/longitude"] = DEFAULT_LON_DATA + file_content["geolocation_data/solar_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/solar_azimuth"] = DEFAULT_LON_DATA + file_content["geolocation_data/sensor_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/sensor_azimuth"] = DEFAULT_LON_DATA + if file_type.endswith("d"): + file_content["geolocation_data/lunar_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/lunar_azimuth"] = DEFAULT_LON_DATA + elif file_type == "vl1bm": for m_band in self.M_BANDS: - file_content[f'observation_data/{m_band}'] = DEFAULT_FILE_DATA - elif file_type == 'vl1bi': + file_content[f"observation_data/{m_band}"] = DEFAULT_FILE_DATA + elif file_type == "vl1bi": for i_band in self.I_BANDS: - file_content[f'observation_data/{i_band}'] = DEFAULT_FILE_DATA - elif file_type == 'vl1bd': - file_content['observation_data/DNB_observations'] = DEFAULT_FILE_DATA - file_content['observation_data/DNB_observations/attr/units'] = 'Watts/cm^2/steradian' + file_content[f"observation_data/{i_band}"] = DEFAULT_FILE_DATA + elif file_type == "vl1bd": + file_content["observation_data/DNB_observations"] = DEFAULT_FILE_DATA + file_content["observation_data/DNB_observations/attr/units"] = "Watts/cm^2/steradian" @staticmethod def _set_dataset_specific_metadata(file_content): """Set dataset-specific metadata.""" for k in list(file_content.keys()): - if not k.startswith('observation_data') and not k.startswith('geolocation_data'): + if not k.startswith("observation_data") and not k.startswith("geolocation_data"): continue - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - if k[-3:] in ['M12', 'M13', 'M14', 'M15', 'M16', 'I04', 'I05']: - file_content[k + '_brightness_temperature_lut'] = DEFAULT_FILE_DATA.ravel() - file_content[k + '_brightness_temperature_lut/attr/units'] = 'Kelvin' - file_content[k + '_brightness_temperature_lut/attr/valid_min'] = 0 - file_content[k + '_brightness_temperature_lut/attr/valid_max'] = 65534 - file_content[k + '_brightness_temperature_lut/attr/_FillValue'] = 65535 - file_content[k + '/attr/units'] = 'Watts/meter^2/steradian/micrometer' - elif k[-3:] in ['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', - 'M09', 'M10', 'M11', 'I01', 'I02', 'I03']: - file_content[k + '/attr/radiance_units'] = 'Watts/meter^2/steradian/micrometer' - file_content[k + '/attr/radiance_scale_factor'] = 1.1 - file_content[k + '/attr/radiance_add_offset'] = 0.1 - elif k.endswith('longitude'): - file_content[k + '/attr/units'] = 'degrees_east' - elif k.endswith('latitude'): - file_content[k + '/attr/units'] = 'degrees_north' - elif k.endswith('zenith') or k.endswith('azimuth'): - file_content[k + '/attr/units'] = 'degrees' - file_content[k + '/attr/valid_min'] = 0 - file_content[k + '/attr/valid_max'] = 65534 - file_content[k + '/attr/_FillValue'] = 65535 - file_content[k + '/attr/scale_factor'] = 1.1 - file_content[k + '/attr/add_offset'] = 0.1 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + if k[-3:] in ["M12", "M13", "M14", "M15", "M16", "I04", "I05"]: + file_content[k + "_brightness_temperature_lut"] = DEFAULT_FILE_DATA.ravel() + file_content[k + "_brightness_temperature_lut/attr/units"] = "Kelvin" + file_content[k + "_brightness_temperature_lut/attr/valid_min"] = 0 + file_content[k + "_brightness_temperature_lut/attr/valid_max"] = 65534 + file_content[k + "_brightness_temperature_lut/attr/_FillValue"] = 65535 + file_content[k + "/attr/units"] = "Watts/meter^2/steradian/micrometer" + elif k[-3:] in ["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", + "M09", "M10", "M11", "I01", "I02", "I03"]: + file_content[k + "/attr/radiance_units"] = "Watts/meter^2/steradian/micrometer" + file_content[k + "/attr/radiance_scale_factor"] = 1.1 + file_content[k + "/attr/radiance_add_offset"] = 0.1 + elif k.endswith("longitude"): + file_content[k + "/attr/units"] = "degrees_east" + elif k.endswith("latitude"): + file_content[k + "/attr/units"] = "degrees_north" + elif k.endswith("zenith") or k.endswith("azimuth"): + file_content[k + "/attr/units"] = "degrees" + file_content[k + "/attr/valid_min"] = 0 + file_content[k + "/attr/valid_max"] = 65534 + file_content[k + "/attr/_FillValue"] = 65535 + file_content[k + "/attr/scale_factor"] = 1.1 + file_content[k + "/attr/add_offset"] = 0.1 class FakeNetCDF4FileHandlerNight(FakeNetCDF4FileHandlerDay): @@ -149,9 +149,9 @@ def setup_method(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_l1b import VIIRSL1BFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (self.fake_cls,)) + self.p = mock.patch.object(VIIRSL1BFileHandler, "__bases__", (self.fake_cls,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -164,7 +164,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -176,8 +176,8 @@ def test_available_datasets_m_bands(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) avail_names = r.available_dataset_names @@ -190,52 +190,52 @@ def test_load_every_m_band_bt(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['M12', - 'M13', - 'M14', - 'M15', - 'M16']) + datasets = r.load(["M12", + "M13", + "M14", + "M15", + "M16"]) assert len(datasets) == 5 for v in datasets.values(): - assert v.attrs['calibration'] == 'brightness_temperature' - assert v.attrs['units'] == 'K' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "brightness_temperature" + assert v.attrs["units"] == "K" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_every_m_band_refl(self): """Test loading all M band reflectances.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11']) + datasets = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11"]) assert len(datasets) == (11 if self.has_reflectance_bands else 0) for v in datasets.values(): - assert v.attrs['calibration'] == 'reflectance' - assert v.attrs['units'] == '%' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "reflectance" + assert v.attrs["units"] == "%" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_every_m_band_rad(self): """Test loading all M bands as radiances.""" @@ -243,34 +243,34 @@ def test_load_every_m_band_rad(self): from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load([make_dataid(name='M01', calibration='radiance'), - make_dataid(name='M02', calibration='radiance'), - make_dataid(name='M03', calibration='radiance'), - make_dataid(name='M04', calibration='radiance'), - make_dataid(name='M05', calibration='radiance'), - make_dataid(name='M06', calibration='radiance'), - make_dataid(name='M07', calibration='radiance'), - make_dataid(name='M08', calibration='radiance'), - make_dataid(name='M09', calibration='radiance'), - make_dataid(name='M10', calibration='radiance'), - make_dataid(name='M11', calibration='radiance'), - make_dataid(name='M12', calibration='radiance'), - make_dataid(name='M13', calibration='radiance'), - make_dataid(name='M14', calibration='radiance'), - make_dataid(name='M15', calibration='radiance'), - make_dataid(name='M16', calibration='radiance')]) + datasets = r.load([make_dataid(name="M01", calibration="radiance"), + make_dataid(name="M02", calibration="radiance"), + make_dataid(name="M03", calibration="radiance"), + make_dataid(name="M04", calibration="radiance"), + make_dataid(name="M05", calibration="radiance"), + make_dataid(name="M06", calibration="radiance"), + make_dataid(name="M07", calibration="radiance"), + make_dataid(name="M08", calibration="radiance"), + make_dataid(name="M09", calibration="radiance"), + make_dataid(name="M10", calibration="radiance"), + make_dataid(name="M11", calibration="radiance"), + make_dataid(name="M12", calibration="radiance"), + make_dataid(name="M13", calibration="radiance"), + make_dataid(name="M14", calibration="radiance"), + make_dataid(name="M15", calibration="radiance"), + make_dataid(name="M16", calibration="radiance")]) assert len(datasets) == (16 if self.has_reflectance_bands else 5) for v in datasets.values(): - assert v.attrs['calibration'] == 'radiance' - assert v.attrs['units'] == 'W m-2 um-1 sr-1' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "radiance" + assert v.attrs["units"] == "W m-2 um-1 sr-1" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_i_band_angles(self): """Test loading all M bands as radiances.""" @@ -278,65 +278,65 @@ def test_load_i_band_angles(self): from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BI_snpp_d20161130_t012400_c20161130054822.nc', - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOI_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BI_snpp_d20161130_t012400_c20161130054822.nc", + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOI_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load([ - make_dataid(name='satellite_zenith_angle'), - make_dataid(name='satellite_azimuth_angle'), - make_dataid(name='solar_azimuth_angle'), - make_dataid(name='solar_zenith_angle'), + make_dataid(name="satellite_zenith_angle"), + make_dataid(name="satellite_azimuth_angle"), + make_dataid(name="solar_azimuth_angle"), + make_dataid(name="solar_zenith_angle"), ]) assert len(datasets) == 4 for v in datasets.values(): - assert v.attrs['resolution'] == 371 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["resolution"] == 371 + assert v.attrs["sensor"] == "viirs" def test_load_dnb_radiance(self): """Test loading the main DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BD_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOD_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['DNB']) + datasets = r.load(["DNB"]) assert len(datasets) == 1 for v in datasets.values(): - assert v.attrs['calibration'] == 'radiance' - assert v.attrs['units'] == 'W m-2 sr-1' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "radiance" + assert v.attrs["units"] == "W m-2 sr-1" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_dnb_angles(self): """Test loading all DNB angle datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BD_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOD_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['dnb_solar_zenith_angle', - 'dnb_solar_azimuth_angle', - 'dnb_satellite_zenith_angle', - 'dnb_satellite_azimuth_angle', - 'dnb_lunar_zenith_angle', - 'dnb_lunar_azimuth_angle', + datasets = r.load(["dnb_solar_zenith_angle", + "dnb_solar_azimuth_angle", + "dnb_satellite_zenith_angle", + "dnb_satellite_azimuth_angle", + "dnb_lunar_zenith_angle", + "dnb_lunar_azimuth_angle", ]) assert len(datasets) == 6 for v in datasets.values(): - assert v.attrs['units'] == 'degrees' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["units"] == "degrees" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" class TestVIIRSL1BReaderDayNight(TestVIIRSL1BReaderDay): diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py index ed50214c15..fecd9a0b0f 100644 --- a/satpy/tests/reader_tests/test_viirs_sdr.py +++ b/satpy/tests/reader_tests/test_viirs_sdr.py @@ -49,18 +49,18 @@ def __init__(self, filename, filename_info, filetype_info, include_factors=True) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): - start_time = filename_info['start_time'] - end_time = filename_info['end_time'].replace(year=start_time.year, + start_time = filename_info["start_time"] + end_time = filename_info["end_time"].replace(year=start_time.year, month=start_time.month, day=start_time.day) - begin_date = start_time.strftime('%Y%m%d') + begin_date = start_time.strftime("%Y%m%d") begin_date = np.array(begin_date) - begin_time = start_time.strftime('%H%M%S.%fZ') + begin_time = start_time.strftime("%H%M%S.%fZ") begin_time = np.array(begin_time) - ending_date = end_time.strftime('%Y%m%d') - ending_time = end_time.strftime('%H%M%S.%fZ') + ending_date = end_time.strftime("%Y%m%d") + ending_time = end_time.strftime("%H%M%S.%fZ") new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, @@ -69,8 +69,8 @@ def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), - "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), - "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), + "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info["orbit"]), + "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix1}/attr/Instrument_Short_Name": "VIIRS", "/attr/Platform_Short_Name": "NPP", } @@ -84,13 +84,13 @@ def _add_granule_specific_info_to_file_content( lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([48] * num_granules) for granule_idx in range(num_granules): - prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix, + prefix_gran = "{prefix}/{dataset_group}_Gran_{idx}".format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] - file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = num_scans - file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx] - file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx] + file_content[prefix_gran + "/attr/N_Number_Of_Scans"] = num_scans + file_content[prefix_gran + "/attr/G-Ring_Longitude"] = lons_lists[granule_idx] + file_content[prefix_gran + "/attr/G-Ring_Latitude"] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): @@ -154,13 +154,13 @@ def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) - if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']: - keys = ['Radiance', 'Reflectance'] - elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']: - keys = ['Radiance', 'BrightnessTemperature'] + if filename[2:5] in ["M{:02d}".format(x) for x in range(12)] + ["I01", "I02", "I03"]: + keys = ["Radiance", "Reflectance"] + elif filename[2:5] in ["M{:02d}".format(x) for x in range(12, 17)] + ["I04", "I05"]: + keys = ["Radiance", "BrightnessTemperature"] else: # DNB - keys = ['Radiance'] + keys = ["Radiance"] for k in keys: k = data_var_prefix + "/" + k @@ -175,7 +175,7 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) - is_dnb = filename[:5] not in ['GMODO', 'GIMGO'] + is_dnb = filename[:5] not in ["GMODO", "GIMGO"] if not is_dnb: lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) @@ -194,12 +194,12 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape - angles = ['SolarZenithAngle', - 'SolarAzimuthAngle', - 'SatelliteZenithAngle', - 'SatelliteAzimuthAngle'] + angles = ["SolarZenithAngle", + "SolarAzimuthAngle", + "SatelliteZenithAngle", + "SatelliteAzimuthAngle"] if is_dnb: - angles += ['LunarZenithAngle', 'LunarAzimuthAngle'] + angles += ["LunarZenithAngle", "LunarAzimuthAngle"] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA @@ -208,14 +208,14 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi @staticmethod def _add_geo_ref(file_content, filename): - if filename[:3] == 'SVI': - geo_prefix = 'GIMGO' - elif filename[:3] == 'SVM': - geo_prefix = 'GMODO' + if filename[:3] == "SVI": + geo_prefix = "GIMGO" + elif filename[:3] == "SVM": + geo_prefix = "GMODO" else: geo_prefix = None if geo_prefix: - file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] + file_content["/attr/N_GEO_Ref"] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): @@ -225,7 +225,7 @@ def _convert_numpy_content_to_dataarray(final_content): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 1: - final_content[key] = DataArray(val, dims=('y', 'x')) + final_content[key] = DataArray(val, dims=("y", "x")) else: final_content[key] = DataArray(val) @@ -234,9 +234,9 @@ def get_test_content(self, filename, filename_info, filetype_info): final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] - prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) - prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) - prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) + prefix1 = "Data_Products/{dataset_group}".format(dataset_group=dataset_group) + prefix2 = "{prefix}/{dataset_group}_Aggr".format(prefix=prefix1, dataset_group=dataset_group) + prefix3 = "All_Data/{dataset_group}_All".format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) @@ -248,10 +248,10 @@ def get_test_content(self, filename, filename_info, filetype_info): for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v - if filename[:3] in ['SVM', 'SVI', 'SVD']: + if filename[:3] in ["SVM", "SVI", "SVD"]: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) - elif filename[0] == 'G': + elif filename[0] == "G": self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) @@ -271,8 +271,8 @@ def touch_geo_files(*prefixes): def _touch_geo_file(prefix): - geo_fn = prefix + '_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' - open(geo_fn, 'w') + geo_fn = prefix + "_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" + open(geo_fn, "w") return geo_fn @@ -283,47 +283,47 @@ class TestVIIRSSDRReader(unittest.TestCase): def _assert_reflectance_properties(self, data_arr, num_scans=16, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'reflectance') - self.assertEqual(data_arr.attrs['units'], '%') - self.assertEqual(data_arr.attrs['rows_per_scan'], num_scans) + self.assertEqual(data_arr.attrs["calibration"], "reflectance") + self.assertEqual(data_arr.attrs["units"], "%") + self.assertEqual(data_arr.attrs["rows_per_scan"], num_scans) if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + self.assertIn("area", data_arr.attrs) + self.assertIsNotNone(data_arr.attrs["area"]) + self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) else: - self.assertNotIn('area', data_arr.attrs) + self.assertNotIn("area", data_arr.attrs) def _assert_bt_properties(self, data_arr, num_scans=16, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'brightness_temperature') - self.assertEqual(data_arr.attrs['units'], 'K') - self.assertEqual(data_arr.attrs['rows_per_scan'], num_scans) + self.assertEqual(data_arr.attrs["calibration"], "brightness_temperature") + self.assertEqual(data_arr.attrs["units"], "K") + self.assertEqual(data_arr.attrs["rows_per_scan"], num_scans) if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + self.assertIn("area", data_arr.attrs) + self.assertIsNotNone(data_arr.attrs["area"]) + self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) else: - self.assertNotIn('area', data_arr.attrs) + self.assertNotIn("area", data_arr.attrs) def _assert_dnb_radiance_properties(self, data_arr, with_area=True): self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'radiance') - self.assertEqual(data_arr.attrs['units'], 'W m-2 sr-1') - self.assertEqual(data_arr.attrs['rows_per_scan'], 16) + self.assertEqual(data_arr.attrs["calibration"], "radiance") + self.assertEqual(data_arr.attrs["units"], "W m-2 sr-1") + self.assertEqual(data_arr.attrs["rows_per_scan"], 16) if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + self.assertIn("area", data_arr.attrs) + self.assertIsNotNone(data_arr.attrs["area"]) + self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) else: - self.assertNotIn('area', data_arr.attrs) + self.assertNotIn("area", data_arr.attrs) def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -336,7 +336,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -349,9 +349,9 @@ def test_init_start_time_is_nodate(self): r = load_reader(self.reader_configs) with pytest.raises(ValueError) as exec_info: _ = r.create_filehandlers([ - 'SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5", ]) - expected = 'Datetime invalid 1958-01-01 00:00:00' + expected = "Datetime invalid 1958-01-01 00:00:00" assert str(exec_info.value) == expected def test_init_start_time_beyond(self): @@ -361,10 +361,10 @@ def test_init_start_time_beyond(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2012, 2, 26) + "start_time": datetime(2012, 2, 26) }) fhs = r.create_filehandlers([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) self.assertEqual(len(fhs), 0) @@ -375,10 +375,10 @@ def test_init_end_time_beyond(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - 'end_time': datetime(2012, 2, 24) + "end_time": datetime(2012, 2, 24) }) fhs = r.create_filehandlers([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) self.assertEqual(len(fhs), 0) @@ -390,11 +390,11 @@ def test_init_start_end_time(self): r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2012, 2, 24), - 'end_time': datetime(2012, 2, 26) + "start_time": datetime(2012, 2, 24), + "end_time": datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) self.assertEqual(len(loadables), 1) r.create_filehandlers(loadables) @@ -406,30 +406,30 @@ def test_load_all_m_reflectances_no_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) for d in ds.values(): @@ -440,31 +440,31 @@ def test_load_all_m_reflectances_find_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) @@ -476,140 +476,140 @@ def test_load_all_m_reflectances_provided_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 5) - self.assertEqual(d.attrs['area'].lats.min(), 45) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + self.assertEqual(d.attrs["area"].lons.min(), 5) + self.assertEqual(d.attrs["area"].lats.min(), 45) + self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) + self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) def test_load_all_m_reflectances_use_nontc(self): """Load all M band reflectances but use non-TC geolocation.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=False) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): - r.create_filehandlers(loadables, {'use_tc': False}) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + r.create_filehandlers(loadables, {"use_tc": False}) + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 15) - self.assertEqual(d.attrs['area'].lats.min(), 55) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + self.assertEqual(d.attrs["area"].lons.min(), 15) + self.assertEqual(d.attrs["area"].lats.min(), 55) + self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) + self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) def test_load_all_m_reflectances_use_nontc2(self): """Load all M band reflectances but use non-TC geolocation because TC isn't available.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=None) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMODO") as (geo_fn2,): - r.create_filehandlers(loadables, {'use_tc': None}) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + r.create_filehandlers(loadables, {"use_tc": None}) + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) self.assertEqual(len(ds), 11) for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 15) - self.assertEqual(d.attrs['area'].lats.min(), 55) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + self.assertEqual(d.attrs["area"].lons.min(), 15) + self.assertEqual(d.attrs["area"].lats.min(), 55) + self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) + self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) def test_load_all_m_bts(self): """Load all M band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['M12', - 'M13', - 'M14', - 'M15', - 'M16', + ds = r.load(["M12", + "M13", + "M14", + "M15", + "M16", ]) self.assertEqual(len(ds), 5) for d in ds.values(): @@ -625,22 +625,22 @@ def test_load_dnb_sza_no_factors(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - r.create_filehandlers(loadables, {'include_factors': False}) - ds = r.load(['dnb_solar_zenith_angle', - 'dnb_solar_azimuth_angle', - 'dnb_satellite_zenith_angle', - 'dnb_satellite_azimuth_angle', - 'dnb_lunar_zenith_angle', - 'dnb_lunar_azimuth_angle']) + r.create_filehandlers(loadables, {"include_factors": False}) + ds = r.load(["dnb_solar_zenith_angle", + "dnb_solar_azimuth_angle", + "dnb_satellite_zenith_angle", + "dnb_satellite_azimuth_angle", + "dnb_lunar_zenith_angle", + "dnb_lunar_azimuth_angle"]) self.assertEqual(len(ds), 6) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['units'], 'degrees') - self.assertEqual(d.attrs['rows_per_scan'], 16) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["units"], "degrees") + self.assertEqual(d.attrs["rows_per_scan"], 16) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) def test_load_all_m_radiances(self): """Load all M band radiances.""" @@ -648,62 +648,62 @@ def test_load_all_m_radiances(self): from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load([ - make_dsq(name='M01', calibration='radiance'), - make_dsq(name='M02', calibration='radiance'), - make_dsq(name='M03', calibration='radiance'), - make_dsq(name='M04', calibration='radiance'), - make_dsq(name='M05', calibration='radiance'), - make_dsq(name='M06', calibration='radiance'), - make_dsq(name='M07', calibration='radiance'), - make_dsq(name='M08', calibration='radiance'), - make_dsq(name='M09', calibration='radiance'), - make_dsq(name='M10', calibration='radiance'), - make_dsq(name='M11', calibration='radiance'), - make_dsq(name='M12', calibration='radiance'), - make_dsq(name='M13', calibration='radiance'), - make_dsq(name='M14', calibration='radiance'), - make_dsq(name='M15', calibration='radiance'), - make_dsq(name='M16', calibration='radiance'), + make_dsq(name="M01", calibration="radiance"), + make_dsq(name="M02", calibration="radiance"), + make_dsq(name="M03", calibration="radiance"), + make_dsq(name="M04", calibration="radiance"), + make_dsq(name="M05", calibration="radiance"), + make_dsq(name="M06", calibration="radiance"), + make_dsq(name="M07", calibration="radiance"), + make_dsq(name="M08", calibration="radiance"), + make_dsq(name="M09", calibration="radiance"), + make_dsq(name="M10", calibration="radiance"), + make_dsq(name="M11", calibration="radiance"), + make_dsq(name="M12", calibration="radiance"), + make_dsq(name="M13", calibration="radiance"), + make_dsq(name="M14", calibration="radiance"), + make_dsq(name="M15", calibration="radiance"), + make_dsq(name="M16", calibration="radiance"), ]) self.assertEqual(len(ds), 16) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['calibration'], 'radiance') - self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') - self.assertEqual(d.attrs['rows_per_scan'], 16) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["calibration"], "radiance") + self.assertEqual(d.attrs["units"], "W m-2 um-1 sr-1") + self.assertEqual(d.attrs["rows_per_scan"], 16) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) def test_load_dnb(self): """Load DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['DNB']) + ds = r.load(["DNB"]) self.assertEqual(len(ds), 1) for d in ds.values(): data = d.values @@ -722,11 +722,11 @@ def test_load_dnb_no_factors(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - r.create_filehandlers(loadables, {'include_factors': False}) - ds = r.load(['DNB']) + r.create_filehandlers(loadables, {"include_factors": False}) + ds = r.load(["DNB"]) self.assertEqual(len(ds), 1) for d in ds.values(): data = d.values @@ -745,12 +745,12 @@ def test_load_i_no_files(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - self.assertNotIn('I01', [x['name'] for x in r.available_dataset_ids]) - ds = r.load(['I01']) + self.assertNotIn("I01", [x["name"] for x in r.available_dataset_ids]) + ds = r.load(["I01"]) self.assertEqual(len(ds), 0) def test_load_all_i_reflectances_provided_geo(self): @@ -758,36 +758,36 @@ def test_load_all_i_reflectances_provided_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['I01', - 'I02', - 'I03', + ds = r.load(["I01", + "I02", + "I03", ]) self.assertEqual(len(ds), 3) for d in ds.values(): self._assert_reflectance_properties(d, num_scans=32) - self.assertEqual(d.attrs['area'].lons.min(), 5) - self.assertEqual(d.attrs['area'].lats.min(), 45) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 32) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 32) + self.assertEqual(d.attrs["area"].lons.min(), 5) + self.assertEqual(d.attrs["area"].lats.min(), 45) + self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 32) + self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 32) def test_load_all_i_bts(self): """Load all I band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['I04', - 'I05', + ds = r.load(["I04", + "I05", ]) self.assertEqual(len(ds), 2) for d in ds.values(): @@ -799,29 +799,29 @@ def test_load_all_i_radiances(self): from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load([ - make_dsq(name='I01', calibration='radiance'), - make_dsq(name='I02', calibration='radiance'), - make_dsq(name='I03', calibration='radiance'), - make_dsq(name='I04', calibration='radiance'), - make_dsq(name='I05', calibration='radiance'), + make_dsq(name="I01", calibration="radiance"), + make_dsq(name="I02", calibration="radiance"), + make_dsq(name="I03", calibration="radiance"), + make_dsq(name="I04", calibration="radiance"), + make_dsq(name="I05", calibration="radiance"), ]) self.assertEqual(len(ds), 5) for d in ds.values(): self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['calibration'], 'radiance') - self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') - self.assertEqual(d.attrs['rows_per_scan'], 32) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + self.assertEqual(d.attrs["calibration"], "radiance") + self.assertEqual(d.attrs["units"], "W m-2 um-1 sr-1") + self.assertEqual(d.attrs["rows_per_scan"], 32) + self.assertIn("area", d.attrs) + self.assertIsNotNone(d.attrs["area"]) class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler2): @@ -840,9 +840,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandlerAggr,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -855,7 +855,7 @@ def test_bounding_box(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) # make sure we have some files @@ -869,7 +869,7 @@ def test_bounding_box(self): 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.77254, 60.465942, 62.036346, 63.465122, 64.72178, 65.78417, 66.66166, 79.00025 ] - lons, lats = r.file_handlers['generic_file'][0].get_bounding_box() + lons, lats = r.file_handlers["generic_file"][0].get_bounding_box() np.testing.assert_allclose(lons, expected_lons) np.testing.assert_allclose(lats, expected_lats) @@ -890,9 +890,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeShortHDF5FileHandlerAggr,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeShortHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -905,7 +905,7 @@ def test_load_truncated_band(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["I01"]) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 2f926a0e47..b14ff771d6 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -32,33 +32,33 @@ @pytest.fixture def _nc_filename(tmp_path): now = datetime.datetime.utcnow() - filename = f'VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc' + filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) # Create test data - with Dataset(filename_str, 'w') as nc: + with Dataset(filename_str, "w") as nc: nscn = 7 npix = 800 n_lut = 12000 - nc.createDimension('npix', npix) - nc.createDimension('nscn', nscn) - nc.createDimension('n_lut', n_lut) + nc.createDimension("npix", npix) + nc.createDimension("nscn", nscn) + nc.createDimension("n_lut", n_lut) nc.StartTime = "2023-03-28T09:08:07" nc.EndTime = "2023-03-28T10:11:12" for ind in range(1, 11, 1): ch_name = "M{:02d}".format(ind) - r_a = nc.createVariable(ch_name, np.int16, dimensions=('nscn', 'npix')) + r_a = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) r_a[:] = np.ones((nscn, npix)) * 10 - attrs = {'scale_factor': 0.1, 'units': 'percent'} + attrs = {"scale_factor": 0.1, "units": "percent"} for attr in attrs: setattr(r_a, attr, attrs[attr]) for ind in range(12, 17, 1): ch_name = "M{:02d}".format(ind) - tb_b = nc.createVariable(ch_name, np.int16, dimensions=('nscn', 'npix')) + tb_b = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) tb_b[:] = np.ones((nscn, npix)) * 800 - attrs = {'units': 'radiances', 'scale_factor': 0.002} + attrs = {"units": "radiances", "scale_factor": 0.002} for attr in attrs: setattr(tb_b, attr, attrs[attr]) - tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=('n_lut')) + tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=("n_lut")) tb_lut[:] = np.array(range(0, n_lut)) * 0.5 return filename_str @@ -72,7 +72,7 @@ def test_read_vgac(self, _nc_filename): # Read data scn_ = Scene( - reader='viirs_vgac_l1c_nc', + reader="viirs_vgac_l1c_nc", filenames=[_nc_filename]) scn_.load(["M05", "M15"]) assert (scn_["M05"][0, 0] == 100) diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py index a7a76cafb3..ff0f780190 100644 --- a/satpy/tests/reader_tests/test_virr_l1b.py +++ b/satpy/tests/reader_tests/test_virr_l1b.py @@ -39,48 +39,48 @@ def _make_file(self, platform_id, geolocation_prefix, l1b_prefix, ECWN, Emissive dim_1 = 20 test_file = { # Satellite data. - '/attr/Day Or Night Flag': 'D', '/attr/Observing Beginning Date': '2018-12-25', - '/attr/Observing Beginning Time': '21:41:47.090', '/attr/Observing Ending Date': '2018-12-25', - '/attr/Observing Ending Time': '21:47:28.254', '/attr/Satellite Name': platform_id, - '/attr/Sensor Identification Code': 'VIRR', + "/attr/Day Or Night Flag": "D", "/attr/Observing Beginning Date": "2018-12-25", + "/attr/Observing Beginning Time": "21:41:47.090", "/attr/Observing Ending Date": "2018-12-25", + "/attr/Observing Ending Time": "21:47:28.254", "/attr/Satellite Name": platform_id, + "/attr/Sensor Identification Code": "VIRR", # Emissive data. - l1b_prefix + 'EV_Emissive': self.make_test_data([3, dim_0, dim_1]), - l1b_prefix + 'EV_Emissive/attr/valid_range': [0, 50000], - l1b_prefix + 'Emissive_Radiance_Scales': self.make_test_data([dim_0, dim_1]), - l1b_prefix + 'EV_Emissive/attr/units': Emissive_units, - l1b_prefix + 'Emissive_Radiance_Offsets': self.make_test_data([dim_0, dim_1]), - '/attr/' + ECWN: [2610.31, 917.6268, 836.2546], + l1b_prefix + "EV_Emissive": self.make_test_data([3, dim_0, dim_1]), + l1b_prefix + "EV_Emissive/attr/valid_range": [0, 50000], + l1b_prefix + "Emissive_Radiance_Scales": self.make_test_data([dim_0, dim_1]), + l1b_prefix + "EV_Emissive/attr/units": Emissive_units, + l1b_prefix + "Emissive_Radiance_Offsets": self.make_test_data([dim_0, dim_1]), + "/attr/" + ECWN: [2610.31, 917.6268, 836.2546], # Reflectance data. - l1b_prefix + 'EV_RefSB': self.make_test_data([7, dim_0, dim_1]), - l1b_prefix + 'EV_RefSB/attr/valid_range': [0, 32767], l1b_prefix + 'EV_RefSB/attr/units': 'none', - '/attr/RefSB_Cal_Coefficients': np.ones(14, dtype=np.float32) * 2 + l1b_prefix + "EV_RefSB": self.make_test_data([7, dim_0, dim_1]), + l1b_prefix + "EV_RefSB/attr/valid_range": [0, 32767], l1b_prefix + "EV_RefSB/attr/units": "none", + "/attr/RefSB_Cal_Coefficients": np.ones(14, dtype=np.float32) * 2 } - for attribute in ['Latitude', 'Longitude', geolocation_prefix + 'SolarZenith', - geolocation_prefix + 'SensorZenith', geolocation_prefix + 'SolarAzimuth', - geolocation_prefix + 'SensorAzimuth']: + for attribute in ["Latitude", "Longitude", geolocation_prefix + "SolarZenith", + geolocation_prefix + "SensorZenith", geolocation_prefix + "SolarAzimuth", + geolocation_prefix + "SensorAzimuth"]: test_file[attribute] = self.make_test_data([dim_0, dim_1]) - test_file[attribute + '/attr/Intercept'] = 0. - test_file[attribute + '/attr/units'] = 'degrees' - if 'Solar' in attribute or 'Sensor' in attribute: - test_file[attribute + '/attr/Slope'] = .01 - if 'Azimuth' in attribute: - test_file[attribute + '/attr/valid_range'] = [0, 18000] + test_file[attribute + "/attr/Intercept"] = 0. + test_file[attribute + "/attr/units"] = "degrees" + if "Solar" in attribute or "Sensor" in attribute: + test_file[attribute + "/attr/Slope"] = .01 + if "Azimuth" in attribute: + test_file[attribute + "/attr/valid_range"] = [0, 18000] else: - test_file[attribute + '/attr/valid_range'] = [-18000, 18000] + test_file[attribute + "/attr/valid_range"] = [-18000, 18000] else: - test_file[attribute + '/attr/Slope'] = 1. - if 'Longitude' == attribute: - test_file[attribute + '/attr/valid_range'] = [-180., 180.] + test_file[attribute + "/attr/Slope"] = 1. + if "Longitude" == attribute: + test_file[attribute + "/attr/valid_range"] = [-180., 180.] else: - test_file[attribute + '/attr/valid_range'] = [-90., 90.] + test_file[attribute + "/attr/valid_range"] = [-90., 90.] return test_file def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - if filename_info['platform_id'] == 'FY3B': - return self._make_file('FY3B', '', '', 'Emmisive_Centroid_Wave_Number', 'milliWstts/m^2/cm^(-1)/steradian') - return self._make_file(filename_info['platform_id'], 'Geolocation/', 'Data/', - 'Emissive_Centroid_Wave_Number', 'none') + if filename_info["platform_id"] == "FY3B": + return self._make_file("FY3B", "", "", "Emmisive_Centroid_Wave_Number", "milliWstts/m^2/cm^(-1)/steradian") + return self._make_file(filename_info["platform_id"], "Geolocation/", "Data/", + "Emissive_Centroid_Wave_Number", "none") class TestVIRRL1BReader(unittest.TestCase): @@ -92,9 +92,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.virr_l1b import VIRR_L1B - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIRR_L1B, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIRR_L1B, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -104,62 +104,62 @@ def tearDown(self): def _band_helper(self, attributes, units, calibration, standard_name, file_type, band_index_size, resolution): - self.assertEqual(units, attributes['units']) - self.assertEqual(calibration, attributes['calibration']) - self.assertEqual(standard_name, attributes['standard_name']) - self.assertEqual(file_type, attributes['file_type']) - self.assertTrue(attributes['band_index'] in range(band_index_size)) - self.assertEqual(resolution, attributes['resolution']) - self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) + self.assertEqual(units, attributes["units"]) + self.assertEqual(calibration, attributes["calibration"]) + self.assertEqual(standard_name, attributes["standard_name"]) + self.assertEqual(file_type, attributes["file_type"]) + self.assertTrue(attributes["band_index"] in range(band_index_size)) + self.assertEqual(resolution, attributes["resolution"]) + self.assertEqual(("longitude", "latitude"), attributes["coordinates"]) def _fy3_helper(self, platform_name, reader, Emissive_units): """Load channels and test accurate metadata.""" import datetime - band_values = {'1': 22.0, '2': 22.0, '6': 22.0, '7': 22.0, '8': 22.0, '9': 22.0, '10': 22.0, - '3': 496.542155, '4': 297.444511, '5': 288.956557, 'solar_zenith_angle': .1, - 'satellite_zenith_angle': .1, 'solar_azimuth_angle': .1, 'satellite_azimuth_angle': .1, - 'longitude': 10} - if platform_name == 'FY3B': + band_values = {"1": 22.0, "2": 22.0, "6": 22.0, "7": 22.0, "8": 22.0, "9": 22.0, "10": 22.0, + "3": 496.542155, "4": 297.444511, "5": 288.956557, "solar_zenith_angle": .1, + "satellite_zenith_angle": .1, "solar_azimuth_angle": .1, "satellite_azimuth_angle": .1, + "longitude": 10} + if platform_name == "FY3B": # updated 2015 coefficients - band_values['1'] = -0.168 - band_values['2'] = -0.2706 - band_values['6'] = -1.5631 - band_values['7'] = -0.2114 - band_values['8'] = -0.171 - band_values['9'] = -0.1606 - band_values['10'] = -0.1328 + band_values["1"] = -0.168 + band_values["2"] = -0.2706 + band_values["6"] = -1.5631 + band_values["7"] = -0.2114 + band_values["8"] = -0.171 + band_values["9"] = -0.1606 + band_values["10"] = -0.1328 datasets = reader.load([band for band in band_values]) for dataset in datasets: # Object returned by get_dataset. - ds = datasets[dataset['name']] + ds = datasets[dataset["name"]] attributes = ds.attrs self.assertTrue(isinstance(ds.data, da.Array)) - self.assertEqual('virr', attributes['sensor']) - self.assertEqual(platform_name, attributes['platform_name']) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes['start_time']) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes['end_time']) - self.assertEqual((19, 20), datasets[dataset['name']].shape) - self.assertEqual(('y', 'x'), datasets[dataset['name']].dims) - if dataset['name'] in ['1', '2', '6', '7', '8', '9', '10']: - self._band_helper(attributes, '%', 'reflectance', - 'toa_bidirectional_reflectance', 'virr_l1b', + self.assertEqual("virr", attributes["sensor"]) + self.assertEqual(platform_name, attributes["platform_name"]) + self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes["start_time"]) + self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes["end_time"]) + self.assertEqual((19, 20), datasets[dataset["name"]].shape) + self.assertEqual(("y", "x"), datasets[dataset["name"]].dims) + if dataset["name"] in ["1", "2", "6", "7", "8", "9", "10"]: + self._band_helper(attributes, "%", "reflectance", + "toa_bidirectional_reflectance", "virr_l1b", 7, 1000) - elif dataset['name'] in ['3', '4', '5']: - self._band_helper(attributes, Emissive_units, 'brightness_temperature', - 'toa_brightness_temperature', 'virr_l1b', 3, 1000) - elif dataset['name'] in ['longitude', 'latitude']: - self.assertEqual('degrees', attributes['units']) - self.assertTrue(attributes['standard_name'] in ['longitude', 'latitude']) - self.assertEqual(['virr_l1b', 'virr_geoxx'], attributes['file_type']) - self.assertEqual(1000, attributes['resolution']) + elif dataset["name"] in ["3", "4", "5"]: + self._band_helper(attributes, Emissive_units, "brightness_temperature", + "toa_brightness_temperature", "virr_l1b", 3, 1000) + elif dataset["name"] in ["longitude", "latitude"]: + self.assertEqual("degrees", attributes["units"]) + self.assertTrue(attributes["standard_name"] in ["longitude", "latitude"]) + self.assertEqual(["virr_l1b", "virr_geoxx"], attributes["file_type"]) + self.assertEqual(1000, attributes["resolution"]) else: - self.assertEqual('degrees', attributes['units']) + self.assertEqual("degrees", attributes["units"]) self.assertTrue( - attributes['standard_name'] in ['solar_zenith_angle', 'sensor_zenith_angle', 'solar_azimuth_angle', - 'sensor_azimuth_angle']) - self.assertEqual(['virr_geoxx', 'virr_l1b'], attributes['file_type']) - self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) - self.assertEqual(band_values[dataset['name']], + attributes["standard_name"] in ["solar_zenith_angle", "sensor_zenith_angle", "solar_azimuth_angle", + "sensor_azimuth_angle"]) + self.assertEqual(["virr_geoxx", "virr_l1b"], attributes["file_type"]) + self.assertEqual(("longitude", "latitude"), attributes["coordinates"]) + self.assertEqual(band_values[dataset["name"]], round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6)) assert "valid_range" not in ds.attrs @@ -167,21 +167,21 @@ def test_fy3b_file(self): """Test that FY3B files are recognized.""" from satpy.readers import load_reader FY3B_reader = load_reader(self.reader_configs) - FY3B_file = FY3B_reader.select_files_from_pathnames(['tf2018359214943.FY3B-L_VIRRX_L1B.HDF']) + FY3B_file = FY3B_reader.select_files_from_pathnames(["tf2018359214943.FY3B-L_VIRRX_L1B.HDF"]) self.assertEqual(1, len(FY3B_file)) FY3B_reader.create_filehandlers(FY3B_file) # Make sure we have some files self.assertTrue(FY3B_reader.file_handlers) - self._fy3_helper('FY3B', FY3B_reader, 'milliWstts/m^2/cm^(-1)/steradian') + self._fy3_helper("FY3B", FY3B_reader, "milliWstts/m^2/cm^(-1)/steradian") def test_fy3c_file(self): """Test that FY3C files are recognized.""" from satpy.readers import load_reader FY3C_reader = load_reader(self.reader_configs) - FY3C_files = FY3C_reader.select_files_from_pathnames(['tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF', - 'tf2018359143912.FY3C-L_VIRRX_L1B.HDF']) + FY3C_files = FY3C_reader.select_files_from_pathnames(["tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF", + "tf2018359143912.FY3C-L_VIRRX_L1B.HDF"]) self.assertEqual(2, len(FY3C_files)) FY3C_reader.create_filehandlers(FY3C_files) # Make sure we have some files self.assertTrue(FY3C_reader.file_handlers) - self._fy3_helper('FY3C', FY3C_reader, '1') + self._fy3_helper("FY3C", FY3C_reader, "1") diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index c62ffcea1d..3760249d95 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -35,8 +35,8 @@ def test_serialization_with_readers_and_data_arr(self): """Test that dask can serialize a Scene with readers.""" from distributed.protocol import deserialize, serialize - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) cloned_scene = deserialize(*serialize(scene)) assert scene._readers.keys() == cloned_scene._readers.keys() assert scene.all_dataset_ids == scene.all_dataset_ids @@ -57,12 +57,12 @@ def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition scn = Scene() - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) - scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), - 'area': area}) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None @@ -74,9 +74,9 @@ def test_geoviews_basic_with_swath(self): lons = xr.DataArray(da.zeros((2, 2))) lats = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(lons, lats) - scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), - 'area': area}) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None @@ -98,14 +98,14 @@ def single_area_scn(self): """Define Scene with single area.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area}) scn = Scene() - scn['var1'] = data_array + scn["var1"] = data_array return scn @pytest.fixture @@ -113,22 +113,22 @@ def multi_area_scn(self): """Define Scene with multiple area.""" from pyresample.geometry import AreaDefinition - area1 = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area1 = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) - area2 = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area2 = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 4, 4, [-200, -200, 200, 200]) data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area1}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area1}) data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area2}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area2}) scn = Scene() - scn['var1'] = data_array1 - scn['var2'] = data_array2 + scn["var1"] = data_array1 + scn["var2"] = data_array2 return scn def test_with_single_area_scene_type(self, single_area_scn): diff --git a/satpy/tests/scene_tests/test_data_access.py b/satpy/tests/scene_tests/test_data_access.py index f345679e03..e446af9c46 100644 --- a/satpy/tests/scene_tests/test_data_access.py +++ b/satpy/tests/scene_tests/test_data_access.py @@ -83,21 +83,21 @@ def test_iter_by_area_swath(self): from pyresample.geometry import SwathDefinition scene = Scene() sd = SwathDefinition(lons=np.arange(5), lats=np.arange(5)) - scene["1"] = xr.DataArray(np.arange(5), attrs={'area': sd}) - scene["2"] = xr.DataArray(np.arange(5), attrs={'area': sd}) + scene["1"] = xr.DataArray(np.arange(5), attrs={"area": sd}) + scene["2"] = xr.DataArray(np.arange(5), attrs={"area": sd}) scene["3"] = xr.DataArray(np.arange(5)) for area_obj, ds_list in scene.iter_by_area(): - ds_list_names = set(ds['name'] for ds in ds_list) + ds_list_names = set(ds["name"] for ds in ds_list) if area_obj is sd: - assert ds_list_names == {'1', '2'} + assert ds_list_names == {"1", "2"} else: assert area_obj is None - assert ds_list_names == {'3'} + assert ds_list_names == {"3"} def test_bad_setitem(self): """Test setting an item wrongly.""" scene = Scene() - pytest.raises(ValueError, scene.__setitem__, '1', np.arange(5)) + pytest.raises(ValueError, scene.__setitem__, "1", np.arange(5)) def test_setitem(self): """Test setting an item.""" @@ -108,16 +108,16 @@ def test_setitem(self): assert set(scene._datasets.keys()) == {expected_id} assert set(scene._wishlist) == {expected_id} - did = make_dataid(name='oranges') + did = make_dataid(name="oranges") scene[did] = ds1 - assert 'oranges' in scene + assert "oranges" in scene nparray = np.arange(5*5).reshape(5, 5) with pytest.raises(ValueError): - scene['apples'] = nparray - assert 'apples' not in scene - did = make_dataid(name='apples') + scene["apples"] = nparray + assert "apples" not in scene + did = make_dataid(name="apples") scene[did] = nparray - assert 'apples' in scene + assert "apples" in scene def test_getitem(self): """Test __getitem__ with names only.""" @@ -125,41 +125,41 @@ def test_getitem(self): scene["1"] = ds1 = xr.DataArray(np.arange(5)) scene["2"] = ds2 = xr.DataArray(np.arange(5)) scene["3"] = ds3 = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1 - assert scene['2'] is ds2 - assert scene['3'] is ds3 - pytest.raises(KeyError, scene.__getitem__, '4') - assert scene.get('3') is ds3 - assert scene.get('4') is None + assert scene["1"] is ds1 + assert scene["2"] is ds2 + assert scene["3"] is ds3 + pytest.raises(KeyError, scene.__getitem__, "4") + assert scene.get("3") is ds3 + assert scene.get("4") is None def test_getitem_modifiers(self): """Test __getitem__ with names and modifiers.""" # Return least modified item scene = Scene() - scene['1'] = ds1_m0 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene["1"] = ds1_m0 = xr.DataArray(np.arange(5)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m0 + assert scene["1"] is ds1_m0 assert len(list(scene.keys())) == 2 scene = Scene() - scene['1'] = ds1_m0 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene["1"] = ds1_m0 = xr.DataArray(np.arange(5)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1', 'mod2')) + scene[make_dataid(name="1", modifiers=("mod1", "mod2")) ] = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m0 + assert scene["1"] is ds1_m0 assert len(list(scene.keys())) == 3 scene = Scene() - scene[make_dataid(name='1', modifiers=('mod1', 'mod2')) + scene[make_dataid(name="1", modifiers=("mod1", "mod2")) ] = ds1_m2 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = ds1_m1 = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m1 - assert scene[make_dataid(name='1', modifiers=('mod1', 'mod2'))] is ds1_m2 + assert scene["1"] is ds1_m1 + assert scene[make_dataid(name="1", modifiers=("mod1", "mod2"))] is ds1_m2 pytest.raises(KeyError, scene.__getitem__, - make_dataid(name='1', modifiers=tuple())) + make_dataid(name="1", modifiers=tuple())) assert len(list(scene.keys())) == 2 def test_getitem_slices(self): @@ -168,13 +168,13 @@ def test_getitem_slices(self): from pyresample.utils import proj4_str_to_dict scene1 = Scene() scene2 = Scene() - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, 200, 400, @@ -184,80 +184,80 @@ def test_getitem_slices(self): lats=np.zeros((5, 10))) scene1["1"] = scene2["1"] = xr.DataArray(np.zeros((5, 10))) scene1["2"] = scene2["2"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x')) - scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), - attrs={'area': area_def}) + dims=("y", "x")) + scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), + attrs={"area": area_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), - attrs={'name': 'anc_var', 'area': area_def})] - attrs = {'ancillary_variables': anc_vars, 'area': area_def} + attrs={"name": "anc_var", "area": area_def})] + attrs = {"ancillary_variables": anc_vars, "area": area_def} scene1["3a"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs) - scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), - attrs={'area': swath_def}) + scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), + attrs={"area": swath_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), - attrs={'name': 'anc_var', 'area': swath_def})] - attrs = {'ancillary_variables': anc_vars, 'area': swath_def} + attrs={"name": "anc_var", "area": swath_def})] + attrs = {"ancillary_variables": anc_vars, "area": swath_def} scene2["4a"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs) new_scn1 = scene1[2:5, 2:8] new_scn2 = scene2[2:5, 2:8] for new_scn in [new_scn1, new_scn2]: # datasets without an area don't get sliced - assert new_scn['1'].shape == (5, 10) - assert new_scn['2'].shape == (5, 10) - - assert new_scn1['3'].shape == (3, 6) - assert 'area' in new_scn1['3'].attrs - assert new_scn1['3'].attrs['area'].shape == (3, 6) - assert new_scn1['3a'].shape == (3, 6) - a_var = new_scn1['3a'].attrs['ancillary_variables'][0] + assert new_scn["1"].shape == (5, 10) + assert new_scn["2"].shape == (5, 10) + + assert new_scn1["3"].shape == (3, 6) + assert "area" in new_scn1["3"].attrs + assert new_scn1["3"].attrs["area"].shape == (3, 6) + assert new_scn1["3a"].shape == (3, 6) + a_var = new_scn1["3a"].attrs["ancillary_variables"][0] assert a_var.shape == (3, 6) - assert new_scn2['4'].shape == (3, 6) - assert 'area' in new_scn2['4'].attrs - assert new_scn2['4'].attrs['area'].shape == (3, 6) - assert new_scn2['4a'].shape == (3, 6) - a_var = new_scn2['4a'].attrs['ancillary_variables'][0] + assert new_scn2["4"].shape == (3, 6) + assert "area" in new_scn2["4"].attrs + assert new_scn2["4"].attrs["area"].shape == (3, 6) + assert new_scn2["4a"].shape == (3, 6) + a_var = new_scn2["4a"].attrs["ancillary_variables"][0] assert a_var.shape == (3, 6) def test_contains(self): """Test contains.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.1, 0.2, 0.3), - '_satpy_id_keys': default_id_keys_config}) - assert '1' in scene + attrs={"wavelength": (0.1, 0.2, 0.3), + "_satpy_id_keys": default_id_keys_config}) + assert "1" in scene assert 0.15 in scene - assert '2' not in scene + assert "2" not in scene assert 0.31 not in scene scene = Scene() - scene['blueberry'] = xr.DataArray(np.arange(5)) - scene['blackberry'] = xr.DataArray(np.arange(5)) - scene['strawberry'] = xr.DataArray(np.arange(5)) - scene['raspberry'] = xr.DataArray(np.arange(5)) + scene["blueberry"] = xr.DataArray(np.arange(5)) + scene["blackberry"] = xr.DataArray(np.arange(5)) + scene["strawberry"] = xr.DataArray(np.arange(5)) + scene["raspberry"] = xr.DataArray(np.arange(5)) # deepcode ignore replace~keys~list~compare: This is on purpose - assert make_cid(name='blueberry') in scene.keys() - assert make_cid(name='blueberry') in scene - assert 'blueberry' in scene - assert 'blueberry' not in scene.keys() + assert make_cid(name="blueberry") in scene.keys() + assert make_cid(name="blueberry") in scene + assert "blueberry" in scene + assert "blueberry" not in scene.keys() def test_delitem(self): """Test deleting an item.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.1, 0.2, 0.3), - '_satpy_id_keys': default_id_keys_config}) + attrs={"wavelength": (0.1, 0.2, 0.3), + "_satpy_id_keys": default_id_keys_config}) scene["2"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.4, 0.5, 0.6), - '_satpy_id_keys': default_id_keys_config}) + attrs={"wavelength": (0.4, 0.5, 0.6), + "_satpy_id_keys": default_id_keys_config}) scene["3"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.7, 0.8, 0.9), - '_satpy_id_keys': default_id_keys_config}) - del scene['1'] - del scene['3'] + attrs={"wavelength": (0.7, 0.8, 0.9), + "_satpy_id_keys": default_id_keys_config}) + del scene["1"] + del scene["3"] del scene[0.45] assert not scene._wishlist assert not list(scene._datasets.keys()) @@ -268,7 +268,7 @@ def _create_coarest_finest_data_array(shape, area_def, attrs=None): data_arr = xr.DataArray( da.arange(math.prod(shape)).reshape(shape), attrs={ - 'area': area_def, + "area": area_def, }) if attrs: data_arr.attrs.update(attrs) @@ -277,11 +277,11 @@ def _create_coarest_finest_data_array(shape, area_def, attrs=None): def _create_coarsest_finest_area_def(shape, extents): from pyresample import AreaDefinition - proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs' + proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs" area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_str, shape[1], shape[0], @@ -331,7 +331,7 @@ def test_coarsest_finest_area_different_shape(self, coarse_area, fine_area): assert scn.coarsest_area() is coarse_area assert scn.finest_area() is fine_area - assert scn.coarsest_area(['2', '3']) is fine_area + assert scn.coarsest_area(["2", "3"]) is fine_area @pytest.mark.parametrize( ("area_def", "shifted_area"), @@ -375,24 +375,24 @@ class TestComputePersist: def test_compute_pass_through(self): """Test pass through of xarray compute.""" import numpy as np - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scene = scene.compute() - assert isinstance(scene['ds1'].data, np.ndarray) + assert isinstance(scene["ds1"].data, np.ndarray) def test_persist_pass_through(self): """Test pass through of xarray persist.""" from dask.array.utils import assert_eq - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scenep = scene.persist() - assert_eq(scene['ds1'].data, scenep['ds1'].data) - assert set(scenep['ds1'].data.dask).issubset(scene['ds1'].data.dask) - assert len(scenep["ds1"].data.dask) == scenep['ds1'].data.npartitions + assert_eq(scene["ds1"].data, scenep["ds1"].data) + assert set(scenep["ds1"].data.dask).issubset(scene["ds1"].data.dask) + assert len(scenep["ds1"].data.dask) == scenep["ds1"].data.npartitions def test_chunk_pass_through(self): """Test pass through of xarray chunk.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scene = scene.chunk(chunks=2) - assert scene['ds1'].data.chunksize == (2, 2) + assert scene["ds1"].data.chunksize == (2, 2) diff --git a/satpy/tests/scene_tests/test_init.py b/satpy/tests/scene_tests/test_init.py index 4caf804366..a9b4622769 100644 --- a/satpy/tests/scene_tests/test_init.py +++ b/satpy/tests/scene_tests/test_init.py @@ -38,49 +38,49 @@ class TestScene: def test_init(self): """Test scene initialization.""" - with mock.patch('satpy.scene.Scene._create_reader_instances') as cri: + with mock.patch("satpy.scene.Scene._create_reader_instances") as cri: cri.return_value = {} - Scene(filenames=['bla'], reader='blo') - cri.assert_called_once_with(filenames=['bla'], reader='blo', + Scene(filenames=["bla"], reader="blo") + cri.assert_called_once_with(filenames=["bla"], reader="blo", reader_kwargs=None) def test_init_str_filename(self): """Test initializing with a single string as filenames.""" - pytest.raises(ValueError, Scene, reader='blo', filenames='test.nc') + pytest.raises(ValueError, Scene, reader="blo", filenames="test.nc") def test_start_end_times(self): """Test start and end times for a scene.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_preserve_reader_kwargs(self): """Test that the initialization preserves the kwargs.""" cri = spy_decorator(Scene._create_reader_instances) - with mock.patch('satpy.scene.Scene._create_reader_instances', cri): - reader_kwargs = {'calibration_type': 'gsics'} - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', - filter_parameters={'area': 'euron1'}, + with mock.patch("satpy.scene.Scene._create_reader_instances", cri): + reader_kwargs = {"calibration_type": "gsics"} + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", + filter_parameters={"area": "euron1"}, reader_kwargs=reader_kwargs) - assert reader_kwargs is not cri.mock.call_args[1]['reader_kwargs'] + assert reader_kwargs is not cri.mock.call_args[1]["reader_kwargs"] assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_alone(self): """Test simple initialization.""" scn = Scene() - assert not scn._readers, 'Empty scene should not load any readers' + assert not scn._readers, "Empty scene should not load any readers" def test_init_no_files(self): """Test that providing an empty list of filenames fails.""" - pytest.raises(ValueError, Scene, reader='viirs_sdr', filenames=[]) + pytest.raises(ValueError, Scene, reader="viirs_sdr", filenames=[]) def test_create_reader_instances_with_filenames(self): """Test creating a reader providing filenames.""" filenames = ["bla", "foo", "bar"] reader_name = None - with mock.patch('satpy.scene.load_readers') as findermock: + with mock.patch("satpy.scene.load_readers") as findermock: Scene(filenames=filenames) findermock.assert_called_once_with( filenames=filenames, @@ -112,7 +112,7 @@ def test_create_reader_instances_with_reader(self): """Test createring a reader instance providing the reader name.""" reader = "foo" filenames = ["1", "2", "3"] - with mock.patch('satpy.scene.load_readers') as findermock: + with mock.patch("satpy.scene.load_readers") as findermock: findermock.return_value = {} Scene(reader=reader, filenames=filenames) findermock.assert_called_once_with(reader=reader, @@ -123,29 +123,29 @@ def test_create_reader_instances_with_reader(self): def test_create_reader_instances_with_reader_kwargs(self): """Test creating a reader instance with reader kwargs.""" from satpy.readers.yaml_reader import FileYAMLReader - reader_kwargs = {'calibration_type': 'gsics'} - filter_parameters = {'area': 'euron1'} - reader_kwargs2 = {'calibration_type': 'gsics', 'filter_parameters': filter_parameters} + reader_kwargs = {"calibration_type": "gsics"} + filter_parameters = {"area": "euron1"} + reader_kwargs2 = {"calibration_type": "gsics", "filter_parameters": filter_parameters} rinit = spy_decorator(FileYAMLReader.create_filehandlers) - with mock.patch('satpy.readers.yaml_reader.FileYAMLReader.create_filehandlers', rinit): - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', - filter_parameters={'area': 'euron1'}, + with mock.patch("satpy.readers.yaml_reader.FileYAMLReader.create_filehandlers", rinit): + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", + filter_parameters={"area": "euron1"}, reader_kwargs=reader_kwargs) del scene - assert reader_kwargs == rinit.mock.call_args[1]['fh_kwargs'] + assert reader_kwargs == rinit.mock.call_args[1]["fh_kwargs"] rinit.mock.reset_mock() - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", reader_kwargs=reader_kwargs2) - assert reader_kwargs == rinit.mock.call_args[1]['fh_kwargs'] + assert reader_kwargs == rinit.mock.call_args[1]["fh_kwargs"] del scene def test_create_multiple_reader_different_kwargs(self, include_test_etc): """Test passing different kwargs to different readers.""" from satpy.readers import load_reader - with mock.patch.object(satpy.readers, 'load_reader', wraps=load_reader) as lr: + with mock.patch.object(satpy.readers, "load_reader", wraps=load_reader) as lr: Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, reader_kwargs={ @@ -153,8 +153,8 @@ def test_create_multiple_reader_different_kwargs(self, include_test_etc): "fake2_1ds": {"mouth": "varallo"} }) lr.assert_has_calls([ - mock.call([os.path.join(include_test_etc, 'readers', 'fake1_1ds.yaml')], mouth="omegna"), - mock.call([os.path.join(include_test_etc, 'readers', 'fake2_1ds.yaml')], mouth="varallo")]) + mock.call([os.path.join(include_test_etc, "readers", "fake1_1ds.yaml")], mouth="omegna"), + mock.call([os.path.join(include_test_etc, "readers", "fake2_1ds.yaml")], mouth="varallo")]) def test_storage_options_from_reader_kwargs_no_options(self): """Test getting storage options from reader kwargs. @@ -162,8 +162,8 @@ def test_storage_options_from_reader_kwargs_no_options(self): Case where there are no options given. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames) open_files.assert_called_once_with(filenames) @@ -173,13 +173,13 @@ def test_storage_options_from_reader_kwargs_single_dict_no_options(self): Case where a single dict is given for all readers without storage options. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] - reader_kwargs = {'reader_opt': 'foo'} - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + reader_kwargs = {"reader_opt": "foo"} + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) open_files.assert_called_once_with(filenames) - @pytest.mark.parametrize("reader_kwargs", [{}, {'reader_opt': 'foo'}]) + @pytest.mark.parametrize("reader_kwargs", [{}, {"reader_opt": "foo"}]) def test_storage_options_from_reader_kwargs_single_dict(self, reader_kwargs): """Test getting storage options from reader kwargs. @@ -187,14 +187,14 @@ def test_storage_options_from_reader_kwargs_single_dict(self, reader_kwargs): """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] expected_reader_kwargs = reader_kwargs.copy() - storage_options = {'option1': '1'} - reader_kwargs['storage_options'] = storage_options + storage_options = {"option1": "1"} + reader_kwargs["storage_options"] = storage_options orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers') as load_readers: - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers") as load_readers: + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) call_ = load_readers.mock_calls[0] - assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs + assert call_.kwargs["reader_kwargs"] == expected_reader_kwargs open_files.assert_called_once_with(filenames, **storage_options) assert reader_kwargs == orig_reader_kwargs @@ -208,25 +208,25 @@ def test_storage_options_from_reader_kwargs_per_reader(self): "reader2": ["s3://data-bucket/file2"], "reader3": ["s3://data-bucket/file3"], } - storage_options_1 = {'option1': '1'} - storage_options_2 = {'option2': '2'} - storage_options_3 = {'option3': '3'} + storage_options_1 = {"option1": "1"} + storage_options_2 = {"option2": "2"} + storage_options_3 = {"option3": "3"} reader_kwargs = { - "reader1": {'reader_opt_1': 'foo'}, - "reader2": {'reader_opt_2': 'bar'}, - "reader3": {'reader_opt_3': 'baz'}, + "reader1": {"reader_opt_1": "foo"}, + "reader2": {"reader_opt_2": "bar"}, + "reader3": {"reader_opt_3": "baz"}, } expected_reader_kwargs = deepcopy(reader_kwargs) - reader_kwargs['reader1']['storage_options'] = storage_options_1 - reader_kwargs['reader2']['storage_options'] = storage_options_2 - reader_kwargs['reader3']['storage_options'] = storage_options_3 + reader_kwargs["reader1"]["storage_options"] = storage_options_1 + reader_kwargs["reader2"]["storage_options"] = storage_options_2 + reader_kwargs["reader3"]["storage_options"] = storage_options_3 orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers') as load_readers: - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers") as load_readers: + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) call_ = load_readers.mock_calls[0] - assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs + assert call_.kwargs["reader_kwargs"] == expected_reader_kwargs assert mock.call(filenames["reader1"], **storage_options_1) in open_files.mock_calls assert mock.call(filenames["reader2"], **storage_options_2) in open_files.mock_calls assert mock.call(filenames["reader3"], **storage_options_3) in open_files.mock_calls @@ -244,15 +244,15 @@ def test_storage_options_from_reader_kwargs_per_reader_and_global(self): "reader3": ["s3://data-bucket/file3"], } reader_kwargs = { - "reader1": {'reader_opt_1': 'foo', 'storage_options': {'option1': '1'}}, - "reader2": {'reader_opt_2': 'bar', 'storage_options': {'option2': '2'}}, + "reader1": {"reader_opt_1": "foo", "storage_options": {"option1": "1"}}, + "reader2": {"reader_opt_2": "bar", "storage_options": {"option2": "2"}}, "storage_options": {"endpoint_url": "url"}, } orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) - assert mock.call(filenames["reader1"], option1='1', endpoint_url='url') in open_files.mock_calls - assert mock.call(filenames["reader2"], option2='2', endpoint_url='url') in open_files.mock_calls + assert mock.call(filenames["reader1"], option1="1", endpoint_url="url") in open_files.mock_calls + assert mock.call(filenames["reader2"], option2="2", endpoint_url="url") in open_files.mock_calls assert reader_kwargs == orig_reader_kwargs diff --git a/satpy/tests/scene_tests/test_load.py b/satpy/tests/scene_tests/test_load.py index 6eefbc0080..889d9e2cbe 100644 --- a/satpy/tests/scene_tests/test_load.py +++ b/satpy/tests/scene_tests/test_load.py @@ -36,7 +36,7 @@ class TestSceneAllAvailableDatasets: def test_all_datasets_no_readers(self): """Test all datasets with no reader.""" scene = Scene() - pytest.raises(KeyError, scene.all_dataset_ids, reader_name='fake') + pytest.raises(KeyError, scene.all_dataset_ids, reader_name="fake") id_list = scene.all_dataset_ids() assert id_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -46,7 +46,7 @@ def test_all_datasets_no_readers(self): def test_all_dataset_names_no_readers(self): """Test all dataset names with no reader.""" scene = Scene() - pytest.raises(KeyError, scene.all_dataset_names, reader_name='fake') + pytest.raises(KeyError, scene.all_dataset_names, reader_name="fake") name_list = scene.all_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -57,7 +57,7 @@ def test_available_dataset_no_readers(self): """Test the available datasets without a reader.""" scene = Scene() pytest.raises( - KeyError, scene.available_dataset_ids, reader_name='fake') + KeyError, scene.available_dataset_ids, reader_name="fake") name_list = scene.available_dataset_ids() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -68,7 +68,7 @@ def test_available_dataset_names_no_readers(self): """Test the available dataset names without a reader.""" scene = Scene() pytest.raises( - KeyError, scene.available_dataset_names, reader_name='fake') + KeyError, scene.available_dataset_names, reader_name="fake") name_list = scene.available_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -77,8 +77,8 @@ def test_available_dataset_names_no_readers(self): def test_all_datasets_one_reader(self): """Test all datasets for one reader.""" - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1") id_list = scene.all_dataset_ids() # 20 data products + 6 lon/lat products num_reader_ds = 21 + 6 @@ -88,8 +88,8 @@ def test_all_datasets_one_reader(self): def test_all_datasets_multiple_reader(self): """Test all datasets for multiple readers.""" - scene = Scene(filenames={'fake1_1ds': ['fake1_1ds_1.txt'], - 'fake2_1ds': ['fake2_1ds_1.txt']}) + scene = Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], + "fake2_1ds": ["fake2_1ds_1.txt"]}) id_list = scene.all_dataset_ids() assert len(id_list) == 2 id_list = scene.all_dataset_ids(composites=True) @@ -99,8 +99,8 @@ def test_all_datasets_multiple_reader(self): def test_available_datasets_one_reader(self): """Test the available datasets for one reader.""" - scene = Scene(filenames=['fake1_1ds_1.txt'], - reader='fake1_1ds') + scene = Scene(filenames=["fake1_1ds_1.txt"], + reader="fake1_1ds") id_list = scene.available_dataset_ids() assert len(id_list) == 1 id_list = scene.available_dataset_ids(composites=True) @@ -109,13 +109,13 @@ def test_available_datasets_one_reader(self): def test_available_composite_ids_missing_available(self): """Test available_composite_ids when a composites dep is missing.""" - scene = Scene(filenames=['fake1_1ds_1.txt'], - reader='fake1_1ds') - assert 'comp2' not in scene.available_composite_names() + scene = Scene(filenames=["fake1_1ds_1.txt"], + reader="fake1_1ds") + assert "comp2" not in scene.available_composite_names() def test_available_composites_known_versus_all(self): """Test available_composite_ids when some datasets aren't available.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1', + scene = Scene(filenames=["fake1_1.txt"], reader="fake1", reader_kwargs={"not_available": ["ds2", "ds3"]}) all_comps = scene.all_composite_names() avail_comps = scene.available_composite_names() @@ -127,11 +127,11 @@ def test_available_composites_known_versus_all(self): def test_available_comps_no_deps(self): """Test Scene available composites when composites don't have a dependency.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") all_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in all_comp_ids + assert make_cid(name="static_image") in all_comp_ids available_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in available_comp_ids + assert make_cid(name="static_image") in available_comp_ids def test_available_when_sensor_none_in_preloaded_dataarrays(self): """Test Scene available composites when existing loaded arrays have sensor set to None. @@ -143,7 +143,7 @@ def test_available_when_sensor_none_in_preloaded_dataarrays(self): """ scene = _scene_with_data_array_none_sensor() available_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in available_comp_ids + assert make_cid(name="static_image") in available_comp_ids @pytest.mark.usefixtures("include_test_etc") @@ -152,13 +152,13 @@ class TestBadLoading: def test_load_str(self): """Test passing a string to Scene.load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(TypeError, scene.load, 'ds1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(TypeError, scene.load, "ds1") def test_load_no_exist(self): """Test loading a dataset that doesn't exist.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(KeyError, scene.load, ['im_a_dataset_that_doesnt_exist']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(KeyError, scene.load, ["im_a_dataset_that_doesnt_exist"]) @pytest.mark.usefixtures("include_test_etc") @@ -169,50 +169,50 @@ def test_load_no_exist2(self): """Test loading a dataset that doesn't exist then another load.""" from satpy.readers.yaml_reader import FileYAMLReader load_mock = spy_decorator(FileYAMLReader.load) - with mock.patch.object(FileYAMLReader, 'load', load_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds9_fail_load']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds9_fail_load"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 lmock.assert_called_once_with( - {make_dataid(name='ds9_fail_load', wavelength=(1.0, 1.1, 1.2))}) + {make_dataid(name="ds9_fail_load", wavelength=(1.0, 1.1, 1.2))}) - scene.load(['ds1']) + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert lmock.call_count == 2 # most recent call should have only been ds1 lmock.assert_called_with({ - make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()), + make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()), }) assert len(loaded_ids) == 1 def test_load_ds1_no_comps(self): """Test loading one dataset with no loaded compositors.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) def test_load_ds1_load_twice(self): """Test loading one dataset with no loaded compositors.""" from satpy.readers.yaml_reader import FileYAMLReader - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) load_mock = spy_decorator(FileYAMLReader.load) - with mock.patch.object(FileYAMLReader, 'load', load_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock): lmock = load_mock.mock - scene.load(['ds1']) + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, - calibration='reflectance', + calibration="reflectance", modifiers=tuple()) assert not lmock.called, ("Reader.load was called again when " "loading something that's already " @@ -220,17 +220,17 @@ def test_load_ds1_load_twice(self): def test_load_ds1_unknown_modifier(self): """Test loading one dataset with no loaded compositors.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") pytest.raises(KeyError, scene.load, - [make_dataid(name='ds1', modifiers=('_fake_bad_mod_',))]) + [make_dataid(name="ds1", modifiers=("_fake_bad_mod_",))]) def test_load_ds4_cal(self): """Test loading a dataset that has two calibration variations.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds4']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds4"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['calibration'] == 'reflectance' + assert loaded_ids[0]["calibration"] == "reflectance" @pytest.mark.parametrize( ("input_filenames", "load_kwargs", "exp_resolution"), @@ -243,37 +243,37 @@ def test_load_ds4_cal(self): ) def test_load_ds5_variations(self, input_filenames, load_kwargs, exp_resolution): """Test loading a dataset has multiple resolutions available.""" - scene = Scene(filenames=input_filenames, reader='fake1') - scene.load(['ds5'], **load_kwargs) + scene = Scene(filenames=input_filenames, reader="fake1") + scene.load(["ds5"], **load_kwargs) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['name'] == 'ds5' - assert loaded_ids[0]['resolution'] == exp_resolution + assert loaded_ids[0]["name"] == "ds5" + assert loaded_ids[0]["resolution"] == exp_resolution def test_load_ds5_multiple_resolution_loads(self): """Test loading a dataset with multiple resolutions available as separate loads.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds5'], resolution=1000) - scene.load(['ds5'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds5"], resolution=1000) + scene.load(["ds5"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'ds5' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'ds5' - assert loaded_ids[1]['resolution'] == 1000 + assert loaded_ids[0]["name"] == "ds5" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "ds5" + assert loaded_ids[1]["resolution"] == 1000 def test_load_ds6_wl(self): """Test loading a dataset by wavelength.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([0.22]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['name'] == 'ds6' + assert loaded_ids[0]["name"] == "ds6" def test_load_ds9_fail_load(self): """Test loading a dataset that will fail during load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds9_fail_load']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds9_fail_load"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 @@ -308,7 +308,7 @@ class TestLoadingComposites: ) def test_single_composite_loading(self, comp_name, exp_id_or_name): """Test that certain composites can be loaded individually.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([comp_name]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 @@ -319,33 +319,33 @@ def test_single_composite_loading(self, comp_name, exp_id_or_name): def test_load_multiple_resolutions(self): """Test loading a dataset has multiple resolutions available with different resolutions.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - comp25 = make_cid(name='comp25', resolution=1000) - scene[comp25] = xr.DataArray([], attrs={'name': 'comp25', 'resolution': 1000}) - scene.load(['comp25'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + comp25 = make_cid(name="comp25", resolution=1000) + scene[comp25] = xr.DataArray([], attrs={"name": "comp25", "resolution": 1000}) + scene.load(["comp25"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'comp25' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'comp25' - assert loaded_ids[1]['resolution'] == 1000 + assert loaded_ids[0]["name"] == "comp25" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "comp25" + assert loaded_ids[1]["resolution"] == 1000 def test_load_same_subcomposite(self): """Test loading a composite and one of it's subcomposites at the same time.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp24', 'comp25'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp24", "comp25"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'comp24' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'comp25' - assert loaded_ids[1]['resolution'] == 500 + assert loaded_ids[0]["name"] == "comp24" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "comp25" + assert loaded_ids[1]["resolution"] == 500 def test_load_comp8(self): """Test loading a composite that has a non-existent prereq.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(KeyError, scene.load, ['comp8']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(KeyError, scene.load, ["comp8"]) def test_load_comp15(self): """Test loading a composite whose prerequisites can't be loaded. @@ -354,23 +354,23 @@ def test_load_comp15(self): """ # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp15']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp15"]) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp17(self): """Test loading a composite that depends on a composite that won't load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp17']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp17"]) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp18(self): """Test loading a composite that depends on an incompatible area modified dataset.""" # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') - scene.load(['comp18']) + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") + scene.load(["comp18"]) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 @@ -379,11 +379,11 @@ def test_load_comp18(self): # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # for the incomp_areas modifier assert len(loaded_ids) == 4 # the 1 dependencies - assert 'ds3' in scene._datasets - assert make_dataid(name='ds4', calibration='reflectance', - modifiers=('mod1', 'mod3')) in scene._datasets - assert make_dataid(name='ds5', resolution=250, - modifiers=('mod1',)) in scene._datasets + assert "ds3" in scene._datasets + assert make_dataid(name="ds4", calibration="reflectance", + modifiers=("mod1", "mod3")) in scene._datasets + assert make_dataid(name="ds5", resolution=250, + modifiers=("mod1",)) in scene._datasets def test_load_comp18_2(self): """Test loading a composite that depends on an incompatible area modified dataset. @@ -393,8 +393,8 @@ def test_load_comp18_2(self): """ # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') - scene.load(['comp18_2']) + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") + scene.load(["comp18_2"]) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 @@ -403,12 +403,12 @@ def test_load_comp18_2(self): # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # and ds2 for the incomp_areas_opt modifier assert len(loaded_ids) == 5 # the 1 dependencies - assert 'ds3' in scene._datasets - assert 'ds2' in scene._datasets - assert make_dataid(name='ds4', calibration='reflectance', - modifiers=('mod1', 'mod3')) in scene._datasets - assert make_dataid(name='ds5', resolution=250, - modifiers=('mod1',)) in scene._datasets + assert "ds3" in scene._datasets + assert "ds2" in scene._datasets + assert make_dataid(name="ds4", calibration="reflectance", + modifiers=("mod1", "mod3")) in scene._datasets + assert make_dataid(name="ds5", resolution=250, + modifiers=("mod1",)) in scene._datasets def test_load_comp19(self): """Test loading a composite that shares a dep with a dependency. @@ -421,79 +421,79 @@ def test_load_comp19(self): """ # Check dependency tree nodes # initialize the dep tree without loading the data - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene._update_dependency_tree({'comp19'}, None) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene._update_dependency_tree({"comp19"}, None) - this_node = scene._dependency_tree['comp19'] - shared_dep_id = make_dataid(name='ds5', modifiers=('res_change',)) + this_node = scene._dependency_tree["comp19"] + shared_dep_id = make_dataid(name="ds5", modifiers=("res_change",)) shared_dep_expected_node = scene._dependency_tree[shared_dep_id] # get the node for the first dep in the prereqs list of the # comp13 node - shared_dep_node = scene._dependency_tree['comp13'].data[1][0] + shared_dep_node = scene._dependency_tree["comp13"].data[1][0] shared_dep_node2 = this_node.data[1][0] assert shared_dep_expected_node is shared_dep_node assert shared_dep_expected_node is shared_dep_node2 - scene.load(['comp19']) + scene.load(["comp19"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_cid(name='comp19') + assert loaded_ids[0] == make_cid(name="comp19") def test_load_multiple_comps(self): """Test loading multiple composites.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp1', 'comp2', 'comp3', 'comp4', 'comp5', 'comp6', - 'comp7', 'comp9', 'comp10']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp1", "comp2", "comp3", "comp4", "comp5", "comp6", + "comp7", "comp9", "comp10"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_multiple_comps_separate(self): """Test loading multiple composites, one at a time.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10']) - scene.load(['comp9']) - scene.load(['comp7']) - scene.load(['comp6']) - scene.load(['comp5']) - scene.load(['comp4']) - scene.load(['comp3']) - scene.load(['comp2']) - scene.load(['comp1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"]) + scene.load(["comp9"]) + scene.load(["comp7"]) + scene.load(["comp6"]) + scene.load(["comp5"]) + scene.load(["comp4"]) + scene.load(["comp3"]) + scene.load(["comp2"]) + scene.load(["comp1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_modified(self): """Test loading a modified dataset.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load([make_dsq(name='ds1', modifiers=('mod1', 'mod2'))]) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load([make_dsq(name="ds1", modifiers=("mod1", "mod2"))]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2') + assert loaded_ids[0]["modifiers"] == ("mod1", "mod2") def test_load_modified_with_load_kwarg(self): """Test loading a modified dataset using the ``Scene.load`` keyword argument.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1'], modifiers=('mod1', 'mod2')) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"], modifiers=("mod1", "mod2")) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2') + assert loaded_ids[0]["modifiers"] == ("mod1", "mod2") def test_load_multiple_modified(self): """Test loading multiple modified datasets.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([ - make_dataid(name='ds1', modifiers=('mod1', 'mod2')), - make_dataid(name='ds2', modifiers=('mod2', 'mod1')), + make_dataid(name="ds1", modifiers=("mod1", "mod2")), + make_dataid(name="ds2", modifiers=("mod2", "mod1")), ]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 for i in loaded_ids: - if i['name'] == 'ds1': - assert i['modifiers'] == ('mod1', 'mod2') + if i["name"] == "ds1": + assert i["modifiers"] == ("mod1", "mod2") else: - assert i['name'] == 'ds2' - assert i['modifiers'] == ('mod2', 'mod1') + assert i["name"] == "ds2" + assert i["modifiers"] == ("mod2", "mod1") def test_load_dataset_after_composite(self): """Test load composite followed by other datasets.""" @@ -501,15 +501,15 @@ def test_load_dataset_after_composite(self): from satpy.tests.utils import FakeCompositor load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) - with mock.patch.object(FileYAMLReader, 'load', load_mock), \ - mock.patch.object(FakeCompositor, '__call__', comp_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock), \ + mock.patch.object(FakeCompositor, "__call__", comp_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp3']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp3"]) assert lmock.call_count == 1 - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 - scene.load(['ds1']) + scene.load(["ds1"]) # we should only load from the file twice assert lmock.call_count == 2 # we should only generate the composite once @@ -524,36 +524,36 @@ def test_load_dataset_after_composite2(self): load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) mod_mock = spy_decorator(FakeModifier.__call__) - with mock.patch.object(FileYAMLReader, 'load', load_mock), \ - mock.patch.object(FakeCompositor, '__call__', comp_mock), \ - mock.patch.object(FakeModifier, '__call__', mod_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock), \ + mock.patch.object(FakeCompositor, "__call__", comp_mock), \ + mock.patch.object(FakeModifier, "__call__", mod_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"]) assert lmock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - with mock.patch.object(scene, '_generate_composites_nodes_from_loaded_datasets', + with mock.patch.object(scene, "_generate_composites_nodes_from_loaded_datasets", wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( - name='ds1', resolution=250, calibration='reflectance', modifiers=tuple() + name="ds1", resolution=250, calibration="reflectance", modifiers=tuple() ) in loaded_ids # m.assert_called_once_with(set([scene._dependency_tree['ds1']])) m.assert_called_once_with(set()) - with mock.patch.object(scene, '_generate_composites_nodes_from_loaded_datasets', + with mock.patch.object(scene, "_generate_composites_nodes_from_loaded_datasets", wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( - name='ds1', resolution=250, calibration='reflectance', modifiers=tuple() + name="ds1", resolution=250, calibration="reflectance", modifiers=tuple() ) in loaded_ids m.assert_called_once_with(set()) # we should only generate the comp10 composite once but comp2 was also generated @@ -567,17 +567,17 @@ def test_load_dataset_after_composite2(self): def test_no_generate_comp10(self): """Test generating a composite after loading.""" # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10'], generate=False) - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' not in scene._datasets + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"], generate=False) + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" not in scene._datasets # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 scene._generate_composites_from_loaded_datasets() - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' in scene._datasets + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" in scene._datasets assert not scene.missing_datasets def test_modified_with_wl_dep(self): @@ -592,10 +592,10 @@ def test_modified_with_wl_dep(self): # Check dependency tree nodes # initialize the dep tree without loading the data - ds1_mod_id = make_dsq(name='ds1', modifiers=('mod_wl',)) - ds3_mod_id = make_dsq(name='ds3', modifiers=('mod_wl',)) + ds1_mod_id = make_dsq(name="ds1", modifiers=("mod_wl",)) + ds3_mod_id = make_dsq(name="ds3", modifiers=("mod_wl",)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene._update_dependency_tree({ds1_mod_id, ds3_mod_id}, None) ds1_mod_node = scene._dependency_tree[ds1_mod_id] @@ -603,10 +603,10 @@ def test_modified_with_wl_dep(self): ds1_mod_dep_node = ds1_mod_node.data[1][1] ds3_mod_dep_node = ds3_mod_node.data[1][1] # mod_wl depends on the this node: - ds6_modded_node = scene._dependency_tree[make_dataid(name='ds6', modifiers=('mod1',))] + ds6_modded_node = scene._dependency_tree[make_dataid(name="ds6", modifiers=("mod1",))] # this dep should be full qualified with name and wavelength - assert ds6_modded_node.name['name'] is not None - assert isinstance(ds6_modded_node.name['wavelength'], WavelengthRange) + assert ds6_modded_node.name["name"] is not None + assert isinstance(ds6_modded_node.name["wavelength"], WavelengthRange) # the node should be shared between everything that uses it assert ds1_mod_dep_node is ds3_mod_dep_node assert ds1_mod_dep_node is ds6_modded_node @@ -621,25 +621,25 @@ def test_modified_with_wl_dep(self): def test_load_comp11_and_23(self): """Test loading two composites that depend on similar wavelengths.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # mock the available comps/mods in the compositor loader avail_comps = scene.available_composite_ids() - assert make_cid(name='comp11') in avail_comps - assert make_cid(name='comp23') in avail_comps + assert make_cid(name="comp11") in avail_comps + assert make_cid(name="comp23") in avail_comps # it is fine that an optional prereq doesn't exist - scene.load(['comp11', 'comp23']) - comp11_node = scene._dependency_tree['comp11'] - comp23_node = scene._dependency_tree['comp23'] - assert comp11_node.data[1][-1].name['name'] == 'ds10' - assert comp23_node.data[1][0].name['name'] == 'ds8' + scene.load(["comp11", "comp23"]) + comp11_node = scene._dependency_tree["comp11"] + comp23_node = scene._dependency_tree["comp23"] + assert comp11_node.data[1][-1].name["name"] == "ds10" + assert comp23_node.data[1][0].name["name"] == "ds8" loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert 'comp11' in scene - assert 'comp23' in scene + assert "comp11" in scene + assert "comp23" in scene def test_load_too_many(self): """Test dependency tree if too many reader keys match.""" - scene = Scene(filenames=['fake3_1.txt'], reader='fake3') + scene = Scene(filenames=["fake3_1.txt"], reader="fake3") avail_comps = scene.available_composite_ids() # static image => 1 assert len(avail_comps) == 1 @@ -660,8 +660,8 @@ def test_load_when_sensor_none_in_preloaded_dataarrays(self): def _scene_with_data_array_none_sensor(): - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene['my_data'] = _data_array_none_sensor("my_data") + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene["my_data"] = _data_array_none_sensor("my_data") return scene diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 39f9a50092..286735c093 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -39,40 +39,40 @@ def test_crop(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', - 'test2', - 'test2', + "test2", + "test2", + "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((y_size, x_size))) - scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) - scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) - scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), - attrs={'area': area_def2}) + scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x")) + scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) + scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=("y", "x"), + attrs={"area": area_def2}) # by area crop_area = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, @@ -80,33 +80,33 @@ def test_crop(self): area_extent[2] - 10000., area_extent[3] - 500000.) ) new_scn1 = scene1.crop(crop_area) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (3380, 3708) - assert new_scn1['4'].shape == (1690, 1854) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (3380, 3708) + assert new_scn1["4"].shape == (1690, 1854) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (184, 714) - assert new_scn1['4'].shape == (92, 357) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (184, 714) + assert new_scn1["4"].shape == (92, 357) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(-200000., -100000., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (36, 70) - assert new_scn1['4'].shape == (18, 35) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (36, 70) + assert new_scn1["4"].shape == (18, 35) def test_crop_epsg_crs(self): """Test the crop method when source area uses an EPSG code.""" @@ -117,18 +117,18 @@ def test_crop_epsg_crs(self): x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', 'test', 'test', + "test", "test", "test", "EPSG:32630", x_size, y_size, area_extent, ) - scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) + scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(719695.7781587119, 5427887.407618969, 725068.1609052602, 5433708.364368956)) - assert '1' in new_scn1 - assert new_scn1['1'].shape == (198, 182) + assert "1" in new_scn1 + assert new_scn1["1"].shape == (198, 182) def test_crop_rgb(self): """Test the crop method on multi-dimensional data.""" @@ -136,43 +136,43 @@ def test_crop_rgb(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', - 'test2', - 'test2', + "test2", + "test2", + "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((3, y_size, x_size)), - dims=('bands', 'y', 'x'), - attrs={'area': area_def}) + dims=("bands", "y", "x"), + attrs={"area": area_def}) scene1["2"] = xr.DataArray(np.zeros((y_size // 2, 3, x_size // 2)), - dims=('y', 'bands', 'x'), - attrs={'area': area_def2}) + dims=("y", "bands", "x"), + attrs={"area": area_def2}) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert 'bands' in new_scn1['1'].dims - assert 'bands' in new_scn1['2'].dims - assert new_scn1['1'].shape == (3, 184, 714) - assert new_scn1['2'].shape == (92, 3, 357) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "bands" in new_scn1["1"].dims + assert "bands" in new_scn1["2"].dims + assert new_scn1["1"].shape == (3, 184, 714) + assert new_scn1["2"].shape == (92, 3, 357) @pytest.mark.usefixtures("include_test_etc") @@ -187,17 +187,17 @@ def _fake_resample_dataset_force_20x20(self, dataset, dest_area, **kwargs): """Return copy of dataset pretending it was resampled to (20, 20) shape.""" data = np.zeros((20, 20)) attrs = dataset.attrs.copy() - attrs['area'] = dest_area + attrs["area"] = dest_area return xr.DataArray( data, - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs, ) - @mock.patch('satpy.scene.resample_dataset') - @pytest.mark.parametrize('datasets', [ + @mock.patch("satpy.scene.resample_dataset") + @pytest.mark.parametrize("datasets", [ None, - ('comp13', 'ds5', 'ds2'), + ("comp13", "ds5", "ds2"), ]) def test_resample_scene_copy(self, rs, datasets): """Test that the Scene is properly copied during resampling. @@ -209,26 +209,26 @@ def test_resample_scene_copy(self, rs, datasets): from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") - scene.load(['comp19']) + scene.load(["comp19"]) new_scene = scene.resample(area_def, datasets=datasets) - new_scene['new_ds'] = new_scene['comp19'].copy() + new_scene["new_ds"] = new_scene["comp19"].copy() - scene.load(['ds1']) + scene.load(["ds1"]) - comp19_node = scene._dependency_tree['comp19'] - ds5_mod_id = make_dataid(name='ds5', modifiers=('res_change',)) + comp19_node = scene._dependency_tree["comp19"] + ds5_mod_id = make_dataid(name="ds5", modifiers=("res_change",)) ds5_node = scene._dependency_tree[ds5_mod_id] - comp13_node = scene._dependency_tree['comp13'] + comp13_node = scene._dependency_tree["comp13"] assert comp13_node.data[1][0] is comp19_node.data[1][0] assert comp13_node.data[1][0] is ds5_node - pytest.raises(KeyError, scene._dependency_tree.__getitem__, 'new_ds') + pytest.raises(KeyError, scene._dependency_tree.__getitem__, "new_ds") # comp19 required resampling to produce so we should have its 3 deps # 1. comp13 @@ -238,15 +238,15 @@ def test_resample_scene_copy(self, rs, datasets): # 4. ds1 loaded_ids = list(scene.keys()) assert len(loaded_ids) == 4 - for name in ('comp13', 'ds5', 'ds2', 'ds1'): - assert any(x['name'] == name for x in loaded_ids) + for name in ("comp13", "ds5", "ds2", "ds1"): + assert any(x["name"] == name for x in loaded_ids) loaded_ids = list(new_scene.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0] == make_cid(name='comp19') - assert loaded_ids[1] == make_cid(name='new_ds') + assert loaded_ids[0] == make_cid(name="comp19") + assert loaded_ids[1] == make_cid(name="new_ds") - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_resample_scene_preserves_requested_dependencies(self, rs): """Test that the Scene is properly copied during resampling. @@ -258,61 +258,61 @@ def test_resample_scene_preserves_requested_dependencies(self, rs): from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # Set PYTHONHASHSEED to 0 in the interpreter to test as intended (comp26 comes before comp14) - scene.load(['comp26', 'comp14'], generate=False) + scene.load(["comp26", "comp14"], generate=False) scene.resample(area_def, unload=True) new_scene_2 = scene.resample(area_def, unload=True) - assert 'comp14' not in scene - assert 'comp26' not in scene - assert 'comp14' in new_scene_2 - assert 'comp26' in new_scene_2 - assert 'ds1' not in new_scene_2 # unloaded + assert "comp14" not in scene + assert "comp26" not in scene + assert "comp14" in new_scene_2 + assert "comp26" in new_scene_2 + assert "ds1" not in new_scene_2 # unloaded - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_resample_reduce_data_toggle(self, rs): """Test that the Scene can be reduced or not reduced during resampling.""" from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - target_area = AreaDefinition('test', 'test', 'test', proj_str, 4, 4, (-1000., -1500., 1000., 1500.)) - area_def = AreaDefinition('test', 'test', 'test', proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + target_area = AreaDefinition("test", "test", "test", proj_str, 4, 4, (-1000., -1500., 1000., 1500.)) + area_def = AreaDefinition("test", "test", "test", proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() get_area_slices = area_def.get_area_slices get_area_slices.return_value = (slice(0, 3, None), slice(0, 3, None)) - area_def_big = AreaDefinition('test', 'test', 'test', proj_str, 10, 10, (-1000., -1500., 1000., 1500.)) + area_def_big = AreaDefinition("test", "test", "test", proj_str, 10, 10, (-1000., -1500., 1000., 1500.)) area_def_big.get_area_slices = mock.MagicMock() get_area_slices_big = area_def_big.get_area_slices get_area_slices_big.return_value = (slice(0, 6, None), slice(0, 6, None)) # Test that data reduction can be disabled - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp19']) - scene['comp19'].attrs['area'] = area_def - scene['comp19_big'] = xr.DataArray( - da.zeros((10, 10)), dims=('y', 'x'), - attrs=scene['comp19'].attrs.copy()) - scene['comp19_big'].attrs['area'] = area_def_big - scene['comp19_copy'] = scene['comp19'].copy() + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp19"]) + scene["comp19"].attrs["area"] = area_def + scene["comp19_big"] = xr.DataArray( + da.zeros((10, 10)), dims=("y", "x"), + attrs=scene["comp19"].attrs.copy()) + scene["comp19_big"].attrs["area"] = area_def_big + scene["comp19_copy"] = scene["comp19"].copy() orig_slice_data = scene._slice_data # we force the below order of processing to test that success isn't # based on data of the same resolution being processed together test_order = [ - make_cid(**scene['comp19'].attrs), - make_cid(**scene['comp19_big'].attrs), - make_cid(**scene['comp19_copy'].attrs), + make_cid(**scene["comp19"].attrs), + make_cid(**scene["comp19_big"].attrs), + make_cid(**scene["comp19_copy"].attrs), ] - with mock.patch('satpy.scene.Scene._slice_data') as slice_data, \ - mock.patch('satpy.dataset.dataset_walker') as ds_walker: + with mock.patch("satpy.scene.Scene._slice_data") as slice_data, \ + mock.patch("satpy.dataset.dataset_walker") as ds_walker: ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) @@ -332,24 +332,24 @@ def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - - scene.load(['comp19', 'comp20']) - scene['comp19'].attrs['area'] = area_def - scene['comp19'].attrs['ancillary_variables'] = [scene['comp20']] - scene['comp20'].attrs['area'] = area_def - - dst_area = AreaDefinition('dst', 'dst', 'dst', + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + + scene.load(["comp19", "comp20"]) + scene["comp19"].attrs["area"] = area_def + scene["comp19"].attrs["ancillary_variables"] = [scene["comp20"]] + scene["comp20"].attrs["area"] = area_def + + dst_area = AreaDefinition("dst", "dst", "dst", proj_dict, 2, 2, (-1000., -1500., 0., 0.), ) new_scene = scene.resample(dst_area) - assert new_scene['comp20'] is new_scene['comp19'].attrs['ancillary_variables'][0] + assert new_scene["comp20"] is new_scene["comp19"].attrs["ancillary_variables"][0] def test_resample_multi_ancillary(self): """Test that multiple ancillary variables are retained after resampling. @@ -380,14 +380,14 @@ def test_resample_multi_ancillary(self): def test_resample_reduce_data(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_str, 20, 20, (-1000., -1500., 1000., 1500.)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - - scene.load(['comp19']) - scene['comp19'].attrs['area'] = area_def - dst_area = AreaDefinition('dst', 'dst', 'dst', + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_str, 20, 20, (-1000., -1500., 1000., 1500.)) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + + scene.load(["comp19"]) + scene["comp19"].attrs["area"] = area_def + dst_area = AreaDefinition("dst", "dst", "dst", proj_str, 20, 20, (-1000., -1500., 0., 0.), @@ -395,24 +395,24 @@ def test_resample_reduce_data(self): new_scene1 = scene.resample(dst_area, reduce_data=False) new_scene2 = scene.resample(dst_area) new_scene3 = scene.resample(dst_area, reduce_data=True) - assert new_scene1['comp19'].shape == (20, 20, 3) - assert new_scene2['comp19'].shape == (20, 20, 3) - assert new_scene3['comp19'].shape == (20, 20, 3) + assert new_scene1["comp19"].shape == (20, 20, 3) + assert new_scene2["comp19"].shape == (20, 20, 3) + assert new_scene3["comp19"].shape == (20, 20, 3) - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_no_generate_comp10(self, rs): """Test generating a composite after loading.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, 200, 400, @@ -420,36 +420,36 @@ def test_no_generate_comp10(self, rs): ) # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10'], generate=False) - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' not in scene + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"], generate=False) + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn = scene.resample(area_def, generate=False) - assert 'comp10' not in scene + assert "comp10" not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn._generate_composites_from_loaded_datasets() - assert any(ds_id['name'] == 'comp10' for ds_id in new_scn._wishlist) - assert 'comp10' in new_scn + assert any(ds_id["name"] == "comp10" for ds_id in new_scn._wishlist) + assert "comp10" in new_scn assert not new_scn.missing_datasets # try generating them right away new_scn = scene.resample(area_def) - assert any(ds_id['name'] == 'comp10' for ds_id in new_scn._wishlist) - assert 'comp10' in new_scn + assert any(ds_id["name"] == "comp10" for ds_id in new_scn._wishlist) + assert "comp10" in new_scn assert not new_scn.missing_datasets def test_comp_loading_after_resampling_existing_sensor(self): """Test requesting a composite after resampling.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1", "ds2"]) - new_scn = scene.resample(resampler='native') + new_scn = scene.resample(resampler="native") # Can't load from readers after resampling with pytest.raises(KeyError): @@ -463,9 +463,9 @@ def test_comp_loading_after_resampling_existing_sensor(self): def test_comp_loading_after_resampling_new_sensor(self): """Test requesting a composite after resampling when the sensor composites weren't loaded before.""" # this is our base Scene with sensor "fake_sensor2" - scene1 = Scene(filenames=['fake2_3ds_1.txt'], reader='fake2_3ds') + scene1 = Scene(filenames=["fake2_3ds_1.txt"], reader="fake2_3ds") scene1.load(["ds2"]) - new_scn = scene1.resample(resampler='native') + new_scn = scene1.resample(resampler="native") # Can't load from readers after resampling with pytest.raises(KeyError): @@ -534,16 +534,16 @@ def test_comps_need_resampling_optional_mod_deps(self): dependencies that aren't needed which fail. """ - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # should require resampling - scene.load(['comp27', 'ds13']) - assert 'comp27' not in scene - assert 'ds13' in scene + scene.load(["comp27", "ds13"]) + assert "comp27" not in scene + assert "ds13" in scene - new_scene = scene.resample(resampler='native') + new_scene = scene.resample(resampler="native") assert len(list(new_scene.keys())) == 2 - assert 'comp27' in new_scene - assert 'ds13' in new_scene + assert "comp27" in new_scene + assert "ds13" in new_scene class TestSceneAggregation: @@ -556,7 +556,7 @@ def test_aggregate(self): scene1 = self._create_test_data(x_size, y_size) - scene2 = scene1.aggregate(func='sum', x=2, y=2) + scene2 = scene1.aggregate(func="sum", x=2, y=2) expected_aggregated_shape = (y_size / 2, x_size / 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) @@ -577,40 +577,40 @@ def _create_test_data(x_size, y_size): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) scene1["1"] = xr.DataArray(np.ones((y_size, x_size)), - attrs={'_satpy_id_keys': default_id_keys_config}) + attrs={"_satpy_id_keys": default_id_keys_config}) scene1["2"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"_satpy_id_keys": default_id_keys_config}) scene1["3"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'area': area_def, '_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"area": area_def, "_satpy_id_keys": default_id_keys_config}) scene1["4"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'area': area_def, 'standard_name': 'backscatter', - '_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"area": area_def, "standard_name": "backscatter", + "_satpy_id_keys": default_id_keys_config}) return scene1 def _check_aggregation_results(self, expected_aggregated_shape, scene1, scene2, x_size, y_size): - assert scene1['1'] is scene2['1'] - assert scene1['2'] is scene2['2'] - np.testing.assert_allclose(scene2['3'].data, 4) - assert scene2['1'].shape == (y_size, x_size) - assert scene2['2'].shape == (y_size, x_size) - assert scene2['3'].shape == expected_aggregated_shape - assert 'standard_name' in scene2['4'].attrs - assert scene2['4'].attrs['standard_name'] == 'backscatter' + assert scene1["1"] is scene2["1"] + assert scene1["2"] is scene2["2"] + np.testing.assert_allclose(scene2["3"].data, 4) + assert scene2["1"].shape == (y_size, x_size) + assert scene2["2"].shape == (y_size, x_size) + assert scene2["3"].shape == expected_aggregated_shape + assert "standard_name" in scene2["4"].attrs + assert scene2["4"].attrs["standard_name"] == "backscatter" def test_aggregate_with_boundary(self): """Test aggregation with boundary argument.""" @@ -620,8 +620,8 @@ def test_aggregate_with_boundary(self): scene1 = self._create_test_data(x_size, y_size) with pytest.raises(ValueError): - scene1.aggregate(func='sum', x=2, y=2, boundary='exact') + scene1.aggregate(func="sum", x=2, y=2, boundary="exact") - scene2 = scene1.aggregate(func='sum', x=2, y=2, boundary='trim') + scene2 = scene1.aggregate(func="sum", x=2, y=2, boundary="trim") expected_aggregated_shape = (y_size // 2, x_size // 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) diff --git a/satpy/tests/scene_tests/test_saving.py b/satpy/tests/scene_tests/test_saving.py index 188d9c1e75..0781ae8796 100644 --- a/satpy/tests/scene_tests/test_saving.py +++ b/satpy/tests/scene_tests/test_saving.py @@ -37,69 +37,69 @@ def test_save_datasets_default(self, tmp_path): """Save a dataset using 'save_datasets'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 scn.save_datasets(base_dir=tmp_path) - assert os.path.isfile(os.path.join(tmp_path, 'test_20180101_000000.tif')) + assert os.path.isfile(os.path.join(tmp_path, "test_20180101_000000.tif")) def test_save_datasets_by_ext(self, tmp_path): """Save a dataset using 'save_datasets' with 'filename'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 from satpy.writers.simple_image import PillowWriter save_image_mock = spy_decorator(PillowWriter.save_image) - with mock.patch.object(PillowWriter, 'save_image', save_image_mock): - scn.save_datasets(base_dir=tmp_path, filename='{name}.png') + with mock.patch.object(PillowWriter, "save_image", save_image_mock): + scn.save_datasets(base_dir=tmp_path, filename="{name}.png") save_image_mock.mock.assert_called_once() - assert os.path.isfile(os.path.join(tmp_path, 'test.png')) + assert os.path.isfile(os.path.join(tmp_path, "test.png")) def test_save_datasets_bad_writer(self, tmp_path): """Save a dataset using 'save_datasets' and a bad writer.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow()} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 pytest.raises(ValueError, scn.save_datasets, - writer='_bad_writer_', + writer="_bad_writer_", base_dir=tmp_path) def test_save_datasets_missing_wishlist(self, tmp_path): """Calling 'save_datasets' with no valid datasets.""" scn = Scene() - scn._wishlist.add(make_cid(name='true_color')) + scn._wishlist.add(make_cid(name="true_color")) pytest.raises(RuntimeError, scn.save_datasets, - writer='geotiff', + writer="geotiff", base_dir=tmp_path) pytest.raises(KeyError, scn.save_datasets, - datasets=['no_exist']) + datasets=["no_exist"]) def test_save_dataset_default(self, tmp_path): """Save a dataset using 'save_dataset'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 - scn.save_dataset('test', base_dir=tmp_path) - assert os.path.isfile(os.path.join(tmp_path, 'test_20180101_000000.tif')) + scn["test"] = ds1 + scn.save_dataset("test", base_dir=tmp_path) + assert os.path.isfile(os.path.join(tmp_path, "test_20180101_000000.tif")) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index f056d2fa93..5c3ededd40 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -39,24 +39,24 @@ class TestMatchDataArrays(unittest.TestCase): """Test the utility method 'match_data_arrays'.""" - def _get_test_ds(self, shape=(50, 100), dims=('y', 'x')): + def _get_test_ds(self, shape=(50, 100), dims=("y", "x")): """Get a fake DataArray.""" from pyresample.geometry import AreaDefinition data = da.random.random(shape, chunks=25) area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, - shape[dims.index('x')], shape[dims.index('y')], + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, + shape[dims.index("x")], shape[dims.index("y")], (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - attrs = {'area': area} + attrs = {"area": area} return xr.DataArray(data, dims=dims, attrs=attrs) def test_single_ds(self): """Test a single dataset is returned unharmed.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1,)) assert ret_datasets[0].identical(ds1) @@ -65,7 +65,7 @@ def test_mult_ds_area(self): from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) @@ -75,8 +75,8 @@ def test_mult_ds_no_area(self): from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() - del ds2.attrs['area'] - comp = CompositeBase('test_comp') + del ds2.attrs["area"] + comp = CompositeBase("test_comp") self.assertRaises(ValueError, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_area(self): @@ -86,13 +86,13 @@ def test_mult_ds_diff_area(self): from satpy.composites import CompositeBase, IncompatibleAreas ds1 = self._get_test_ds() ds2 = self._get_test_ds() - ds2.attrs['area'] = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + ds2.attrs["area"] = AreaDefinition( + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, 100, 50, (-30037508.34, -20018754.17, 10037508.34, 18754.17)) - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_dims(self): @@ -101,9 +101,9 @@ def test_mult_ds_diff_dims(self): # x is still 50, y is still 100, even though they are in # different order - ds1 = self._get_test_ds(shape=(50, 100), dims=('y', 'x')) - ds2 = self._get_test_ds(shape=(3, 100, 50), dims=('bands', 'x', 'y')) - comp = CompositeBase('test_comp') + ds1 = self._get_test_ds(shape=(50, 100), dims=("y", "x")) + ds2 = self._get_test_ds(shape=(3, 100, 50), dims=("bands", "x", "y")) + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) @@ -114,19 +114,19 @@ def test_mult_ds_diff_size(self): # x is 50 in this one, 100 in ds2 # y is 100 in this one, 50 in ds2 - ds1 = self._get_test_ds(shape=(50, 100), dims=('x', 'y')) - ds2 = self._get_test_ds(shape=(3, 50, 100), dims=('bands', 'y', 'x')) - comp = CompositeBase('test_comp') + ds1 = self._get_test_ds(shape=(50, 100), dims=("x", "y")) + ds2 = self._get_test_ds(shape=(3, 50, 100), dims=("bands", "y", "x")) + comp = CompositeBase("test_comp") self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_nondimensional_coords(self): """Test the removal of non-dimensional coordinates when compositing.""" from satpy.composites import CompositeBase ds = self._get_test_ds(shape=(2, 2)) - ds['acq_time'] = ('y', [0, 1]) - comp = CompositeBase('test_comp') + ds["acq_time"] = ("y", [0, 1]) + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays([ds, ds]) - self.assertNotIn('acq_time', ret_datasets[0].coords) + self.assertNotIn("acq_time", ret_datasets[0].coords) class TestRatioSharpenedCompositors: @@ -135,88 +135,88 @@ class TestRatioSharpenedCompositors: def setup_method(self): """Create test data.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) - attrs = {'area': area, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'resolution': 1000, - 'calibration': 'reflectance', - 'units': '%', - 'name': 'test_vis'} + attrs = {"area": area, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "resolution": 1000, + "calibration": "reflectance", + "units": "%", + "name": "test_vis"} low_res_data = np.ones((2, 2), dtype=np.float64) + 4 low_res_data[1, 1] = 0.0 # produces infinite ratio ds1 = xr.DataArray(da.from_array(low_res_data, chunks=2), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds2.attrs['name'] += '2' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds2.attrs["name"] += "2" self.ds2 = ds2 ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds3.attrs['name'] += '3' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds3.attrs["name"] += "3" self.ds3 = ds3 # high resolution version high_res_data = np.ones((2, 2), dtype=np.float64) high_res_data[1, 0] = np.nan # invalid value in one band ds4 = xr.DataArray(da.from_array(high_res_data, chunks=2), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds4.attrs['name'] += '4' - ds4.attrs['resolution'] = 500 + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds4.attrs["name"] += "4" + ds4.attrs["resolution"] = 500 self.ds4 = ds4 # high resolution version - but too big ds4_big = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs.copy(), dims=('y', 'x'), - coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) - ds4_big.attrs['name'] += '4' - ds4_big.attrs['resolution'] = 500 - ds4_big.attrs['rows_per_scan'] = 1 - ds4_big.attrs['area'] = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + attrs=attrs.copy(), dims=("y", "x"), + coords={"y": [0, 1, 2, 3], "x": [0, 1, 2, 3]}) + ds4_big.attrs["name"] += "4" + ds4_big.attrs["resolution"] = 500 + ds4_big.attrs["rows_per_scan"] = 1 + ds4_big.attrs["area"] = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds4_big = ds4_big @pytest.mark.parametrize( "init_kwargs", [ - {'high_resolution_band': "bad", 'neutral_resolution_band': "red"}, - {'high_resolution_band': "red", 'neutral_resolution_band': "bad"} + {"high_resolution_band": "bad", "neutral_resolution_band": "red"}, + {"high_resolution_band": "red", "neutral_resolution_band": "bad"} ] ) def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB with pytest.raises(ValueError): - RatioSharpenedRGB(name='true_color', **init_kwargs) + RatioSharpenedRGB(name="true_color", **init_kwargs) def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" from satpy.composites import IncompatibleAreas, RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') + comp = RatioSharpenedRGB(name="true_color") with pytest.raises(IncompatibleAreas): comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') + comp = RatioSharpenedRGB(name="true_color") with pytest.raises(ValueError): comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) + comp = SelfSharpenedRGB(name="true_color", high_resolution_band=None) with pytest.raises(ValueError): comp((self.ds1, self.ds2, self.ds3)) @@ -266,7 +266,7 @@ def test_basic_no_sharpen(self): def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b): """Test RatioSharpenedRGB by different groups of high_resolution_band and neutral_resolution_band.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band=high_resolution_band, + comp = RatioSharpenedRGB(name="true_color", high_resolution_band=high_resolution_band, neutral_resolution_band=neutral_resolution_band) res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) @@ -291,7 +291,7 @@ def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, e def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color') + comp = SelfSharpenedRGB(name="true_color") res = comp((self.ds1, self.ds2, self.ds3)) data = res.values @@ -307,48 +307,48 @@ class TestDifferenceCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) - attrs = {'area': area, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'resolution': 1000, - 'name': 'test_vis'} + attrs = {"area": area, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "resolution": 1000, + "name": "test_vis"} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds2.attrs['name'] += '2' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds2.attrs["name"] += "2" self.ds2 = ds2 # high res version ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4, - attrs=attrs.copy(), dims=('y', 'x'), - coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) - ds2.attrs['name'] += '2' - ds2.attrs['resolution'] = 500 - ds2.attrs['rows_per_scan'] = 1 - ds2.attrs['area'] = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + attrs=attrs.copy(), dims=("y", "x"), + coords={"y": [0, 1, 2, 3], "x": [0, 1, 2, 3]}) + ds2.attrs["name"] += "2" + ds2.attrs["resolution"] = 500 + ds2.attrs["rows_per_scan"] = 1 + ds2.attrs["area"] = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds2_big = ds2 def test_basic_diff(self): """Test that a basic difference composite works.""" from satpy.composites import DifferenceCompositor - comp = DifferenceCompositor(name='diff', standard_name='temperature_difference') + comp = DifferenceCompositor(name="diff", standard_name="temperature_difference") res = comp((self.ds1, self.ds2)) np.testing.assert_allclose(res.values, -2) - assert res.attrs.get('standard_name') == 'temperature_difference' + assert res.attrs.get("standard_name") == "temperature_difference" def test_bad_areas_diff(self): """Test that a difference where resolutions are different fails.""" from satpy.composites import DifferenceCompositor, IncompatibleAreas - comp = DifferenceCompositor(name='diff') + comp = DifferenceCompositor(name="diff") # too many arguments self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds2_big)) # different resolution @@ -391,7 +391,7 @@ class TestDayNightCompositor(unittest.TestCase): def setUp(self): """Create test data.""" - bands = ['R', 'G', 'B'] + bands = ["R", "G", "B"] start_time = datetime(2018, 1, 1, 18, 0, 0) # RGB @@ -401,20 +401,20 @@ def setUp(self): a[:, 1, 0] = 0.3 a[:, 1, 1] = 0.4 a = da.from_array(a, a.shape) - self.data_a = xr.DataArray(a, attrs={'test': 'a', 'start_time': start_time}, - coords={'bands': bands}, dims=('bands', 'y', 'x')) + self.data_a = xr.DataArray(a, attrs={"test": "a", "start_time": start_time}, + coords={"bands": bands}, dims=("bands", "y", "x")) b = np.zeros((3, 2, 2), dtype=np.float64) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 b[:, 1, 1] = 0.75 b = da.from_array(b, b.shape) - self.data_b = xr.DataArray(b, attrs={'test': 'b', 'start_time': start_time}, - coords={'bands': bands}, dims=('bands', 'y', 'x')) + self.data_b = xr.DataArray(b, attrs={"test": "b", "start_time": start_time}, + coords={"bands": bands}, dims=("bands", "y", "x")) sza = np.array([[80., 86.], [94., 100.]]) sza = da.from_array(sza, sza.shape) - self.sza = xr.DataArray(sza, dims=('y', 'x')) + self.sza = xr.DataArray(sza, dims=("y", "x")) # fake area my_area = AreaDefinition( @@ -423,15 +423,15 @@ def setUp(self): 2, 2, (-95.0, 40.0, -92.0, 43.0), ) - self.data_a.attrs['area'] = my_area - self.data_b.attrs['area'] = my_area + self.data_a.attrs["area"] = my_area + self.data_b.attrs["area"] = my_area # not used except to check that it matches the data arrays - self.sza.attrs['area'] = my_area + self.sza.attrs["area"] = my_area def test_daynight_sza(self): """Test compositor with both day and night portions when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") + comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() expected = np.array([[0., 0.22122352], [0.5, 1.]]) @@ -440,7 +440,7 @@ def test_daynight_sza(self): def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") + comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b)) res = res.compute() expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -450,7 +450,7 @@ def test_daynight_area(self): def test_night_only_sza_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -461,17 +461,17 @@ def test_night_only_sza_with_alpha(self): def test_night_only_sza_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() expected = np.array([[0., 0.11042631], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands def test_night_only_area_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -482,17 +482,17 @@ def test_night_only_area_with_alpha(self): def test_night_only_area_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_b,)) res = res.compute() expected = np.array([[np.nan, 0.], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands def test_day_only_sza_with_alpha(self): """Test compositor with day portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a, self.sza)) res = res.compute() expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -503,18 +503,18 @@ def test_day_only_sza_with_alpha(self): def test_day_only_sza_without_alpha(self): """Test compositor with day portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() expected_channel_data = np.array([[0., 0.22122352], [0., 0.]]) for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) - assert 'A' not in res.bands + assert "A" not in res.bands def test_day_only_area_with_alpha(self): """Test compositor with day portion with alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a,)) res = res.compute() expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -525,7 +525,7 @@ def test_day_only_area_with_alpha(self): def test_day_only_area_with_alpha_and_missing_data(self): """Test compositor with day portion with alpha_band when SZA data is not provided and there is missing data.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -536,12 +536,12 @@ def test_day_only_area_with_alpha_and_missing_data(self): def test_day_only_area_without_alpha(self): """Test compositor with day portion without alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a,)) res = res.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands class TestFillingCompositor(unittest.TestCase): @@ -550,15 +550,15 @@ class TestFillingCompositor(unittest.TestCase): def test_fill(self): """Test filling.""" from satpy.composites import FillingCompositor - comp = FillingCompositor(name='fill_test') + comp = FillingCompositor(name="fill_test") filler = xr.DataArray(np.array([1, 2, 3, 4, 3, 2, 1])) red = xr.DataArray(np.array([1, 2, 3, np.nan, 3, 2, 1])) green = xr.DataArray(np.array([np.nan, 2, 3, 4, 3, 2, np.nan])) blue = xr.DataArray(np.array([4, 3, 2, 1, 2, 3, 4])) res = comp([filler, red, green, blue]) - np.testing.assert_allclose(res.sel(bands='R').data, filler.data) - np.testing.assert_allclose(res.sel(bands='G').data, filler.data) - np.testing.assert_allclose(res.sel(bands='B').data, blue.data) + np.testing.assert_allclose(res.sel(bands="R").data, filler.data) + np.testing.assert_allclose(res.sel(bands="G").data, filler.data) + np.testing.assert_allclose(res.sel(bands="B").data, blue.data) class TestMultiFiller(unittest.TestCase): @@ -567,7 +567,7 @@ class TestMultiFiller(unittest.TestCase): def test_fill(self): """Test filling.""" from satpy.composites import MultiFiller - comp = MultiFiller(name='fill_test') + comp = MultiFiller(name="fill_test") attrs = {"units": "K"} a = xr.DataArray(np.array([1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]), attrs=attrs.copy()) b = xr.DataArray(np.array([np.nan, 2, 3, np.nan, np.nan, np.nan, np.nan]), attrs=attrs.copy()) @@ -587,29 +587,29 @@ class TestLuminanceSharpeningCompositor(unittest.TestCase): def test_compositor(self): """Test luminance sharpening compositor.""" from satpy.composites import LuminanceSharpeningCompositor - comp = LuminanceSharpeningCompositor(name='test') + comp = LuminanceSharpeningCompositor(name="test") # Three shades of grey rgb_arr = np.array([1, 50, 100, 200, 1, 50, 100, 200, 1, 50, 100, 200]) rgb = xr.DataArray(rgb_arr.reshape((3, 2, 2)), - dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B']}) + dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"]}) # 100 % luminance -> all result values ~1.0 lum = xr.DataArray(np.array([[100., 100.], [100., 100.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # 50 % luminance, all result values ~0.5 lum = xr.DataArray(np.array([[50., 50.], [50., 50.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.5, atol=1e-9) # 30 % luminance, all result values ~0.3 lum = xr.DataArray(np.array([[30., 30.], [30., 30.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.3, atol=1e-9) # 0 % luminance, all values ~0.0 lum = xr.DataArray(np.array([[0., 0.], [0., 0.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.0, atol=1e-9) @@ -621,29 +621,29 @@ class TestSandwichCompositor: @pytest.mark.parametrize( "input_shape,bands", [ - ((3, 2, 2), ['R', 'G', 'B']), - ((4, 2, 2), ['R', 'G', 'B', 'A']) + ((3, 2, 2), ["R", "G", "B"]), + ((4, 2, 2), ["R", "G", "B", "A"]) ] ) - @mock.patch('satpy.composites.enhance2dataset') + @mock.patch("satpy.composites.enhance2dataset") def test_compositor(self, e2d, input_shape, bands): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor rgb_arr = da.from_array(np.random.random(input_shape), chunks=2) - rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x'], - coords={'bands': bands}) + rgb = xr.DataArray(rgb_arr, dims=["bands", "y", "x"], + coords={"bands": bands}) lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2) - lum = xr.DataArray(lum_arr, dims=['y', 'x']) + lum = xr.DataArray(lum_arr, dims=["y", "x"]) # Make enhance2dataset return unmodified dataset e2d.return_value = rgb - comp = SandwichCompositor(name='test') + comp = SandwichCompositor(name="test") res = comp([lum, rgb]) for band in rgb: - if band.bands != 'A': + if band.bands != "A": # Check compositor has modified this band np.testing.assert_allclose(res.loc[band.bands].to_numpy(), band.to_numpy() * lum_arr / 100.) @@ -661,32 +661,32 @@ class TestInlineComposites(unittest.TestCase): def test_inline_composites(self): """Test that inline composites are working.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - comps = load_compositor_configs_for_sensors(['visir'])[0] + comps = load_compositor_configs_for_sensors(["visir"])[0] # Check that "fog" product has all its prerequisites defined - keys = comps['visir'].keys() - fog = [comps['visir'][dsid] for dsid in keys if "fog" == dsid['name']][0] - self.assertEqual(fog.attrs['prerequisites'][0]['name'], '_fog_dep_0') - self.assertEqual(fog.attrs['prerequisites'][1]['name'], '_fog_dep_1') - self.assertEqual(fog.attrs['prerequisites'][2], 10.8) + keys = comps["visir"].keys() + fog = [comps["visir"][dsid] for dsid in keys if "fog" == dsid["name"]][0] + self.assertEqual(fog.attrs["prerequisites"][0]["name"], "_fog_dep_0") + self.assertEqual(fog.attrs["prerequisites"][1]["name"], "_fog_dep_1") + self.assertEqual(fog.attrs["prerequisites"][2], 10.8) # Check that the sub-composite dependencies use wavelengths # (numeric values) - keys = comps['visir'].keys() - fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']] - self.assertEqual(comps['visir'][fog_dep_ids[0]].attrs['prerequisites'], + keys = comps["visir"].keys() + fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] + self.assertEqual(comps["visir"][fog_dep_ids[0]].attrs["prerequisites"], [12.0, 10.8]) - self.assertEqual(comps['visir'][fog_dep_ids[1]].attrs['prerequisites'], + self.assertEqual(comps["visir"][fog_dep_ids[1]].attrs["prerequisites"], [10.8, 8.7]) # Check the same for SEVIRI and verify channel names are used # in the sub-composite dependencies instead of wavelengths - comps = load_compositor_configs_for_sensors(['seviri'])[0] - keys = comps['seviri'].keys() - fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']] - self.assertEqual(comps['seviri'][fog_dep_ids[0]].attrs['prerequisites'], - ['IR_120', 'IR_108']) - self.assertEqual(comps['seviri'][fog_dep_ids[1]].attrs['prerequisites'], - ['IR_108', 'IR_087']) + comps = load_compositor_configs_for_sensors(["seviri"])[0] + keys = comps["seviri"].keys() + fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] + self.assertEqual(comps["seviri"][fog_dep_ids[0]].attrs["prerequisites"], + ["IR_120", "IR_108"]) + self.assertEqual(comps["seviri"][fog_dep_ids[1]].attrs["prerequisites"], + ["IR_108", "IR_087"]) class TestColormapCompositor(unittest.TestCase): @@ -695,7 +695,7 @@ class TestColormapCompositor(unittest.TestCase): def setUp(self): """Set up the test case.""" from satpy.composites import ColormapCompositor - self.colormap_compositor = ColormapCompositor('test_cmap_compositor') + self.colormap_compositor = ColormapCompositor("test_cmap_compositor") def test_build_colormap_with_int_data_and_without_meanings(self): """Test colormap building.""" @@ -707,8 +707,8 @@ def test_build_colormap_with_int_data_and_without_meanings(self): def test_build_colormap_with_int_data_and_with_meanings(self): """Test colormap building.""" palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) self.assertTrue(np.allclose(colormap.values, [2, 3, 4])) self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) @@ -720,12 +720,12 @@ class TestPaletteCompositor(unittest.TestCase): def test_call(self): """Test palette compositing.""" from satpy.composites import PaletteCompositor - cmap_comp = PaletteCompositor('test_cmap_compositor') + cmap_comp = PaletteCompositor("test_cmap_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] - data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=['y', 'x']) + data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=["y", "x"]) res = cmap_comp([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], @@ -742,15 +742,15 @@ class TestColorizeCompositor(unittest.TestCase): def test_colorize_no_fill(self): """Test colorizing.""" from satpy.composites import ColorizeCompositor - colormap_composite = ColorizeCompositor('test_color_compositor') + colormap_composite = ColorizeCompositor("test_color_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8), - dims=['y', 'x']) + dims=["y", "x"]) res = colormap_composite([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], @@ -763,15 +763,15 @@ def test_colorize_no_fill(self): def test_colorize_with_interpolation(self): """Test colorizing with interpolation.""" from satpy.composites import ColorizeCompositor - colormap_composite = ColorizeCompositor('test_color_compositor') + colormap_composite = ColorizeCompositor("test_color_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(da.from_array(np.array([[4, 3, 2.5], [2, 3.2, 4]])), - dims=['y', 'x'], - attrs={'valid_range': np.array([2, 4])}) + dims=["y", "x"], + attrs={"valid_range": np.array([2, 4])}) res = colormap_composite([data, palette]) exp = np.array([[[1.0, 0.498039, 0.246575], [0., 0.59309977, 1.0]], @@ -788,7 +788,7 @@ class TestCloudCompositorWithoutCloudfree: def setup_method(self): """Set up the test case.""" from satpy.composites.cloud_products import CloudCompositorWithoutCloudfree - self.colormap_composite = CloudCompositorWithoutCloudfree('test_cmap_compositor') + self.colormap_composite = CloudCompositorWithoutCloudfree("test_cmap_compositor") self.exp = np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, 655350]]) self.exp_bad_oc = np.array([[4, 3, 2], @@ -797,45 +797,45 @@ def setup_method(self): def test_call_numpy_with_invalid_value_in_status(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]]), dims=['y', 'x'], - attrs={'_FillValue': 65535}) + status = xr.DataArray(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]]), dims=["y", "x"], + attrs={"_FillValue": 65535}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, np.nan]], dtype=np.float32), - dims=['y', 'x'], - attrs={'_FillValue': 65535, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 65535, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_dask_with_invalid_value_in_status(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]])), dims=['y', 'x'], - attrs={'_FillValue': 65535}) + status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]])), dims=["y", "x"], + attrs={"_FillValue": 65535}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, np.nan]], dtype=np.float32)), - dims=['y', 'x'], - attrs={'_FillValue': 99, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 99, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_bad_optical_conditions(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [3, 3, 3], [0, 0, 1]])), dims=['y', 'x'], - attrs={'_FillValue': 65535, - "flag_meanings": 'bad_optical_conditions'}) + status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [3, 3, 3], [0, 0, 1]])), dims=["y", "x"], + attrs={"_FillValue": 65535, + "flag_meanings": "bad_optical_conditions"}) data = xr.DataArray(np.array([[4, 3, 2], [2, 255, 4], [255, 7, 255]], dtype=np.uint8), - dims=['y', 'x'], - name='cmic_cre', - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + name="cmic_cre", + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp_bad_oc, atol=1e-4) def test_bad_indata(self): """Test the CloudCompositorWithoutCloudfree composite generation without status.""" data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4], [255, 7, 255]], dtype=np.uint8), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) np.testing.assert_raises(ValueError, self.colormap_composite, [data]) @@ -852,36 +852,36 @@ def setup_method(self): self.exp_b = np.array([[4, 3, 2], [2, 3, 255], [np.nan, np.nan, np.nan]]) - self.colormap_composite = CloudCompositorCommonMask('test_cmap_compositor') + self.colormap_composite = CloudCompositorCommonMask("test_cmap_compositor") def test_call_numpy(self): """Test the CloudCompositorCommonMask with numpy.""" - mask = xr.DataArray(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]]), dims=['y', 'x'], - attrs={'_FillValue': 255}) + mask = xr.DataArray(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]]), dims=["y", "x"], + attrs={"_FillValue": 255}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, np.nan], [np.nan, np.nan, np.nan]], dtype=np.float32), - dims=['y', 'x'], - attrs={'_FillValue': 65535, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 65535, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, mask]) np.testing.assert_allclose(res, self.exp_a, atol=1e-4) def test_call_dask(self): """Test the CloudCompositorCommonMask with dask.""" - mask = xr.DataArray(da.from_array(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]])), dims=['y', 'x'], - attrs={'_FillValue': 255}) + mask = xr.DataArray(da.from_array(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]])), dims=["y", "x"], + attrs={"_FillValue": 255}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 255], [255, 255, 255]], dtype=np.int16)), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) res = self.colormap_composite([data, mask]) np.testing.assert_allclose(res, self.exp_b, atol=1e-4) def test_bad_call(self): """Test the CloudCompositorCommonMask without mask.""" data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 255], [255, 255, 255]], dtype=np.int16), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) np.testing.assert_raises(ValueError, self.colormap_composite, [data]) @@ -891,16 +891,16 @@ class TestPrecipCloudsCompositor(unittest.TestCase): def test_call(self): """Test the precip composite generation.""" from satpy.composites.cloud_products import PrecipCloudsRGB - colormap_compositor = PrecipCloudsRGB('test_precip_compositor') + colormap_compositor = PrecipCloudsRGB("test_precip_compositor") data_light = xr.DataArray(np.array([[80, 70, 60, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_moderate = xr.DataArray(np.array([[60, 50, 40, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_intense = xr.DataArray(np.array([[40, 30, 20, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_flags = xr.DataArray(np.array([[0, 0, 4, 0], [0, 0, 0, 0]], dtype=np.uint8), - dims=['y', 'x']) + dims=["y", "x"]) res = colormap_compositor([data_light, data_moderate, data_intense, data_flags]) exp = np.array([[[0.24313725, 0.18235294, 0.12156863, np.nan], @@ -919,35 +919,35 @@ class TestSingleBandCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from satpy.composites import SingleBandCompositor - self.comp = SingleBandCompositor(name='test') + self.comp = SingleBandCompositor(name="test") all_valid = np.ones((2, 2)) - self.all_valid = xr.DataArray(all_valid, dims=['y', 'x']) + self.all_valid = xr.DataArray(all_valid, dims=["y", "x"]) def test_call(self): """Test calling the compositor.""" # Dataset with extra attributes all_valid = self.all_valid - all_valid.attrs['sensor'] = 'foo' + all_valid.attrs["sensor"] = "foo" attrs = { - 'foo': 'bar', - 'resolution': 333, - 'units': 'K', - 'sensor': {'fake_sensor1', 'fake_sensor2'}, - 'calibration': 'BT', - 'wavelength': 10.8 + "foo": "bar", + "resolution": 333, + "units": "K", + "sensor": {"fake_sensor1", "fake_sensor2"}, + "calibration": "BT", + "wavelength": 10.8 } - self.comp.attrs['resolution'] = None + self.comp.attrs["resolution"] = None res = self.comp([all_valid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get('sensor'), 'foo') - self.assertTrue('foo' in res.attrs) - self.assertEqual(res.attrs.get('foo'), 'bar') - self.assertTrue('units' in res.attrs) - self.assertTrue('calibration' in res.attrs) - self.assertFalse('modifiers' in res.attrs) - self.assertEqual(res.attrs['wavelength'], 10.8) - self.assertEqual(res.attrs['resolution'], 333) + self.assertEqual(res.attrs.get("sensor"), "foo") + self.assertTrue("foo" in res.attrs) + self.assertEqual(res.attrs.get("foo"), "bar") + self.assertTrue("units" in res.attrs) + self.assertTrue("calibration" in res.attrs) + self.assertFalse("modifiers" in res.attrs) + self.assertEqual(res.attrs["wavelength"], 10.8) + self.assertEqual(res.attrs["resolution"], 333) class TestCategoricalDataCompositor(unittest.TestCase): @@ -955,9 +955,9 @@ class TestCategoricalDataCompositor(unittest.TestCase): def setUp(self): """Create test data.""" - attrs = {'name': 'foo'} + attrs = {"name": "foo"} data = xr.DataArray(da.from_array([[2., 1.], [3., 0.]]), attrs=attrs, - dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) + dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) self.data = data @@ -965,20 +965,20 @@ def test_basic_recategorization(self): """Test general functionality of compositor incl. attributes.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] - name = 'bar' + name = "bar" comp = CategoricalDataCompositor(name=name, lut=lut) res = comp([self.data]) res = res.compute() expected = np.array([[1., 0.], [1., np.nan]]) np.testing.assert_equal(res.values, expected) - np.testing.assert_equal(res.attrs['name'], name) - np.testing.assert_equal(res.attrs['composite_lut'], lut) + np.testing.assert_equal(res.attrs["name"], name) + np.testing.assert_equal(res.attrs["composite_lut"], lut) def test_too_many_datasets(self): """Test that ValueError is raised if more than one dataset is provided.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] - comp = CategoricalDataCompositor(name='foo', lut=lut) + comp = CategoricalDataCompositor(name="foo", lut=lut) np.testing.assert_raises(ValueError, comp, [self.data, self.data]) @@ -988,19 +988,19 @@ class TestGenericCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from satpy.composites import GenericCompositor - self.comp = GenericCompositor(name='test') - self.comp2 = GenericCompositor(name='test2', common_channel_mask=False) + self.comp = GenericCompositor(name="test") + self.comp2 = GenericCompositor(name="test2", common_channel_mask=False) all_valid = np.ones((1, 2, 2)) - self.all_valid = xr.DataArray(all_valid, dims=['bands', 'y', 'x']) + self.all_valid = xr.DataArray(all_valid, dims=["bands", "y", "x"]) first_invalid = np.reshape(np.array([np.nan, 1., 1., 1.]), (1, 2, 2)) self.first_invalid = xr.DataArray(first_invalid, - dims=['bands', 'y', 'x']) + dims=["bands", "y", "x"]) second_invalid = np.reshape(np.array([1., np.nan, 1., 1.]), (1, 2, 2)) self.second_invalid = xr.DataArray(second_invalid, - dims=['bands', 'y', 'x']) + dims=["bands", "y", "x"]) wrong_shape = np.reshape(np.array([1., 1., 1.]), (1, 3, 1)) - self.wrong_shape = xr.DataArray(wrong_shape, dims=['bands', 'y', 'x']) + self.wrong_shape = xr.DataArray(wrong_shape, dims=["bands", "y", "x"]) def test_masking(self): """Test masking in generic compositor.""" @@ -1021,49 +1021,49 @@ def test_masking(self): def test_concat_datasets(self): """Test concatenation of datasets.""" from satpy.composites import IncompatibleAreas - res = self.comp._concat_datasets([self.all_valid], 'L') + res = self.comp._concat_datasets([self.all_valid], "L") num_bands = len(res.bands) self.assertEqual(num_bands, 1) self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], 'L') - res = self.comp._concat_datasets([self.all_valid, self.all_valid], 'LA') + self.assertEqual(res.bands[0], "L") + res = self.comp._concat_datasets([self.all_valid, self.all_valid], "LA") num_bands = len(res.bands) self.assertEqual(num_bands, 2) self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], 'L') - self.assertEqual(res.bands[1], 'A') + self.assertEqual(res.bands[0], "L") + self.assertEqual(res.bands[1], "A") self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, - [self.all_valid, self.wrong_shape], 'LA') + [self.all_valid, self.wrong_shape], "LA") def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" res = self.comp._get_sensors([self.all_valid]) self.assertIsNone(res) dset1 = self.all_valid - dset1.attrs['sensor'] = 'foo' + dset1.attrs["sensor"] = "foo" res = self.comp._get_sensors([dset1]) - self.assertEqual(res, 'foo') + self.assertEqual(res, "foo") dset2 = self.first_invalid - dset2.attrs['sensor'] = 'bar' + dset2.attrs["sensor"] = "bar" res = self.comp._get_sensors([dset1, dset2]) - self.assertIn('foo', res) - self.assertIn('bar', res) + self.assertIn("foo", res) + self.assertIn("bar", res) self.assertEqual(len(res), 2) self.assertIsInstance(res, set) - @mock.patch('satpy.composites.GenericCompositor._get_sensors') - @mock.patch('satpy.composites.combine_metadata') - @mock.patch('satpy.composites.check_times') - @mock.patch('satpy.composites.GenericCompositor.match_data_arrays') + @mock.patch("satpy.composites.GenericCompositor._get_sensors") + @mock.patch("satpy.composites.combine_metadata") + @mock.patch("satpy.composites.check_times") + @mock.patch("satpy.composites.GenericCompositor.match_data_arrays") def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, get_sensors): """Test calling generic compositor.""" from satpy.composites import IncompatibleAreas combine_metadata.return_value = dict() - get_sensors.return_value = 'foo' + get_sensors.return_value = "foo" # One dataset, no mode given res = self.comp([self.all_valid]) self.assertEqual(res.shape[0], 1) - self.assertEqual(res.attrs['mode'], 'L') + self.assertEqual(res.attrs["mode"], "L") match_data_arrays.assert_not_called() # This compositor has been initialized without common masking, so the # masking shouldn't have been called @@ -1088,25 +1088,25 @@ def test_call(self): """Test calling generic compositor.""" # Multiple datasets with extra attributes all_valid = self.all_valid - all_valid.attrs['sensor'] = 'foo' - attrs = {'foo': 'bar', 'resolution': 333} - self.comp.attrs['resolution'] = None + all_valid.attrs["sensor"] = "foo" + attrs = {"foo": "bar", "resolution": 333} + self.comp.attrs["resolution"] = None res = self.comp([self.all_valid, self.first_invalid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get('sensor'), 'foo') - self.assertIn('foo', res.attrs) - self.assertEqual(res.attrs.get('foo'), 'bar') - self.assertNotIn('units', res.attrs) - self.assertNotIn('calibration', res.attrs) - self.assertNotIn('modifiers', res.attrs) - self.assertIsNone(res.attrs['wavelength']) - self.assertEqual(res.attrs['mode'], 'LA') - self.assertEqual(res.attrs['resolution'], 333) + self.assertEqual(res.attrs.get("sensor"), "foo") + self.assertIn("foo", res.attrs) + self.assertEqual(res.attrs.get("foo"), "bar") + self.assertNotIn("units", res.attrs) + self.assertNotIn("calibration", res.attrs) + self.assertNotIn("modifiers", res.attrs) + self.assertIsNone(res.attrs["wavelength"]) + self.assertEqual(res.attrs["mode"], "LA") + self.assertEqual(res.attrs["resolution"], 333) def test_deprecation_warning(self): """Test deprecation warning for dcprecated composite recipes.""" - warning_message = 'foo is a deprecated composite. Use composite bar instead.' - self.comp.attrs['deprecation_warning'] = warning_message + warning_message = "foo is a deprecated composite. Use composite bar instead." + self.comp.attrs["deprecation_warning"] = warning_message with pytest.warns(UserWarning, match=warning_message): self.comp([self.all_valid]) @@ -1119,72 +1119,72 @@ def test_add_bands_l_rgb(self): from satpy.composites import add_bands # L + RGB -> RGB - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L']}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), - coords={'bands': ['R', 'G', 'B']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L"]}) + new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), + coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B"] + self.assertEqual(res.attrs["mode"], "".join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_l_rgba(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGBA -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L']}, attrs={'mode': 'L'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), - coords={'bands': ['R', 'G', 'B', 'A']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L"]}, attrs={"mode": "L"}) + new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), + coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + self.assertEqual(res.attrs["mode"], "".join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_la_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # LA + RGB -> RGBA - data = xr.DataArray(da.ones((2, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L', 'A']}, attrs={'mode': 'LA'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), - coords={'bands': ['R', 'G', 'B']}) + data = xr.DataArray(da.ones((2, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L", "A"]}, attrs={"mode": "LA"}) + new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), + coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + self.assertEqual(res.attrs["mode"], "".join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_rgb_rbga(self): """Test adding bands.""" from satpy.composites import add_bands # RGB + RGBA -> RGBA - data = xr.DataArray(da.ones((3, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'mode': 'RGB'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), - coords={'bands': ['R', 'G', 'B', 'A']}) + data = xr.DataArray(da.ones((3, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"mode": "RGB"}) + new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), + coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + self.assertEqual(res.attrs["mode"], "".join(res_bands)) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_p_l(self): """Test adding bands.""" from satpy.composites import add_bands # P(RGBA) + L -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['P']}, - attrs={'mode': 'P'}) - new_bands = xr.DataArray(da.array(['L']), dims=('bands'), - coords={'bands': ['L']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["P"]}, + attrs={"mode": "P"}) + new_bands = xr.DataArray(da.array(["L"]), dims=("bands"), + coords={"bands": ["L"]}) with pytest.raises(NotImplementedError): add_bands(data, new_bands) @@ -1192,7 +1192,7 @@ def test_add_bands_p_l(self): class TestStaticImageCompositor(unittest.TestCase): """Test case for the static compositor.""" - @mock.patch('satpy.resample.get_area_def') + @mock.patch("satpy.resample.get_area_def") def test_init(self, get_area_def): """Test the initializiation of static compositor.""" from satpy.composites import StaticImageCompositor @@ -1213,14 +1213,14 @@ def test_init(self, get_area_def): self.assertEqual(comp.area, "bar") get_area_def.assert_called_once_with("euro4") - @mock.patch('satpy.aux_download.retrieve') - @mock.patch('satpy.aux_download.register_file') - @mock.patch('satpy.Scene') + @mock.patch("satpy.aux_download.retrieve") + @mock.patch("satpy.aux_download.register_file") + @mock.patch("satpy.Scene") def test_call(self, Scene, register, retrieve): # noqa """Test the static compositing.""" from satpy.composites import StaticImageCompositor - satpy.config.set(data_dir=os.path.join(os.path.sep, 'path', 'to', 'image')) + satpy.config.set(data_dir=os.path.join(os.path.sep, "path", "to", "image")) remote_tif = "http://example.com/foo.tif" class MockScene(dict): @@ -1230,20 +1230,20 @@ def load(self, arg): img = mock.MagicMock() img.attrs = {} scn = MockScene() - scn['image'] = img + scn["image"] = img Scene.return_value = scn # absolute path to local file comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() - Scene.assert_called_once_with(reader='generic_image', - filenames=['/foo.tif']) + Scene.assert_called_once_with(reader="generic_image", + filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() self.assertIn("start_time", res.attrs) self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs['sensor']) - self.assertNotIn('modifiers', res.attrs) - self.assertNotIn('calibration', res.attrs) + self.assertIsNone(res.attrs["sensor"]) + self.assertNotIn("modifiers", res.attrs) + self.assertNotIn("calibration", res.attrs) # remote file with local cached version Scene.reset_mock() @@ -1251,28 +1251,28 @@ def load(self, arg): retrieve.return_value = "data_dir/foo.tif" comp = StaticImageCompositor("name", url=remote_tif, area="euro4") res = comp() - Scene.assert_called_once_with(reader='generic_image', - filenames=['data_dir/foo.tif']) + Scene.assert_called_once_with(reader="generic_image", + filenames=["data_dir/foo.tif"]) self.assertIn("start_time", res.attrs) self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs['sensor']) - self.assertNotIn('modifiers', res.attrs) - self.assertNotIn('calibration', res.attrs) + self.assertIsNone(res.attrs["sensor"]) + self.assertNotIn("modifiers", res.attrs) + self.assertNotIn("calibration", res.attrs) # Non-georeferenced image, no area given - img.attrs.pop('area') + img.attrs.pop("area") comp = StaticImageCompositor("name", filename="/foo.tif") with self.assertRaises(AttributeError): comp() # Non-georeferenced image, area given - comp = StaticImageCompositor("name", filename="/foo.tif", area='euro4') + comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() - self.assertEqual(res.attrs['area'].area_id, 'euro4') + self.assertEqual(res.attrs["area"].area_id, "euro4") # Filename contains environment variable os.environ["TEST_IMAGE_PATH"] = "/path/to/image" - comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area='euro4') + comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area="euro4") self.assertEqual(comp._cache_filename, "/path/to/image/foo.tif") # URL and filename without absolute path @@ -1281,12 +1281,12 @@ def load(self, arg): self.assertEqual(comp._cache_filename, "bar.tif") # No URL, filename without absolute path, use default data_dir from config - with mock.patch('os.path.exists') as exists: + with mock.patch("os.path.exists") as exists: exists.return_value = True comp = StaticImageCompositor("name", filename="foo.tif") self.assertEqual(comp._url, None) self.assertEqual(comp._cache_filename, - os.path.join(os.path.sep, 'path', 'to', 'image', 'foo.tif')) + os.path.join(os.path.sep, "path", "to", "image", "foo.tif")) def _enhance2dataset(dataset, convert_p=False): @@ -1315,21 +1315,21 @@ def setup_class(cls): } cls.foreground_data = foreground_data - @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) + @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( - ('foreground_bands', 'background_bands', 'exp_bands', 'exp_result'), + ("foreground_bands", "background_bands", "exp_bands", "exp_result"), [ - ('L', 'L', 'L', np.array([[1.0, 0.5], [0.0, 1.0]])), - ('LA', 'LA', 'L', np.array([[1.0, 0.75], [0.5, 1.0]])), - ('RGB', 'RGB', 'RGB', np.array([ + ("L", "L", "L", np.array([[1.0, 0.5], [0.0, 1.0]])), + ("LA", "LA", "L", np.array([[1.0, 0.75], [0.5, 1.0]])), + ("RGB", "RGB", "RGB", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), - ('RGBA', 'RGBA', 'RGB', np.array([ + ("RGBA", "RGBA", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), - ('RGBA', 'RGB', 'RGB', np.array([ + ("RGBA", "RGB", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), @@ -1342,43 +1342,43 @@ def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): # L mode images foreground_data = self.foreground_data[foreground_bands] - attrs = {'mode': foreground_bands, 'area': 'foo'} + attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), - dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) - attrs = {'mode': background_bands, 'area': 'foo'} - background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + attrs = {"mode": background_bands, "area": "foo"} + background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) res = comp([foreground, background]) - assert res.attrs['area'] == 'foo' + assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) - assert res.attrs['mode'] == exp_bands + assert res.attrs["mode"] == exp_bands - @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) + @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) def test_multiple_sensors(self): """Test the background compositing from multiple sensor data.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name") # L mode images - attrs = {'mode': 'L', 'area': 'foo'} + attrs = {"mode": "L", "area": "foo"} foreground_data = self.foreground_data["L"] foreground = xr.DataArray(da.from_array(foreground_data), - dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs.copy()) - foreground.attrs['sensor'] = 'abi' - background = xr.DataArray(da.ones((1, 2, 2)), dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + foreground.attrs["sensor"] = "abi" + background = xr.DataArray(da.ones((1, 2, 2)), dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs.copy()) - background.attrs['sensor'] = 'glm' + background.attrs["sensor"] = "glm" res = comp([foreground, background]) - assert res.attrs['area'] == 'foo' + assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, np.array([[1., 0.5], [0., 1.]])) - assert res.attrs['mode'] == 'L' - assert res.attrs['sensor'] == {'abi', 'glm'} + assert res.attrs["mode"] == "L" + assert res.attrs["sensor"] == {"abi", "glm"} class TestMaskingCompositor: @@ -1387,39 +1387,39 @@ class TestMaskingCompositor: @pytest.fixture def conditions_v1(self): """Masking conditions with string values.""" - return [{'method': 'equal', - 'value': 'Cloud-free_land', - 'transparency': 100}, - {'method': 'equal', - 'value': 'Cloud-free_sea', - 'transparency': 50}] + return [{"method": "equal", + "value": "Cloud-free_land", + "transparency": 100}, + {"method": "equal", + "value": "Cloud-free_sea", + "transparency": 50}] @pytest.fixture def conditions_v2(self): """Masking conditions with numerical values.""" - return [{'method': 'equal', - 'value': 1, - 'transparency': 100}, - {'method': 'equal', - 'value': 2, - 'transparency': 50}] + return [{"method": "equal", + "value": 1, + "transparency": 100}, + {"method": "equal", + "value": 2, + "transparency": 50}] @pytest.fixture def test_data(self): """Test data to use with masking compositors.""" - return xr.DataArray(da.random.random((3, 3)), dims=['y', 'x']) + return xr.DataArray(da.random.random((3, 3)), dims=["y", "x"]) @pytest.fixture def test_ct_data(self): """Test 2D CT data array.""" - flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] + flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] flag_values = da.array([1, 2]) ct_data = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) - ct_data = xr.DataArray(ct_data, dims=['y', 'x']) - ct_data.attrs['flag_meanings'] = flag_meanings - ct_data.attrs['flag_values'] = flag_values + ct_data = xr.DataArray(ct_data, dims=["y", "x"]) + ct_data.attrs["flag_meanings"] = flag_meanings + ct_data.attrs["flag_values"] = flag_values return ct_data @pytest.fixture @@ -1439,7 +1439,7 @@ def reference_alpha(self): ref_alpha = da.array([[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]]) - return xr.DataArray(ref_alpha, dims=['y', 'x']) + return xr.DataArray(ref_alpha, dims=["y", "x"]) def test_init(self): """Test the initializiation of compositor.""" @@ -1451,10 +1451,10 @@ def test_init(self): # transparency defined transparency = {0: 100, 1: 50} - conditions = [{'method': 'equal', 'value': 0, 'transparency': 100}, - {'method': 'equal', 'value': 1, 'transparency': 50}] + conditions = [{"method": "equal", "value": 0, "transparency": 100}, + {"method": "equal", "value": 1, "transparency": 50}] comp = MaskingCompositor("name", transparency=transparency.copy()) - assert not hasattr(comp, 'transparency') + assert not hasattr(comp, "transparency") # Transparency should be converted to conditions assert comp.conditions == conditions @@ -1470,18 +1470,18 @@ def test_get_flag_value(self): mask = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) - mask = xr.DataArray(mask, dims=['y', 'x']) - flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] - mask.attrs['flag_meanings'] = flag_meanings - mask.attrs['flag_values'] = flag_values + mask = xr.DataArray(mask, dims=["y", "x"]) + flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] + mask.attrs["flag_meanings"] = flag_meanings + mask.attrs["flag_values"] = flag_values - assert _get_flag_value(mask, 'Cloud-free_land') == 1 - assert _get_flag_value(mask, 'Cloud-free_sea') == 2 + assert _get_flag_value(mask, "Cloud-free_land") == 1 + assert _get_flag_value(mask, "Cloud-free_sea") == 2 - flag_meanings_str = 'Cloud-free_land Cloud-free_sea' - mask.attrs['flag_meanings'] = flag_meanings_str - assert _get_flag_value(mask, 'Cloud-free_land') == 1 - assert _get_flag_value(mask, 'Cloud-free_sea') == 2 + flag_meanings_str = "Cloud-free_land Cloud-free_sea" + mask.attrs["flag_meanings"] = flag_meanings_str + assert _get_flag_value(mask, "Cloud-free_land") == 1 + assert _get_flag_value(mask, "Cloud-free_sea") == 2 @pytest.mark.parametrize("mode", ["LA", "RGBA"]) def test_call_numerical_transparency_data( @@ -1502,7 +1502,7 @@ def test_call_numerical_transparency_data( assert res.mode == mode for m in mode.rstrip("A"): np.testing.assert_allclose(res.sel(bands=m), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, reference_data, reference_alpha): @@ -1514,8 +1514,8 @@ def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="L"), reference_data) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_call_named_fields_string( self, conditions_v2, test_data, test_ct_data, reference_data, @@ -1524,14 +1524,14 @@ def test_call_named_fields_string( from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - flag_meanings_str = 'Cloud-free_land Cloud-free_sea' - test_ct_data.attrs['flag_meanings'] = flag_meanings_str + flag_meanings_str = "Cloud-free_land Cloud-free_sea" + test_ct_data.attrs["flag_meanings"] = flag_meanings_str with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="L"), reference_data) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_method_isnan(self, test_data, test_ct_data, test_ct_data_v3): @@ -1539,27 +1539,27 @@ def test_method_isnan(self, test_data, from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - conditions_v3 = [{'method': 'isnan', 'transparency': 100}] + conditions_v3 = [{"method": "isnan", "transparency": 100}] # The data are set to NaN where ct is NaN reference_data_v3 = test_data.where(test_ct_data == 1) reference_alpha_v3 = da.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) - reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=['y', 'x']) + reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=["y", "x"]) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v3) res = comp([test_data, test_ct_data_v3]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data_v3) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha_v3) + np.testing.assert_allclose(res.sel(bands="L"), reference_data_v3) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha_v3) def test_method_absolute_import(self, test_data, test_ct_data_v3): """Test "absolute_import" as method.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - conditions_v4 = [{'method': 'absolute_import', 'transparency': 'satpy.resample'}] + conditions_v4 = [{"method": "absolute_import", "transparency": "satpy.resample"}] # This should raise AttributeError with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v4) @@ -1573,50 +1573,50 @@ def test_rgb_dataset(self, conditions_v1, test_ct_data, reference_alpha): # 3D data array data = xr.DataArray(da.random.random((3, 3, 3)), - dims=['bands', 'y', 'x'], - coords={'bands': ['R', 'G', 'B'], - 'y': np.arange(3), - 'x': np.arange(3)}) + dims=["bands", "y", "x"], + coords={"bands": ["R", "G", "B"], + "y": np.arange(3), + "x": np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v1) res = comp([data, test_ct_data]) assert res.mode == "RGBA" - np.testing.assert_allclose(res.sel(bands='R'), - data.sel(bands='R').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='G'), - data.sel(bands='G').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='B'), - data.sel(bands='B').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="R"), + data.sel(bands="R").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="G"), + data.sel(bands="G").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="B"), + data.sel(bands="B").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_rgba_dataset(self, conditions_v2, test_ct_data, reference_alpha): """Test RGBA dataset.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler data = xr.DataArray(da.random.random((4, 3, 3)), - dims=['bands', 'y', 'x'], - coords={'bands': ['R', 'G', 'B', 'A'], - 'y': np.arange(3), - 'x': np.arange(3)}) + dims=["bands", "y", "x"], + coords={"bands": ["R", "G", "B", "A"], + "y": np.arange(3), + "x": np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([data, test_ct_data]) assert res.mode == "RGBA" - np.testing.assert_allclose(res.sel(bands='R'), - data.sel(bands='R').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='G'), - data.sel(bands='G').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='B'), - data.sel(bands='B').where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="R"), + data.sel(bands="R").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="G"), + data.sel(bands="G").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="B"), + data.sel(bands="B").where(test_ct_data > 1)) # The compositor should drop the original alpha band - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_incorrect_method(self, test_data, test_ct_data): """Test incorrect method.""" from satpy.composites import MaskingCompositor - conditions = [{'method': 'foo', 'value': 0, 'transparency': 100}] + conditions = [{"method": "foo", "value": 0, "transparency": 100}] comp = MaskingCompositor("name", conditions=conditions) with pytest.raises(AttributeError): comp([test_data, test_ct_data]) @@ -1646,12 +1646,12 @@ def setUp(self): self.ch08_w = 3.0 self.ch06_w = 4.0 - @mock.patch('satpy.composites.NaturalEnh.__repr__') - @mock.patch('satpy.composites.NaturalEnh.match_data_arrays') + @mock.patch("satpy.composites.NaturalEnh.__repr__") + @mock.patch("satpy.composites.NaturalEnh.match_data_arrays") def test_natural_enh(self, match_data_arrays, repr_): """Test NaturalEnh compositor.""" from satpy.composites import NaturalEnh - repr_.return_value = '' + repr_.return_value = "" projectables = [self.ch1, self.ch2, self.ch3] def temp_func(*args): @@ -1676,57 +1676,57 @@ def temp_func(*args): class TestEnhance2Dataset(unittest.TestCase): """Test the enhance2dataset utility.""" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p_to_rgb(self, get_enhanced_image): """Test enhancing a paletted dataset in RGB mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0), (4, 4, 4), (8, 8, 8)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) - assert res.attrs['mode'] == 'RGB' + assert res.attrs["mode"] == "RGB" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p_to_rgba(self, get_enhanced_image): """Test enhancing a paletted dataset in RGBA mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) - assert res.attrs['mode'] == 'RGBA' + assert res.attrs["mode"] == "RGBA" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) - assert res.attrs['mode'] == 'P' + assert res.attrs["mode"] == "P" assert res.max().values == 2 - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_l(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['L']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["L"]})) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) - assert res.attrs['mode'] == 'L' + assert res.attrs["mode"] == "L" assert res.max().values == 1 @@ -1736,29 +1736,29 @@ class TestInferMode(unittest.TestCase): def test_bands_coords_is_used(self): """Test that the `bands` coord is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((1, 5, 5)), dims=('bands', 'x', 'y'), coords={'bands': ['P']}) - assert GenericCompositor.infer_mode(arr) == 'P' + arr = xr.DataArray(np.ones((1, 5, 5)), dims=("bands", "x", "y"), coords={"bands": ["P"]}) + assert GenericCompositor.infer_mode(arr) == "P" - arr = xr.DataArray(np.ones((3, 5, 5)), dims=('bands', 'x', 'y'), coords={'bands': ['Y', 'Cb', 'Cr']}) - assert GenericCompositor.infer_mode(arr) == 'YCbCr' + arr = xr.DataArray(np.ones((3, 5, 5)), dims=("bands", "x", "y"), coords={"bands": ["Y", "Cb", "Cr"]}) + assert GenericCompositor.infer_mode(arr) == "YCbCr" def test_mode_is_used(self): """Test that the `mode` attribute is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((1, 5, 5)), dims=('bands', 'x', 'y'), attrs={'mode': 'P'}) - assert GenericCompositor.infer_mode(arr) == 'P' + arr = xr.DataArray(np.ones((1, 5, 5)), dims=("bands", "x", "y"), attrs={"mode": "P"}) + assert GenericCompositor.infer_mode(arr) == "P" def test_band_size_is_used(self): """Test that the band size is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((2, 5, 5)), dims=('bands', 'x', 'y')) - assert GenericCompositor.infer_mode(arr) == 'LA' + arr = xr.DataArray(np.ones((2, 5, 5)), dims=("bands", "x", "y")) + assert GenericCompositor.infer_mode(arr) == "LA" def test_no_bands_is_l(self): """Test that default (no band) is L.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((5, 5)), dims=('x', 'y')) - assert GenericCompositor.infer_mode(arr) == 'L' + arr = xr.DataArray(np.ones((5, 5)), dims=("x", "y")) + assert GenericCompositor.infer_mode(arr) == "L" class TestLongitudeMaskingCompositor(unittest.TestCase): @@ -1772,26 +1772,26 @@ def test_masking(self): lons = np.array([-180., -100., -50., 0., 50., 100., 180.]) area.get_lonlats = mock.MagicMock(return_value=[lons, []]) a = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, 7]), - attrs={'area': area, 'units': 'K'}) + attrs={"area": area, "units": "K"}) - comp = LongitudeMaskingCompositor(name='test', lon_min=-40., lon_max=120.) + comp = LongitudeMaskingCompositor(name="test", lon_min=-40., lon_max=120.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) assert "units" in res.attrs assert res.attrs["units"] == "K" - comp = LongitudeMaskingCompositor(name='test', lon_min=-40.) + comp = LongitudeMaskingCompositor(name="test", lon_min=-40.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) - comp = LongitudeMaskingCompositor(name='test', lon_max=120.) + comp = LongitudeMaskingCompositor(name="test", lon_max=120.) expected = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) - comp = LongitudeMaskingCompositor(name='test', lon_min=120., lon_max=-40.) + comp = LongitudeMaskingCompositor(name="test", lon_min=120., lon_max=-40.) expected = xr.DataArray(np.array([1, 2, 3, np.nan, np.nan, np.nan, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) diff --git a/satpy/tests/test_config.py b/satpy/tests/test_config.py index 10d3205223..5cb1c047d2 100644 --- a/satpy/tests/test_config.py +++ b/satpy/tests/test_config.py @@ -59,7 +59,7 @@ def test_areas_pyproj(self): swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: - if hasattr(area_obj, 'freeze'): + if hasattr(area_obj, "freeze"): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: @@ -74,7 +74,7 @@ def test_areas_rasterio(self): from rasterio.crs import CRS except ImportError: return unittest.skip("Missing rasterio dependency") - if not hasattr(CRS, 'from_dict'): + if not hasattr(CRS, "from_dict"): return unittest.skip("RasterIO 1.0+ required") import numpy as np @@ -91,7 +91,7 @@ def test_areas_rasterio(self): swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: - if hasattr(area_obj, 'freeze'): + if hasattr(area_obj, "freeze"): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: @@ -115,8 +115,8 @@ def fake_plugin_etc_path( etc_path, entry_points, module_paths = _get_entry_points_and_etc_paths(tmp_path, entry_point_names) fake_iter_entry_points = _create_fake_iter_entry_points(entry_points) fake_importlib_files = _create_fake_importlib_files(module_paths) - with mock.patch('satpy._config.entry_points', fake_iter_entry_points), \ - mock.patch('satpy._config.impr_files', fake_importlib_files): + with mock.patch("satpy._config.entry_points", fake_iter_entry_points), \ + mock.patch("satpy._config.impr_files", fake_importlib_files): yield etc_path @@ -298,7 +298,7 @@ def test_get_plugin_configs(self, fake_composite_plugin_etc_path): from satpy._config import get_entry_points_config_dirs with satpy.config.set(config_path=[]): - dirs = get_entry_points_config_dirs('satpy.composites') + dirs = get_entry_points_config_dirs("satpy.composites") assert dirs == [str(fake_composite_plugin_etc_path)] def test_load_entry_point_composite(self, fake_composite_plugin_etc_path): @@ -393,16 +393,16 @@ def test_custom_config_file(self): import satpy my_config_dict = { - 'cache_dir': "/path/to/cache", + "cache_dir": "/path/to/cache", } try: - with tempfile.NamedTemporaryFile(mode='w+t', suffix='.yaml', delete=False) as tfile: + with tempfile.NamedTemporaryFile(mode="w+t", suffix=".yaml", delete=False) as tfile: yaml.dump(my_config_dict, tfile) tfile.close() - with mock.patch.dict('os.environ', {'SATPY_CONFIG': tfile.name}): + with mock.patch.dict("os.environ", {"SATPY_CONFIG": tfile.name}): reload(satpy._config) reload(satpy) - assert satpy.config.get('cache_dir') == '/path/to/cache' + assert satpy.config.get("cache_dir") == "/path/to/cache" finally: os.remove(tfile.name) @@ -412,15 +412,15 @@ def test_deprecated_env_vars(self): import satpy old_vars = { - 'PPP_CONFIG_DIR': '/my/ppp/config/dir', - 'SATPY_ANCPATH': '/my/ancpath', + "PPP_CONFIG_DIR": "/my/ppp/config/dir", + "SATPY_ANCPATH": "/my/ancpath", } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('data_dir') == '/my/ancpath' - assert satpy.config.get('config_path') == ['/my/ppp/config/dir'] + assert satpy.config.get("data_dir") == "/my/ancpath" + assert satpy.config.get("config_path") == ["/my/ppp/config/dir"] def test_config_path_multiple(self): """Test that multiple config paths are accepted.""" @@ -429,13 +429,13 @@ def test_config_path_multiple(self): import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { - 'SATPY_CONFIG_PATH': env_paths, + "SATPY_CONFIG_PATH": env_paths, } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == exp_paths + assert satpy.config.get("config_path") == exp_paths def test_config_path_multiple_load(self): """Test that config paths from subprocesses load properly. @@ -449,10 +449,10 @@ def test_config_path_multiple_load(self): import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { - 'SATPY_CONFIG_PATH': env_paths, + "SATPY_CONFIG_PATH": env_paths, } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): # these reloads will update env variable "SATPY_CONFIG_PATH" reload(satpy._config) reload(satpy) @@ -460,7 +460,7 @@ def test_config_path_multiple_load(self): # load the updated env variable and parse it again. reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == exp_paths + assert satpy.config.get("config_path") == exp_paths def test_bad_str_config_path(self): """Test that a str config path isn't allowed.""" @@ -468,17 +468,17 @@ def test_bad_str_config_path(self): import satpy old_vars = { - 'SATPY_CONFIG_PATH': '/my/configs1', + "SATPY_CONFIG_PATH": "/my/configs1", } # single path from env var still works - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == ['/my/configs1'] + assert satpy.config.get("config_path") == ["/my/configs1"] # strings are not allowed, lists are - with satpy.config.set(config_path='/single/string/paths/are/bad'): + with satpy.config.set(config_path="/single/string/paths/are/bad"): pytest.raises(ValueError, satpy._config.get_config_path_safe) def test_tmp_dir_is_writable(self): @@ -503,7 +503,7 @@ def _is_writable(directory): def _os_specific_multipaths(): - exp_paths = ['/my/configs1', '/my/configs2', '/my/configs3'] + exp_paths = ["/my/configs1", "/my/configs2", "/my/configs3"] if sys.platform.startswith("win"): exp_paths = ["C:" + p for p in exp_paths] path_str = os.pathsep.join(exp_paths) diff --git a/satpy/tests/test_data_download.py b/satpy/tests/test_data_download.py index 8f2984bd9b..85cd420951 100644 --- a/satpy/tests/test_data_download.py +++ b/satpy/tests/test_data_download.py @@ -42,16 +42,16 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar if not prerequisites or len(prerequisites) != 1: raise ValueError("Unexpected number of prereqs") super().__init__(name, prerequisites, optional_prerequisites, **kwargs) - self.register_data_files({'url': kwargs['url'], - 'filename': kwargs['filename'], - 'known_hash': kwargs['known_hash']}) + self.register_data_files({"url": kwargs["url"], + "filename": kwargs["filename"], + "known_hash": kwargs["known_hash"]}) def _setup_custom_composite_config(base_dir): from satpy.composites import StaticImageCompositor from satpy.modifiers.atmosphere import ReflectanceCorrector composite_config = base_dir.mkdir("composites").join("visir.yaml") - with open(composite_config, 'w') as comp_file: + with open(composite_config, "w") as comp_file: yaml.dump({ "sensor_name": "visir", "modifiers": { @@ -79,7 +79,7 @@ def _setup_custom_composite_config(base_dir): def _setup_custom_reader_config(base_dir): reader_config = base_dir.mkdir("readers").join("fake.yaml") - with open(reader_config, 'wt') as comp_file: + with open(reader_config, "wt") as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" reader: @@ -97,7 +97,7 @@ def _setup_custom_reader_config(base_dir): def _setup_custom_writer_config(base_dir): writer_config = base_dir.mkdir("writers").join("fake.yaml") - with open(writer_config, 'wt') as comp_file: + with open(writer_config, "wt") as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" writer: @@ -113,8 +113,8 @@ def _setup_custom_writer_config(base_dir): def _assert_reader_files_downloaded(readers, found_files): - r_cond1 = 'readers/README.rst' in found_files - r_cond2 = 'readers/README2.rst' in found_files + r_cond1 = "readers/README.rst" in found_files + r_cond2 = "readers/README2.rst" in found_files if readers is not None and not readers: r_cond1 = not r_cond1 r_cond2 = not r_cond2 @@ -123,8 +123,8 @@ def _assert_reader_files_downloaded(readers, found_files): def _assert_writer_files_downloaded(writers, found_files): - w_cond1 = 'writers/README.rst' in found_files - w_cond2 = 'writers/README2.rst' in found_files + w_cond1 = "writers/README.rst" in found_files + w_cond2 = "writers/README2.rst" in found_files if writers is not None and not writers: w_cond1 = not w_cond1 w_cond2 = not w_cond2 @@ -133,15 +133,15 @@ def _assert_writer_files_downloaded(writers, found_files): def _assert_comp_files_downloaded(comp_sensors, found_files): - comp_cond = 'composites/README.rst' in found_files + comp_cond = "composites/README.rst" in found_files if comp_sensors is not None and not comp_sensors: comp_cond = not comp_cond assert comp_cond def _assert_mod_files_downloaded(comp_sensors, found_files): - mod_cond = 'modifiers/README.rst' in found_files - unfriendly_cond = 'modifiers/unfriendly.rst' in found_files + mod_cond = "modifiers/README.rst" in found_files + unfriendly_cond = "modifiers/unfriendly.rst" in found_files if comp_sensors is not None and not comp_sensors: mod_cond = not mod_cond assert mod_cond @@ -158,15 +158,15 @@ def _setup_custom_configs(self, tmpdir): _setup_custom_writer_config(tmpdir) self.tmpdir = tmpdir - @pytest.mark.parametrize('comp_sensors', [[], None, ['visir']]) - @pytest.mark.parametrize('writers', [[], None, ['fake']]) - @pytest.mark.parametrize('readers', [[], None, ['fake']]) + @pytest.mark.parametrize("comp_sensors", [[], None, ["visir"]]) + @pytest.mark.parametrize("writers", [[], None, ["fake"]]) + @pytest.mark.parametrize("readers", [[], None, ["fake"]]) def test_find_registerable(self, readers, writers, comp_sensors): """Test that find_registerable finds some things.""" import satpy from satpy.aux_download import find_registerable_files with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', {}): + mock.patch("satpy.aux_download._FILE_REGISTRY", {}): found_files = find_registerable_files( readers=readers, writers=writers, composite_sensors=comp_sensors, @@ -183,7 +183,7 @@ def test_limited_find_registerable(self): from satpy.aux_download import find_registerable_files file_registry = {} with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): found_files = find_registerable_files( readers=[], writers=[], composite_sensors=[], ) @@ -195,8 +195,8 @@ def test_retrieve(self): from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + comp_file = "composites/README.rst" found_files = find_registerable_files() assert comp_file in found_files assert not self.tmpdir.join(comp_file).exists() @@ -209,8 +209,8 @@ def test_offline_retrieve(self): from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + comp_file = "composites/README.rst" found_files = find_registerable_files() assert comp_file in found_files @@ -242,10 +242,10 @@ def test_retrieve_all(self): file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry), \ - mock.patch('satpy.aux_download._FILE_URLS', file_urls), \ - mock.patch('satpy.aux_download.find_registerable_files'): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry), \ + mock.patch("satpy.aux_download._FILE_URLS", file_urls), \ + mock.patch("satpy.aux_download.find_registerable_files"): + comp_file = "composites/README.rst" file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() @@ -260,13 +260,13 @@ def test_no_downloads_in_tests(self): file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - cache_key = 'myfile.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + cache_key = "myfile.rst" register_file(README_URL, cache_key) assert not self.tmpdir.join(cache_key).exists() pytest.raises(RuntimeError, retrieve, cache_key) # touch the file so it gets created - open(self.tmpdir.join(cache_key), 'w').close() + open(self.tmpdir.join(cache_key), "w").close() # offline downloading should still be allowed with satpy.config.set(download_aux=False): retrieve(cache_key) @@ -278,10 +278,10 @@ def test_download_script(self): file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry), \ - mock.patch('satpy.aux_download._FILE_URLS', file_urls), \ - mock.patch('satpy.aux_download.find_registerable_files'): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry), \ + mock.patch("satpy.aux_download._FILE_URLS", file_urls), \ + mock.patch("satpy.aux_download.find_registerable_files"): + comp_file = "composites/README.rst" file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 0bc1de2982..b8df391d30 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -36,19 +36,19 @@ def test_basic_init(self): from satpy.dataset.dataid import minimal_default_keys_config as mdkc did = DataID(dikc, name="a") - assert did['name'] == 'a' - assert did['modifiers'] == tuple() + assert did["name"] == "a" + assert did["modifiers"] == tuple() DataID(dikc, name="a", wavelength=0.86) DataID(dikc, name="a", resolution=1000) - DataID(dikc, name="a", calibration='radiance') + DataID(dikc, name="a", calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, - calibration='radiance') + calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, - calibration='radiance', modifiers=('sunz_corrected',)) + calibration="radiance", modifiers=("sunz_corrected",)) with pytest.raises(ValueError): DataID(dikc, wavelength=0.86) - did = DataID(mdkc, name='comp24', resolution=500) - assert did['resolution'] == 500 + did = DataID(mdkc, name="comp24", resolution=500) + assert did["resolution"] == 500 def test_init_bad_modifiers(self): """Test that modifiers are a tuple.""" @@ -72,13 +72,13 @@ def test_bad_calibration(self): from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc with pytest.raises(ValueError): - DataID(dikc, name='C05', calibration='_bad_') + DataID(dikc, name="C05", calibration="_bad_") def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',)) + d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=("hej",)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) assert d1.is_modified() @@ -88,11 +88,11 @@ def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',)) + d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=("hej",)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) - assert not d1.create_less_modified_query()['modifiers'] - assert not d2.create_less_modified_query()['modifiers'] + assert not d1.create_less_modified_query()["modifiers"] + assert not d2.create_less_modified_query()["modifiers"] class TestCombineMetadata(unittest.TestCase): @@ -101,11 +101,11 @@ class TestCombineMetadata(unittest.TestCase): def setUp(self): """Set up the test case.""" self.datetime_dts = ( - {'start_time': datetime(2018, 2, 1, 11, 58, 0)}, - {'start_time': datetime(2018, 2, 1, 11, 59, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 0, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 1, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 2, 0)}, + {"start_time": datetime(2018, 2, 1, 11, 58, 0)}, + {"start_time": datetime(2018, 2, 1, 11, 59, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 0, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 1, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 2, 0)}, ) def test_average_datetimes(self): @@ -125,14 +125,14 @@ def test_combine_times_with_averaging(self): """Test the combine_metadata with times with averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts) - self.assertEqual(self.datetime_dts[2]['start_time'], ret['start_time']) + self.assertEqual(self.datetime_dts[2]["start_time"], ret["start_time"]) def test_combine_times_without_averaging(self): """Test the combine_metadata with times without averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts, average_times=False) # times are not equal so don't include it in the final result - self.assertNotIn('start_time', ret) + self.assertNotIn("start_time", ret) def test_combine_arrays(self): """Test the combine_metadata with arrays.""" @@ -175,44 +175,44 @@ def test_combine_lists_identical(self): """Test combine metadata with identical lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 4]}, ] res = combine_metadata(*metadatas) - assert res['prerequisites'] == [1, 2, 3, 4] + assert res["prerequisites"] == [1, 2, 3, 4] def test_combine_lists_same_size_diff_values(self): """Test combine metadata with lists with different values.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3, 5]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 5]}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res def test_combine_lists_different_size(self): """Test combine metadata with different size lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': []}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": []}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3]}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res def test_combine_identical_numpy_scalars(self): """Test combining identical fill values.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'_FillValue': np.uint16(42)}, {'_FillValue': np.uint16(42)}] - assert combine_metadata(*test_metadata) == {'_FillValue': 42} + test_metadata = [{"_FillValue": np.uint16(42)}, {"_FillValue": np.uint16(42)}] + assert combine_metadata(*test_metadata) == {"_FillValue": 42} def test_combine_empty_metadata(self): """Test combining empty metadata.""" @@ -223,96 +223,96 @@ def test_combine_empty_metadata(self): def test_combine_nans(self): """Test combining nan fill values.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'_FillValue': np.nan}, {'_FillValue': np.nan}] - assert combine_metadata(*test_metadata) == {'_FillValue': np.nan} + test_metadata = [{"_FillValue": np.nan}, {"_FillValue": np.nan}] + assert combine_metadata(*test_metadata) == {"_FillValue": np.nan} def test_combine_numpy_arrays(self): """Test combining values that are numpy arrays.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'valid_range': np.array([0., 0.00032], dtype=np.float32)}, - {'valid_range': np.array([0., 0.00032], dtype=np.float32)}, - {'valid_range': np.array([0., 0.00032], dtype=np.float32)}] + test_metadata = [{"valid_range": np.array([0., 0.00032], dtype=np.float32)}, + {"valid_range": np.array([0., 0.00032], dtype=np.float32)}, + {"valid_range": np.array([0., 0.00032], dtype=np.float32)}] result = combine_metadata(*test_metadata) - assert np.allclose(result['valid_range'], np.array([0., 0.00032], dtype=np.float32)) + assert np.allclose(result["valid_range"], np.array([0., 0.00032], dtype=np.float32)) def test_combine_dask_arrays(self): """Test combining values that are dask arrays.""" import dask.array as da from satpy.dataset.metadata import combine_metadata - test_metadata = [{'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}, - {'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}] + test_metadata = [{"valid_range": da.from_array(np.array([0., 0.00032], dtype=np.float32))}, + {"valid_range": da.from_array(np.array([0., 0.00032], dtype=np.float32))}] result = combine_metadata(*test_metadata) - assert 'valid_range' not in result + assert "valid_range" not in result def test_combine_real_world_mda(self): """Test with real data.""" - mda_objects = ({'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}}, - {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}}) - - expected = {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}} + mda_objects = ({"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}}, + {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}}) + + expected = {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) - assert np.allclose(result.pop('_FillValue'), expected.pop('_FillValue'), equal_nan=True) - assert np.allclose(result.pop('valid_range'), expected.pop('valid_range')) - np.testing.assert_equal(result.pop('raw_metadata'), - expected.pop('raw_metadata')) + assert np.allclose(result.pop("_FillValue"), expected.pop("_FillValue"), equal_nan=True) + assert np.allclose(result.pop("valid_range"), expected.pop("valid_range")) + np.testing.assert_equal(result.pop("raw_metadata"), + expected.pop("raw_metadata")) assert result == expected def test_combine_one_metadata_object(self): """Test combining one metadata object.""" - mda_objects = ({'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}},) - - expected = {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}} + mda_objects = ({"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}},) + + expected = {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) - assert np.allclose(result.pop('_FillValue'), expected.pop('_FillValue'), equal_nan=True) - assert np.allclose(result.pop('valid_range'), expected.pop('valid_range')) + assert np.allclose(result.pop("_FillValue"), expected.pop("_FillValue"), equal_nan=True) + assert np.allclose(result.pop("valid_range"), expected.pop("valid_range")) assert result == expected @@ -320,29 +320,29 @@ def test_combine_dicts_close(): """Test combination of dictionaries whose values are close.""" from satpy.dataset.metadata import combine_metadata attrs = { - 'raw_metadata': { - 'a': 1, - 'b': 'foo', - 'c': [1, 2, 3], - 'd': { - 'e': np.str_('bar'), - 'f': datetime(2020, 1, 1, 12, 15, 30), - 'g': np.array([1, 2, 3]), + "raw_metadata": { + "a": 1, + "b": "foo", + "c": [1, 2, 3], + "d": { + "e": np.str_("bar"), + "f": datetime(2020, 1, 1, 12, 15, 30), + "g": np.array([1, 2, 3]), }, - 'h': np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) } } attrs_close = { - 'raw_metadata': { - 'a': 1 + 1E-12, - 'b': 'foo', - 'c': np.array([1, 2, 3]) + 1E-12, - 'd': { - 'e': np.str_('bar'), - 'f': datetime(2020, 1, 1, 12, 15, 30), - 'g': np.array([1, 2, 3]) + 1E-12 + "raw_metadata": { + "a": 1 + 1E-12, + "b": "foo", + "c": np.array([1, 2, 3]) + 1E-12, + "d": { + "e": np.str_("bar"), + "f": datetime(2020, 1, 1, 12, 15, 30), + "g": np.array([1, 2, 3]) + 1E-12 }, - 'h': np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) } } test_metadata = [attrs, attrs_close] @@ -354,22 +354,22 @@ def test_combine_dicts_close(): "test_mda", [ # a/b/c/d different - {'a': np.array([1, 2, 3]), 'd': 123}, - {'a': {'b': np.array([4, 5, 6]), 'c': 1.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 2.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'bar'}, + {"a": np.array([1, 2, 3]), "d": 123}, + {"a": {"b": np.array([4, 5, 6]), "c": 1.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 2.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": "bar"}, # a/b/c/d type different np.array([1, 2, 3]), - {'a': {'b': 'baz', 'c': 1.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 'baz'}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 1.0} + {"a": {"b": "baz", "c": 1.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": "baz"}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": 1.0} ] ) def test_combine_dicts_different(test_mda): """Test combination of dictionaries differing in various ways.""" from satpy.dataset.metadata import combine_metadata - mda = {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'foo'} - test_metadata = [{'raw_metadata': mda}, {'raw_metadata': test_mda}] + mda = {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": "foo"} + test_metadata = [{"raw_metadata": mda}, {"raw_metadata": test_mda}] result = combine_metadata(*test_metadata) assert not result @@ -380,57 +380,57 @@ def test_dataid(): # Check that enum is translated to type. did = make_dataid() - assert issubclass(did._id_keys['calibration']['type'], ValueList) - assert 'enum' not in did._id_keys['calibration'] + assert issubclass(did._id_keys["calibration"]["type"], ValueList) + assert "enum" not in did._id_keys["calibration"] # Check that None is never a valid value - did = make_dataid(name='cheese_shops', resolution=None) - assert 'resolution' not in did - assert 'None' not in did.__repr__() + did = make_dataid(name="cheese_shops", resolution=None) + assert "resolution" not in did + assert "None" not in did.__repr__() with pytest.raises(ValueError): make_dataid(name=None, resolution=1000) # Check that defaults are applied correctly - assert did['modifiers'] == ModifierTuple() + assert did["modifiers"] == ModifierTuple() # Check that from_dict creates a distinct instance... - did2 = did.from_dict(dict(name='cheese_shops', resolution=None)) + did2 = did.from_dict(dict(name="cheese_shops", resolution=None)) assert did is not did2 # ...But is equal assert did2 == did # Check that the instance is immutable with pytest.raises(TypeError): - did['resolution'] = 1000 + did["resolution"] = 1000 # Check that a missing required field crashes with pytest.raises(ValueError): make_dataid(resolution=1000) # Check to_dict - assert did.to_dict() == dict(name='cheese_shops', modifiers=tuple()) + assert did.to_dict() == dict(name="cheese_shops", modifiers=tuple()) # Check repr - did = make_dataid(name='VIS008', resolution=111) + did = make_dataid(name="VIS008", resolution=111) assert repr(did) == "DataID(name='VIS008', resolution=111, modifiers=())" # Check inequality - default_id_keys_config = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + default_id_keys_config = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } assert DataID(default_id_keys_config, wavelength=10) != DataID(default_id_keys_config, name="VIS006") @@ -439,44 +439,44 @@ def test_dataid(): def test_dataid_equal_if_enums_different(): """Check that dataids with different enums but same items are equal.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange - id_keys_config1 = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + id_keys_config1 = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'c1', - 'c2', - 'c3', + "resolution": None, + "calibration": { + "enum": [ + "c1", + "c2", + "c3", ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } - id_keys_config2 = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + id_keys_config2 = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'c1', - 'c1.5', - 'c2', - 'c2.5', - 'c3' + "resolution": None, + "calibration": { + "enum": [ + "c1", + "c1.5", + "c2", + "c2.5", + "c3" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } - assert DataID(id_keys_config1, name='ni', calibration='c2') == DataID(id_keys_config2, name="ni", calibration='c2') + assert DataID(id_keys_config1, name="ni", calibration="c2") == DataID(id_keys_config2, name="ni", calibration="c2") def test_dataid_copy(): @@ -497,7 +497,7 @@ def test_dataid_pickle(): import pickle from satpy.tests.utils import make_dataid - did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') + did = make_dataid(name="hi", wavelength=(10, 11, 12), resolution=1000, calibration="radiance") assert did == pickle.loads(pickle.dumps(did)) @@ -512,7 +512,7 @@ def test_dataid_elements_picklable(): import pickle from satpy.tests.utils import make_dataid - did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') + did = make_dataid(name="hi", wavelength=(10, 11, 12), resolution=1000, calibration="radiance") for value in did.values(): pickled_value = pickle.loads(pickle.dumps(value)) assert value == pickled_value @@ -525,10 +525,10 @@ def test_dataquery(self): """Test DataQuery objects.""" from satpy.dataset import DataQuery - DataQuery(name='cheese_shops') + DataQuery(name="cheese_shops") # Check repr - did = DataQuery(name='VIS008', resolution=111) + did = DataQuery(name="VIS008", resolution=111) assert repr(did) == "DataQuery(name='VIS008', resolution=111)" # Check inequality @@ -537,7 +537,7 @@ def test_dataquery(self): def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset import DataQuery - d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej',)) + d1 = DataQuery(name="a", wavelength=0.2, modifiers=("hej",)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert d1.is_modified() @@ -546,11 +546,11 @@ def test_is_modified(self): def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset import DataQuery - d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej',)) + d1 = DataQuery(name="a", wavelength=0.2, modifiers=("hej",)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) - assert not d1.create_less_modified_query()['modifiers'] - assert not d2.create_less_modified_query()['modifiers'] + assert not d1.create_less_modified_query()["modifiers"] + assert not d2.create_less_modified_query()["modifiers"] class TestIDQueryInteractions(unittest.TestCase): @@ -559,56 +559,56 @@ class TestIDQueryInteractions(unittest.TestCase): def setUp(self) -> None: """Set up the test case.""" self.default_id_keys_config = { - 'name': { - 'required': True, + "name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } def test_hash_equality(self): """Test hash equality.""" - dq = DataQuery(modifiers=tuple(), name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops') + dq = DataQuery(modifiers=tuple(), name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops") assert hash(dq) == hash(did) def test_id_filtering(self): """Check did filtering.""" - dq = DataQuery(modifiers=tuple(), name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops') - did2 = DataID(self.default_id_keys_config, name='ni') + dq = DataQuery(modifiers=tuple(), name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops") + did2 = DataID(self.default_id_keys_config, name="ni") res = dq.filter_dataids([did2, did]) assert len(res) == 1 assert res[0] == did dataid_container = [DataID(self.default_id_keys_config, - name='ds1', + name="ds1", resolution=250, - calibration='reflectance', + calibration="reflectance", modifiers=tuple())] dq = DataQuery(wavelength=0.22, modifiers=tuple()) assert len(dq.filter_dataids(dataid_container)) == 0 dataid_container = [DataID(minimal_default_keys_config, - name='natural_color')] - dq = DataQuery(name='natural_color', resolution=250) + name="natural_color")] + dq = DataQuery(name="natural_color", resolution=250) assert len(dq.filter_dataids(dataid_container)) == 1 - dq = make_dsq(wavelength=0.22, modifiers=('mod1',)) - did = make_cid(name='static_image') + dq = make_dsq(wavelength=0.22, modifiers=("mod1",)) + did = make_cid(name="static_image") assert len(dq.filter_dataids([did])) == 0 def test_inequality(self): @@ -617,70 +617,70 @@ def test_inequality(self): def test_sort_dataids(self): """Check dataid sorting.""" - dq = DataQuery(name='cheese_shops', wavelength=2, modifiers='*') - did = DataID(self.default_id_keys_config, name='cheese_shops', wavelength=(1, 2, 3)) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', wavelength=(1.1, 2.1, 3.1)) + dq = DataQuery(name="cheese_shops", wavelength=2, modifiers="*") + did = DataID(self.default_id_keys_config, name="cheese_shops", wavelength=(1, 2, 3)) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", wavelength=(1.1, 2.1, 3.1)) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert np.allclose(distances, [0, 0.1]) - dq = DataQuery(name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops', resolution=200) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', resolution=400) + dq = DataQuery(name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops", resolution=200) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", resolution=400) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] - did = DataID(self.default_id_keys_config, name='cheese_shops', calibration='counts') - did2 = DataID(self.default_id_keys_config, name='cheese_shops', calibration='reflectance') + did = DataID(self.default_id_keys_config, name="cheese_shops", calibration="counts") + did2 = DataID(self.default_id_keys_config, name="cheese_shops", calibration="reflectance") dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did2, did] assert distances[0] < distances[1] - did = DataID(self.default_id_keys_config, name='cheese_shops', modifiers=tuple()) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', modifiers=tuple(['out_of_stock'])) + did = DataID(self.default_id_keys_config, name="cheese_shops", modifiers=tuple()) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", modifiers=tuple(["out_of_stock"])) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] def test_sort_dataids_with_different_set_of_keys(self): """Check sorting data ids when the query has a different set of keys.""" - dq = DataQuery(name='solar_zenith_angle', calibration='reflectance') - dids = [DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=1000, modifiers=()), - DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=500, modifiers=()), - DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=250, modifiers=())] + dq = DataQuery(name="solar_zenith_angle", calibration="reflectance") + dids = [DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=1000, modifiers=()), + DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=500, modifiers=()), + DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=250, modifiers=())] dsids, distances = dq.sort_dataids(dids) assert distances[0] < distances[1] assert distances[1] < distances[2] def test_seviri_hrv_has_priority_over_vis008(self): """Check that the HRV channel has priority over VIS008 when querying 0.8µm.""" - dids = [DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + dids = [DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="counts", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="counts", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="counts", modifiers=())] dq = DataQuery(wavelength=0.8) res, distances = dq.sort_dataids(dids) @@ -694,14 +694,14 @@ def test_frequency_quadruple_side_band_class_method_convert(): res = frq_qdsb.convert(57.37) assert res == 57.37 - res = frq_qdsb.convert({'central': 57.0, 'side': 0.322, 'sideside': 0.05, 'bandwidth': 0.036}) + res = frq_qdsb.convert({"central": 57.0, "side": 0.322, "sideside": 0.05, "bandwidth": 0.036}) assert res == FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) def test_frequency_quadruple_side_band_channel_str(): """Test the frequency quadruple side band object: test the band description.""" frq_qdsb1 = FrequencyQuadrupleSideBand(57.0, 0.322, 0.05, 0.036) - frq_qdsb2 = FrequencyQuadrupleSideBand(57000, 322, 50, 36, 'MHz') + frq_qdsb2 = FrequencyQuadrupleSideBand(57000, 322, 50, 36, "MHz") assert str(frq_qdsb1) == "central=57.0 GHz ±0.322 ±0.05 width=0.036 GHz" assert str(frq_qdsb2) == "central=57000 MHz ±322 ±50 width=36 MHz" @@ -735,8 +735,8 @@ def test_frequency_quadruple_side_band_channel_distances(): frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) mydist = frq_qdsb.distance([57, 0.322, 0.05, 0.036]) - frq_dict = {'central': 57, 'side': 0.322, 'sideside': 0.05, - 'bandwidth': 0.036, 'unit': 'GHz'} + frq_dict = {"central": 57, "side": 0.322, "sideside": 0.05, + "bandwidth": 0.036, "unit": "GHz"} mydist = frq_qdsb.distance(frq_dict) assert mydist == np.inf @@ -769,7 +769,7 @@ def test_frequency_quadruple_side_band_channel_containment(): frq_qdsb = None assert (frq_qdsb in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05)) is False - assert '57' not in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05) + assert "57" not in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05) def test_frequency_double_side_band_class_method_convert(): @@ -779,14 +779,14 @@ def test_frequency_double_side_band_class_method_convert(): res = frq_dsb.convert(185) assert res == 185 - res = frq_dsb.convert({'central': 185, 'side': 7, 'bandwidth': 2}) + res = frq_dsb.convert({"central": 185, "side": 7, "bandwidth": 2}) assert res == FrequencyDoubleSideBand(185, 7, 2) def test_frequency_double_side_band_channel_str(): """Test the frequency double side band object: test the band description.""" frq_dsb1 = FrequencyDoubleSideBand(183, 7, 2) - frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, 'MHz') + frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, "MHz") assert str(frq_dsb1) == "central=183 GHz ±7 width=2 GHz" assert str(frq_dsb2) == "central=183000 MHz ±7000 width=2000 MHz" @@ -846,12 +846,12 @@ def test_frequency_double_side_band_channel_containment(): assert frq_range not in FrequencyDoubleSideBand(183, 4, 2) with pytest.raises(NotImplementedError): - assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3, 'MHz') + assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3, "MHz") frq_range = None assert (frq_range in FrequencyDoubleSideBand(183, 3, 2)) is False - assert '183' not in FrequencyDoubleSideBand(183, 3, 2) + assert "183" not in FrequencyDoubleSideBand(183, 3, 2) def test_frequency_range_class_method_convert(): @@ -861,14 +861,14 @@ def test_frequency_range_class_method_convert(): res = frq_range.convert(89) assert res == 89 - res = frq_range.convert({'central': 89, 'bandwidth': 2}) + res = frq_range.convert({"central": 89, "bandwidth": 2}) assert res == FrequencyRange(89, 2) def test_frequency_range_class_method_str(): """Test the frequency range object: test the band description.""" frq_range1 = FrequencyRange(89, 2) - frq_range2 = FrequencyRange(89000, 2000, 'MHz') + frq_range2 = FrequencyRange(89000, 2000, "MHz") assert str(frq_range1) == "central=89 GHz width=2 GHz" assert str(frq_range2) == "central=89000 MHz width=2000 MHz" @@ -882,7 +882,7 @@ def test_frequency_range_channel_equality(): assert 1.2 != frqr assert frqr == (2, 1) - assert frqr == (2, 1, 'GHz') + assert frqr == (2, 1, "GHz") def test_frequency_range_channel_containment(): @@ -892,12 +892,12 @@ def test_frequency_range_channel_containment(): assert 2.8 not in frqr with pytest.raises(NotImplementedError): - assert frqr in FrequencyRange(89, 2, 'MHz') + assert frqr in FrequencyRange(89, 2, "MHz") frqr = None assert (frqr in FrequencyRange(89, 2)) is False - assert '89' not in FrequencyRange(89, 2) + assert "89" not in FrequencyRange(89, 2) def test_frequency_range_channel_distances(): @@ -920,7 +920,7 @@ def test_wavelength_range(): assert 1.2 == wr assert .9 != wr assert wr == (1, 2, 3) - assert wr == (1, 2, 3, 'µm') + assert wr == (1, 2, 3, "µm") # Check containement assert 1.2 in wr @@ -929,11 +929,11 @@ def test_wavelength_range(): assert WavelengthRange(1.1, 2.2, 3.3) not in wr assert WavelengthRange(1.2, 2, 2.8) in wr assert WavelengthRange(10, 20, 30) not in wr - assert 'bla' not in wr + assert "bla" not in wr assert None not in wr - wr2 = WavelengthRange(1, 2, 3, 'µm') + wr2 = WavelengthRange(1, 2, 3, "µm") assert wr2 in wr - wr2 = WavelengthRange(1, 2, 3, 'nm') + wr2 = WavelengthRange(1, 2, 3, "nm") with pytest.raises(NotImplementedError): wr2 in wr # noqa diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index 976b6bbd6e..7ed3a3ac43 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -58,7 +58,7 @@ def __call__(self, pattern): except IndexError: num_results = self.num_results[-1] self.current_call += 1 - return [pattern + '.{:03d}'.format(idx) for idx in range(num_results)] + return [pattern + ".{:03d}".format(idx) for idx in range(num_results)] class TestDemo(unittest.TestCase): @@ -80,26 +80,26 @@ def tearDown(self): except OSError: pass - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_us_midlatitude_cyclone_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] # expected 16 files, got 2 self.assertRaises(AssertionError, get_us_midlatitude_cyclone_abi) # unknown access method - self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method='unknown') + self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method="unknown") - gcsfs_inst.glob.return_value = ['a.nc'] * 16 + gcsfs_inst.glob.return_value = ["a.nc"] * 16 filenames = get_us_midlatitude_cyclone_abi() - expected = os.path.join('.', 'abi_l1b', '20190314_us_midlatitude_cyclone', 'a.nc') + expected = os.path.join(".", "abi_l1b", "20190314_us_midlatitude_cyclone", "a.nc") for fn in filenames: self.assertEqual(expected, fn) - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_hurricane_florence_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_hurricane_florence_abi @@ -110,7 +110,7 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 self.assertRaises(AssertionError, get_hurricane_florence_abi) - self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method='unknown') + self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method="unknown") gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() @@ -132,63 +132,63 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): class TestGCPUtils(unittest.TestCase): """Test Google Cloud Platform utilities.""" - @mock.patch('satpy.demo._google_cloud_platform.urlopen') + @mock.patch("satpy.demo._google_cloud_platform.urlopen") def test_is_gcp_instance(self, uo): """Test is_google_cloud_instance.""" from satpy.demo._google_cloud_platform import URLError, is_google_cloud_instance uo.side_effect = URLError("Test Environment") self.assertFalse(is_google_cloud_instance()) - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_bucket_files(self, gcsfs_mod): """Test get_bucket_files basic cases.""" from satpy.demo._google_cloud_platform import get_bucket_files gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] - filenames = get_bucket_files('*.nc', '.') - expected = [os.path.join('.', 'a.nc'), os.path.join('.', 'b.nc')] + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] + filenames = get_bucket_files("*.nc", ".") + expected = [os.path.join(".", "a.nc"), os.path.join(".", "b.nc")] self.assertEqual(expected, filenames) gcsfs_inst.glob.side_effect = _GlobHelper(10) - filenames = get_bucket_files(['*.nc', '*.txt'], '.', pattern_slice=slice(2, 5)) + filenames = get_bucket_files(["*.nc", "*.txt"], ".", pattern_slice=slice(2, 5)) self.assertEqual(len(filenames), 3 * 2) gcsfs_inst.glob.side_effect = None # reset mock side effect - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] - self.assertRaises(OSError, get_bucket_files, '*.nc', 'does_not_exist') + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] + self.assertRaises(OSError, get_bucket_files, "*.nc", "does_not_exist") - open('a.nc', 'w').close() # touch the file + open("a.nc", "w").close() # touch the file gcsfs_inst.get.reset_mock() - gcsfs_inst.glob.return_value = ['a.nc'] - filenames = get_bucket_files('*.nc', '.') - self.assertEqual([os.path.join('.', 'a.nc')], filenames) + gcsfs_inst.glob.return_value = ["a.nc"] + filenames = get_bucket_files("*.nc", ".") + self.assertEqual([os.path.join(".", "a.nc")], filenames) gcsfs_inst.get.assert_not_called() # force redownload gcsfs_inst.get.reset_mock() - gcsfs_inst.glob.return_value = ['a.nc'] - filenames = get_bucket_files('*.nc', '.', force=True) - self.assertEqual([os.path.join('.', 'a.nc')], filenames) + gcsfs_inst.glob.return_value = ["a.nc"] + filenames = get_bucket_files("*.nc", ".", force=True) + self.assertEqual([os.path.join(".", "a.nc")], filenames) gcsfs_inst.get.assert_called_once() # if we don't get any results then we expect an exception gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = [] - self.assertRaises(OSError, get_bucket_files, '*.nc', '.') + self.assertRaises(OSError, get_bucket_files, "*.nc", ".") - @mock.patch('satpy.demo._google_cloud_platform.gcsfs', None) + @mock.patch("satpy.demo._google_cloud_platform.gcsfs", None) def test_no_gcsfs(self): """Test that 'gcsfs' is required.""" from satpy.demo._google_cloud_platform import get_bucket_files - self.assertRaises(RuntimeError, get_bucket_files, '*.nc', '.') + self.assertRaises(RuntimeError, get_bucket_files, "*.nc", ".") class TestAHIDemoDownload: """Test the AHI demo data download.""" - @mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()}) + @mock.patch.dict(sys.modules, {"s3fs": mock.MagicMock()}) def test_ahi_full_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir @@ -197,7 +197,7 @@ def test_ahi_full_download(self): files = download_typhoon_surigae_ahi(base_dir=gettempdir()) assert len(files) == 160 - @mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()}) + @mock.patch.dict(sys.modules, {"s3fs": mock.MagicMock()}) def test_ahi_partial_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir @@ -274,7 +274,7 @@ def iter_content(self, chunk_size): x = bytes_io.read(chunk_size) -@mock.patch('satpy.demo.utils.requests') +@mock.patch("satpy.demo.utils.requests") class TestVIIRSSDRDemoDownload: """Test VIIRS SDR downloading.""" @@ -415,7 +415,7 @@ def setUp(self): self.subdir = os.path.join(".", "seviri_hrit", "20180228_1500") self.files = generate_subset_of_filenames(base_dir=self.subdir) - self.patcher = mock.patch('satpy.demo.utils.requests.get', autospec=True) + self.patcher = mock.patch("satpy.demo.utils.requests.get", autospec=True) self.get_mock = self.patcher.start() _FakeRequest.requests_log = [] @@ -450,12 +450,12 @@ def test_download_a_subset_of_files(self): with mock_filesystem(): files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) assert set(files) == set(os.path.join(self.subdir, filename) for filename in [ - 'H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__', - 'H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__', - 'H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__', + "H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__", + "H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__", + "H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__", ]) def test_do_not_download_same_file_twice(self): diff --git a/satpy/tests/test_dependency_tree.py b/satpy/tests/test_dependency_tree.py index 415a927cc5..57b718963f 100644 --- a/satpy/tests/test_dependency_tree.py +++ b/satpy/tests/test_dependency_tree.py @@ -59,7 +59,7 @@ def setUp(self): self.dependency_tree.add_leaf(dependency_2_1, node_dependency_2) # We don't need to add the unmodified dependency a second time. - dependency_3 = make_dataid(name='ds2', resolution=250, calibration="reflectance", modifiers=tuple()) + dependency_3 = make_dataid(name="ds2", resolution=250, calibration="reflectance", modifiers=tuple()) self.dependency_tree.add_leaf(dependency_3, node_composite_1) @staticmethod @@ -78,7 +78,7 @@ def test_copy_preserves_all_nodes(self): new_dependency_tree.trunk()) # make sure that we can get access to sub-nodes - c13_id = make_cid(name='comp13') + c13_id = make_cid(name="comp13") assert self._nodes_equal(self.dependency_tree.trunk(limit_nodes_to=[c13_id]), new_dependency_tree.trunk(limit_nodes_to=[c13_id])) @@ -104,14 +104,14 @@ class TestMissingDependencies(unittest.TestCase): def test_new_missing_dependencies(self): """Test new MissingDependencies.""" from satpy.node import MissingDependencies - error = MissingDependencies('bla') - assert error.missing_dependencies == 'bla' + error = MissingDependencies("bla") + assert error.missing_dependencies == "bla" def test_new_missing_dependencies_with_message(self): """Test new MissingDependencies with a message.""" from satpy.node import MissingDependencies - error = MissingDependencies('bla', "This is a message") - assert 'This is a message' in str(error) + error = MissingDependencies("bla", "This is a message") + assert "This is a message" in str(error) class TestMultipleResolutionSameChannelDependency(unittest.TestCase): @@ -126,27 +126,27 @@ def test_modis_overview_1000m(self): from satpy.modifiers.geometry import SunZenithCorrector from satpy.readers.yaml_reader import FileYAMLReader - config_file = os.path.join(PACKAGE_CONFIG_PATH, 'readers', 'modis_l1b.yaml') + config_file = os.path.join(PACKAGE_CONFIG_PATH, "readers", "modis_l1b.yaml") self.reader_instance = FileYAMLReader.from_config_files(config_file) - overview = {'_satpy_id': make_dataid(name='overview'), - 'name': 'overview', - 'optional_prerequisites': [], - 'prerequisites': [DataQuery(name='1', modifiers=('sunz_corrected',)), - DataQuery(name='2', modifiers=('sunz_corrected',)), - DataQuery(name='31')], - 'standard_name': 'overview'} - compositors = {'modis': DatasetDict()} - compositors['modis']['overview'] = GenericCompositor(**overview) - - modifiers = {'modis': {'sunz_corrected': (SunZenithCorrector, - {'optional_prerequisites': ['solar_zenith_angle'], - 'name': 'sunz_corrected', - 'prerequisites': []})}} - dep_tree = DependencyTree({'modis_l1b': self.reader_instance}, compositors, modifiers) - dep_tree.populate_with_keys({'overview'}, DataQuery(resolution=1000)) + overview = {"_satpy_id": make_dataid(name="overview"), + "name": "overview", + "optional_prerequisites": [], + "prerequisites": [DataQuery(name="1", modifiers=("sunz_corrected",)), + DataQuery(name="2", modifiers=("sunz_corrected",)), + DataQuery(name="31")], + "standard_name": "overview"} + compositors = {"modis": DatasetDict()} + compositors["modis"]["overview"] = GenericCompositor(**overview) + + modifiers = {"modis": {"sunz_corrected": (SunZenithCorrector, + {"optional_prerequisites": ["solar_zenith_angle"], + "name": "sunz_corrected", + "prerequisites": []})}} + dep_tree = DependencyTree({"modis_l1b": self.reader_instance}, compositors, modifiers) + dep_tree.populate_with_keys({"overview"}, DataQuery(resolution=1000)) for key in dep_tree._all_nodes.keys(): - assert key.get('resolution', 1000) == 1000 + assert key.get("resolution", 1000) == 1000 class TestMultipleSensors(unittest.TestCase): @@ -194,18 +194,18 @@ def __call__(self, *args, **kwargs): # create the dictionary one element at a time to force "incorrect" order # (sensor2 comes before sensor1, but results should be alphabetical order) compositors = {} - compositors['sensor2'] = s2_comps = DatasetDict() - compositors['sensor1'] = s1_comps = DatasetDict() - c1_s2_id = make_cid(name='comp1', resolution=1000) - c1_s1_id = make_cid(name='comp1', resolution=500) + compositors["sensor2"] = s2_comps = DatasetDict() + compositors["sensor1"] = s1_comps = DatasetDict() + c1_s2_id = make_cid(name="comp1", resolution=1000) + c1_s1_id = make_cid(name="comp1", resolution=500) s2_comps[c1_s2_id] = comp1_sensor2 s1_comps[c1_s1_id] = comp1_sensor1 modifiers = {} - modifiers['sensor2'] = s2_mods = {} - modifiers['sensor1'] = s1_mods = {} - s2_mods['mod1'] = (_FakeModifier, {'ret_val': 2}) - s1_mods['mod1'] = (_FakeModifier, {'ret_val': 1}) + modifiers["sensor2"] = s2_mods = {} + modifiers["sensor1"] = s1_mods = {} + s2_mods["mod1"] = (_FakeModifier, {"ret_val": 2}) + s1_mods["mod1"] = (_FakeModifier, {"ret_val": 1}) self.dependency_tree = DependencyTree({}, compositors, modifiers) # manually add a leaf so we don't have to mock a reader @@ -214,7 +214,7 @@ def __call__(self, *args, **kwargs): def test_compositor_loaded_sensor_order(self): """Test that a compositor is loaded from the first alphabetical sensor.""" - self.dependency_tree.populate_with_keys({'comp1'}) + self.dependency_tree.populate_with_keys({"comp1"}) comp_nodes = self.dependency_tree.trunk() self.assertEqual(len(comp_nodes), 1) self.assertEqual(comp_nodes[0].name["resolution"], 500) @@ -222,7 +222,7 @@ def test_compositor_loaded_sensor_order(self): def test_modifier_loaded_sensor_order(self): """Test that a modifier is loaded from the first alphabetical sensor.""" from satpy import DataQuery - dq = DataQuery(name='ds5', modifiers=('mod1',)) + dq = DataQuery(name="ds5", modifiers=("mod1",)) self.dependency_tree.populate_with_keys({dq}) comp_nodes = self.dependency_tree.trunk() self.assertEqual(len(comp_nodes), 1) diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index 46f4a16784..4282bc86b1 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -32,7 +32,7 @@ def test_open_dataset(): """Test xr.open_dataset wrapper.""" fn = mock.MagicMock() str_file_path = "path/to/file.nc" - with mock.patch('xarray.open_dataset') as xr_open: + with mock.patch("xarray.open_dataset") as xr_open: _ = open_dataset(fn, decode_cf=True, chunks=500) fn.open.assert_called_once_with() xr_open.assert_called_once_with(fn.open(), decode_cf=True, chunks=500) @@ -48,105 +48,105 @@ class TestBaseFileHandler(unittest.TestCase): def setUp(self): """Set up the test.""" self.fh = BaseFileHandler( - 'filename', {'filename_info': 'bla'}, 'filetype_info') + "filename", {"filename_info": "bla"}, "filetype_info") def test_combine_times(self): """Combine times.""" - info1 = {'start_time': 1} - info2 = {'start_time': 2} + info1 = {"start_time": 1} + info2 = {"start_time": 2} res = self.fh.combine_info([info1, info2]) - exp = {'start_time': 1} + exp = {"start_time": 1} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) - exp = {'start_time': 1} + exp = {"start_time": 1} self.assertDictEqual(res, exp) - info1 = {'end_time': 1} - info2 = {'end_time': 2} + info1 = {"end_time": 1} + info2 = {"end_time": 2} res = self.fh.combine_info([info1, info2]) - exp = {'end_time': 2} + exp = {"end_time": 2} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) - exp = {'end_time': 2} + exp = {"end_time": 2} self.assertDictEqual(res, exp) def test_combine_orbits(self): """Combine orbits.""" - info1 = {'start_orbit': 1} - info2 = {'start_orbit': 2} + info1 = {"start_orbit": 1} + info2 = {"start_orbit": 2} res = self.fh.combine_info([info1, info2]) - exp = {'start_orbit': 1} + exp = {"start_orbit": 1} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) - exp = {'start_orbit': 1} + exp = {"start_orbit": 1} self.assertDictEqual(res, exp) - info1 = {'end_orbit': 1} - info2 = {'end_orbit': 2} + info1 = {"end_orbit": 1} + info2 = {"end_orbit": 2} res = self.fh.combine_info([info1, info2]) - exp = {'end_orbit': 2} + exp = {"end_orbit": 2} self.assertDictEqual(res, exp) res = self.fh.combine_info([info2, info1]) - exp = {'end_orbit': 2} + exp = {"end_orbit": 2} self.assertDictEqual(res, exp) - @mock.patch('satpy.readers.file_handlers.SwathDefinition') + @mock.patch("satpy.readers.file_handlers.SwathDefinition") def test_combine_area(self, sdef): """Combine area.""" area1 = mock.MagicMock() area1.lons = np.arange(5) area1.lats = np.arange(5) - area1.name = 'area1' + area1.name = "area1" area2 = mock.MagicMock() area2.lons = np.arange(5) area2.lats = np.arange(5) - area2.name = 'area2' + area2.name = "area2" - info1 = {'area': area1} - info2 = {'area': area2} + info1 = {"area": area1} + info2 = {"area": area2} self.fh.combine_info([info1, info2]) - self.assertTupleEqual(sdef.call_args[1]['lons'].shape, (2, 5)) - self.assertTupleEqual(sdef.call_args[1]['lats'].shape, (2, 5)) - self.assertEqual(sdef.return_value.name, 'area1_area2') + self.assertTupleEqual(sdef.call_args[1]["lons"].shape, (2, 5)) + self.assertTupleEqual(sdef.call_args[1]["lats"].shape, (2, 5)) + self.assertEqual(sdef.return_value.name, "area1_area2") def test_combine_orbital_parameters(self): """Combine orbital parameters.""" - info1 = {'orbital_parameters': {'projection_longitude': 1, - 'projection_latitude': 1, - 'projection_altitude': 1, - 'satellite_nominal_longitude': 1, - 'satellite_nominal_latitude': 1, - 'satellite_actual_longitude': 1, - 'satellite_actual_latitude': 1, - 'satellite_actual_altitude': 1, - 'nadir_longitude': 1, - 'nadir_latitude': 1, - 'only_in_1': False}} - info2 = {'orbital_parameters': {'projection_longitude': 2, - 'projection_latitude': 2, - 'projection_altitude': 2, - 'satellite_nominal_longitude': 2, - 'satellite_nominal_latitude': 2, - 'satellite_actual_longitude': 2, - 'satellite_actual_latitude': 2, - 'satellite_actual_altitude': 2, - 'nadir_longitude': 2, - 'nadir_latitude': 2, - 'only_in_2': True}} - exp = {'orbital_parameters': {'projection_longitude': 1.5, - 'projection_latitude': 1.5, - 'projection_altitude': 1.5, - 'satellite_nominal_longitude': 1.5, - 'satellite_nominal_latitude': 1.5, - 'satellite_actual_longitude': 1.5, - 'satellite_actual_latitude': 1.5, - 'satellite_actual_altitude': 1.5, - 'nadir_longitude': 1.5, - 'nadir_latitude': 1.5, - 'only_in_1': False, - 'only_in_2': True}} + info1 = {"orbital_parameters": {"projection_longitude": 1, + "projection_latitude": 1, + "projection_altitude": 1, + "satellite_nominal_longitude": 1, + "satellite_nominal_latitude": 1, + "satellite_actual_longitude": 1, + "satellite_actual_latitude": 1, + "satellite_actual_altitude": 1, + "nadir_longitude": 1, + "nadir_latitude": 1, + "only_in_1": False}} + info2 = {"orbital_parameters": {"projection_longitude": 2, + "projection_latitude": 2, + "projection_altitude": 2, + "satellite_nominal_longitude": 2, + "satellite_nominal_latitude": 2, + "satellite_actual_longitude": 2, + "satellite_actual_latitude": 2, + "satellite_actual_altitude": 2, + "nadir_longitude": 2, + "nadir_latitude": 2, + "only_in_2": True}} + exp = {"orbital_parameters": {"projection_longitude": 1.5, + "projection_latitude": 1.5, + "projection_altitude": 1.5, + "satellite_nominal_longitude": 1.5, + "satellite_nominal_latitude": 1.5, + "satellite_actual_longitude": 1.5, + "satellite_actual_latitude": 1.5, + "satellite_actual_altitude": 1.5, + "nadir_longitude": 1.5, + "nadir_latitude": 1.5, + "only_in_1": False, + "only_in_2": True}} res = self.fh.combine_info([info1, info2]) self.assertDictEqual(res, exp) @@ -159,34 +159,34 @@ def test_combine_orbital_parameters(self): def test_combine_time_parameters(self): """Combine times in 'time_parameters.""" time_params1 = { - 'nominal_start_time': datetime(2020, 1, 1, 12, 0, 0), - 'nominal_end_time': datetime(2020, 1, 1, 12, 2, 30), - 'observation_start_time': datetime(2020, 1, 1, 12, 0, 2, 23821), - 'observation_end_time': datetime(2020, 1, 1, 12, 2, 23, 12348), + "nominal_start_time": datetime(2020, 1, 1, 12, 0, 0), + "nominal_end_time": datetime(2020, 1, 1, 12, 2, 30), + "observation_start_time": datetime(2020, 1, 1, 12, 0, 2, 23821), + "observation_end_time": datetime(2020, 1, 1, 12, 2, 23, 12348), } time_params2 = {} time_shift = timedelta(seconds=1.5) for key, value in time_params1.items(): time_params2[key] = value + time_shift res = self.fh.combine_info([ - {'time_parameters': time_params1}, - {'time_parameters': time_params2} + {"time_parameters": time_params1}, + {"time_parameters": time_params2} ]) - res_time_params = res['time_parameters'] - assert res_time_params['nominal_start_time'] == datetime(2020, 1, 1, 12, 0, 0) - assert res_time_params['nominal_end_time'] == datetime(2020, 1, 1, 12, 2, 31, 500000) - assert res_time_params['observation_start_time'] == datetime(2020, 1, 1, 12, 0, 2, 23821) - assert res_time_params['observation_end_time'] == datetime(2020, 1, 1, 12, 2, 24, 512348) + res_time_params = res["time_parameters"] + assert res_time_params["nominal_start_time"] == datetime(2020, 1, 1, 12, 0, 0) + assert res_time_params["nominal_end_time"] == datetime(2020, 1, 1, 12, 2, 31, 500000) + assert res_time_params["observation_start_time"] == datetime(2020, 1, 1, 12, 0, 2, 23821) + assert res_time_params["observation_end_time"] == datetime(2020, 1, 1, 12, 2, 24, 512348) def test_file_is_kept_intact(self): """Test that the file object passed (string, path, or other) is kept intact.""" open_file = mock.MagicMock() - bfh = BaseFileHandler(open_file, {'filename_info': 'bla'}, 'filetype_info') + bfh = BaseFileHandler(open_file, {"filename_info": "bla"}, "filetype_info") assert bfh.filename == open_file from pathlib import Path - filename = Path('/bla/bla.nc') - bfh = BaseFileHandler(filename, {'filename_info': 'bla'}, 'filetype_info') + filename = Path("/bla/bla.nc") + bfh = BaseFileHandler(filename, {"filename_info": "bla"}, "filetype_info") assert isinstance(bfh.filename, Path) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index c21a514808..04d32b7ecc 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -30,43 +30,43 @@ def _sunz_area_def(): """Get fake area for testing sunz generation.""" - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) return area def _sunz_bigger_area_def(): """Get area that is twice the size of 'sunz_area_def'.""" - bigger_area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + bigger_area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) return bigger_area def _sunz_stacked_area_def(): """Get fake stacked area for testing sunz generation.""" - area1 = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 1, + area1 = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 1, (-2000, 0, 2000, 2000)) - area2 = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 1, + area2 = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 1, (-2000, -2000, 2000, 0)) return StackedAreaDefinition(area1, area2) def _shared_sunz_attrs(area_def): - attrs = {'area': area_def, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'name': 'test_vis'} + attrs = {"area": area_def, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "name": "test_vis"} return attrs def _get_ds1(attrs): ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) return ds1 @@ -89,8 +89,8 @@ def sunz_ds2(): """Generate larger fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_bigger_area_def()) ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 0.5, 1, 1.5], 'x': [0, 0.5, 1, 1.5]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 0.5, 1, 1.5], "x": [0, 0.5, 1, 1.5]}) return ds2 @@ -100,9 +100,9 @@ def sunz_sza(): sza = xr.DataArray( np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]], chunks=2))), - attrs={'area': _sunz_area_def()}, - dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}, + attrs={"area": _sunz_area_def()}, + dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}, ) return sza @@ -113,47 +113,47 @@ class TestSunZenithCorrector: def test_basic_default_not_provided(self, sunz_ds1): """Test default limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((sunz_ds1,), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) + res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - assert 'y' in res.coords - assert 'x' in res.coords - ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) - res = comp((ds1,), test_attr='test') + assert "y" in res.coords + assert "x" in res.coords + ds1 = sunz_ds1.copy().drop_vars(("y", "x")) + res = comp((ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - assert 'y' not in res.coords - assert 'x' not in res.coords + assert "y" not in res.coords + assert "x" not in res.coords def test_basic_lims_not_provided(self, sunz_ds1): """Test custom limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) - res = comp((sunz_ds1,), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) + res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_default_provided(self, data_arr, sunz_sza): """Test default limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((data_arr, sunz_sza), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) + res = comp((data_arr, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_lims_provided(self, data_arr, sunz_sza): """Test custom limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) - res = comp((data_arr, sunz_sza), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) + res = comp((data_arr, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) def test_imcompatible_areas(self, sunz_ds2, sunz_sza): """Test sunz correction on incompatible areas.""" from satpy.composites import IncompatibleAreas from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) with pytest.raises(IncompatibleAreas): - comp((sunz_ds2, sunz_sza), test_attr='test') + comp((sunz_ds2, sunz_sza), test_attr="test") class TestNIRReflectance(unittest.TestCase): @@ -167,24 +167,24 @@ def setUp(self): area = mock.MagicMock(get_lonlats=self.get_lonlats) self.start_time = 1 - self.metadata = {'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'name': 'IR_039', - 'area': area, - 'start_time': self.start_time} + self.metadata = {"platform_name": "Meteosat-11", + "sensor": "seviri", + "name": "IR_039", + "area": area, + "start_time": self.start_time} nir_arr = np.random.random((2, 2)) - self.nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) + self.nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) self.nir.attrs.update(self.metadata) ir_arr = 100 * np.random.random((2, 2)) - self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) - self.ir_.attrs['area'] = area + self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) + self.ir_.attrs["area"] = area self.sunz_arr = 100 * np.random.random((2, 2)) - self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=['y', 'x']) - self.sunz.attrs['standard_name'] = 'solar_zenith_angle' - self.sunz.attrs['area'] = area + self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=["y", "x"]) + self.sunz.attrs["standard_name"] = "solar_zenith_angle" + self.sunz.attrs["area"] = area self.da_sunz = da.from_array(self.sunz_arr) refl_arr = np.random.random((2, 2)) @@ -200,9 +200,9 @@ def fake_refl_from_tbs(self, sun_zenith, da_nir, da_tb11, tb_ir_co2=None): return self.refl_with_co2 return self.refl - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided only sunz.""" calculator.return_value = mock.MagicMock( @@ -210,18 +210,18 @@ def test_provide_sunz_no_co2(self, calculator, apply_modifier_info, sza): sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert self.metadata.items() <= res.attrs.items() - assert res.attrs['units'] == '%' - assert res.attrs['sun_zenith_threshold'] is not None + assert res.attrs["units"] == "%" + assert res.attrs["sun_zenith_threshold"] is not None assert np.allclose(res.data, self.refl * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor with minimal parameters.""" calculator.return_value = mock.MagicMock( @@ -229,8 +229,8 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[], **info) # due to copying of DataArrays, self.get_lonlats is not the same as the one that was called @@ -240,9 +240,9 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None) assert np.allclose(res.data, self.refl * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided extra co2 info.""" calculator.return_value = mock.MagicMock( @@ -250,20 +250,20 @@ def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} co2_arr = np.random.random((2, 2)) - co2 = xr.DataArray(da.from_array(co2_arr), dims=['y', 'x']) - co2.attrs['wavelength'] = [12.0, 13.0, 14.0] - co2.attrs['units'] = 'K' + co2 = xr.DataArray(da.from_array(co2_arr), dims=["y", "x"]) + co2.attrs["wavelength"] = [12.0, 13.0, 14.0] + co2.attrs["units"] = "K" res = comp([self.nir, self.ir_], optional_datasets=[co2], **info) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=co2.data) assert np.allclose(res.data, self.refl_with_co2 * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( @@ -271,32 +271,32 @@ def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test', sunz_threshold=84.0) - info = {'modifiers': None} + comp = NIRReflectance(name="test", sunz_threshold=84.0) + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertEqual(res.attrs['sun_zenith_threshold'], 84.0) - calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039', + self.assertEqual(res.attrs["sun_zenith_threshold"], 84.0) + calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=84.0, masking_limit=NIRReflectance.MASKING_LIMIT) - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_sunz_threshold_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert comp.sun_zenith_threshold is not None - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( @@ -304,23 +304,23 @@ def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test', masking_limit=None) - info = {'modifiers': None} + comp = NIRReflectance(name="test", masking_limit=None) + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertIsNone(res.attrs['sun_zenith_masking_limit']) - calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039', + self.assertIsNone(res.attrs["sun_zenith_masking_limit"]) + calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=NIRReflectance.TERMINATOR_LIMIT, masking_limit=None) - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_masking_limit_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) @@ -331,9 +331,9 @@ def test_masking_limit_default_value_is_not_none(self, calculator, apply_modifie class TestNIREmissivePartFromReflectance(unittest.TestCase): """Test the NIR Emissive part from reflectance compositor.""" - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_compositor(self, calculator, apply_modifier_info, sza): """Test the NIR emissive part from reflectance compositor.""" from satpy.modifiers.spectral import NIRReflectance @@ -353,12 +353,12 @@ def test_compositor(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIREmissivePartFromReflectance - comp = NIREmissivePartFromReflectance(name='test', sunz_threshold=86.0) - info = {'modifiers': None} + comp = NIREmissivePartFromReflectance(name="test", sunz_threshold=86.0) + info = {"modifiers": None} - platform = 'NOAA-20' - sensor = 'viirs' - chan_name = 'M12' + platform = "NOAA-20" + sensor = "viirs" + chan_name = "M12" get_lonlats = mock.MagicMock() lons, lats = 1, 2 @@ -366,29 +366,29 @@ def test_compositor(self, calculator, apply_modifier_info, sza): area = mock.MagicMock(get_lonlats=get_lonlats) nir_arr = np.random.random((2, 2)) - nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) - nir.attrs['platform_name'] = platform - nir.attrs['sensor'] = sensor - nir.attrs['name'] = chan_name - nir.attrs['area'] = area + nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) + nir.attrs["platform_name"] = platform + nir.attrs["sensor"] = sensor + nir.attrs["name"] = chan_name + nir.attrs["area"] = area ir_arr = np.random.random((2, 2)) - ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) - ir_.attrs['area'] = area + ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) + ir_.attrs["area"] = area sunz_arr = 100 * np.random.random((2, 2)) - sunz = xr.DataArray(da.from_array(sunz_arr), dims=['y', 'x']) - sunz.attrs['standard_name'] = 'solar_zenith_angle' - sunz.attrs['area'] = area + sunz = xr.DataArray(da.from_array(sunz_arr), dims=["y", "x"]) + sunz.attrs["standard_name"] = "solar_zenith_angle" + sunz.attrs["area"] = area sunz2 = da.from_array(sunz_arr) sza.return_value = sunz2 res = comp([nir, ir_], optional_datasets=[sunz], **info) - self.assertEqual(res.attrs['sun_zenith_threshold'], 86.0) - self.assertEqual(res.attrs['units'], 'K') - self.assertEqual(res.attrs['platform_name'], platform) - self.assertEqual(res.attrs['sensor'], sensor) - self.assertEqual(res.attrs['name'], chan_name) - calculator.assert_called_with('NOAA-20', 'viirs', 'M12', sunz_threshold=86.0, + self.assertEqual(res.attrs["sun_zenith_threshold"], 86.0) + self.assertEqual(res.attrs["units"], "K") + self.assertEqual(res.attrs["platform_name"], platform) + self.assertEqual(res.attrs["sensor"], sensor) + self.assertEqual(res.attrs["name"], chan_name) + calculator.assert_called_with("NOAA-20", "viirs", "M12", sunz_threshold=86.0, masking_limit=NIRReflectance.MASKING_LIMIT) @@ -400,9 +400,9 @@ def _make_data_area(self): rows = 3 cols = 5 area = AreaDefinition( - 'some_area_name', 'On-the-fly area', 'geosabii', - {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', - 'units': 'm'}, + "some_area_name", "On-the-fly area", "geosabii", + {"a": "6378137.0", "b": "6356752.31414", "h": "35786023.0", "lon_0": "-89.5", "proj": "geos", "sweep": "x", + "units": "m"}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) @@ -415,46 +415,46 @@ def _make_data_area(self): def _create_test_data(self, name, wavelength, resolution): area, dnb = self._make_data_area() input_band = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': name, 'resolution': resolution, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": name, "resolution": resolution, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) red_band = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': (0.62, 0.64, 0.66), - 'name': 'B03', 'resolution': 500, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": (0.62, 0.64, 0.66), + "name": "B03", "resolution": 500, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) fake_angle_data = da.ones_like(dnb, dtype=np.float32) * 90.0 angle1 = xr.DataArray(fake_angle_data, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': "satellite_azimuth_angle", 'resolution': resolution, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": "satellite_azimuth_angle", "resolution": resolution, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], }) return input_band, red_band, angle1, angle1, angle1, angle1 @@ -480,15 +480,15 @@ def test_rayleigh_corrector(self, name, wavelength, resolution, aerosol_type, re reduce_strength, exp_mean, exp_unique): """Test PSPRayleighReflectance with fake data.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance - ray_cor = PSPRayleighReflectance(name=name, atmosphere='us-standard', aerosol_types=aerosol_type, + ray_cor = PSPRayleighReflectance(name=name, atmosphere="us-standard", aerosol_types=aerosol_type, reduce_lim_low=reduce_lim_low, reduce_lim_high=reduce_lim_high, reduce_strength=reduce_strength) - assert ray_cor.attrs['name'] == name - assert ray_cor.attrs['atmosphere'] == 'us-standard' - assert ray_cor.attrs['aerosol_types'] == aerosol_type - assert ray_cor.attrs['reduce_lim_low'] == reduce_lim_low - assert ray_cor.attrs['reduce_lim_high'] == reduce_lim_high - assert ray_cor.attrs['reduce_strength'] == reduce_strength + assert ray_cor.attrs["name"] == name + assert ray_cor.attrs["atmosphere"] == "us-standard" + assert ray_cor.attrs["aerosol_types"] == aerosol_type + assert ray_cor.attrs["reduce_lim_low"] == reduce_lim_low + assert ray_cor.attrs["reduce_lim_high"] == reduce_lim_high + assert ray_cor.attrs["reduce_strength"] == reduce_strength input_band, red_band, *_ = self._create_test_data(name, wavelength, resolution) res = ray_cor([input_band, red_band]) @@ -507,7 +507,7 @@ def test_rayleigh_with_angles(self, as_optionals): """Test PSPRayleighReflectance with angles provided.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance aerosol_type = "rayleigh_only" - ray_cor = PSPRayleighReflectance(name="B01", atmosphere='us-standard', aerosol_types=aerosol_type) + ray_cor = PSPRayleighReflectance(name="B01", atmosphere="us-standard", aerosol_types=aerosol_type) prereqs, opt_prereqs = self._get_angles_prereqs_and_opts(as_optionals) with mock.patch("satpy.modifiers.atmosphere.get_angles") as get_angles: res = ray_cor(prereqs, opt_prereqs) @@ -558,15 +558,15 @@ def test_call(self): "nadir_latitude": 0.0, } band = xr.DataArray(da.zeros((5, 5)), - attrs={'area': area, - 'start_time': stime, - 'name': 'name', - 'platform_name': 'platform', - 'sensor': 'sensor', - 'orbital_parameters': orb_params}, - dims=('y', 'x')) + attrs={"area": area, + "start_time": stime, + "name": "name", + "platform_name": "platform", + "sensor": "sensor", + "orbital_parameters": orb_params}, + dims=("y", "x")) # Perform atmospherical correction - psp = PSPAtmosphericalCorrection(name='dummy') + psp = PSPAtmosphericalCorrection(name="dummy") res = psp(projectables=[band]) res.compute() diff --git a/satpy/tests/test_node.py b/satpy/tests/test_node.py index 8a41082266..7475b04d24 100644 --- a/satpy/tests/test_node.py +++ b/satpy/tests/test_node.py @@ -62,7 +62,7 @@ class TestCompositorNode(unittest.TestCase): def setUp(self): """Set up the test case.""" - self.name = 'hej' + self.name = "hej" self.fake = FakeCompositor(self.name) self.c_node = CompositorNode(self.fake) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index db50900cad..3b2888565b 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -40,27 +40,27 @@ os.environ.pop("PPP_CONFIG_DIR", None) os.environ.pop("SATPY_CONFIG_PATH", None) -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'polarization': None, - 'level': None, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "polarization": None, + "level": None, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } @@ -70,22 +70,22 @@ @pytest.fixture def viirs_file(tmp_path, monkeypatch): """Create a dummy viirs file.""" - filename = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' + filename = "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() return filename @pytest.fixture def atms_file(tmp_path, monkeypatch): """Create a dummy atms file.""" - filename = 'SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5' + filename = "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() return filename @@ -120,11 +120,11 @@ def setUp(self): calibration="reflectance", polarization="H"): "4refl", make_dataid(name="test5", - modifiers=('mod1', 'mod2')): "5_2mod", + modifiers=("mod1", "mod2")): "5_2mod", make_dataid(name="test5", - modifiers=('mod2',)): "5_1mod", - make_dataid(name='test6', level=100): '6_100', - make_dataid(name='test6', level=200): '6_200', + modifiers=("mod2",)): "5_1mod", + make_dataid(name="test6", level=100): "6_100", + make_dataid(name="test6", level=200): "6_200", } self.test_dict = DatasetDict(regular_dict) @@ -154,31 +154,31 @@ def test_getitem(self): # access by near wavelength of another dataset self.assertEqual(d[1.65], "3") # access by name with multiple levels - self.assertEqual(d['test6'], '6_100') + self.assertEqual(d["test6"], "6_100") self.assertEqual(d[make_dsq(wavelength=1.5)], "2") self.assertEqual(d[make_dsq(wavelength=0.5, resolution=1000)], "1") self.assertEqual(d[make_dsq(wavelength=0.5, resolution=500)], "1h") - self.assertEqual(d[make_dsq(name='test6', level=100)], '6_100') - self.assertEqual(d[make_dsq(name='test6', level=200)], '6_200') + self.assertEqual(d[make_dsq(name="test6", level=100)], "6_100") + self.assertEqual(d[make_dsq(name="test6", level=200)], "6_200") # higher resolution is returned self.assertEqual(d[0.5], "1h") - self.assertEqual(d['test4'], '4refl') - self.assertEqual(d[make_dataid(name='test4', calibration='radiance')], '4rad') - self.assertRaises(KeyError, d.getitem, '1h') + self.assertEqual(d["test4"], "4refl") + self.assertEqual(d[make_dataid(name="test4", calibration="radiance")], "4rad") + self.assertRaises(KeyError, d.getitem, "1h") # test with full tuple - self.assertEqual(d[make_dsq(name='test', wavelength=(0, 0.5, 1), resolution=1000)], "1") + self.assertEqual(d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)], "1") def test_get_key(self): """Test 'get_key' special functions.""" from satpy.dataset import DataQuery d = self.test_dict - res1 = get_key(make_dataid(name='test4'), d, calibration='radiance') - res2 = get_key(make_dataid(name='test4'), d, calibration='radiance', + res1 = get_key(make_dataid(name="test4"), d, calibration="radiance") + res2 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=0) - res3 = get_key(make_dataid(name='test4'), d, calibration='radiance', + res3 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=3) self.assertEqual(len(res2), 1) self.assertEqual(len(res3), 1) @@ -186,43 +186,43 @@ def test_get_key(self): res3 = res3[0] self.assertEqual(res1, res2) self.assertEqual(res1, res3) - res1 = get_key('test4', d, query=DataQuery(polarization='V')) - self.assertEqual(res1, make_dataid(name='test4', calibration='radiance', - polarization='V')) + res1 = get_key("test4", d, query=DataQuery(polarization="V")) + self.assertEqual(res1, make_dataid(name="test4", calibration="radiance", + polarization="V")) res1 = get_key(0.5, d, query=DataQuery(resolution=500)) - self.assertEqual(res1, make_dataid(name='testh', + self.assertEqual(res1, make_dataid(name="testh", wavelength=(0, 0.5, 1), resolution=500)) - res1 = get_key('test6', d, query=DataQuery(level=100)) - self.assertEqual(res1, make_dataid(name='test6', + res1 = get_key("test6", d, query=DataQuery(level=100)) + self.assertEqual(res1, make_dataid(name="test6", level=100)) - res1 = get_key('test5', d) - res2 = get_key('test5', d, query=DataQuery(modifiers=('mod2',))) - res3 = get_key('test5', d, query=DataQuery(modifiers=('mod1', 'mod2',))) - self.assertEqual(res1, make_dataid(name='test5', - modifiers=('mod2',))) + res1 = get_key("test5", d) + res2 = get_key("test5", d, query=DataQuery(modifiers=("mod2",))) + res3 = get_key("test5", d, query=DataQuery(modifiers=("mod1", "mod2",))) + self.assertEqual(res1, make_dataid(name="test5", + modifiers=("mod2",))) self.assertEqual(res1, res2) self.assertNotEqual(res1, res3) # more than 1 result when default is to ask for 1 result - self.assertRaises(KeyError, get_key, 'test4', d, best=False) + self.assertRaises(KeyError, get_key, "test4", d, best=False) def test_contains(self): """Test DatasetDict contains method.""" d = self.test_dict - self.assertIn('test', d) - self.assertFalse(d.contains('test')) - self.assertNotIn('test_bad', d) + self.assertIn("test", d) + self.assertFalse(d.contains("test")) + self.assertNotIn("test_bad", d) self.assertIn(0.5, d) self.assertFalse(d.contains(0.5)) self.assertIn(1.5, d) self.assertIn(1.55, d) self.assertIn(1.65, d) - self.assertIn(make_dataid(name='test4', calibration='radiance'), d) - self.assertIn('test4', d) + self.assertIn(make_dataid(name="test4", calibration="radiance"), d) + self.assertIn("test4", d) def test_keys(self): """Test keys method of DatasetDict.""" @@ -232,21 +232,21 @@ def test_keys(self): self.assertTrue(all(isinstance(x, DataID) for x in d.keys())) name_keys = d.keys(names=True) self.assertListEqual(sorted(set(name_keys))[:4], [ - 'test', 'test2', 'test3', 'test4']) + "test", "test2", "test3", "test4"]) wl_keys = tuple(d.keys(wavelengths=True)) self.assertIn((0, 0.5, 1), wl_keys) - self.assertIn((1, 1.5, 2, 'µm'), wl_keys) - self.assertIn((1.2, 1.7, 2.2, 'µm'), wl_keys) + self.assertIn((1, 1.5, 2, "µm"), wl_keys) + self.assertIn((1.2, 1.7, 2.2, "µm"), wl_keys) self.assertIn(None, wl_keys) def test_setitem(self): """Test setitem method of DatasetDict.""" d = self.test_dict - d['new_ds'] = {'metadata': 'new_ds'} - self.assertEqual(d['new_ds']['metadata'], 'new_ds') - d[0.5] = {'calibration': 'radiance'} - self.assertEqual(d[0.5]['resolution'], 500) - self.assertEqual(d[0.5]['name'], 'testh') + d["new_ds"] = {"metadata": "new_ds"} + self.assertEqual(d["new_ds"]["metadata"], "new_ds") + d[0.5] = {"calibration": "radiance"} + self.assertEqual(d[0.5]["resolution"], 500) + self.assertEqual(d[0.5]["name"], "testh") class TestReaderLoader(unittest.TestCase): @@ -261,7 +261,7 @@ def setUp(self): from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -281,21 +281,21 @@ def test_no_args(self): def test_filenames_only(self): """Test with filenames specified.""" from satpy.readers import load_readers - ri = load_readers(filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_filenames_and_reader(self): """Test with filenames and reader specified.""" from satpy.readers import load_readers - ri = load_readers(reader='viirs_sdr', - filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(reader="viirs_sdr", + filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" from satpy.readers import load_readers - self.assertRaises(ValueError, load_readers, reader='i_dont_exist', filenames=[ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + self.assertRaises(ValueError, load_readers, reader="i_dont_exist", filenames=[ + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) def test_filenames_as_path(self): @@ -304,28 +304,28 @@ def test_filenames_as_path(self): from satpy.readers import load_readers ri = load_readers(filenames=[ - Path('SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'), + Path("SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"), ]) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_filenames_as_dict(self): """Test loading readers where filenames are organized by reader.""" from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(filenames=filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_filenames_as_dict_bad_reader(self): """Test loading with filenames dict but one of the readers is bad.""" from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - '__fake__': ['fake.txt'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "__fake__": ["fake.txt"], } self.assertRaisesRegex(ValueError, - r'(?=.*__fake__)(?!.*viirs)(^No reader.+)', + r"(?=.*__fake__)(?!.*viirs)(^No reader.+)", load_readers, filenames=filenames) def test_filenames_as_dict_with_reader(self): @@ -337,63 +337,63 @@ def test_filenames_as_dict_with_reader(self): """ from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } - ri = load_readers(reader='viirs_sdr', filenames=filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(reader="viirs_sdr", filenames=filenames) + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) def test_empty_filenames_as_dict(self): """Test passing filenames as a dictionary with an empty list of filenames.""" # only one reader from satpy.readers import load_readers filenames = { - 'viirs_sdr': [], + "viirs_sdr": [], } self.assertRaises(ValueError, load_readers, filenames=filenames) # two readers, one is empty filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'viirs_l1b': [], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "viirs_l1b": [], } ri = load_readers(filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) - - @mock.patch('satpy.readers.hrit_base.HRITFileHandler._get_hd') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') + self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + + @mock.patch("satpy.readers.hrit_base.HRITFileHandler._get_hd") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") def test_missing_requirements(self, *mocks): """Test warnings and exceptions in case of missing requirements.""" from satpy.readers import load_readers # Filenames from a single scan - epi_pro_miss = ['H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__'] - epi_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__'] - pro_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__'] + epi_pro_miss = ["H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__"] + epi_miss = epi_pro_miss + ["H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__"] + pro_miss = epi_pro_miss + ["H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__"] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No handler for reading requirement.*", category=UserWarning) for filenames in [epi_miss, pro_miss, epi_pro_miss]: - self.assertRaises(ValueError, load_readers, reader='seviri_l1b_hrit', filenames=filenames) + self.assertRaises(ValueError, load_readers, reader="seviri_l1b_hrit", filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ # 09:00 scan is ok - 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__', - 'H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__', - 'H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__', + "H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__", + "H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__", + "H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__", # 10:00 scan is incomplete - 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__', + "H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__", ] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No matching requirement file.*", category=UserWarning) try: - load_readers(filenames=at_least_one_complete, reader='seviri_l1b_hrit') + load_readers(filenames=at_least_one_complete, reader="seviri_l1b_hrit") except ValueError: - self.fail('If at least one set of filenames is complete, no ' - 'exception should be raised') + self.fail("If at least one set of filenames is complete, no " + "exception should be raised") def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" @@ -401,13 +401,13 @@ def test_all_filtered(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } - filter_params = {'start_time': datetime.datetime(1970, 1, 1), - 'end_time': datetime.datetime(1970, 1, 2), - 'area': None} + filter_params = {"start_time": datetime.datetime(1970, 1, 1), + "end_time": datetime.datetime(1970, 1, 2), + "area": None} self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) + filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" @@ -415,13 +415,13 @@ def test_all_filtered_multiple(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc"], } - filter_params = {'start_time': datetime.datetime(1970, 1, 1), - 'end_time': datetime.datetime(1970, 1, 2)} + filter_params = {"start_time": datetime.datetime(1970, 1, 1), + "end_time": datetime.datetime(1970, 1, 2)} self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) + filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" @@ -429,17 +429,17 @@ def test_almost_all_filtered(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc"], } - filter_params = {'start_time': datetime.datetime(2012, 2, 25), - 'end_time': datetime.datetime(2012, 2, 26)} + filter_params = {"start_time": datetime.datetime(2012, 2, 25), + "end_time": datetime.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't - readers = load_readers(filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) - self.assertIn('viirs_sdr', readers) + readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) + self.assertIn("viirs_sdr", readers) # abi_l1b reader was created, but no datasets available - self.assertIn('abi_l1b', readers) - self.assertEqual(len(list(readers['abi_l1b'].available_dataset_ids)), 0) + self.assertIn("abi_l1b", readers) + self.assertEqual(len(list(readers["abi_l1b"].available_dataset_ids)), 0) class TestFindFilesAndReaders: @@ -451,7 +451,7 @@ def setup_method(self): from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -461,31 +461,31 @@ def teardown_method(self): def test_reader_name(self, viirs_file): """Test with default base_dir and reader specified.""" - ri = find_files_and_readers(reader='viirs_sdr') - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr") + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_other_name(self, monkeypatch, tmp_path): """Test with default base_dir and reader specified.""" - filename = 'S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc' + filename = "S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() - ri = find_files_and_readers(reader='nwcsaf-pps_nc') - assert list(ri.keys()) == ['nwcsaf-pps_nc'] - assert ri['nwcsaf-pps_nc'] == [filename] + ri = find_files_and_readers(reader="nwcsaf-pps_nc") + assert list(ri.keys()) == ["nwcsaf-pps_nc"] + assert ri["nwcsaf-pps_nc"] == [filename] def test_reader_name_matched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', + ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 0, 0), end_time=datetime(2012, 2, 25, 19, 0, 0), ) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_matched_start_time(self, viirs_file): """Test with start matching the filename. @@ -494,9 +494,9 @@ def test_reader_name_matched_start_time(self, viirs_file): """ from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', start_time=datetime(2012, 2, 25, 18, 1, 30)) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 1, 30)) + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_matched_end_time(self, viirs_file): """Test with end matching the filename. @@ -506,16 +506,16 @@ def test_reader_name_matched_end_time(self, viirs_file): """ from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', end_time=datetime(2012, 2, 25, 18, 1, 30)) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr", end_time=datetime(2012, 2, 25, 18, 1, 30)) + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_unmatched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" from datetime import datetime with pytest.raises(ValueError): - find_files_and_readers(reader='viirs_sdr', + find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 26, 18, 0, 0), end_time=datetime(2012, 2, 26, 19, 0, 0)) @@ -524,8 +524,8 @@ def test_no_parameters(self, viirs_file): from satpy.readers import find_files_and_readers ri = find_files_and_readers() - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): """Test with no limiting parameters when there area both atms and viirs files in the same directory.""" @@ -533,31 +533,31 @@ def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): ri = find_files_and_readers() - assert 'atms_sdr_hdf5' in list(ri.keys()) - assert 'viirs_sdr' in list(ri.keys()) - assert ri['atms_sdr_hdf5'] == [atms_file] - assert ri['viirs_sdr'] == [viirs_file] + assert "atms_sdr_hdf5" in list(ri.keys()) + assert "viirs_sdr" in list(ri.keys()) + assert ri["atms_sdr_hdf5"] == [atms_file] + assert ri["viirs_sdr"] == [viirs_file] def test_bad_sensor(self): """Test bad sensor doesn't find any files.""" with pytest.raises(ValueError): - find_files_and_readers(sensor='i_dont_exist') + find_files_and_readers(sensor="i_dont_exist") def test_sensor(self, viirs_file): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works - ri = find_files_and_readers(sensor='viirs') - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(sensor="viirs") + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_sensor_no_files(self): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works with pytest.raises(ValueError): - find_files_and_readers(sensor='viirs') - assert find_files_and_readers(sensor='viirs', missing_ok=True) == {} + find_files_and_readers(sensor="viirs") + assert find_files_and_readers(sensor="viirs", missing_ok=True) == {} def test_reader_load_failed(self): """Test that an exception is raised when a reader can't be loaded.""" @@ -566,10 +566,10 @@ def test_reader_load_failed(self): from satpy.readers import find_files_and_readers # touch the file so it exists on disk - with mock.patch('yaml.load') as load: + with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") with pytest.raises(yaml.YAMLError): - find_files_and_readers(reader='viirs_sdr') + find_files_and_readers(reader="viirs_sdr") def test_pending_old_reader_name_mapping(self): """Test that requesting pending old reader names raises a warning.""" @@ -602,17 +602,17 @@ def test_filename_matches_reader_name(self): class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): - return tag_suffix + ' ' + node.value - IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) + return tag_suffix + " " + node.value + IgnoreLoader.add_multi_constructor("", IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.readers import read_reader_config - for reader_config in glob_config('readers/*.yaml'): + for reader_config in glob_config("readers/*.yaml"): reader_fn = os.path.basename(reader_config) reader_fn_name = os.path.splitext(reader_fn)[0] reader_info = read_reader_config([reader_config], loader=IgnoreLoader) - assert reader_fn_name == reader_info['name'], \ + assert reader_fn_name == reader_info["name"], \ "Reader YAML filename doesn't match reader name in the YAML file." def test_available_readers(self): @@ -622,16 +622,16 @@ def test_available_readers(self): reader_names = available_readers() assert len(reader_names) > 0 assert isinstance(reader_names[0], str) - assert 'viirs_sdr' in reader_names # needs h5py - assert 'abi_l1b' in reader_names # needs netcdf4 + assert "viirs_sdr" in reader_names # needs h5py + assert "abi_l1b" in reader_names # needs netcdf4 assert reader_names == sorted(reader_names) reader_infos = available_readers(as_dict=True) assert len(reader_names) == len(reader_infos) assert isinstance(reader_infos[0], dict) for reader_info in reader_infos: - assert 'name' in reader_info - assert reader_infos == sorted(reader_infos, key=lambda reader_info: reader_info['name']) + assert "name" in reader_info + assert reader_infos == sorted(reader_infos, key=lambda reader_info: reader_info["name"]) def test_available_readers_base_loader(self, monkeypatch): """Test the 'available_readers' function for yaml loader type BaseLoader.""" @@ -641,20 +641,20 @@ def test_available_readers_base_loader(self, monkeypatch): from satpy._config import glob_config def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): - if name in ('netcdf4', ): + if name in ("netcdf4", ): raise ImportError(f"Mocked import error {name}") return real_import(name, globals=globals, locals=locals, fromlist=fromlist, level=level) - monkeypatch.delitem(sys.modules, 'netcdf4', raising=False) - monkeypatch.setattr(builtins, '__import__', patched_import_error) + monkeypatch.delitem(sys.modules, "netcdf4", raising=False) + monkeypatch.setattr(builtins, "__import__", patched_import_error) with pytest.raises(ImportError): import netcdf4 # noqa: F401 reader_names = available_readers(yaml_loader=yaml.BaseLoader) - assert 'abi_l1b' in reader_names # needs netcdf4 - assert 'viirs_l1b' in reader_names - assert len(reader_names) == len(list(glob_config('readers/*.yaml'))) + assert "abi_l1b" in reader_names # needs netcdf4 + assert "viirs_l1b" in reader_names + assert len(reader_names) == len(list(glob_config("readers/*.yaml"))) class TestGroupFiles(unittest.TestCase): @@ -677,7 +677,7 @@ def setUp(self): "OR_ABI-L1b-RadC-M3C02_G16_s20171171527203_e20171171529576_c20171171530008.nc", ] self.g16_files = input_files - self.g17_files = [x.replace('G16', 'G17') for x in input_files] + self.g17_files = [x.replace("G16", "G17") for x in input_files] self.noaa20_files = [ "GITCO_j01_d20180511_t2027292_e2028538_b02476_c20190530192858056873_noac_ops.h5", "GITCO_j01_d20180511_t2028550_e2030195_b02476_c20190530192932937427_noac_ops.h5", @@ -729,41 +729,41 @@ def test_bad_reader(self): from satpy.readers import group_files # touch the file so it exists on disk - with mock.patch('yaml.load') as load: + with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") - self.assertRaises(yaml.YAMLError, group_files, [], reader='abi_l1b') + self.assertRaises(yaml.YAMLError, group_files, [], reader="abi_l1b") def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b') + groups = group_files(self.g16_files, reader="abi_l1b") self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + self.assertEqual(2, len(groups[0]["abi_l1b"])) def test_default_behavior_set(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files files = set(self.g16_files) num_files = len(files) - groups = group_files(files, reader='abi_l1b') + groups = group_files(files, reader="abi_l1b") # we didn't modify it self.assertEqual(len(files), num_files) self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + self.assertEqual(2, len(groups[0]["abi_l1b"])) def test_non_datetime_group_key(self): """Test what happens when the start_time isn't used for grouping.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b', group_keys=('platform_shortname',)) + groups = group_files(self.g16_files, reader="abi_l1b", group_keys=("platform_shortname",)) self.assertEqual(1, len(groups)) - self.assertEqual(12, len(groups[0]['abi_l1b'])) + self.assertEqual(12, len(groups[0]["abi_l1b"])) def test_large_time_threshold(self): """Test what happens when the time threshold holds multiple files.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b', time_threshold=60*8) + groups = group_files(self.g16_files, reader="abi_l1b", time_threshold=60*8) self.assertEqual(3, len(groups)) - self.assertEqual(4, len(groups[0]['abi_l1b'])) + self.assertEqual(4, len(groups[0]["abi_l1b"])) def test_two_instruments_files(self): """Test the behavior when two instruments files are provided. @@ -776,9 +776,9 @@ def test_two_instruments_files(self): """ from satpy.readers import group_files - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', group_keys=('start_time',)) + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time",)) self.assertEqual(6, len(groups)) - self.assertEqual(4, len(groups[0]['abi_l1b'])) + self.assertEqual(4, len(groups[0]["abi_l1b"])) def test_two_instruments_files_split(self): """Test the default behavior when two instruments files are provided and split. @@ -788,51 +788,51 @@ def test_two_instruments_files_split(self): """ from satpy.readers import group_files - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', - group_keys=('start_time', 'platform_shortname')) + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", + group_keys=("start_time", "platform_shortname")) self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + self.assertEqual(2, len(groups[0]["abi_l1b"])) # default for abi_l1b should also behave like this - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b') + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b") self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + self.assertEqual(2, len(groups[0]["abi_l1b"])) def test_viirs_orbits(self): """Test a reader that doesn't use 'start_time' for default grouping.""" from satpy.readers import group_files - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr') + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr") self.assertEqual(2, len(groups)) # the noaa-20 files will be first because the orbit number is smaller # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[0]['viirs_sdr'])) + self.assertEqual(5 * 3, len(groups[0]["viirs_sdr"])) # 3 granules * 2 file types - self.assertEqual(6, len(groups[1]['viirs_sdr'])) + self.assertEqual(6, len(groups[1]["viirs_sdr"])) def test_viirs_override_keys(self): """Test overriding a group keys to add 'start_time'.""" from satpy.readers import group_files - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', - group_keys=('start_time', 'orbit', 'platform_shortname')) + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", + group_keys=("start_time", "orbit", "platform_shortname")) self.assertEqual(8, len(groups)) - self.assertEqual(2, len(groups[0]['viirs_sdr'])) # NPP - self.assertEqual(2, len(groups[1]['viirs_sdr'])) # NPP - self.assertEqual(2, len(groups[2]['viirs_sdr'])) # NPP - self.assertEqual(3, len(groups[3]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[4]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[5]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[6]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[7]['viirs_sdr'])) # N20 + self.assertEqual(2, len(groups[0]["viirs_sdr"])) # NPP + self.assertEqual(2, len(groups[1]["viirs_sdr"])) # NPP + self.assertEqual(2, len(groups[2]["viirs_sdr"])) # NPP + self.assertEqual(3, len(groups[3]["viirs_sdr"])) # N20 + self.assertEqual(3, len(groups[4]["viirs_sdr"])) # N20 + self.assertEqual(3, len(groups[5]["viirs_sdr"])) # N20 + self.assertEqual(3, len(groups[6]["viirs_sdr"])) # N20 + self.assertEqual(3, len(groups[7]["viirs_sdr"])) # N20 # Ask for a larger time span with our groups - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", time_threshold=60 * 60 * 2, - group_keys=('start_time', 'orbit', 'platform_shortname')) + group_keys=("start_time", "orbit", "platform_shortname")) self.assertEqual(2, len(groups)) # NPP is first because it has an earlier time # 3 granules * 2 file types - self.assertEqual(6, len(groups[0]['viirs_sdr'])) + self.assertEqual(6, len(groups[0]["viirs_sdr"])) # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[1]['viirs_sdr'])) + self.assertEqual(5 * 3, len(groups[1]["viirs_sdr"])) def test_multi_readers(self): """Test passing multiple readers.""" @@ -946,14 +946,14 @@ def _generate_random_string(): def _assert_is_open_file_and_close(opened): try: - assert hasattr(opened, 'tell') + assert hasattr(opened, "tell") finally: opened.close() def _posixify_path(filename): drive, driveless_name = os.path.splitdrive(filename) - return driveless_name.replace('\\', '/') + return driveless_name.replace("\\", "/") class TestFSFile(unittest.TestCase): @@ -975,7 +975,7 @@ def setUp(self): self.local_filename2 = os.path.join(tempfile.gettempdir(), self.random_string2) Path(self.local_filename2).touch() self.zip_name = os.path.join(tempfile.gettempdir(), self.random_string2 + ".zip") - zip_file = zipfile.ZipFile(self.zip_name, 'w', zipfile.ZIP_DEFLATED) + zip_file = zipfile.ZipFile(self.zip_name, "w", zipfile.ZIP_DEFLATED) zip_file.write(self.local_filename2) zip_file.close() os.remove(self.local_filename2) @@ -1057,7 +1057,7 @@ def test_sorting_fsfiles(self): file2 = FSFile(self.local_filename) - extra_file = os.path.normpath('/somedir/bla') + extra_file = os.path.normpath("/somedir/bla") sorted_filenames = [os.fspath(file) for file in sorted([file1, file2, extra_file])] expected_filenames = sorted([extra_file, os.fspath(file1), os.fspath(file2)]) assert sorted_filenames == expected_filenames diff --git a/satpy/tests/test_regressions.py b/satpy/tests/test_regressions.py index f85d9c37be..1f0a4924f8 100644 --- a/satpy/tests/test_regressions.py +++ b/satpy/tests/test_regressions.py @@ -26,22 +26,22 @@ from satpy.tests.utils import make_dataid -abi_file_list = ['/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc', - '/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc', - '/data/OR_ABI-L1b-RadF-M3C03_G16_s20180722030423_e20180722041190_c20180722041237-119000_0.nc', - '/data/OR_ABI-L1b-RadF-M3C04_G16_s20180722030423_e20180722041189_c20180722041221.nc', - '/data/OR_ABI-L1b-RadF-M3C05_G16_s20180722030423_e20180722041190_c20180722041237-119101_0.nc', - '/data/OR_ABI-L1b-RadF-M3C06_G16_s20180722030423_e20180722041195_c20180722041227.nc', - '/data/OR_ABI-L1b-RadF-M3C07_G16_s20180722030423_e20180722041201_c20180722041238.nc', - '/data/OR_ABI-L1b-RadF-M3C08_G16_s20180722030423_e20180722041190_c20180722041238.nc', - '/data/OR_ABI-L1b-RadF-M3C09_G16_s20180722030423_e20180722041195_c20180722041256.nc', - '/data/OR_ABI-L1b-RadF-M3C10_G16_s20180722030423_e20180722041201_c20180722041250.nc', - '/data/OR_ABI-L1b-RadF-M3C11_G16_s20180722030423_e20180722041189_c20180722041254.nc', - '/data/OR_ABI-L1b-RadF-M3C12_G16_s20180722030423_e20180722041195_c20180722041256.nc', - '/data/OR_ABI-L1b-RadF-M3C13_G16_s20180722030423_e20180722041201_c20180722041259.nc', - '/data/OR_ABI-L1b-RadF-M3C14_G16_s20180722030423_e20180722041190_c20180722041258.nc', - '/data/OR_ABI-L1b-RadF-M3C15_G16_s20180722030423_e20180722041195_c20180722041259.nc', - '/data/OR_ABI-L1b-RadF-M3C16_G16_s20180722030423_e20180722041202_c20180722041259.nc'] +abi_file_list = ["/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc", + "/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc", + "/data/OR_ABI-L1b-RadF-M3C03_G16_s20180722030423_e20180722041190_c20180722041237-119000_0.nc", + "/data/OR_ABI-L1b-RadF-M3C04_G16_s20180722030423_e20180722041189_c20180722041221.nc", + "/data/OR_ABI-L1b-RadF-M3C05_G16_s20180722030423_e20180722041190_c20180722041237-119101_0.nc", + "/data/OR_ABI-L1b-RadF-M3C06_G16_s20180722030423_e20180722041195_c20180722041227.nc", + "/data/OR_ABI-L1b-RadF-M3C07_G16_s20180722030423_e20180722041201_c20180722041238.nc", + "/data/OR_ABI-L1b-RadF-M3C08_G16_s20180722030423_e20180722041190_c20180722041238.nc", + "/data/OR_ABI-L1b-RadF-M3C09_G16_s20180722030423_e20180722041195_c20180722041256.nc", + "/data/OR_ABI-L1b-RadF-M3C10_G16_s20180722030423_e20180722041201_c20180722041250.nc", + "/data/OR_ABI-L1b-RadF-M3C11_G16_s20180722030423_e20180722041189_c20180722041254.nc", + "/data/OR_ABI-L1b-RadF-M3C12_G16_s20180722030423_e20180722041195_c20180722041256.nc", + "/data/OR_ABI-L1b-RadF-M3C13_G16_s20180722030423_e20180722041201_c20180722041259.nc", + "/data/OR_ABI-L1b-RadF-M3C14_G16_s20180722030423_e20180722041190_c20180722041258.nc", + "/data/OR_ABI-L1b-RadF-M3C15_G16_s20180722030423_e20180722041195_c20180722041259.nc", + "/data/OR_ABI-L1b-RadF-M3C16_G16_s20180722030423_e20180722041202_c20180722041259.nc"] def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): @@ -50,31 +50,31 @@ def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): This is an incomplete copy of existing file structures. """ dataset = Dataset(attrs={ - 'time_coverage_start': '2018-03-13T20:30:42.3Z', - 'time_coverage_end': '2018-03-13T20:41:18.9Z', + "time_coverage_start": "2018-03-13T20:30:42.3Z", + "time_coverage_end": "2018-03-13T20:41:18.9Z", }) projection = DataArray( [-214748364], attrs={ - 'long_name': 'GOES-R ABI fixed grid projection', - 'grid_mapping_name': 'geostationary', - 'perspective_point_height': 35786023.0, - 'semi_major_axis': 6378137.0, - 'semi_minor_axis': 6356752.31414, - 'inverse_flattening': 298.2572221, - 'latitude_of_projection_origin': 0.0, - 'longitude_of_projection_origin': -75.0, - 'sweep_angle_axis': 'x' + "long_name": "GOES-R ABI fixed grid projection", + "grid_mapping_name": "geostationary", + "perspective_point_height": 35786023.0, + "semi_major_axis": 6378137.0, + "semi_minor_axis": 6356752.31414, + "inverse_flattening": 298.2572221, + "latitude_of_projection_origin": 0.0, + "longitude_of_projection_origin": -75.0, + "sweep_angle_axis": "x" }) - dataset['goes_imager_projection'] = projection + dataset["goes_imager_projection"] = projection - if 'C01' in filename or 'C03' in filename or 'C05' in filename: + if "C01" in filename or "C03" in filename or "C05" in filename: stop = 10847 step = 2 scale = 2.8e-05 offset = 0.151858 - elif 'C02' in filename: + elif "C02" in filename: stop = 21693 step = 4 scale = 1.4e-05 @@ -88,117 +88,117 @@ def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): y = DataArray( da.arange(0, stop, step), attrs={ - 'scale_factor': -scale, - 'add_offset': offset, - 'units': 'rad', - 'axis': 'Y', - 'long_name': 'GOES fixed grid projection y-coordinate', - 'standard_name': 'projection_y_coordinate' + "scale_factor": -scale, + "add_offset": offset, + "units": "rad", + "axis": "Y", + "long_name": "GOES fixed grid projection y-coordinate", + "standard_name": "projection_y_coordinate" }, - dims=['y']) + dims=["y"]) - dataset['y'] = y + dataset["y"] = y x = DataArray( da.arange(0, stop, step), attrs={ - 'scale_factor': scale, - 'add_offset': -offset, - 'units': 'rad', - 'axis': 'X', - 'long_name': 'GOES fixed grid projection x-coordinate', - 'standard_name': 'projection_x_coordinate' + "scale_factor": scale, + "add_offset": -offset, + "units": "rad", + "axis": "X", + "long_name": "GOES fixed grid projection x-coordinate", + "standard_name": "projection_x_coordinate" }, - dims=['x']) + dims=["x"]) - dataset['x'] = x + dataset["x"] = x rad = DataArray( da.random.randint(0, 1025, size=[len(y), len(x)], dtype=np.int16, chunks=chunks), attrs={ - '_FillValue': np.array(1023), - 'long_name': 'ABI L1b Radiances', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - '_Unsigned': 'true', - 'sensor_band_bit_depth': 10, - 'valid_range': np.array([0, 1022], dtype=np.int16), - 'scale_factor': 0.8121064, - 'add_offset': -25.936647, - 'units': 'W m-2 sr-1 um-1', - 'resolution': 'y: 0.000028 rad x: 0.000028 rad', - 'grid_mapping': 'goes_imager_projection', - 'cell_methods': 't: point area: point' + "_FillValue": np.array(1023), + "long_name": "ABI L1b Radiances", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "_Unsigned": "true", + "sensor_band_bit_depth": 10, + "valid_range": np.array([0, 1022], dtype=np.int16), + "scale_factor": 0.8121064, + "add_offset": -25.936647, + "units": "W m-2 sr-1 um-1", + "resolution": "y: 0.000028 rad x: 0.000028 rad", + "grid_mapping": "goes_imager_projection", + "cell_methods": "t: point area: point" }, - dims=['y', 'x'] + dims=["y", "x"] ) - dataset['Rad'] = rad + dataset["Rad"] = rad sublat = DataArray(0.0, attrs={ - 'long_name': 'nominal satellite subpoint latitude (platform latitude)', - 'standard_name': 'latitude', - '_FillValue': -999.0, - 'units': 'degrees_north'}) - dataset['nominal_satellite_subpoint_lat'] = sublat + "long_name": "nominal satellite subpoint latitude (platform latitude)", + "standard_name": "latitude", + "_FillValue": -999.0, + "units": "degrees_north"}) + dataset["nominal_satellite_subpoint_lat"] = sublat sublon = DataArray(-75.0, attrs={ - 'long_name': 'nominal satellite subpoint longitude (platform longitude)', - 'standard_name': 'longitude', - '_FillValue': -999.0, - 'units': 'degrees_east'}) + "long_name": "nominal satellite subpoint longitude (platform longitude)", + "standard_name": "longitude", + "_FillValue": -999.0, + "units": "degrees_east"}) - dataset['nominal_satellite_subpoint_lon'] = sublon + dataset["nominal_satellite_subpoint_lon"] = sublon satheight = DataArray(35786.023, attrs={ - 'long_name': 'nominal satellite height above GRS 80 ellipsoid (platform altitude)', - 'standard_name': 'height_above_reference_ellipsoid', - '_FillValue': -999.0, - 'units': 'km'}) + "long_name": "nominal satellite height above GRS 80 ellipsoid (platform altitude)", + "standard_name": "height_above_reference_ellipsoid", + "_FillValue": -999.0, + "units": "km"}) - dataset['nominal_satellite_height'] = satheight + dataset["nominal_satellite_height"] = satheight yaw_flip_flag = DataArray(0, attrs={ - 'long_name': 'Flag indicating the spacecraft is operating in yaw flip configuration', - '_Unsigned': 'true', - '_FillValue': np.array(-1), - 'valid_range': np.array([0, 1], dtype=np.int8), - 'units': '1', - 'flag_values': '0 1', - 'flag_meanings': 'false true'}) + "long_name": "Flag indicating the spacecraft is operating in yaw flip configuration", + "_Unsigned": "true", + "_FillValue": np.array(-1), + "valid_range": np.array([0, 1], dtype=np.int8), + "units": "1", + "flag_values": "0 1", + "flag_meanings": "false true"}) - dataset['yaw_flip_flag'] = yaw_flip_flag + dataset["yaw_flip_flag"] = yaw_flip_flag return dataset -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_1258(fake_open_dataset): """Save true_color from abi with radiance doesn't need two resamplings.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['true_color_nocorr', 'C04'], calibration='radiance') - resampled_scene = scene.resample(scene.coarsest_area(), resampler='native') + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["true_color_nocorr", "C04"], calibration="radiance") + resampled_scene = scene.resample(scene.coarsest_area(), resampler="native") assert len(resampled_scene.keys()) == 2 -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_1088(fake_open_dataset): """Check that copied arrays gets resampled.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['C04'], calibration='radiance') + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["C04"], calibration="radiance") - my_id = make_dataid(name='my_name', wavelength=(10, 11, 12)) - scene[my_id] = scene['C04'].copy() - resampled = scene.resample('eurol') + my_id = make_dataid(name="my_name", wavelength=(10, 11, 12)) + scene[my_id] = scene["C04"].copy() + resampled = scene.resample("eurol") assert resampled[my_id].shape == (2048, 2560) -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_no_enums(fake_open_dataset): """Check that no enums are inserted in the resulting attrs.""" from enum import Enum @@ -206,7 +206,7 @@ def test_no_enums(fake_open_dataset): from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['C04'], calibration='radiance') - for value in scene['C04'].attrs.values(): + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["C04"], calibration="radiance") + for value in scene["C04"].attrs.values(): assert not isinstance(value, Enum) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index aa9063b95c..a9a3b24a01 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -37,7 +37,7 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=None, - input_dims=('y', 'x')): + input_dims=("y", "x")): """Get common data objects used in testing. Returns: @@ -57,49 +57,49 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No from xarray import DataArray ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, - attrs={'name': 'test_data_name', 'test': 'test'}) - if input_dims and 'y' in input_dims: + attrs={"name": "test_data_name", "test": "test"}) + if input_dims and "y" in input_dims: ds1 = ds1.assign_coords(y=da.arange(input_shape[-2], chunks=85)) - if input_dims and 'x' in input_dims: + if input_dims and "x" in input_dims: ds1 = ds1.assign_coords(x=da.arange(input_shape[-1], chunks=85)) - if input_dims and 'bands' in input_dims: - ds1 = ds1.assign_coords(bands=list('RGBA'[:ds1.sizes['bands']])) + if input_dims and "bands" in input_dims: + ds1 = ds1.assign_coords(bands=list("RGBA"[:ds1.sizes["bands"]])) - input_proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 ' - '+b=6356752.31414 +sweep=x +units=m +no_defs') + input_proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 " + "+b=6356752.31414 +sweep=x +units=m +no_defs") source = AreaDefinition( - 'test_target', - 'test_target', - 'test_target', + "test_target", + "test_target", + "test_target", proj4_str_to_dict(input_proj_str), input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) - ds1.attrs['area'] = source + ds1.attrs["area"] = source crs = CRS.from_string(input_proj_str) ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() input_area_shape = tuple(ds1.sizes[dim] for dim in ds1.dims - if dim in ['y', 'x']) - geo_dims = ('y', 'x') if input_dims else None + if dim in ["y", "x"]) + geo_dims = ("y", "x") if input_dims else None lons = da.random.random(input_area_shape, chunks=50) lats = da.random.random(input_area_shape, chunks=50) swath_def = SwathDefinition( DataArray(lons, dims=geo_dims), DataArray(lats, dims=geo_dims)) - ds2.attrs['area'] = swath_def - crs = CRS.from_string('+proj=latlong +datum=WGS84 +ellps=WGS84') + ds2.attrs["area"] = swath_def + crs = CRS.from_string("+proj=latlong +datum=WGS84 +ellps=WGS84") ds2 = ds2.assign_coords(crs=crs) # set up target definition - output_proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') + output_proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") output_proj_str = output_proj or output_proj_str target = AreaDefinition( - 'test_target', - 'test_target', - 'test_target', + "test_target", + "test_target", + "test_target", proj4_str_to_dict(output_proj_str), output_shape[1], # width output_shape[0], # height @@ -116,14 +116,14 @@ def test_type_preserve(self): from pyresample.geometry import SwathDefinition from satpy.resample import resample_dataset - source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x']), - xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x'])) - dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x']), - xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x'])) + source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=["y", "x"]), + xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=["y", "x"])) + dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=["y", "x"]), + xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=["y", "x"])) expected_gap = np.array([[1, 2], [3, 255]]) - data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=['y', 'x']) - data.attrs['_FillValue'] = 255 - data.attrs['area'] = source_area + data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=["y", "x"]) + data.attrs["_FillValue"] = 255 + data.attrs["area"] = source_area res = resample_dataset(data, dest_area) self.assertEqual(res.dtype, data.dtype) self.assertTrue(np.all(res.values == expected_gap)) @@ -137,11 +137,11 @@ def test_type_preserve(self): class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" - @mock.patch('satpy.resample.KDTreeResampler._check_numpy_cache') - @mock.patch('satpy.resample.xr.Dataset') - @mock.patch('satpy.resample.zarr.open') - @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') - @mock.patch('pyresample.kd_tree.XArrayResamplerNN') + @mock.patch("satpy.resample.KDTreeResampler._check_numpy_cache") + @mock.patch("satpy.resample.xr.Dataset") + @mock.patch("satpy.resample.zarr.open") + @mock.patch("satpy.resample.KDTreeResampler._create_cache_filename") + @mock.patch("pyresample.kd_tree.XArrayResamplerNN") def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset, cnc): """Test the kd resampler.""" @@ -151,7 +151,7 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset.return_value = mock_dset resampler = KDTreeResampler(source_swath, target_area) resampler.precompute( - mask=da.arange(5, chunks=5).astype(bool), cache_dir='.') + mask=da.arange(5, chunks=5).astype(bool), cache_dir=".") xr_resampler.assert_called_once() resampler.resampler.get_neighbour_info.assert_called() # swath definitions should not be cached @@ -166,7 +166,7 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, try: the_dir = tempfile.mkdtemp() resampler = KDTreeResampler(source_area, target_area) - create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') + create_filename.return_value = os.path.join(the_dir, "test_cache.zarr") zarr_open.side_effect = ValueError() resampler.precompute(cache_dir=the_dir) # assert data was saved to the on-disk cache @@ -216,8 +216,8 @@ def astype(self, dtype): resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) - @mock.patch('satpy.resample.np.load') - @mock.patch('satpy.resample.xr.Dataset') + @mock.patch("satpy.resample.np.load") + @mock.patch("satpy.resample.xr.Dataset") def test_check_numpy_cache(self, xr_Dataset, np_load): """Test that cache stored in .npz is converted to zarr.""" from satpy.resample import KDTreeResampler @@ -232,22 +232,22 @@ def test_check_numpy_cache(self, xr_Dataset, np_load): the_dir = tempfile.mkdtemp() kwargs = {} np_path = resampler._create_cache_filename(the_dir, - prefix='resample_lut-', - fmt='.npz', + prefix="resample_lut-", + fmt=".npz", mask=None, **kwargs) zarr_path = resampler._create_cache_filename(the_dir, - prefix='nn_lut-', - fmt='.zarr', + prefix="nn_lut-", + fmt=".zarr", mask=None, **kwargs) resampler._check_numpy_cache(the_dir) np_load.assert_not_called() zarr_out.to_zarr.assert_not_called() - with open(np_path, 'w') as fid: + with open(np_path, "w") as fid: fid.write("42") resampler._check_numpy_cache(the_dir) - np_load.assert_called_once_with(np_path, 'r') + np_load.assert_called_once_with(np_path, "r") zarr_out.to_zarr.assert_called_once_with(zarr_path) finally: shutil.rmtree(the_dir) @@ -259,9 +259,9 @@ def test_check_numpy_cache(self, xr_Dataset, np_load): class TestEWAResampler(unittest.TestCase): """Test EWA resampler class.""" - @mock.patch('satpy.resample.fornav') - @mock.patch('satpy.resample.ll2cr') - @mock.patch('satpy.resample.SwathDefinition.get_lonlats') + @mock.patch("satpy.resample.fornav") + @mock.patch("satpy.resample.ll2cr") + @mock.patch("satpy.resample.SwathDefinition.get_lonlats") def test_2d_ewa(self, get_lonlats, ll2cr, fornav): """Test EWA with a 2D dataset.""" import numpy as np @@ -278,11 +278,11 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav): swath_data.data = swath_data.data.astype(np.float32) num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) - new_data = resample_dataset(swath_data, target_area, resampler='ewa') + new_data = resample_dataset(swath_data, target_area, resampler="ewa") self.assertTupleEqual(new_data.shape, (200, 100)) self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs['test'], 'test') - self.assertIs(new_data.attrs['area'], target_area) + self.assertEqual(new_data.attrs["test"], "test") + self.assertIs(new_data.attrs["area"], target_area) # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count @@ -291,26 +291,26 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav): # resample a different dataset and make sure cache is used data = xr.DataArray( swath_data.data, - dims=('y', 'x'), attrs={'area': source_swath, 'test': 'test2', - 'name': 'test2'}) - new_data = resample_dataset(data, target_area, resampler='ewa') + dims=("y", "x"), attrs={"area": source_swath, "test": "test2", + "name": "test2"}) + new_data = resample_dataset(data, target_area, resampler="ewa") new_data.compute() # ll2cr will be called once more because of the computation self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) # but we should already have taken the lonlats from the SwathDefinition self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) - - @mock.patch('satpy.resample.fornav') - @mock.patch('satpy.resample.ll2cr') - @mock.patch('satpy.resample.SwathDefinition.get_lonlats') + self.assertIn("y", new_data.coords) + self.assertIn("x", new_data.coords) + self.assertIn("crs", new_data.coords) + self.assertIsInstance(new_data.coords["crs"].item(), CRS) + self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) + self.assertEqual(new_data.coords["y"].attrs["units"], "meter") + self.assertEqual(new_data.coords["x"].attrs["units"], "meter") + self.assertEqual(target_area.crs, new_data.coords["crs"].item()) + + @mock.patch("satpy.resample.fornav") + @mock.patch("satpy.resample.ll2cr") + @mock.patch("satpy.resample.SwathDefinition.get_lonlats") def test_3d_ewa(self, get_lonlats, ll2cr, fornav): """Test EWA with a 3D dataset.""" import numpy as np @@ -318,7 +318,7 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): from satpy.resample import resample_dataset _, _, swath_data, source_swath, target_area = get_test_data( - input_shape=(3, 200, 100), input_dims=('bands', 'y', 'x')) + input_shape=(3, 200, 100), input_dims=("bands", "y", "x")) swath_data.data = swath_data.data.astype(np.float32) ll2cr.return_value = (100, np.zeros((10, 10), dtype=np.float32), @@ -328,11 +328,11 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): get_lonlats.return_value = (source_swath.lons, source_swath.lats) num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) - new_data = resample_dataset(swath_data, target_area, resampler='ewa') + new_data = resample_dataset(swath_data, target_area, resampler="ewa") self.assertTupleEqual(new_data.shape, (3, 200, 100)) self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs['test'], 'test') - self.assertIs(new_data.attrs['area'], target_area) + self.assertEqual(new_data.attrs["test"], "test") + self.assertIs(new_data.attrs["area"], target_area) # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count @@ -341,25 +341,25 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): # resample a different dataset and make sure cache is used swath_data = xr.DataArray( swath_data.data, - dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}, - attrs={'area': source_swath, 'test': 'test'}) - new_data = resample_dataset(swath_data, target_area, resampler='ewa') + dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, + attrs={"area": source_swath, "test": "test"}) + new_data = resample_dataset(swath_data, target_area, resampler="ewa") new_data.compute() # ll2cr will be called once more because of the computation self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) # but we should already have taken the lonlats from the SwathDefinition self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('bands', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - np.testing.assert_equal(new_data.coords['bands'].values, - ['R', 'G', 'B']) - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) + self.assertIn("y", new_data.coords) + self.assertIn("x", new_data.coords) + self.assertIn("bands", new_data.coords) + self.assertIn("crs", new_data.coords) + self.assertIsInstance(new_data.coords["crs"].item(), CRS) + self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) + self.assertEqual(new_data.coords["y"].attrs["units"], "meter") + self.assertEqual(new_data.coords["x"].attrs["units"], "meter") + np.testing.assert_equal(new_data.coords["bands"].values, + ["R", "G", "B"]) + self.assertEqual(target_area.crs, new_data.coords["crs"].item()) class TestNativeResampler: @@ -419,35 +419,35 @@ def test_expand_dims(self): assert new_data.shape == (200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'y' in new_data.coords - assert 'x' in new_data.coords - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert new_data.coords['y'].attrs['units'] == 'meter' - assert new_data.coords['x'].attrs['units'] == 'meter' - assert target_area.crs == new_data.coords['crs'].item() + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() def test_expand_dims_3d(self): """Test expanding native resampling with 3D data.""" ds1, source_area, _, _, target_area = get_test_data( - input_shape=(3, 100, 50), input_dims=('bands', 'y', 'x')) + input_shape=(3, 100, 50), input_dims=("bands", "y", "x")) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) assert new_data.shape == (3, 200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'y' in new_data.coords - assert 'x' in new_data.coords - assert 'bands' in new_data.coords - np.testing.assert_equal(new_data.coords['bands'].values, ['R', 'G', 'B']) - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert new_data.coords['y'].attrs['units'] == 'meter' - assert new_data.coords['x'].attrs['units'] == 'meter' - assert target_area.crs == new_data.coords['crs'].item() + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "bands" in new_data.coords + np.testing.assert_equal(new_data.coords["bands"].values, ["R", "G", "B"]) + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() def test_expand_without_dims(self): """Test expanding native resampling with no dimensions specified.""" @@ -458,10 +458,10 @@ def test_expand_without_dims(self): assert new_data.shape == (200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert target_area.crs == new_data.coords['crs'].item() + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert target_area.crs == new_data.coords["crs"].item() def test_expand_without_dims_4D(self): """Test expanding native resampling with 4D data with no dimensions specified.""" @@ -476,9 +476,9 @@ def test_expand_without_dims_4D(self): class TestBilinearResampler(unittest.TestCase): """Test the bilinear resampler.""" - @mock.patch('satpy.resample._move_existing_caches') - @mock.patch('satpy.resample.BilinearResampler._create_cache_filename') - @mock.patch('pyresample.bilinear.XArrayBilinearResampler') + @mock.patch("satpy.resample._move_existing_caches") + @mock.patch("satpy.resample.BilinearResampler._create_cache_filename") + @mock.patch("pyresample.bilinear.XArrayBilinearResampler") def test_bil_resampling(self, xr_resampler, create_filename, move_existing_caches): """Test the bilinear resampler.""" @@ -496,29 +496,29 @@ def test_bil_resampling(self, xr_resampler, create_filename, # Test that get_sample_from_bil_info is called properly fill_value = 8 resampler.resampler.get_sample_from_bil_info.return_value = \ - xr.DataArray(da.zeros(target_area.shape), dims=('y', 'x')) + xr.DataArray(da.zeros(target_area.shape), dims=("y", "x")) new_data = resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_bil_info.assert_called_with( data, fill_value=fill_value, output_shape=target_area.shape) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) + self.assertIn("y", new_data.coords) + self.assertIn("x", new_data.coords) + self.assertIn("crs", new_data.coords) + self.assertIsInstance(new_data.coords["crs"].item(), CRS) + self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) + self.assertEqual(new_data.coords["y"].attrs["units"], "meter") + self.assertEqual(new_data.coords["x"].attrs["units"], "meter") + self.assertEqual(target_area.crs, new_data.coords["crs"].item()) # Test that the resampling info is tried to read from the disk resampler = BilinearResampler(source_swath, target_area) - resampler.precompute(cache_dir='.') + resampler.precompute(cache_dir=".") resampler.resampler.load_resampling_info.assert_called() # Test caching the resampling info try: the_dir = tempfile.mkdtemp() resampler = BilinearResampler(source_area, target_area) - create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') + create_filename.return_value = os.path.join(the_dir, "test_cache.zarr") xr_resampler.return_value.load_resampling_info.side_effect = IOError resampler.precompute(cache_dir=the_dir) @@ -544,9 +544,9 @@ def test_bil_resampling(self, xr_resampler, create_filename, resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) resampler.save_bil_info(cache_dir=the_dir) - zarr_file = os.path.join(the_dir, 'test_cache.zarr') + zarr_file = os.path.join(the_dir, "test_cache.zarr") # Save again faking the cache file already exists - with mock.patch('os.path.exists') as exists: + with mock.patch("os.path.exists") as exists: exists.return_value = True resampler.save_bil_info(cache_dir=the_dir) move_existing_caches.assert_called_once_with(the_dir, zarr_file) @@ -559,18 +559,18 @@ def test_move_existing_caches(self): try: the_dir = tempfile.mkdtemp() # Test that existing cache file is moved away - zarr_file = os.path.join(the_dir, 'test.zarr') - with open(zarr_file, 'w') as fid: - fid.write('42') + zarr_file = os.path.join(the_dir, "test.zarr") + with open(zarr_file, "w") as fid: + fid.write("42") from satpy.resample import _move_existing_caches _move_existing_caches(the_dir, zarr_file) self.assertFalse(os.path.exists(zarr_file)) self.assertTrue(os.path.exists( - os.path.join(the_dir, 'moved_by_satpy', - 'test.zarr'))) + os.path.join(the_dir, "moved_by_satpy", + "test.zarr"))) # Run again to see that the existing dir doesn't matter - with open(zarr_file, 'w') as fid: - fid.write('42') + with open(zarr_file, "w") as fid: + fid.write("42") _move_existing_caches(the_dir, zarr_file) finally: shutil.rmtree(the_dir) @@ -585,69 +585,69 @@ def test_area_def_coordinates(self): from satpy.resample import add_crs_xy_coords area_def = AreaDefinition( - 'test', 'test', 'test', {'proj': 'lcc', 'lat_1': 25, 'lat_0': 25}, + "test", "test", "test", {"proj": "lcc", "lat_1": 25, "lat_0": 25}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertIn('x', new_data_arr.coords) + self.assertIn("y", new_data_arr.coords) + self.assertIn("x", new_data_arr.coords) - self.assertIn('units', new_data_arr.coords['y'].attrs) + self.assertIn("units", new_data_arr.coords["y"].attrs) self.assertEqual( - new_data_arr.coords['y'].attrs['units'], 'meter') - self.assertIn('units', new_data_arr.coords['x'].attrs) + new_data_arr.coords["y"].attrs["units"], "meter") + self.assertIn("units", new_data_arr.coords["x"].attrs) self.assertEqual( - new_data_arr.coords['x'].attrs['units'], 'meter') - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + new_data_arr.coords["x"].attrs["units"], "meter") + self.assertIn("crs", new_data_arr.coords) + self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) + self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) # already has coords data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), - coords={'y': np.arange(2, 202), 'x': np.arange(100)} + attrs={"area": area_def}, + dims=("y", "x"), + coords={"y": np.arange(2, 202), "x": np.arange(100)} ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertNotIn('units', new_data_arr.coords['y'].attrs) - self.assertIn('x', new_data_arr.coords) - self.assertNotIn('units', new_data_arr.coords['x'].attrs) - np.testing.assert_equal(new_data_arr.coords['y'], np.arange(2, 202)) + self.assertIn("y", new_data_arr.coords) + self.assertNotIn("units", new_data_arr.coords["y"].attrs) + self.assertIn("x", new_data_arr.coords) + self.assertNotIn("units", new_data_arr.coords["x"].attrs) + np.testing.assert_equal(new_data_arr.coords["y"], np.arange(2, 202)) - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + self.assertIn("crs", new_data_arr.coords) + self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) + self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) # lat/lon area area_def = AreaDefinition( - 'test', 'test', 'test', {'proj': 'latlong'}, + "test", "test", "test", {"proj": "latlong"}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertIn('x', new_data_arr.coords) + self.assertIn("y", new_data_arr.coords) + self.assertIn("x", new_data_arr.coords) - self.assertIn('units', new_data_arr.coords['y'].attrs) + self.assertIn("units", new_data_arr.coords["y"].attrs) self.assertEqual( - new_data_arr.coords['y'].attrs['units'], 'degrees_north') - self.assertIn('units', new_data_arr.coords['x'].attrs) + new_data_arr.coords["y"].attrs["units"], "degrees_north") + self.assertIn("units", new_data_arr.coords["x"].attrs) self.assertEqual( - new_data_arr.coords['x'].attrs['units'], 'degrees_east') - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + new_data_arr.coords["x"].attrs["units"], "degrees_east") + self.assertIn("crs", new_data_arr.coords) + self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) + self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) def test_swath_def_coordinates(self): """Test coordinates being added with an SwathDefinition.""" @@ -656,15 +656,15 @@ def test_swath_def_coordinates(self): from satpy.resample import add_crs_xy_coords lons_data = da.random.random((200, 100), chunks=50) lats_data = da.random.random((200, 100), chunks=50) - lons = xr.DataArray(lons_data, attrs={'units': 'degrees_east'}, - dims=('y', 'x')) - lats = xr.DataArray(lats_data, attrs={'units': 'degrees_north'}, - dims=('y', 'x')) + lons = xr.DataArray(lons_data, attrs={"units": "degrees_east"}, + dims=("y", "x")) + lats = xr.DataArray(lats_data, attrs={"units": "degrees_north"}, + dims=("y", "x")) area_def = SwathDefinition(lons, lats) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) # See https://github.com/pydata/xarray/issues/3068 @@ -679,11 +679,11 @@ def test_swath_def_coordinates(self): # new_data_arr.coords['latitude'].attrs['units'], 'degrees_north') # self.assertIsInstance(new_data_arr.coords['latitude'].data, da.Array) - self.assertIn('crs', new_data_arr.coords) - crs = new_data_arr.coords['crs'].item() + self.assertIn("crs", new_data_arr.coords) + crs = new_data_arr.coords["crs"].item() self.assertIsInstance(crs, CRS) assert crs.is_geographic - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) + self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) class TestBucketAvg(unittest.TestCase): @@ -706,7 +706,7 @@ def test_init(self): self.assertTrue(self.bucket.source_geo_def == self.source_geo_def) self.assertTrue(self.bucket.target_geo_def == self.target_geo_def) - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_precompute(self, bucket): """Test bucket resampler precomputation.""" bucket.return_value = True @@ -740,7 +740,7 @@ def test_compute(self): res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) self.assertEqual(res.shape, (3, 5, 5)) - @mock.patch('satpy.resample.PR_USE_SKIPNA', True) + @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) @@ -763,7 +763,7 @@ def test_compute_and_use_skipna_handling(self): fill_value=2, skipna=True) - @mock.patch('satpy.resample.PR_USE_SKIPNA', False) + @mock.patch("satpy.resample.PR_USE_SKIPNA", False) def test_compute_and_not_use_skipna_handling(self): """Test bucket resampler computation and not use skipna handling.""" data = da.ones((5,)) @@ -792,7 +792,7 @@ def test_compute_and_not_use_skipna_handling(self): fill_value=2, mask_all_nan=False) - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test bucket resamplers resample method.""" self.bucket.resampler = mock.MagicMock() @@ -800,38 +800,38 @@ def test_resample(self, pyresample_bucket): self.bucket.compute = mock.MagicMock() # 1D input data - data = xr.DataArray(da.ones((5,)), dims=('foo'), attrs={'bar': 'baz'}) + data = xr.DataArray(da.ones((5,)), dims=("foo"), attrs={"bar": "baz"}) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.bucket.precompute.assert_called_once() self.bucket.compute.assert_called_once() self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ('y', 'x')) - self.assertTrue('bar' in res.attrs) - self.assertEqual(res.attrs['bar'], 'baz') + self.assertEqual(res.dims, ("y", "x")) + self.assertTrue("bar" in res.attrs) + self.assertEqual(res.attrs["bar"], "baz") # 2D input data - data = xr.DataArray(da.ones((5, 5)), dims=('foo', 'bar')) + data = xr.DataArray(da.ones((5, 5)), dims=("foo", "bar")) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ('y', 'x')) + self.assertEqual(res.dims, ("y", "x")) # 3D input data with 'bands' dim - data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'foo', 'bar'), - coords={'bands': ['L']}) + data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "foo", "bar"), + coords={"bands": ["L"]}) self.bucket.compute.return_value = da.ones((1, 5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (1, 5, 5)) - self.assertEqual(res.dims, ('bands', 'y', 'x')) - self.assertEqual(res.coords['bands'], ['L']) + self.assertEqual(res.dims, ("bands", "y", "x")) + self.assertEqual(res.coords["bands"], ["L"]) # 3D input data with misc dim names - data = xr.DataArray(da.ones((3, 5, 5)), dims=('foo', 'bar', 'baz')) + data = xr.DataArray(da.ones((3, 5, 5)), dims=("foo", "bar", "baz")) self.bucket.compute.return_value = da.ones((3, 5, 5)) res = self.bucket.resample(data) self.assertEqual(res.shape, (3, 5, 5)) - self.assertEqual(res.dims, ('foo', 'bar', 'baz')) + self.assertEqual(res.dims, ("foo", "bar", "baz")) class TestBucketSum(unittest.TestCase): @@ -871,7 +871,7 @@ def test_compute(self): res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) self.assertEqual(res.shape, (3, 5, 5)) - @mock.patch('satpy.resample.PR_USE_SKIPNA', True) + @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) @@ -891,7 +891,7 @@ def test_compute_and_use_skipna_handling(self): data, skipna=True) - @mock.patch('satpy.resample.PR_USE_SKIPNA', False) + @mock.patch("satpy.resample.PR_USE_SKIPNA", False) def test_compute_and_not_use_skipna_handling(self): """Test bucket resampler computation and not use skipna handling.""" data = da.ones((5,)) @@ -995,7 +995,7 @@ def test_compute(self): with self.assertRaises(ValueError): _ = self.bucket.compute(data) - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test fraction bucket resamplers resample method.""" self.bucket.resampler = mock.MagicMock() @@ -1003,10 +1003,10 @@ def test_resample(self, pyresample_bucket): self.bucket.compute = mock.MagicMock() # Fractions return a dict - data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'y', 'x')) + data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "y", "x")) arr = da.ones((5, 5)) self.bucket.compute.return_value = {0: arr, 1: arr, 2: arr} res = self.bucket.resample(data) - self.assertTrue('categories' in res.coords) - self.assertTrue('categories' in res.dims) - self.assertTrue(np.all(res.coords['categories'] == np.array([0, 1, 2]))) + self.assertTrue("categories" in res.coords) + self.assertTrue("categories" in res.dims) + self.assertTrue(np.all(res.coords["categories"] == np.array([0, 1, 2]))) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 56dbe25324..29d940fbdc 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -205,21 +205,21 @@ def test_xyz2angle(self): def test_proj_units_to_meters(self): """Test proj units to meters conversion.""" - prj = '+asd=123123123123' + prj = "+asd=123123123123" res = proj_units_to_meters(prj) self.assertEqual(res, prj) - prj = '+a=6378.137' + prj = "+a=6378.137" res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000') - prj = '+a=6378.137 +units=km' + self.assertEqual(res, "+a=6378137.000") + prj = "+a=6378.137 +units=km" res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000') - prj = '+a=6378.137 +b=6378.137' + self.assertEqual(res, "+a=6378137.000") + prj = "+a=6378.137 +b=6378.137" res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000 +b=6378137.000') - prj = '+a=6378.137 +b=6378.137 +h=35785.863' + self.assertEqual(res, "+a=6378137.000 +b=6378137.000") + prj = "+a=6378.137 +b=6378.137 +h=35785.863" res = proj_units_to_meters(prj) - self.assertEqual(res, '+a=6378137.000 +b=6378137.000 +h=35785863.000') + self.assertEqual(res, "+a=6378137.000 +b=6378137.000 +h=35785863.000") class TestGetSatPos: @@ -243,21 +243,21 @@ class TestGetSatPos: def test_get_satpos(self, included_prefixes, preference, expected_result): """Test getting the satellite position.""" all_orb_params = { - 'nadir_longitude': 1, - 'satellite_actual_longitude': 1.1, - 'satellite_nominal_longitude': 1.2, - 'projection_longitude': 1.3, - 'nadir_latitude': 2, - 'satellite_actual_latitude': 2.1, - 'satellite_nominal_latitude': 2.2, - 'projection_latitude': 2.3, - 'satellite_actual_altitude': 3, - 'satellite_nominal_altitude': 3.1, - 'projection_altitude': 3.2 + "nadir_longitude": 1, + "satellite_actual_longitude": 1.1, + "satellite_nominal_longitude": 1.2, + "projection_longitude": 1.3, + "nadir_latitude": 2, + "satellite_actual_latitude": 2.1, + "satellite_nominal_latitude": 2.2, + "projection_latitude": 2.3, + "satellite_actual_altitude": 3, + "satellite_nominal_altitude": 3.1, + "projection_altitude": 3.2 } orb_params = {key: value for key, value in all_orb_params.items() if any(in_prefix in key for in_prefix in included_prefixes)} - data_arr = xr.DataArray((), attrs={'orbital_parameters': orb_params}) + data_arr = xr.DataArray((), attrs={"orbital_parameters": orb_params}) with warnings.catch_warnings(record=True) as caught_warnings: lon, lat, alt = get_satpos(data_arr, preference=preference) @@ -273,8 +273,8 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): "attrs", ( {}, - {'orbital_parameters': {'projection_longitude': 1}}, - {'satellite_altitude': 1} + {"orbital_parameters": {"projection_longitude": 1}}, + {"satellite_altitude": 1} ) ) def test_get_satpos_fails_with_informative_error(self, attrs): @@ -321,7 +321,7 @@ def test_make_fake_scene(): sc = make_fake_scene({ "six": np.arange(25).reshape(5, 5)}) assert len(sc.keys()) == 1 - assert sc.keys().pop()['name'] == "six" + assert sc.keys().pop()["name"] == "six" assert sc["six"].attrs["area"].shape == (5, 5) sc = make_fake_scene({ "seven": np.arange(3*7).reshape(3, 7), @@ -353,12 +353,12 @@ def test_basic_check_satpy(self): def test_specific_check_satpy(self): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy - with mock.patch('satpy.utils.print') as print_mock: - check_satpy(readers=['viirs_sdr'], extras=('cartopy', '__fake')) + with mock.patch("satpy.utils.print") as print_mock: + check_satpy(readers=["viirs_sdr"], extras=("cartopy", "__fake")) checked_fake = False for call in print_mock.mock_calls: - if len(call[1]) > 0 and '__fake' in call[1][0]: - self.assertNotIn('ok', call[1][1]) + if len(call[1]) > 0 and "__fake" in call[1][0]: + self.assertNotIn("ok", call[1][1]) checked_fake = True self.assertTrue(checked_fake, "Did not find __fake module " "mentioned in checks") @@ -586,7 +586,7 @@ def test_convert_remote_files_to_fsspec_windows_paths(): assert res == filenames -@mock.patch('fsspec.open_files') +@mock.patch("fsspec.open_files") def test_convert_remote_files_to_fsspec_storage_options(open_files): """Test convertion of remote files to fsspec objects. @@ -595,7 +595,7 @@ def test_convert_remote_files_to_fsspec_storage_options(open_files): from satpy.utils import convert_remote_files_to_fsspec filenames = ["s3://tmp/file1.nc"] - storage_options = {'anon': True} + storage_options = {"anon": True} _ = convert_remote_files_to_fsspec(filenames, storage_options=storage_options) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index 986687b0d6..6e1ce7f2e2 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -39,10 +39,10 @@ def test_to_image_1d(self): """Conversion to image.""" # 1D from satpy.writers import to_image - p = xr.DataArray(np.arange(25), dims=['y']) + p = xr.DataArray(np.arange(25), dims=["y"]) self.assertRaises(ValueError, to_image, p) - @mock.patch('satpy.writers.XRImage') + @mock.patch("satpy.writers.XRImage") def test_to_image_2d(self, mock_geoimage): """Conversion to image.""" from satpy.writers import to_image @@ -51,33 +51,33 @@ def test_to_image_2d(self, mock_geoimage): data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, attrs=dict(mode="L", fill_value=0, palette=[0, 1, 2, 3, 4, 5]), - dims=['y', 'x']) + dims=["y", "x"]) to_image(p) np.testing.assert_array_equal( data, mock_geoimage.call_args[0][0].values) mock_geoimage.reset_mock() - @mock.patch('satpy.writers.XRImage') + @mock.patch("satpy.writers.XRImage") def test_to_image_3d(self, mock_geoimage): """Conversion to image.""" # 3D from satpy.writers import to_image data = np.arange(75).reshape((3, 5, 5)) - p = xr.DataArray(data, dims=['bands', 'y', 'x']) - p['bands'] = ['R', 'G', 'B'] + p = xr.DataArray(data, dims=["bands", "y", "x"]) + p["bands"] = ["R", "G", "B"] to_image(p) np.testing.assert_array_equal(data[0], mock_geoimage.call_args[0][0][0]) np.testing.assert_array_equal(data[1], mock_geoimage.call_args[0][0][1]) np.testing.assert_array_equal(data[2], mock_geoimage.call_args[0][0][2]) - @mock.patch('satpy.writers.get_enhanced_image') + @mock.patch("satpy.writers.get_enhanced_image") def test_show(self, mock_get_image): """Check showing.""" from satpy.writers import show data = np.arange(25).reshape((5, 5)) - p = xr.DataArray(data, dims=['y', 'x']) + p = xr.DataArray(data, dims=["y", "x"]) show(p) self.assertTrue(mock_get_image.return_value.show.called) @@ -128,7 +128,7 @@ def setup_class(cls): base_dir = os.path.dirname(fn) if base_dir: os.makedirs(base_dir, exist_ok=True) - with open(fn, 'w') as f: + with open(fn, "w") as f: f.write(content) # create fake test image writer @@ -136,7 +136,7 @@ def setup_class(cls): class CustomImageWriter(ImageWriter): def __init__(self, **kwargs): - super(CustomImageWriter, self).__init__(name='test', config_files=[], **kwargs) + super(CustomImageWriter, self).__init__(name="test", config_files=[], **kwargs) self.img = None def save_image(self, img, **kwargs): @@ -148,7 +148,7 @@ def teardown_class(cls): """Remove fake user configurations.""" for fn, _content in cls.TEST_CONFIGS.items(): base_dir = os.path.dirname(fn) - if base_dir not in ['.', ''] and os.path.isdir(base_dir): + if base_dir not in [".", ""] and os.path.isdir(base_dir): shutil.rmtree(base_dir) elif os.path.isfile(fn): os.remove(fn) @@ -157,8 +157,8 @@ def teardown_class(cls): class TestComplexSensorEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use or expect multiple sensors.""" - ENH_FN = 'test_sensor1.yaml' - ENH_FN2 = 'test_sensor2.yaml' + ENH_FN = "test_sensor1.yaml" + ENH_FN2 = "test_sensor2.yaml" TEST_CONFIGS = { ENH_FN: """ @@ -203,11 +203,11 @@ def test_multisensor_choice(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'test1', - 'sensor': {'test_sensor2', 'test_sensor1'}, - 'mode': 'L' + "name": "test1", + "sensor": {"test_sensor2", "test_sensor1"}, + "mode": "L" }, - dims=['y', 'x']) + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -226,11 +226,11 @@ def test_multisensor_exact(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'my_comp', - 'sensor': {'test_sensor2', 'test_sensor1'}, - 'mode': 'L' + "name": "my_comp", + "sensor": {"test_sensor2", "test_sensor1"}, + "mode": "L" }, - dims=['y', 'x']) + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -248,8 +248,8 @@ def test_enhance_bad_query_value(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name=["I", "am", "invalid"], sensor='test_sensor2', mode='L'), - dims=['y', 'x']) + attrs=dict(name=["I", "am", "invalid"], sensor="test_sensor2", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None with pytest.raises(KeyError, match="No .* found for None"): @@ -259,11 +259,11 @@ def test_enhance_bad_query_value(self): class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests): """Test `Enhancer` functionality when user's custom configurations are present.""" - ENH_FN = 'test_sensor.yaml' - ENH_ENH_FN = os.path.join('enhancements', ENH_FN) - ENH_FN2 = 'test_sensor2.yaml' - ENH_ENH_FN2 = os.path.join('enhancements', ENH_FN2) - ENH_FN3 = 'test_empty.yaml' + ENH_FN = "test_sensor.yaml" + ENH_ENH_FN = os.path.join("enhancements", ENH_FN) + ENH_FN2 = "test_sensor2.yaml" + ENH_ENH_FN2 = os.path.join("enhancements", ENH_FN2) + ENH_FN3 = "test_empty.yaml" TEST_CONFIGS = { ENH_FN: """ @@ -303,8 +303,8 @@ def test_enhance_empty_config(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(sensor='test_empty', mode='L'), - dims=['y', 'x']) + attrs=dict(sensor="test_empty", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) @@ -317,8 +317,8 @@ def test_enhance_with_sensor_no_entry(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(sensor='test_sensor2', mode='L'), - dims=['y', 'x']) + attrs=dict(sensor="test_sensor2", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) @@ -332,8 +332,8 @@ def test_no_enhance(self): from satpy.writers import get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) img = get_enhanced_image(ds, enhance=False) np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) @@ -341,8 +341,8 @@ def test_writer_no_enhance(self): """Test turning off enhancements with writer.""" from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) writer = self.CustomImageWriter(enhance=False) writer.save_datasets((ds,), compute=False) img = writer.img @@ -354,8 +354,8 @@ def test_writer_custom_enhance(self): from satpy.writers import Enhancer ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) enhance = Enhancer() writer = self.CustomImageWriter(enhance=enhance) writer.save_datasets((ds,), compute=False) @@ -368,8 +368,8 @@ def test_enhance_with_sensor_entry(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -380,8 +380,8 @@ def test_enhance_with_sensor_entry(self): 1.) ds = DataArray(da.arange(1, 11., chunks=5).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -396,9 +396,9 @@ def test_enhance_with_sensor_entry2(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', units='kelvin', - sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", units="kelvin", + sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -411,7 +411,7 @@ def test_enhance_with_sensor_entry2(self): class TestReaderEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use reader name.""" - ENH_FN = 'test_sensor1.yaml' + ENH_FN = "test_sensor1.yaml" # NOTE: The sections are ordered in a special way so that if 'reader' key # isn't provided that we'll get the section we didn't want and all tests @@ -452,11 +452,11 @@ def _get_test_data_array(self): from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'test1', - 'sensor': 'test_sensor1', - 'mode': 'L', + "name": "test1", + "sensor": "test_sensor1", + "mode": "L", }, - dims=['y', 'x']) + dims=["y", "x"]) return ds def _get_enhanced_image(self, data_arr): @@ -512,17 +512,17 @@ def test_filename_matches_writer_name(self): class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): - return tag_suffix + ' ' + node.value - IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) + return tag_suffix + " " + node.value + IgnoreLoader.add_multi_constructor("", IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.writers import read_writer_config - for writer_config in glob_config('writers/*.yaml'): + for writer_config in glob_config("writers/*.yaml"): writer_fn = os.path.basename(writer_config) writer_fn_name = os.path.splitext(writer_fn)[0] writer_info = read_writer_config([writer_config], loader=IgnoreLoader) - self.assertEqual(writer_fn_name, writer_info['name'], + self.assertEqual(writer_fn_name, writer_info["name"], "Writer YAML filename doesn't match writer " "name in the YAML file.") @@ -532,13 +532,13 @@ def test_available_writers(self): writer_names = available_writers() self.assertGreater(len(writer_names), 0) self.assertIsInstance(writer_names[0], str) - self.assertIn('geotiff', writer_names) + self.assertIn("geotiff", writer_names) writer_infos = available_writers(as_dict=True) self.assertEqual(len(writer_names), len(writer_infos)) self.assertIsInstance(writer_infos[0], dict) for writer_info in writer_infos: - self.assertIn('name', writer_info) + self.assertIn("name", writer_info) class TestComputeWriterResults(unittest.TestCase): @@ -553,12 +553,12 @@ def setUp(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) self.scn = Scene() - self.scn['test'] = ds1 + self.scn["test"] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() @@ -578,10 +578,10 @@ def test_empty(self): def test_simple_image(self): """Test writing to PNG file.""" from satpy.writers import compute_writer_results - fname = os.path.join(self.base_dir, 'simple_image.png') + fname = os.path.join(self.base_dir, "simple_image.png") res = self.scn.save_datasets(filename=fname, - datasets=['test'], - writer='simple_image', + datasets=["test"], + writer="simple_image", compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) @@ -589,10 +589,10 @@ def test_simple_image(self): def test_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results - fname = os.path.join(self.base_dir, 'geotiff.tif') + fname = os.path.join(self.base_dir, "geotiff.tif") res = self.scn.save_datasets(filename=fname, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) compute_writer_results([res]) self.assertTrue(os.path.isfile(fname)) @@ -619,14 +619,14 @@ def test_geotiff(self): def test_multiple_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'geotiff1.tif') + fname1 = os.path.join(self.base_dir, "geotiff1.tif") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='geotiff', compute=False) - fname2 = os.path.join(self.base_dir, 'geotiff2.tif') + datasets=["test"], + writer="geotiff", compute=False) + fname2 = os.path.join(self.base_dir, "geotiff2.tif") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) @@ -634,14 +634,14 @@ def test_multiple_geotiff(self): def test_multiple_simple(self): """Test writing to geotiff files.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'simple_image1.png') + fname1 = os.path.join(self.base_dir, "simple_image1.png") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='simple_image', compute=False) - fname2 = os.path.join(self.base_dir, 'simple_image2.png') + datasets=["test"], + writer="simple_image", compute=False) + fname2 = os.path.join(self.base_dir, "simple_image2.png") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='simple_image', compute=False) + datasets=["test"], + writer="simple_image", compute=False) compute_writer_results([res1, res2]) self.assertTrue(os.path.isfile(fname1)) self.assertTrue(os.path.isfile(fname2)) @@ -649,14 +649,14 @@ def test_multiple_simple(self): def test_mixed(self): """Test writing to multiple mixed-type files.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'simple_image3.png') + fname1 = os.path.join(self.base_dir, "simple_image3.png") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='simple_image', compute=False) - fname2 = os.path.join(self.base_dir, 'geotiff3.tif') + datasets=["test"], + writer="simple_image", compute=False) + fname2 = os.path.join(self.base_dir, "geotiff3.tif") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) res3 = [] compute_writer_results([res1, res2, res3]) self.assertTrue(os.path.isfile(fname1)) @@ -675,18 +675,18 @@ def setup_method(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0), - 'sensor': 'fake_sensor', + "name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0), + "sensor": "fake_sensor", } ) ds2 = ds1.copy() - ds2.attrs['sensor'] = {'fake_sensor1', 'fake_sensor2'} + ds2.attrs["sensor"] = {"fake_sensor1", "fake_sensor2"} self.scn = Scene() - self.scn['test'] = ds1 - self.scn['test2'] = ds2 + self.scn["test"] = ds1 + self.scn["test2"] = ds2 # Temp dir self.base_dir = tempfile.mkdtemp() @@ -700,16 +700,16 @@ def teardown_method(self): def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" - self.scn.save_datasets(base_dir=self.base_dir, filename='geotiff.tif') - assert os.path.isfile(os.path.join(self.base_dir, 'geotiff.tif')) + self.scn.save_datasets(base_dir=self.base_dir, filename="geotiff.tif") + assert os.path.isfile(os.path.join(self.base_dir, "geotiff.tif")) @pytest.mark.parametrize( - ('fmt_fn', 'exp_fns'), + ("fmt_fn", "exp_fns"), [ - ('geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif', - ['geotiff_test_20180101_000000.tif', 'geotiff_test2_20180101_000000.tif']), - ('geotiff_{name}_{sensor}.tif', - ['geotiff_test_fake_sensor.tif', 'geotiff_test2_fake_sensor1-fake_sensor2.tif']), + ("geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif", + ["geotiff_test_20180101_000000.tif", "geotiff_test2_20180101_000000.tif"]), + ("geotiff_{name}_{sensor}.tif", + ["geotiff_test_fake_sensor.tif", "geotiff_test2_fake_sensor1-fake_sensor2.tif"]), ] ) def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): @@ -721,14 +721,14 @@ def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" - fmt_fn = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif') - exp_fn = os.path.join('20180101', 'geotiff_test_20180101_000000.tif') + fmt_fn = os.path.join("{start_time:%Y%m%d}", "geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif") + exp_fn = os.path.join("20180101", "geotiff_test_20180101_000000.tif") self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) assert os.path.isfile(os.path.join(self.base_dir, exp_fn)) # change the filename pattern but keep the same directory - fmt_fn2 = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H}.tif') - exp_fn2 = os.path.join('20180101', 'geotiff_test_20180101_00.tif') + fmt_fn2 = os.path.join("{start_time:%Y%m%d}", "geotiff_{name}_{start_time:%Y%m%d_%H}.tif") + exp_fn2 = os.path.join("20180101", "geotiff_test_20180101_00.tif") self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) assert os.path.isfile(os.path.join(self.base_dir, exp_fn2)) # the original file should still exist @@ -743,53 +743,53 @@ def setUp(self): from pyresample.geometry import AreaDefinition from trollimage.xrimage import XRImage - proj_dict = {'proj': 'lcc', 'datum': 'WGS84', 'ellps': 'WGS84', - 'lon_0': -95., 'lat_0': 25, 'lat_1': 25, - 'units': 'm', 'no_defs': True} + proj_dict = {"proj": "lcc", "datum": "WGS84", "ellps": "WGS84", + "lon_0": -95., "lat_0": 25, "lat_1": 25, + "units": "m", "no_defs": True} self.area_def = AreaDefinition( - 'test', 'test', 'test', proj_dict, + "test", "test", "test", proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) self.orig_rgb_img = XRImage( xr.DataArray(da.arange(75., chunks=10).reshape(3, 5, 5) / 75., - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'name': 'test_ds', 'area': self.area_def}) + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"name": "test_ds", "area": self.area_def}) ) self.orig_l_img = XRImage( xr.DataArray(da.arange(25., chunks=10).reshape(5, 5) / 75., - dims=('y', 'x'), - attrs={'name': 'test_ds', 'area': self.area_def}) + dims=("y", "x"), + attrs={"name": "test_ds", "area": self.area_def}) ) self.decorate = { - 'decorate': [ - {'logo': {'logo_path': '', 'height': 143, 'bg': 'white', 'bg_opacity': 255}}, - {'text': { - 'txt': 'TEST', - 'align': {'top_bottom': 'bottom', 'left_right': 'right'}, - 'font': '', - 'font_size': 22, - 'height': 30, - 'bg': 'black', - 'bg_opacity': 255, - 'line': 'white'}}, - {'scale': { - 'colormap': greys, - 'extend': False, - 'width': 1670, 'height': 110, - 'tick_marks': 5, 'minor_tick_marks': 1, - 'cursor': [0, 0], 'bg': 'white', - 'title': 'TEST TITLE OF SCALE', - 'fontsize': 110, 'align': 'cc' + "decorate": [ + {"logo": {"logo_path": "", "height": 143, "bg": "white", "bg_opacity": 255}}, + {"text": { + "txt": "TEST", + "align": {"top_bottom": "bottom", "left_right": "right"}, + "font": "", + "font_size": 22, + "height": 30, + "bg": "black", + "bg_opacity": 255, + "line": "white"}}, + {"scale": { + "colormap": greys, + "extend": False, + "width": 1670, "height": 110, + "tick_marks": 5, "minor_tick_marks": 1, + "cursor": [0, 0], "bg": "white", + "title": "TEST TITLE OF SCALE", + "fontsize": 110, "align": "cc" }} ] } import_mock = mock.MagicMock() - modules = {'pycoast': import_mock.pycoast, - 'pydecorate': import_mock.pydecorate} - self.module_patcher = mock.patch.dict('sys.modules', modules) + modules = {"pycoast": import_mock.pycoast, + "pydecorate": import_mock.pydecorate} + self.module_patcher = mock.patch.dict("sys.modules", modules) self.module_patcher.start() def tearDown(self): @@ -801,21 +801,21 @@ def test_add_overlay_basic_rgb(self): from pycoast import ContourWriterAGG from satpy.writers import _burn_overlay, add_overlay - coast_dir = '/path/to/coast/data' + coast_dir = "/path/to/coast/data" with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil: apply_pil.return_value = self.orig_rgb_img new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, fill_value=0) self.assertEqual(self.orig_rgb_img.mode, new_img.mode) new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir) - self.assertEqual(self.orig_rgb_img.mode + 'A', new_img.mode) + self.assertEqual(self.orig_rgb_img.mode + "A", new_img.mode) with mock.patch.object(self.orig_rgb_img, "convert") as convert: convert.return_value = self.orig_rgb_img - overlays = {'coasts': {'outline': 'red'}} + overlays = {"coasts": {"outline": "red"}} new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, overlays=overlays, fill_value=0) pil_args = None - pil_kwargs = {'fill_value': 0} + pil_kwargs = {"fill_value": 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, @@ -824,11 +824,11 @@ def test_add_overlay_basic_rgb(self): # test legacy call - grid = {'minor_is_tick': True} - color = 'red' - expected_overlays = {'coasts': {'outline': color, 'width': 0.5, 'level': 1}, - 'borders': {'outline': color, 'width': 0.5, 'level': 1}, - 'grid': grid} + grid = {"minor_is_tick": True} + color = "red" + expected_overlays = {"coasts": {"outline": color, "width": 0.5, "level": 1}, + "borders": {"outline": color, "width": 0.5, "level": 1}, + "grid": grid} with warnings.catch_warnings(record=True) as wns: warnings.simplefilter("always") new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, @@ -838,7 +838,7 @@ def test_add_overlay_basic_rgb(self): assert "deprecated" in str(wns[0].message) pil_args = None - pil_kwargs = {'fill_value': 0} + pil_kwargs = {"fill_value": 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, expected_overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, @@ -848,22 +848,22 @@ def test_add_overlay_basic_rgb(self): def test_add_overlay_basic_l(self): """Test basic add_overlay usage with L data.""" from satpy.writers import add_overlay - new_img = add_overlay(self.orig_l_img, self.area_def, '', fill_value=0) - self.assertEqual('RGB', new_img.mode) - new_img = add_overlay(self.orig_l_img, self.area_def, '') - self.assertEqual('RGBA', new_img.mode) + new_img = add_overlay(self.orig_l_img, self.area_def, "", fill_value=0) + self.assertEqual("RGB", new_img.mode) + new_img = add_overlay(self.orig_l_img, self.area_def, "") + self.assertEqual("RGBA", new_img.mode) def test_add_decorate_basic_rgb(self): """Test basic add_decorate usage with RGB data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_rgb_img, **self.decorate) - self.assertEqual('RGBA', new_img.mode) + self.assertEqual("RGBA", new_img.mode) def test_add_decorate_basic_l(self): """Test basic add_decorate usage with L data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_l_img, **self.decorate) - self.assertEqual('RGBA', new_img.mode) + self.assertEqual("RGBA", new_img.mode) def test_group_results_by_output_file(tmp_path): diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 0d2e057f32..b829f46d23 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -37,47 +37,47 @@ from satpy.tests.utils import make_dataid MHS_YAML_READER_DICT = { - 'reader': {'name': 'mhs_l1c_aapp', - 'description': 'AAPP l1c Reader for AMSU-B/MHS data', - 'sensors': ['mhs'], - 'default_channels': [1, 2, 3, 4, 5], - 'data_identification_keys': {'name': {'required': True}, - 'frequency_double_sideband': - {'type': FrequencyDoubleSideBand}, - 'frequency_range': {'type': FrequencyRange}, - 'resolution': None, - 'polarization': {'enum': ['H', 'V']}, - 'calibration': {'enum': ['brightness_temperature'], 'transitive': True}, - 'modifiers': {'required': True, - 'default': [], - 'type': ModifierTuple}}, - 'config_files': ('satpy/etc/readers/mhs_l1c_aapp.yaml',)}, - 'datasets': {'1': {'name': '1', - 'frequency_range': {'central': 89.0, 'bandwidth': 2.8, 'unit': 'GHz'}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}, - '2': {'name': '2', - 'frequency_range': {'central': 157.0, 'bandwidth': 2.8, 'unit': 'GHz'}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}, - '3': {'name': '3', - 'frequency_double_sideband': {'unit': 'GHz', - 'central': 183.31, - 'side': 1.0, - 'bandwidth': 1.0}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}}, - 'file_types': {'mhs_aapp_l1c': {'file_reader': BaseFileHandler, - 'file_patterns': [ + "reader": {"name": "mhs_l1c_aapp", + "description": "AAPP l1c Reader for AMSU-B/MHS data", + "sensors": ["mhs"], + "default_channels": [1, 2, 3, 4, 5], + "data_identification_keys": {"name": {"required": True}, + "frequency_double_sideband": + {"type": FrequencyDoubleSideBand}, + "frequency_range": {"type": FrequencyRange}, + "resolution": None, + "polarization": {"enum": ["H", "V"]}, + "calibration": {"enum": ["brightness_temperature"], "transitive": True}, + "modifiers": {"required": True, + "default": [], + "type": ModifierTuple}}, + "config_files": ("satpy/etc/readers/mhs_l1c_aapp.yaml",)}, + "datasets": {"1": {"name": "1", + "frequency_range": {"central": 89.0, "bandwidth": 2.8, "unit": "GHz"}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}, + "2": {"name": "2", + "frequency_range": {"central": 157.0, "bandwidth": 2.8, "unit": "GHz"}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}, + "3": {"name": "3", + "frequency_double_sideband": {"unit": "GHz", + "central": 183.31, + "side": 1.0, + "bandwidth": 1.0}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}}, + "file_types": {"mhs_aapp_l1c": {"file_reader": BaseFileHandler, + "file_patterns": [ 'mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']}}} # noqa @@ -91,7 +91,7 @@ def __init__(self, start_time, end_time): self._end_time = end_time self.get_bounding_box = MagicMock() fake_ds = MagicMock() - fake_ds.return_value.dims = ['x', 'y'] + fake_ds.return_value.dims = ["x", "y"] self.get_dataset = fake_ds self.combine_info = MagicMock() @@ -111,38 +111,38 @@ class TestUtils(unittest.TestCase): def test_get_filebase(self): """Check the get_filebase function.""" - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filename = os.path.join(base_dir, 'Oa05_radiance.nc') - expected = os.path.join(base_data, 'Oa05_radiance.nc') + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filename = os.path.join(base_dir, "Oa05_radiance.nc") + expected = os.path.join(base_data, "Oa05_radiance.nc") self.assertEqual(yr._get_filebase(filename, pattern), expected) def test_match_filenames(self): """Check that matching filenames works.""" # just a fake path for testing that doesn't have to exist - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filenames = [os.path.join(base_dir, 'Oa05_radiance.nc'), - os.path.join(base_dir, 'geo_coordinates.nc')] - expected = os.path.join(base_dir, 'geo_coordinates.nc') + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filenames = [os.path.join(base_dir, "Oa05_radiance.nc"), + os.path.join(base_dir, "geo_coordinates.nc")] + expected = os.path.join(base_dir, "geo_coordinates.nc") self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) def test_match_filenames_windows_forward_slash(self): @@ -152,28 +152,28 @@ def test_match_filenames_windows_forward_slash(self): """ # just a fake path for testing that doesn't have to exist - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filenames = [os.path.join(base_dir, 'Oa05_radiance.nc').replace(os.sep, '/'), - os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/')] - expected = os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/') + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filenames = [os.path.join(base_dir, "Oa05_radiance.nc").replace(os.sep, "/"), + os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/")] + expected = os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/") self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) def test_listify_string(self): """Check listify_string.""" self.assertEqual(yr.listify_string(None), []) - self.assertEqual(yr.listify_string('some string'), ['some string']) - self.assertEqual(yr.listify_string(['some', 'string']), - ['some', 'string']) + self.assertEqual(yr.listify_string("some string"), ["some string"]) + self.assertEqual(yr.listify_string(["some", "string"]), + ["some", "string"]) class DummyReader(BaseFileHandler): @@ -203,47 +203,47 @@ class TestFileFileYAMLReaderMultiplePatterns(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla', - 'a0{something:2s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_patterns': patterns, - 'file_reader': DummyReader}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': 'ftype2'}, - 'lats': {'name': 'lats', - 'file_type': 'ftype2'}}} + patterns = ["a{something:3s}.bla", + "a0{something:2s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_patterns": patterns, + "file_reader": DummyReader}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": "ftype2"}, + "lats": {"name": "lats", + "file_type": "ftype2"}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2)}) + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2)}) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] res = self.reader.select_files_from_pathnames(filelist) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: self.assertIn(expected, res) self.assertEqual(len(res), 3) def test_fn_items_for_ft(self): """Check filename_items_for_filetype.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] - ft_info = self.config['file_types']['ftype1'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] + ft_info = self.config["file_types"]["ftype1"] fiter = self.reader.filename_items_for_filetype(filelist, ft_info) filenames = dict(fname for fname in fiter) @@ -251,11 +251,11 @@ def test_fn_items_for_ft(self): def test_create_filehandlers(self): """Check create_filehandlers.""" - filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', - 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "a001.bla", "a002.bla", + "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) - self.assertEqual(len(self.reader.file_handlers['ftype1']), 3) + self.assertEqual(len(self.reader.file_handlers["ftype1"]), 3) def test_serializable(self): """Check that a reader is serializable by dask. @@ -264,8 +264,8 @@ def test_serializable(self): readers. """ from distributed.protocol import deserialize, serialize - filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', - 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "a001.bla", "a002.bla", + "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) cloned_reader = deserialize(*serialize(self.reader)) @@ -281,8 +281,8 @@ def setUp(self): self.config = MHS_YAML_READER_DICT self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) def test_custom_type_with_dict_contents_gets_parsed_correctly(self): @@ -298,70 +298,70 @@ class TestFileFileYAMLReader(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_reader': BaseFileHandler, - 'file_patterns': patterns}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': 'ftype2'}, - 'lats': {'name': 'lats', - 'file_type': 'ftype2'}}} + patterns = ["a{something:3s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_reader": BaseFileHandler, + "file_patterns": patterns}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": "ftype2"}, + "lats": {"name": "lats", + "file_type": "ftype2"}}} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) def test_deprecated_passing_config_files(self): """Test that we get an exception when config files are passed to inti.""" - self.assertRaises(ValueError, yr.FileYAMLReader, '/path/to/some/file.yaml') + self.assertRaises(ValueError, yr.FileYAMLReader, "/path/to/some/file.yaml") def test_all_data_ids(self): """Check that all datasets ids are returned.""" for dataid in self.reader.all_dataset_ids: - name = dataid['name'].replace('0', '') - assert self.config['datasets'][name]['name'] == dataid['name'] - if 'wavelength' in self.config['datasets'][name]: - assert self.config['datasets'][name]['wavelength'] == list(dataid['wavelength'])[:3] - if 'calibration' in self.config['datasets'][name]: - assert self.config['datasets'][name]['calibration'] == dataid['calibration'] + name = dataid["name"].replace("0", "") + assert self.config["datasets"][name]["name"] == dataid["name"] + if "wavelength" in self.config["datasets"][name]: + assert self.config["datasets"][name]["wavelength"] == list(dataid["wavelength"])[:3] + if "calibration" in self.config["datasets"][name]: + assert self.config["datasets"][name]["calibration"] == dataid["calibration"] def test_all_dataset_names(self): """Get all dataset names.""" self.assertSetEqual(self.reader.all_dataset_names, - set(['ch01', 'ch02', 'lons', 'lats'])) + set(["ch01", "ch02", "lons", "lats"])) def test_available_dataset_ids(self): """Get ids of the available datasets.""" - loadables = self.reader.select_files_from_pathnames(['a001.bla']) + loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) self.assertSetEqual(set(self.reader.available_dataset_ids), - {make_dataid(name='ch02', + {make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), - calibration='counts', + calibration="counts", modifiers=()), - make_dataid(name='ch01', + make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', + calibration="reflectance", modifiers=())}) def test_available_dataset_names(self): """Get ids of the available datasets.""" - loadables = self.reader.select_files_from_pathnames(['a001.bla']) + loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) self.assertSetEqual(set(self.reader.available_dataset_names), set(["ch01", "ch02"])) @@ -389,15 +389,15 @@ def test_filter_fh_by_time(self): res = self.reader.time_matches(fh.start_time, None) self.assertEqual(res, idx not in [0, 1, 4, 5]) - @patch('satpy.readers.yaml_reader.get_area_def') - @patch('satpy.readers.yaml_reader.AreaDefBoundary') - @patch('satpy.readers.yaml_reader.Boundary') + @patch("satpy.readers.yaml_reader.get_area_def") + @patch("satpy.readers.yaml_reader.AreaDefBoundary") + @patch("satpy.readers.yaml_reader.Boundary") def test_file_covers_area(self, bnd, adb, gad): """Test that area coverage is checked properly.""" file_handler = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) - self.reader.filter_parameters['area'] = True + self.reader.filter_parameters["area"] = True bnd.return_value.contour_poly.intersection.return_value = True adb.return_value.contour_poly.intersection.return_value = True res = self.reader.check_file_covers_area(file_handler, True) @@ -409,7 +409,7 @@ def test_file_covers_area(self, bnd, adb, gad): self.assertFalse(res) file_handler.get_bounding_box.side_effect = NotImplementedError() - self.reader.filter_parameters['area'] = True + self.reader.filter_parameters["area"] = True res = self.reader.check_file_covers_area(file_handler, True) self.assertTrue(res) @@ -441,9 +441,9 @@ def get_end_time(): datetime(2000, 1, 3, 12, 30)) self.reader.file_handlers = { - '0': [fh1, fh2, fh3, fh4, fh5], - '1': [fh0, fh1, fh2, fh3, fh4, fh5], - '2': [fh2, fh3], + "0": [fh1, fh2, fh3, fh4, fh5], + "1": [fh0, fh1, fh2, fh3, fh4, fh5], + "2": [fh2, fh3], } self.assertEqual(self.reader.start_time, datetime(1999, 12, 30, 0, 0)) @@ -451,24 +451,24 @@ def get_end_time(): def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] res = self.reader.select_files_from_pathnames(filelist) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: self.assertIn(expected, res) self.assertEqual(0, len(self.reader.select_files_from_pathnames([]))) def test_select_from_directory(self): """Check select_files_from_directory.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] dpath = mkdtemp() for fname in filelist: - with open(os.path.join(dpath, fname), 'w'): + with open(os.path.join(dpath, fname), "w"): pass res = self.reader.select_files_from_directory(dpath) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: self.assertIn(os.path.join(dpath, expected), res) for fname in filelist: @@ -490,10 +490,10 @@ def glob(self, pattern): def test_supports_sensor(self): """Check supports_sensor.""" - self.assertTrue(self.reader.supports_sensor('canon')) - self.assertFalse(self.reader.supports_sensor('nikon')) + self.assertTrue(self.reader.supports_sensor("canon")) + self.assertFalse(self.reader.supports_sensor("nikon")) - @patch('satpy.readers.yaml_reader.StackedAreaDefinition') + @patch("satpy.readers.yaml_reader.StackedAreaDefinition") def test_load_area_def(self, sad): """Test loading the area def for the reader.""" dataid = MagicMock() @@ -509,35 +509,35 @@ def test_load_area_def(self, sad): def test_preferred_filetype(self): """Test finding the preferred filetype.""" - self.reader.file_handlers = {'a': 'a', 'b': 'b', 'c': 'c'} - self.assertEqual(self.reader._preferred_filetype(['c', 'a']), 'c') - self.assertEqual(self.reader._preferred_filetype(['a', 'c']), 'a') - self.assertEqual(self.reader._preferred_filetype(['d', 'e']), None) + self.reader.file_handlers = {"a": "a", "b": "b", "c": "c"} + self.assertEqual(self.reader._preferred_filetype(["c", "a"]), "c") + self.assertEqual(self.reader._preferred_filetype(["a", "c"]), "a") + self.assertEqual(self.reader._preferred_filetype(["d", "e"]), None) def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" - ds_q = DataQuery(name='ch01', wavelength=(0.5, 0.6, 0.7, 'µm'), - calibration='reflectance', modifiers=()) + ds_q = DataQuery(name="ch01", wavelength=(0.5, 0.6, 0.7, "µm"), + calibration="reflectance", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_q) self.assertListEqual(res, - [make_dataid(name='lons'), - make_dataid(name='lats')]) + [make_dataid(name="lons"), + make_dataid(name="lats")]) def test_get_coordinates_for_dataset_key_without(self): """Test getting coordinates for a key without coordinates.""" - ds_id = make_dataid(name='lons', + ds_id = make_dataid(name="lons", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) self.assertListEqual(res, []) def test_get_coordinates_for_dataset_keys(self): """Test getting coordinates for keys.""" - ds_id1 = make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', modifiers=()) - ds_id2 = make_dataid(name='ch02', wavelength=(0.7, 0.75, 0.8), - calibration='counts', modifiers=()) - lons = make_dataid(name='lons', modifiers=()) - lats = make_dataid(name='lats', modifiers=()) + ds_id1 = make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=()) + ds_id2 = make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), + calibration="counts", modifiers=()) + lons = make_dataid(name="lons", modifiers=()) + lats = make_dataid(name="lats", modifiers=()) res = self.reader._get_coordinates_for_dataset_keys([ds_id1, ds_id2, lons]) @@ -547,16 +547,16 @@ def test_get_coordinates_for_dataset_keys(self): def test_get_file_handlers(self): """Test getting filehandler to load a dataset.""" - ds_id1 = make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', modifiers=()) - self.reader.file_handlers = {'ftype1': 'bla'} + ds_id1 = make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=()) + self.reader.file_handlers = {"ftype1": "bla"} - self.assertEqual(self.reader._get_file_handlers(ds_id1), 'bla') + self.assertEqual(self.reader._get_file_handlers(ds_id1), "bla") - lons = make_dataid(name='lons', modifiers=()) + lons = make_dataid(name="lons", modifiers=()) self.assertEqual(self.reader._get_file_handlers(lons), None) - @patch('satpy.readers.yaml_reader.xr') + @patch("satpy.readers.yaml_reader.xr") def test_load_entire_dataset(self, xarray): """Check loading an entire dataset.""" file_handlers = [FakeFH(None, None), FakeFH(None, None), @@ -572,69 +572,69 @@ class TestFileYAMLReaderLoading(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_reader': BaseFileHandler, - 'file_patterns': patterns}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1'}, + patterns = ["a{something:3s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_reader": BaseFileHandler, + "file_patterns": patterns}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1"}, }} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) fake_fh = FakeFH(None, None) self.lons = xr.DataArray(np.ones((2, 2)) * 2, - dims=['y', 'x'], - attrs={'standard_name': 'longitude', - 'name': 'longitude'}) + dims=["y", "x"], + attrs={"standard_name": "longitude", + "name": "longitude"}) self.lats = xr.DataArray(np.ones((2, 2)) * 2, - dims=['y', 'x'], - attrs={'standard_name': 'latitude', - 'name': 'latitude'}) + dims=["y", "x"], + attrs={"standard_name": "latitude", + "name": "latitude"}) self.data = None def _assign_array(dsid, *_args, **_kwargs): - if dsid['name'] == 'longitude': + if dsid["name"] == "longitude": return self.lons - if dsid['name'] == 'latitude': + if dsid["name"] == "latitude": return self.lats return self.data fake_fh.get_dataset.side_effect = _assign_array - self.reader.file_handlers = {'ftype1': [fake_fh]} + self.reader.file_handlers = {"ftype1": [fake_fh]} def test_load_dataset_with_builtin_coords(self): """Test loading a dataset with builtin coordinates.""" self.data = xr.DataArray(np.ones((2, 2)), - coords={'longitude': self.lons, - 'latitude': self.lats}, - dims=['y', 'x']) + coords={"longitude": self.lons, + "latitude": self.lats}, + dims=["y", "x"]) self._check_area_for_ch01() def test_load_dataset_with_builtin_coords_in_wrong_order(self): """Test loading a dataset with builtin coordinates in the wrong order.""" self.data = xr.DataArray(np.ones((2, 2)), - coords={'latitude': self.lats, - 'longitude': self.lons}, - dims=['y', 'x']) + coords={"latitude": self.lats, + "longitude": self.lons}, + dims=["y", "x"]) self._check_area_for_ch01() def _check_area_for_ch01(self): - res = self.reader.load(['ch01']) - assert 'area' in res['ch01'].attrs - np.testing.assert_array_equal(res['ch01'].attrs['area'].lons, self.lons) - np.testing.assert_array_equal(res['ch01'].attrs['area'].lats, self.lats) - assert res['ch01'].attrs.get("reader") == "fake" + res = self.reader.load(["ch01"]) + assert "area" in res["ch01"].attrs + np.testing.assert_array_equal(res["ch01"].attrs["area"].lons, self.lons) + np.testing.assert_array_equal(res["ch01"].attrs["area"].lats, self.lats) + assert res["ch01"].attrs.get("reader") == "fake" class TestFileFileYAMLReaderMultipleFileTypes(unittest.TestCase): @@ -648,36 +648,36 @@ def setUp(self): # # For test completeness add one channel (ch3) which is only available # in ftype1. - patterns1 = ['a.nc'] - patterns2 = ['b.nc'] - patterns3 = ['geo.nc'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_patterns': patterns1}, - 'ftype2': {'name': 'ft2', - 'file_patterns': patterns2}, - 'ftype3': {'name': 'ft3', - 'file_patterns': patterns3}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': ['ftype1', 'ftype2'], - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': ['ftype1', 'ftype2'], - 'coordinates': ['lons', 'lats']}, - 'ch3': {'name': 'ch03', - 'wavelength': [0.8, 0.85, 0.9], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': ['ftype1', 'ftype3']}, - 'lats': {'name': 'lats', - 'file_type': ['ftype1', 'ftype3']}}} + patterns1 = ["a.nc"] + patterns2 = ["b.nc"] + patterns3 = ["geo.nc"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_patterns": patterns1}, + "ftype2": {"name": "ft2", + "file_patterns": patterns2}, + "ftype3": {"name": "ft3", + "file_patterns": patterns3}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": ["ftype1", "ftype2"], + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": ["ftype1", "ftype2"], + "coordinates": ["lons", "lats"]}, + "ch3": {"name": "ch03", + "wavelength": [0.8, 0.85, 0.9], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": ["ftype1", "ftype3"]}, + "lats": {"name": "lats", + "file_type": ["ftype1", "ftype3"]}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config) @@ -687,13 +687,13 @@ def test_update_ds_ids_from_file_handlers(self): from functools import partial orig_ids = self.reader.all_ids - for ftype, resol in zip(('ftype1', 'ftype2'), (1, 2)): + for ftype, resol in zip(("ftype1", "ftype2"), (1, 2)): # need to copy this because the dataset infos will be modified _orig_ids = {key: val.copy() for key, val in orig_ids.items()} with patch.dict(self.reader.all_ids, _orig_ids, clear=True), \ patch.dict(self.reader.available_ids, {}, clear=True): # Add a file handler with resolution property - fh = MagicMock(filetype_info={'file_type': ftype}, + fh = MagicMock(filetype_info={"file_type": ftype}, resolution=resol) fh.available_datasets = partial(available_datasets, fh) fh.file_type_matches = partial(file_type_matches, fh) @@ -707,11 +707,11 @@ def test_update_ds_ids_from_file_handlers(self): # Make sure the resolution property has been transferred # correctly from the file handler to the dataset ID for ds_id, ds_info in self.reader.all_ids.items(): - file_types = ds_info['file_type'] + file_types = ds_info["file_type"] if not isinstance(file_types, list): file_types = [file_types] if ftype in file_types: - self.assertEqual(resol, ds_id['resolution']) + self.assertEqual(resol, ds_id["resolution"]) # Test methods @@ -725,10 +725,10 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: new_info = ds_info.copy() - new_info['resolution'] = res + new_info["resolution"] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info @@ -736,9 +736,9 @@ def available_datasets(self, configured_datasets=None): def file_type_matches(self, ds_ftype): """Fake file_type_matches for testing multiple file types.""" - if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']: + if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info["file_type"]: return True - if self.filetype_info['file_type'] in ds_ftype: + if self.filetype_info["file_type"] in ds_ftype: return True return None @@ -764,94 +764,94 @@ def test_load_dataset_with_area_for_single_areas(self, ldwa): original_array = np.arange(6).reshape((2, 3)) area_def = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'geos', - 'h': 35785831, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "geos", + "h": 35785831, + "type": "crs"}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': np.arange(2), - 'x': np.arange(3), - 'time': ("y", np.arange(2))}, - attrs={'area': area_def}, - dims=('y', 'x')) + coords={"y": np.arange(2), + "x": np.arange(3), + "time": ("y", np.arange(2))}, + attrs={"area": area_def}, + dims=("y", "x")) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # check no input, nothing should change res = reader._load_dataset_with_area(dsid, coords) np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check wrong input with self.assertRaises(ValueError): - _ = reader._load_dataset_with_area(dsid, coords, 'wronginput') + _ = reader._load_dataset_with_area(dsid, coords, "wronginput") # check native orientation, nothing should change - res = reader._load_dataset_with_area(dsid, coords, 'native') + res = reader._load_dataset_with_area(dsid, coords, "native") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check upright orientation, nothing should change since area is already upright - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check that left-right image is flipped correctly - dummy_ds_xr.attrs['area'] = area_def.copy(area_extent=(1500, -1000, -1500, 1000)) + dummy_ds_xr.attrs["area"] = area_def.copy(area_extent=(1500, -1000, -1500, 1000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.fliplr(original_array)) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.flip(np.arange(3))) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.flip(np.arange(3))) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check that upside down image is flipped correctly - dummy_ds_xr.attrs['area'] = area_def.copy(area_extent=(-1500, 1000, 1500, -1000)) + dummy_ds_xr.attrs["area"] = area_def.copy(area_extent=(-1500, 1000, 1500, -1000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.flipud(original_array)) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.flip(np.arange(2))) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.flip(np.arange(2))) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.flip(np.arange(2))) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.flip(np.arange(2))) # check different projection than geos, nothing should be changed area_def = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'lcc', - 'lat_1': 25.0, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "lcc", + "lat_1": 25.0, + "type": "crs"}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, - dims=('y', 'x'), - attrs={'area': area_def}) + dims=("y", "x"), + attrs={"area": area_def}) ldwa.return_value = dummy_ds_xr - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") @@ -871,12 +871,12 @@ def test_load_dataset_with_area_for_stacked_areas(self, ldwa): original_array = np.arange(12).reshape((4, 3)) area_def0 = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'geos', - 'h': 35785831, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "geos", + "h": 35785831, + "type": "crs"}, 3, 2, original_area_extents[0], @@ -884,36 +884,36 @@ def test_load_dataset_with_area_for_stacked_areas(self, ldwa): area_def1 = area_def0.copy(area_extent=original_area_extents[1]) dummy_ds_xr = xr.DataArray(original_array, - dims=('y', 'x'), - coords={'y': np.arange(4), - 'x': np.arange(3), - 'time': ("y", np.arange(4))}, - attrs={'area': StackedAreaDefinition(area_def0, area_def1)}) + dims=("y", "x"), + coords={"y": np.arange(4), + "x": np.arange(3), + "time": ("y", np.arange(4))}, + attrs={"area": StackedAreaDefinition(area_def0, area_def1)}) # check that left-right image is flipped correctly - dummy_ds_xr.attrs['area'].defs[0] = area_def0.copy(area_extent=(1500, -1000, -1500, 1000)) - dummy_ds_xr.attrs['area'].defs[1] = area_def1.copy(area_extent=(7000, 5000, 3000, 8000)) + dummy_ds_xr.attrs["area"].defs[0] = area_def0.copy(area_extent=(1500, -1000, -1500, 1000)) + dummy_ds_xr.attrs["area"].defs[1] = area_def1.copy(area_extent=(7000, 5000, 3000, 8000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.fliplr(original_array)) - np.testing.assert_equal(res.attrs['area'].defs[0].area_extent, original_area_extents[0]) - np.testing.assert_equal(res.attrs['area'].defs[1].area_extent, original_area_extents[1]) - np.testing.assert_equal(res.coords['y'], np.arange(4)) - np.testing.assert_equal(res.coords['x'], np.flip(np.arange(3))) - np.testing.assert_equal(res.coords['time'], np.arange(4)) + np.testing.assert_equal(res.attrs["area"].defs[0].area_extent, original_area_extents[0]) + np.testing.assert_equal(res.attrs["area"].defs[1].area_extent, original_area_extents[1]) + np.testing.assert_equal(res.coords["y"], np.arange(4)) + np.testing.assert_equal(res.coords["x"], np.flip(np.arange(3))) + np.testing.assert_equal(res.coords["time"], np.arange(4)) # check that upside down image is flipped correctly - dummy_ds_xr.attrs['area'].defs[0] = area_def0.copy(area_extent=(-1500, 1000, 1500, -1000)) - dummy_ds_xr.attrs['area'].defs[1] = area_def1.copy(area_extent=(3000, 8000, 7000, 5000)) + dummy_ds_xr.attrs["area"].defs[0] = area_def0.copy(area_extent=(-1500, 1000, 1500, -1000)) + dummy_ds_xr.attrs["area"].defs[1] = area_def1.copy(area_extent=(3000, 8000, 7000, 5000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.flipud(original_array)) # note that the order of the stacked areadefs is flipped here, as expected - np.testing.assert_equal(res.attrs['area'].defs[1].area_extent, original_area_extents[0]) - np.testing.assert_equal(res.attrs['area'].defs[0].area_extent, original_area_extents[1]) - np.testing.assert_equal(res.coords['y'], np.flip(np.arange(4))) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.flip(np.arange(4))) + np.testing.assert_equal(res.attrs["area"].defs[1].area_extent, original_area_extents[0]) + np.testing.assert_equal(res.attrs["area"].defs[0].area_extent, original_area_extents[1]) + np.testing.assert_equal(res.coords["y"], np.flip(np.arange(4))) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.flip(np.arange(4))) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") @@ -936,15 +936,15 @@ def test_load_dataset_with_area_for_swath_def_data(self, ldwa): swath_def = SwathDefinition(lons, lats) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': dim}, - attrs={'area': swath_def}, - dims=('y',)) + coords={"y": dim}, + attrs={"area": swath_def}, + dims=("y",)) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # returned dataset should be unchanged since datasets with a swath definition are not flippable - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @@ -963,21 +963,21 @@ def test_load_dataset_with_area_for_data_without_area(self, ldwa): dim = np.arange(3) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': dim}, + coords={"y": dim}, attrs={}, - dims=('y',)) + dims=("y",)) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # returned dataset should be unchanged since datasets without area information are not flippable - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info): seg_area = MagicMock() - seg_area.crs = 'some_crs' + seg_area.crs = "some_crs" seg_area.area_extent = aex seg_area.shape = ashape get_area_def = MagicMock() @@ -987,9 +987,9 @@ def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_p get_segment_position_info.return_value = chk_pos_info fh = MagicMock() - filetype_info = {'expected_segments': expected_segments, - 'file_type': 'filetype1'} - filename_info = {'segment': segment} + filetype_info = {"expected_segments": expected_segments, + "file_type": "filetype1"} + filename_info = {"segment": segment} fh.filetype_info = filetype_info fh.filename_info = filename_info fh.get_area_def = get_area_def @@ -1011,44 +1011,44 @@ def test_get_expected_segments(self, cfh): fake_fh = MagicMock() fake_fh.filename_info = {} fake_fh.filetype_info = {} - cfh.return_value = {'ft1': [fake_fh]} + cfh.return_value = {"ft1": [fake_fh]} # default (1) - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] self.assertEqual(es, 1) # YAML defined for each file type - fake_fh.filetype_info['expected_segments'] = 2 - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] + fake_fh.filetype_info["expected_segments"] = 2 + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] self.assertEqual(es, 2) # defined both in the filename and the YAML metadata # YAML has priority - fake_fh.filename_info = {'total_segments': 3} - fake_fh.filetype_info = {'expected_segments': 2} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] + fake_fh.filename_info = {"total_segments": 3} + fake_fh.filetype_info = {"expected_segments": 2} + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] self.assertEqual(es, 2) # defined in the filename - fake_fh.filename_info = {'total_segments': 3} + fake_fh.filename_info = {"total_segments": 3} fake_fh.filetype_info = {} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] self.assertEqual(es, 3) # check correct FCI segment (aka chunk in the FCI world) number reading into segment - fake_fh.filename_info = {'count_in_repeat_cycle': 5} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filename_info['segment'] + fake_fh.filename_info = {"count_in_repeat_cycle": 5} + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filename_info["segment"] self.assertEqual(es, 5) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.FileYAMLReader._load_dataset') - @patch('satpy.readers.yaml_reader.xr') - @patch('satpy.readers.yaml_reader._find_missing_segments') + @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") + @patch("satpy.readers.yaml_reader.xr") + @patch("satpy.readers.yaml_reader._find_missing_segments") def test_load_dataset(self, mss, xr, parent_load_dataset): """Test _load_dataset().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1066,7 +1066,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): # Setup input, and output of mocked functions counter = 9 expected_segments = 8 - seg = MagicMock(dims=['y', 'x']) + seg = MagicMock(dims=["y", "x"]) slice_list = expected_segments * [seg, ] failure = False projectable = MagicMock() @@ -1139,10 +1139,10 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): file_handlers) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader._load_area_def') - @patch('satpy.readers.yaml_reader._stack_area_defs') - @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area') - @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area') + @patch("satpy.readers.yaml_reader._load_area_def") + @patch("satpy.readers.yaml_reader._stack_area_defs") + @patch("satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area") + @patch("satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area") def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): """Test _load_area_def().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1160,7 +1160,7 @@ def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): parent_load_area_def.assert_called_once_with(dataid, file_handlers) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.AreaDefinition') + @patch("satpy.readers.yaml_reader.AreaDefinition") def test_pad_later_segments_area(self, AreaDefinition): """Test _pad_later_segments_area().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1172,16 +1172,16 @@ def test_pad_later_segments_area(self, AreaDefinition): ashape = [200, 500] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_1] - dataid = 'dataid' + dataid = "dataid" res = reader._pad_later_segments_area(file_handlers, dataid) self.assertEqual(len(res), 2) seg2_extent = (0, 1500, 200, 1000) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, + expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg2_extent) AreaDefinition.assert_called_once_with(*expected_call) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.AreaDefinition') + @patch("satpy.readers.yaml_reader.AreaDefinition") def test_pad_earlier_segments_area(self, AreaDefinition): """Test _pad_earlier_segments_area().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1194,12 +1194,12 @@ def test_pad_earlier_segments_area(self, AreaDefinition): fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_2] - dataid = 'dataid' + dataid = "dataid" area_defs = {2: seg2_area} res = reader._pad_earlier_segments_area(file_handlers, dataid, area_defs) self.assertEqual(len(res), 2) seg1_extent = (0, 500, 200, 0) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, + expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg1_extent) AreaDefinition.assert_called_once_with(*expected_call) @@ -1208,15 +1208,15 @@ def test_find_missing_segments(self): from satpy.readers.yaml_reader import _find_missing_segments as fms # Dataset with only one segment - filename_info = {'segment': 1} + filename_info = {"segment": 1} fh_seg1 = MagicMock(filename_info=filename_info) - projectable = 'projectable' + projectable = "projectable" get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg1.get_dataset = get_dataset file_handlers = [fh_seg1] - ds_info = {'file_type': []} - dataid = 'dataid' + ds_info = {"file_type": []} + dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res self.assertEqual(counter, 2) @@ -1226,18 +1226,18 @@ def test_find_missing_segments(self): self.assertTrue(proj is projectable) # Three expected segments, first and last missing - filename_info = {'segment': 2} - filetype_info = {'expected_segments': 3, - 'file_type': 'foo'} + filename_info = {"segment": 2} + filetype_info = {"expected_segments": 3, + "file_type": "foo"} fh_seg2 = MagicMock(filename_info=filename_info, filetype_info=filetype_info) - projectable = 'projectable' + projectable = "projectable" get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg2.get_dataset = get_dataset file_handlers = [fh_seg2] - ds_info = {'file_type': ['foo']} - dataid = 'dataid' + ds_info = {"file_type": ["foo"]} + dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res self.assertEqual(counter, 3) @@ -1261,28 +1261,28 @@ def GVSYReader(): @pytest.fixture def fake_geswh(): """Get a fixture of the patched _get_empty_segment_with_height.""" - with patch('satpy.readers.yaml_reader._get_empty_segment_with_height') as geswh: + with patch("satpy.readers.yaml_reader._get_empty_segment_with_height") as geswh: yield geswh @pytest.fixture def fake_xr(): """Get a fixture of the patched xarray.""" - with patch('satpy.readers.yaml_reader.xr') as xr: + with patch("satpy.readers.yaml_reader.xr") as xr: yield xr @pytest.fixture def fake_mss(): """Get a fixture of the patched _find_missing_segments.""" - with patch('satpy.readers.yaml_reader._find_missing_segments') as mss: + with patch("satpy.readers.yaml_reader._find_missing_segments") as mss: yield mss @pytest.fixture def fake_adef(): """Get a fixture of the patched AreaDefinition.""" - with patch('satpy.readers.yaml_reader.AreaDefinition') as adef: + with patch("satpy.readers.yaml_reader.AreaDefinition") as adef: yield adef @@ -1293,14 +1293,14 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): """Test execution of (overridden) get_empty_segment inside _load_dataset.""" # Setup input, and output of mocked functions for first segment missing chk_pos_info = { - '1km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 11136}, - '2km': {'start_position_row': 140, - 'end_position_row': None, - 'segment_height': 278, - 'grid_width': 5568} + "1km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 11136}, + "2km": {"start_position_row": 140, + "end_position_row": None, + "segment_height": 278, + "grid_width": 5568} } expected_segments = 2 segment = 2 @@ -1308,10 +1308,10 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): ashape = [278, 5568] fh_2, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_2]} + GVSYReader.file_handlers = {"filetype1": [fh_2]} counter = 2 - seg = MagicMock(dims=['y', 'x']) + seg = MagicMock(dims=["y", "x"]) slice_list = [None, seg] failure = False projectable = MagicMock() @@ -1325,20 +1325,20 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): failure, projectable) GVSYReader._load_dataset(dataid, ds_info, [fh_2]) # the return of get_empty_segment - fake_geswh.assert_called_once_with(empty_segment, 139, dim='y') + fake_geswh.assert_called_once_with(empty_segment, 139, dim="y") def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): """Test _pad_earlier_segments_area() for the variable segment case.""" # setting to 0 or None values that shouldn't be relevant chk_pos_info = { - '1km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 11136}, - '2km': {'start_position_row': 140, - 'end_position_row': None, - 'segment_height': 278, - 'grid_width': 5568} + "1km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 11136}, + "2km": {"start_position_row": 140, + "end_position_row": None, + "segment_height": 278, + "grid_width": 5568} } expected_segments = 2 segment = 2 @@ -1346,8 +1346,8 @@ def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): ashape = [278, 5568] fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_2]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_2]} + dataid = "dataid" area_defs = {2: seg2_area} res = GVSYReader._pad_earlier_segments_area([fh_2], dataid, area_defs) assert len(res) == 2 @@ -1358,29 +1358,29 @@ def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): # half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 500-250=250 seg1_extent = (0, 500, 200, 250) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 5568, 139, + expected_call = ("fill", "fill", "fill", "some_crs", 5568, 139, seg1_extent) fake_adef.assert_called_once_with(*expected_call) def test_pad_later_segments_area(self, GVSYReader, fake_adef): """Test _pad_later_segments_area() in the variable padding case.""" chk_pos_info = { - '1km': {'start_position_row': None, - 'end_position_row': 11136 - 278, - 'segment_height': 556, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": None, + "end_position_row": 11136 - 278, + "segment_height": 556, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} expected_segments = 2 segment = 1 aex = [0, 1000, 200, 500] ashape = [556, 11136] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_1]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_1]} + dataid = "dataid" res = GVSYReader._pad_later_segments_area([fh_1], dataid) assert len(res) == 2 @@ -1389,7 +1389,7 @@ def test_pad_later_segments_area(self, GVSYReader, fake_adef): # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 1000+250=1250 seg2_extent = (0, 1250, 200, 1000) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 11136, 278, + expected_call = ("fill", "fill", "fill", "some_crs", 11136, 278, seg2_extent) fake_adef.assert_called_once_with(*expected_call) @@ -1406,45 +1406,45 @@ def side_effect_areadef(a, b, c, crs, width, height, aex): fake_adef.side_effect = side_effect_areadef chk_pos_info = { - '1km': {'start_position_row': 11136 - 600 - 100 + 1, - 'end_position_row': 11136 - 600, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 600 - 100 + 1, + "end_position_row": 11136 - 600, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} expected_segments = 8 segment = 1 aex = [0, 1000, 200, 500] ashape = [100, 11136] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) chk_pos_info = { - '1km': {'start_position_row': 11136 - 300 - 100 + 1, - 'end_position_row': 11136 - 300, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 300 - 100 + 1, + "end_position_row": 11136 - 300, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} segment = 4 fh_4, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) chk_pos_info = { - '1km': {'start_position_row': 11136 - 100 + 1, - 'end_position_row': None, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 100 + 1, + "end_position_row": None, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} segment = 8 fh_8, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_1, fh_4, fh_8]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_1, fh_4, fh_8]} + dataid = "dataid" res = GVSYReader._pad_later_segments_area([fh_1, fh_4, fh_8], dataid) assert len(res) == 8 @@ -1473,15 +1473,15 @@ def side_effect_areadef(a, b, c, crs, width, height, aex): # The second padded segment has 67px height -> 500*67/100=335 area extent height ->1330+335=1665 # The first padded segment has 67px height -> 500*67/100=335 area extent height ->1665+335=2000 assert fake_adef.call_count == 5 - expected_call1 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100, + expected_call1 = ("fill", "fill", "fill", "some_crs", 11136, 100, (0, 1500.0, 200, 1000)) - expected_call2 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100, + expected_call2 = ("fill", "fill", "fill", "some_crs", 11136, 100, (0, 2000.0, 200, 1500)) - expected_call3 = ('fill', 'fill', 'fill', 'some_crs', 11136, 66, + expected_call3 = ("fill", "fill", "fill", "some_crs", 11136, 66, (0, 1330.0, 200, 1000)) - expected_call4 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67, + expected_call4 = ("fill", "fill", "fill", "some_crs", 11136, 67, (0, 1665.0, 200, 1330.0)) - expected_call5 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67, + expected_call5 = ("fill", "fill", "fill", "some_crs", 11136, 67, (0, 2000.0, 200, 1665.0)) fake_adef.side_effect = None @@ -1496,22 +1496,22 @@ def test_get_empty_segment_with_height(self): """Test _get_empty_segment_with_height().""" from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh - dim = 'y' + dim = "y" # check expansion of empty segment - empty_segment = xr.DataArray(np.ones((139, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((139, 5568)), dims=["y", "x"]) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (140, 5568) # check reduction of empty segment - empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((140, 5568)), dims=["y", "x"]) new_height = 139 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (139, 5568) # check that empty segment is not modified if it has the right height already - empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((140, 5568)), dims=["y", "x"]) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment is empty_segment diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index c87cd1055c..70f1ec80e5 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -69,7 +69,7 @@ def wrapper(self, *args, **kwargs): def convert_file_content_to_data_array(file_content, attrs=tuple(), - dims=('z', 'y', 'x')): + dims=("z", "y", "x")): """Help old reader tests that still use numpy arrays. A lot of old reader tests still use numpy arrays and depend on the @@ -98,8 +98,8 @@ def convert_file_content_to_data_array(file_content, attrs=tuple(), for key, val in file_content.items(): da_attrs = {} for a in attrs: - if key + '/attr/' + a in file_content: - da_attrs[a] = file_content[key + '/attr/' + a] + if key + "/attr/" + a in file_content: + da_attrs[a] = file_content[key + "/attr/" + a] if isinstance(val, np.ndarray): val = da.from_array(val, chunks=4096) @@ -120,14 +120,14 @@ def _filter_datasets(all_ds, names_or_ids): str_filter = [ds_name for ds_name in names_or_ids if isinstance(ds_name, str)] id_filter = [ds_id for ds_id in names_or_ids if not isinstance(ds_id, str)] for ds_id in all_ds: - if ds_id in id_filter or ds_id['name'] in str_filter: + if ds_id in id_filter or ds_id["name"] in str_filter: yield ds_id def _swath_def_of_data_arrays(rows, cols): return SwathDefinition( - DataArray(da.zeros((rows, cols)), dims=('y', 'x')), - DataArray(da.zeros((rows, cols)), dims=('y', 'x')), + DataArray(da.zeros((rows, cols)), dims=("y", "x")), + DataArray(da.zeros((rows, cols)), dims=("y", "x")), ) @@ -136,14 +136,14 @@ class FakeModifier(ModifierBase): def _handle_res_change(self, datasets, info): # assume this is used on the 500m version of ds5 - info['resolution'] = 250 + info["resolution"] = 250 rep_data_arr = datasets[0] - y_size = rep_data_arr.sizes['y'] - x_size = rep_data_arr.sizes['x'] + y_size = rep_data_arr.sizes["y"] + x_size = rep_data_arr.sizes["x"] data = da.zeros((y_size * 2, x_size * 2)) - if isinstance(rep_data_arr.attrs['area'], SwathDefinition): + if isinstance(rep_data_arr.attrs["area"], SwathDefinition): area = _swath_def_of_data_arrays(y_size * 2, x_size * 2) - info['area'] = area + info["area"] = area else: raise NotImplementedError("'res_change' modifier can't handle " "AreaDefinition changes yet.") @@ -151,20 +151,20 @@ def _handle_res_change(self, datasets, info): def __call__(self, datasets, optional_datasets=None, **kwargs): """Modify provided data depending on the modifier name and input data.""" - if self.attrs['optional_prerequisites']: - for opt_dep in self.attrs['optional_prerequisites']: - opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get('name', '') - if 'NOPE' in opt_dep_name or 'fail' in opt_dep_name: + if self.attrs["optional_prerequisites"]: + for opt_dep in self.attrs["optional_prerequisites"]: + opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get("name", "") + if "NOPE" in opt_dep_name or "fail" in opt_dep_name: continue assert (optional_datasets is not None and len(optional_datasets)) - resolution = datasets[0].attrs.get('resolution') - mod_name = self.attrs['modifiers'][-1] + resolution = datasets[0].attrs.get("resolution") + mod_name = self.attrs["modifiers"][-1] data = datasets[0].data i = datasets[0].attrs.copy() - if mod_name == 'res_change' and resolution is not None: + if mod_name == "res_change" and resolution is not None: data = self._handle_res_change(datasets, i) - elif 'incomp_areas' in mod_name: + elif "incomp_areas" in mod_name: raise IncompatibleAreas( "Test modifier 'incomp_areas' always raises IncompatibleAreas") self.apply_modifier_info(datasets[0].attrs, i) @@ -184,27 +184,27 @@ def __call__(self, projectables, nonprojectables=None, **kwargs): if nonprojectables: self.match_data_arrays(nonprojectables) info = self.attrs.copy() - if self.attrs['name'] in ('comp14', 'comp26'): + if self.attrs["name"] in ("comp14", "comp26"): # used as a test when composites update the dataset id with # information from prereqs - info['resolution'] = 555 - if self.attrs['name'] in ('comp24', 'comp25'): + info["resolution"] = 555 + if self.attrs["name"] in ("comp24", "comp25"): # other composites that copy the resolution from inputs - info['resolution'] = projectables[0].attrs.get('resolution') - if len(projectables) != len(self.attrs['prerequisites']): + info["resolution"] = projectables[0].attrs.get("resolution") + if len(projectables) != len(self.attrs["prerequisites"]): raise ValueError("Not enough prerequisite datasets passed") info.update(kwargs) if projectables: - info['area'] = projectables[0].attrs['area'] + info["area"] = projectables[0].attrs["area"] dim_sizes = projectables[0].sizes else: # static_image - dim_sizes = {'y': 4, 'x': 5} - return DataArray(data=da.zeros((dim_sizes['y'], dim_sizes['x'], 3)), + dim_sizes = {"y": 4, "x": 5} + return DataArray(data=da.zeros((dim_sizes["y"], dim_sizes["x"], 3)), attrs=info, - dims=['y', 'x', 'bands'], - coords={'bands': ['R', 'G', 'B']}) + dims=["y", "x", "bands"], + coords={"bands": ["R", "G", "B"]}) class FakeFileHandler(BaseFileHandler): @@ -228,21 +228,21 @@ def end_time(self): @property def sensor_names(self): """Get sensor name from filetype configuration.""" - sensor = self.filetype_info.get('sensor', 'fake_sensor') + sensor = self.filetype_info.get("sensor", "fake_sensor") return {sensor} def get_dataset(self, data_id: DataID, ds_info: dict): """Get fake DataArray for testing.""" - if data_id['name'] == 'ds9_fail_load': + if data_id["name"] == "ds9_fail_load": raise KeyError("Can't load '{}' because it is supposed to " - "fail.".format(data_id['name'])) + "fail.".format(data_id["name"])) attrs = data_id.to_dict() attrs.update(ds_info) - attrs['sensor'] = self.filetype_info.get('sensor', 'fake_sensor') - attrs['platform_name'] = 'fake_platform' - attrs['start_time'] = self.start_time - attrs['end_time'] = self.end_time - res = attrs.get('resolution', 250) + attrs["sensor"] = self.filetype_info.get("sensor", "fake_sensor") + attrs["platform_name"] = "fake_platform" + attrs["start_time"] = self.start_time + attrs["end_time"] = self.end_time + res = attrs.get("resolution", 250) rows = cols = { 250: 20, 500: 10, @@ -250,7 +250,7 @@ def get_dataset(self, data_id: DataID, ds_info: dict): }.get(res, 5) return DataArray(data=da.zeros((rows, cols)), attrs=attrs, - dims=['y', 'x']) + dims=["y", "x"]) def available_datasets(self, configured_datasets=None): """Report YAML datasets available unless 'not_available' is specified during creation.""" @@ -262,7 +262,7 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - ft_matches = self.file_type_matches(ds_info['file_type']) + ft_matches = self.file_type_matches(ds_info["file_type"]) if not ft_matches: yield None, ds_info continue diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index a47552a708..eab72e8f5b 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -42,10 +42,10 @@ def _check_production_location(ds): - if 'production_site' in ds.attrs: - prod_loc_name = 'production_site' - elif 'production_location' in ds.attrs: - prod_loc_name = 'producton_location' + if "production_site" in ds.attrs: + prod_loc_name = "production_site" + elif "production_location" in ds.attrs: + prod_loc_name = "producton_location" else: return @@ -62,67 +62,67 @@ def check_required_properties(unmasked_ds, masked_ds): def _check_required_common_attributes(ds): """Check common properties of the created AWIPS tiles for validity.""" - for attr_name in ('tile_row_offset', 'tile_column_offset', - 'product_tile_height', 'product_tile_width', - 'number_product_tiles', - 'product_rows', 'product_columns'): + for attr_name in ("tile_row_offset", "tile_column_offset", + "product_tile_height", "product_tile_width", + "number_product_tiles", + "product_rows", "product_columns"): assert attr_name in ds.attrs _check_production_location(ds) for data_arr in ds.data_vars.values(): if data_arr.ndim == 0: # grid mapping variable - assert 'grid_mapping_name' in data_arr.attrs + assert "grid_mapping_name" in data_arr.attrs continue - assert data_arr.encoding.get('zlib', False) - assert 'grid_mapping' in data_arr.attrs - assert data_arr.attrs['grid_mapping'] in ds - assert 'units' in data_arr.attrs + assert data_arr.encoding.get("zlib", False) + assert "grid_mapping" in data_arr.attrs + assert data_arr.attrs["grid_mapping"] in ds + assert "units" in data_arr.attrs if data_arr.name != "DQF": assert data_arr.dtype == np.int16 assert data_arr.attrs["_Unsigned"] == "true" def _check_scaled_x_coordinate_variable(ds, masked_ds): - assert 'x' in ds.coords - x_coord = ds.coords['x'] + assert "x" in ds.coords + x_coord = ds.coords["x"] np.testing.assert_equal(np.diff(x_coord), 1) x_attrs = x_coord.attrs - assert x_attrs.get('standard_name') == 'projection_x_coordinate' - assert x_attrs.get('units') == 'meters' - assert 'scale_factor' in x_attrs - assert x_attrs['scale_factor'] > 0 - assert 'add_offset' in x_attrs + assert x_attrs.get("standard_name") == "projection_x_coordinate" + assert x_attrs.get("units") == "meters" + assert "scale_factor" in x_attrs + assert x_attrs["scale_factor"] > 0 + assert "add_offset" in x_attrs - unscaled_x = masked_ds.coords['x'].values + unscaled_x = masked_ds.coords["x"].values assert (np.diff(unscaled_x) > 0).all() def _check_scaled_y_coordinate_variable(ds, masked_ds): - assert 'y' in ds.coords - y_coord = ds.coords['y'] + assert "y" in ds.coords + y_coord = ds.coords["y"] np.testing.assert_equal(np.diff(y_coord), 1) y_attrs = y_coord.attrs - assert y_attrs.get('standard_name') == 'projection_y_coordinate' - assert y_attrs.get('units') == 'meters' - assert 'scale_factor' in y_attrs - assert y_attrs['scale_factor'] < 0 - assert 'add_offset' in y_attrs + assert y_attrs.get("standard_name") == "projection_y_coordinate" + assert y_attrs.get("units") == "meters" + assert "scale_factor" in y_attrs + assert y_attrs["scale_factor"] < 0 + assert "add_offset" in y_attrs - unscaled_y = masked_ds.coords['y'].values + unscaled_y = masked_ds.coords["y"].values assert (np.diff(unscaled_y) < 0).all() def _get_test_area(shape=(200, 100), crs=None, extents=None): from pyresample.geometry import AreaDefinition if crs is None: - crs = CRS('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') + crs = CRS("+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") if extents is None: extents = (-1000., -1500., 1000., 1500.) area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", crs, shape[1], shape[0], @@ -138,11 +138,11 @@ def _get_test_data(shape=(200, 100), chunks=50): def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None): attrs = dict( - name='test_ds', - platform_name='PLAT', - sensor='SENSOR', - units='1', - standard_name='toa_bidirectional_reflectance', + name="test_ds", + platform_name="PLAT", + sensor="SENSOR", + units="1", + standard_name="toa_bidirectional_reflectance", area=area_def, start_time=START_TIME, end_time=END_TIME @@ -151,7 +151,7 @@ def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None): attrs.update(extra_attrs) ds = xr.DataArray( dask_arr, - dims=('y', 'x') if dask_arr.ndim == 2 else ('bands', 'y', 'x'), + dims=("y", "x") if dask_arr.ndim == 2 else ("bands", "y", "x"), attrs=attrs, ) return update_resampled_coords(ds, ds, area_def) @@ -165,13 +165,13 @@ def test_init(self, tmp_path): from satpy.writers.awips_tiled import AWIPSTiledWriter AWIPSTiledWriter(base_dir=str(tmp_path)) - @pytest.mark.parametrize('use_save_dataset', + @pytest.mark.parametrize("use_save_dataset", [(False,), (True,)]) @pytest.mark.parametrize( - ('extra_attrs', 'expected_filename'), + ("extra_attrs", "expected_filename"), [ - ({}, 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc'), - ({'sensor': 'viirs', 'name': 'I01'}, 'TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc'), + ({}, "TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc"), + ({"sensor": "viirs", "name": "I01"}, "TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc"), ] ) def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_dataset, caplog, tmp_path): @@ -183,21 +183,21 @@ def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_da with caplog.at_level(logging.DEBUG): w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) if use_save_dataset: - w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') + w.save_dataset(input_data_arr, sector_id="TEST", source_name="TESTS") else: - w.save_datasets([input_data_arr], sector_id='TEST', source_name='TESTS') + w.save_datasets([input_data_arr], sector_id="TEST", source_name="TESTS") assert "no routine matching" not in caplog.text assert "Can't format string" not in caplog.text - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 1 assert os.path.basename(all_files[0]) == expected_filename for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) output_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, output_ds) - scale_factor = output_ds['data'].encoding['scale_factor'] - np.testing.assert_allclose(input_data_arr.values, output_ds['data'].data, + scale_factor = output_ds["data"].encoding["scale_factor"] + np.testing.assert_allclose(input_data_arr.values, output_ds["data"].data, atol=scale_factor / 2) def test_units_length_warning(self, tmp_path): @@ -208,8 +208,8 @@ def test_units_length_warning(self, tmp_path): input_data_arr = _get_test_lcc_data(data, area_def) input_data_arr.attrs["units"] = "this is a really long units string" w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) - with pytest.warns(UserWarning, match=r'.*this is a really long units string.*too long.*'): - w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') + with pytest.warns(UserWarning, match=r".*this is a really long units string.*too long.*"): + w.save_dataset(input_data_arr, sector_id="TEST", source_name="TESTS") @pytest.mark.parametrize( ("tile_count", "tile_size"), @@ -228,33 +228,33 @@ def test_basic_numbered_tiles(self, tile_count, tile_size, tmp_path): input_data_arr = _get_test_lcc_data(data, area_def) w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) save_kwargs = dict( - sector_id='TEST', + sector_id="TEST", source_name="TESTS", tile_count=tile_count, tile_size=tile_size, - extra_global_attrs={'my_global': 'TEST'} + extra_global_attrs={"my_global": "TEST"} ) should_error = tile_count is None and tile_size is None if should_error: with dask.config.set(scheduler=CustomScheduler(0)), \ - pytest.raises(ValueError, match=r'Either.*tile_count.*'): + pytest.raises(ValueError, match=r"Either.*tile_count.*"): w.save_datasets([input_data_arr], **save_kwargs) else: with dask.config.set(scheduler=CustomScheduler(1 * 2)): # precompute=*2 w.save_datasets([input_data_arr], **save_kwargs) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) expected_num_files = 0 if should_error else 9 assert len(all_files) == expected_num_files for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert unmasked_ds.attrs['my_global'] == 'TEST' - assert unmasked_ds.attrs['sector_id'] == 'TEST' - assert 'physical_element' in unmasked_ds.attrs - stime = input_data_arr.attrs['start_time'] - assert unmasked_ds.attrs['start_date_time'] == stime.strftime('%Y-%m-%dT%H:%M:%S') + assert unmasked_ds.attrs["my_global"] == "TEST" + assert unmasked_ds.attrs["sector_id"] == "TEST" + assert "physical_element" in unmasked_ds.attrs + stime = input_data_arr.attrs["start_time"] + assert unmasked_ds.attrs["start_date_time"] == stime.strftime("%Y-%m-%dT%H:%M:%S") def test_basic_lettered_tiles(self, tmp_path): """Test creating a lettered grid.""" @@ -265,14 +265,14 @@ def test_basic_lettered_tiles(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S') + assert masked_ds.attrs["start_date_time"] == START_TIME.strftime("%Y-%m-%dT%H:%M:%S") def test_basic_lettered_tiles_diff_projection(self, tmp_path): """Test creating a lettered grid from data with differing projection..""" @@ -284,20 +284,20 @@ def test_basic_lettered_tiles_diff_projection(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = sorted(glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = sorted(glob(os.path.join(str(tmp_path), "TESTS_AII*.nc"))) assert len(all_files) == 24 assert "TC02" in all_files[0] # the first tile should be TC02 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S') + assert masked_ds.attrs["start_date_time"] == START_TIME.strftime("%Y-%m-%dT%H:%M:%S") def test_lettered_tiles_update_existing(self, tmp_path): """Test updating lettered tiles with additional data.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - first_base_dir = os.path.join(str(tmp_path), 'first') + first_base_dir = os.path.join(str(tmp_path), "first") w = AWIPSTiledWriter(base_dir=first_base_dir, compress=True) shape = (2000, 1000) data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) @@ -308,11 +308,11 @@ def test_lettered_tiles_update_existing(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = sorted(glob(os.path.join(first_base_dir, 'TESTS_AII*.nc'))) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = sorted(glob(os.path.join(first_base_dir, "TESTS_AII*.nc"))) assert len(all_files) == 16 first_files = [] - second_base_dir = os.path.join(str(tmp_path), 'second') + second_base_dir = os.path.join(str(tmp_path), "second") os.makedirs(second_base_dir) for fn in all_files: new_fn = fn.replace(first_base_dir, second_base_dir) @@ -335,23 +335,23 @@ def test_lettered_tiles_update_existing(self, tmp_path): # file multiple times...sometimes. If we limit dask to one worker # it seems to work fine. with dask.config.set(num_workers=1): - w.save_datasets([ds2], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = glob(os.path.join(second_base_dir, 'TESTS_AII*.nc')) + w.save_datasets([ds2], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = glob(os.path.join(second_base_dir, "TESTS_AII*.nc")) # 16 original tiles + 4 new tiles assert len(all_files) == 20 # these tiles should be the right-most edge of the first image - first_right_edge_files = [x for x in first_files if 'P02' in x or 'P04' in x or 'V02' in x or 'V04' in x] + first_right_edge_files = [x for x in first_files if "P02" in x or "P04" in x or "V02" in x or "V04" in x] for new_file in first_right_edge_files: orig_file = new_file.replace(second_base_dir, first_base_dir) orig_nc = xr.open_dataset(orig_file) - orig_data = orig_nc['data'].values + orig_data = orig_nc["data"].values if not np.isnan(orig_data).any(): # we only care about the tiles that had NaNs originally continue new_nc = xr.open_dataset(new_file) - new_data = new_nc['data'].values + new_data = new_nc["data"].values # there should be at least some areas of the file # that old data was present and hasn't been replaced np.testing.assert_allclose(orig_data[:, :20], new_data[:, :20]) @@ -369,17 +369,17 @@ def test_lettered_tiles_sector_ref(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", lettered_grid=True, use_sector_reference=True, use_end_time=True) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - expected_start = (START_TIME + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S') - assert masked_ds.attrs['start_date_time'] == expected_start + expected_start = (START_TIME + timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S") + assert masked_ds.attrs["start_date_time"] == expected_start def test_lettered_tiles_no_fit(self, tmp_path): """Test creating a lettered grid with no data overlapping the grid.""" @@ -389,9 +389,9 @@ def test_lettered_tiles_no_fit(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(4000000., 5000000., 5000000., 6000000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert not all_files def test_lettered_tiles_no_valid_data(self, tmp_path): @@ -402,9 +402,9 @@ def test_lettered_tiles_no_valid_data(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all NaNs should result in no tiles being created - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert not all_files def test_lettered_tiles_bad_filename(self, tmp_path): @@ -417,8 +417,8 @@ def test_lettered_tiles_bad_filename(self, tmp_path): ds = _get_test_lcc_data(data, area_def) with pytest.raises(KeyError): w.save_datasets([ds], - sector_id='LCC', - source_name='TESTS', + sector_id="LCC", + source_name="TESTS", tile_count=(3, 3), lettered_grid=True) @@ -429,17 +429,17 @@ def test_basic_numbered_tiles_rgb(self, tmp_path): data = da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50) area_def = _get_test_area() ds = _get_test_lcc_data(data, area_def) - ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ['bands', 'y', 'x']))) - ds.coords['bands'] = ['R', 'G', 'B'] + ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ["bands", "y", "x"]))) + ds.coords["bands"] = ["R", "G", "B"] - w.save_datasets([ds], sector_id='TEST', source_name="TESTS", tile_count=(3, 3)) - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_R*.nc')) + w.save_datasets([ds], sector_id="TEST", source_name="TESTS", tile_count=(3, 3)) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_R*.nc")) all_files = chan_files[:] assert len(chan_files) == 9 - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_G*.nc')) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_G*.nc")) all_files.extend(chan_files) assert len(chan_files) == 9 - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_B*.nc')) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_B*.nc")) assert len(chan_files) == 9 all_files.extend(chan_files) for fn in all_files: @@ -449,54 +449,54 @@ def test_basic_numbered_tiles_rgb(self, tmp_path): @pytest.mark.parametrize( "sector", - ['C', - 'F'] + ["C", + "F"] ) @pytest.mark.parametrize( "extra_kwargs", [ {}, - {'environment_prefix': 'AA'}, - {'environment_prefix': 'BB', 'filename': '{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc'}, + {"environment_prefix": "AA"}, + {"environment_prefix": "BB", "filename": "{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc"}, ] ) def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): """Test creating a tiles with multiple variables.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - os.environ['ORGANIZATION'] = '1' * 50 + os.environ["ORGANIZATION"] = "1" * 50 w = AWIPSTiledWriter(base_dir=tmp_path, compress=True) data = _get_test_data() area_def = _get_test_area() ds1 = _get_test_lcc_data(data, area_def) ds1.attrs.update( dict( - name='total_energy', - platform_name='GOES-17', - sensor='SENSOR', - units='1', - scan_mode='M3', + name="total_energy", + platform_name="GOES-17", + sensor="SENSOR", + units="1", + scan_mode="M3", scene_abbr=sector, platform_shortname="G17" ) ) ds2 = ds1.copy() ds2.attrs.update({ - 'name': 'flash_extent_density', + "name": "flash_extent_density", }) ds3 = ds1.copy() ds3.attrs.update({ - 'name': 'average_flash_area', + "name": "average_flash_area", }) dqf = ds1.copy() dqf = (dqf * 255).astype(np.uint8) dqf.attrs = ds1.attrs.copy() dqf.attrs.update({ - 'name': 'DQF', - '_FillValue': 1, + "name": "DQF", + "_FillValue": 1, }) - w.save_datasets([ds1, ds2, ds3, dqf], sector_id='TEST', source_name="TESTS", - tile_count=(3, 3), template='glm_l2_rad{}'.format(sector.lower()), + w.save_datasets([ds1, ds2, ds3, dqf], sector_id="TEST", source_name="TESTS", + tile_count=(3, 3), template="glm_l2_rad{}".format(sector.lower()), **extra_kwargs) fn_glob = self._get_glm_glob_filename(extra_kwargs) all_files = glob(os.path.join(str(tmp_path), fn_glob)) @@ -505,15 +505,15 @@ def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - if sector == 'C': - assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + if sector == "C": + assert masked_ds.attrs["time_coverage_end"] == END_TIME.strftime("%Y-%m-%dT%H:%M:%S.%fZ") else: # 'F' - assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%SZ') + assert masked_ds.attrs["time_coverage_end"] == END_TIME.strftime("%Y-%m-%dT%H:%M:%SZ") @staticmethod def _get_glm_glob_filename(extra_kwargs): - if 'filename' in extra_kwargs: - return 'BB*_GLM*.nc' - elif 'environment_prefix' in extra_kwargs: - return 'AA*_GLM*.nc' - return 'DR*_GLM*.nc' + if "filename" in extra_kwargs: + return "BB*_GLM*.nc" + elif "environment_prefix" in extra_kwargs: + return "AA*_GLM*.nc" + return "DR*_GLM*.nc" diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index a325cb9cc8..6a51a71b36 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -139,11 +139,11 @@ def test_preprocess_dataarray_name(): from satpy.writers.cf_writer import _preprocess_dataarray_name scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) - dataarray = scn['1'] + scn["1"] = xr.DataArray([1, 2, 3]) + dataarray = scn["1"] # If numeric_name_prefix is a string, test add the original_name attributes out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) - assert out_da.attrs['original_name'] == '1' + assert out_da.attrs["original_name"] == "1" # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) @@ -163,16 +163,16 @@ def test_add_time_cf_attrs(): scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) - times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', - '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) - scn['test-array'] = xr.DataArray(test_array, - dims=['y', 'x'], - coords={'time': ('y', times)}, + times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", + "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) + scn["test-array"] = xr.DataArray(test_array, + dims=["y", "x"], + coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) - ds = scn['test-array'].to_dataset(name='test-array') + ds = scn["test-array"].to_dataset(name="test-array") ds = add_time_bounds_dimension(ds) assert "bnds_1d" in ds.dims - assert ds.dims['bnds_1d'] == 2 + assert ds.dims["bnds_1d"] == 2 assert "time_bnds" in list(ds.data_vars) assert "bounds" in ds["time"].attrs assert "standard_name" in ds["time"].attrs @@ -194,23 +194,23 @@ def test_init(self): from satpy.writers import configs_for_writer from satpy.writers.cf_writer import CFWriter - CFWriter(config_files=list(configs_for_writer('cf'))[0]) + CFWriter(config_files=list(configs_for_writer("cf"))[0]) def test_save_array(self): """Test saving an array to netcdf/cf.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([1, 2, 3], + scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['test-array'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["test-array"][:], [1, 2, 3]) expected_prereq = ("DataQuery(name='hej')") - assert f['test-array'].attrs['prerequisites'] == expected_prereq + assert f["test-array"].attrs["prerequisites"] == expected_prereq def test_save_array_coords(self): """Test saving array with coordinates.""" @@ -218,69 +218,69 @@ def test_save_array_coords(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) coords = { - 'x': np.arange(3), - 'y': np.arange(1), + "x": np.arange(3), + "y": np.arange(1), } if CRS is not None: - proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 ' - '+a=6378137.0 +b=6356752.31414 +sweep=x ' - '+units=m +no_defs') - coords['crs'] = CRS.from_string(proj_str) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 " + "+a=6378137.0 +b=6356752.31414 +sweep=x " + "+units=m +no_defs") + coords["crs"] = CRS.from_string(proj_str) + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), coords=coords, attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['test-array'][:], [[1, 2, 3]]) - np.testing.assert_array_equal(f['x'][:], [0, 1, 2]) - np.testing.assert_array_equal(f['y'][:], [0]) - assert 'crs' not in f - assert '_FillValue' not in f['x'].attrs - assert '_FillValue' not in f['y'].attrs + np.testing.assert_array_equal(f["test-array"][:], [[1, 2, 3]]) + np.testing.assert_array_equal(f["x"][:], [0, 1, 2]) + np.testing.assert_array_equal(f["y"][:], [0]) + assert "crs" not in f + assert "_FillValue" not in f["x"].attrs + assert "_FillValue" not in f["y"].attrs expected_prereq = ("DataQuery(name='hej')") - assert f['test-array'].attrs['prerequisites'] == expected_prereq + assert f["test-array"].attrs["prerequisites"] == expected_prereq def test_save_dataset_a_digit(self): """Test saving an array to netcdf/cf where dataset name starting with a digit.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['CHANNEL_1'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["CHANNEL_1"][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', numeric_name_prefix='TEST') + scn.save_datasets(filename=filename, writer="cf", numeric_name_prefix="TEST") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["TEST1"][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix_include_attr(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='TEST') + scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="TEST") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) - assert f['TEST1'].attrs['original_name'] == '1' + np.testing.assert_array_equal(f["TEST1"][:], [1, 2, 3]) + assert f["TEST1"].attrs["original_name"] == "1" def test_save_dataset_a_digit_no_prefix_include_attr(self): """Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='') + scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['1'][:], [1, 2, 3]) - assert 'original_name' not in f['1'].attrs + np.testing.assert_array_equal(f["1"][:], [1, 2, 3]) + assert "original_name" not in f["1"].attrs def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" @@ -291,16 +291,16 @@ def test_ancillary_variables(self): da = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dataid(name='hej')])) - scn['test-array-1'] = da - scn['test-array-2'] = da.copy() - scn['test-array-1'].attrs['ancillary_variables'] = [scn['test-array-2']] - scn['test-array-2'].attrs['ancillary_variables'] = [scn['test-array-1']] + prerequisites=[make_dataid(name="hej")])) + scn["test-array-1"] = da + scn["test-array-2"] = da.copy() + scn["test-array-1"].attrs["ancillary_variables"] = [scn["test-array-2"]] + scn["test-array-2"].attrs["ancillary_variables"] = [scn["test-array-1"]] with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - assert f['test-array-1'].attrs['ancillary_variables'] == 'test-array-2' - assert f['test-array-2'].attrs['ancillary_variables'] == 'test-array-1' + assert f["test-array-1"].attrs["ancillary_variables"] == "test-array-2" + assert f["test-array-2"].attrs["ancillary_variables"] == "test-array-1" def test_groups(self): """Test creating a file with groups.""" @@ -319,34 +319,34 @@ def test_groups(self): time_hrv = [1, 2, 3] scn = Scene() - scn['VIS006'] = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, - attrs={'name': 'VIS006', 'start_time': tstart, 'end_time': tend}) - scn['IR_108'] = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_ir_108)}, - attrs={'name': 'IR_108', 'start_time': tstart, 'end_time': tend}) - scn['HRV'] = xr.DataArray(data_hrv, - dims=('y', 'x'), - coords={'y': y_hrv, 'x': x_hrv, 'acq_time': ('y', time_hrv)}, - attrs={'name': 'HRV', 'start_time': tstart, 'end_time': tend}) + scn["VIS006"] = xr.DataArray(data_visir, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, + attrs={"name": "VIS006", "start_time": tstart, "end_time": tend}) + scn["IR_108"] = xr.DataArray(data_visir, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_ir_108)}, + attrs={"name": "IR_108", "start_time": tstart, "end_time": tend}) + scn["HRV"] = xr.DataArray(data_hrv, + dims=("y", "x"), + coords={"y": y_hrv, "x": x_hrv, "acq_time": ("y", time_hrv)}, + attrs={"name": "HRV", "start_time": tstart, "end_time": tend}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', groups={'visir': ['IR_108', 'VIS006'], 'hrv': ['HRV']}, + scn.save_datasets(filename=filename, writer="cf", groups={"visir": ["IR_108", "VIS006"], "hrv": ["HRV"]}, pretty=True) nc_root = xr.open_dataset(filename) - assert 'history' in nc_root.attrs + assert "history" in nc_root.attrs assert set(nc_root.variables.keys()) == set() - nc_visir = xr.open_dataset(filename, group='visir') - nc_hrv = xr.open_dataset(filename, group='hrv') - assert set(nc_visir.variables.keys()) == {'VIS006', 'IR_108', - 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'} - assert set(nc_hrv.variables.keys()) == {'HRV', 'y', 'x', 'acq_time'} - for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']], - [scn['VIS006'], scn['IR_108'], scn['HRV']]): + nc_visir = xr.open_dataset(filename, group="visir") + nc_hrv = xr.open_dataset(filename, group="hrv") + assert set(nc_visir.variables.keys()) == {"VIS006", "IR_108", + "y", "x", "VIS006_acq_time", "IR_108_acq_time"} + assert set(nc_hrv.variables.keys()) == {"HRV", "y", "x", "acq_time"} + for tst, ref in zip([nc_visir["VIS006"], nc_visir["IR_108"], nc_hrv["HRV"]], + [scn["VIS006"], scn["IR_108"], scn["HRV"]]): np.testing.assert_array_equal(tst.data, ref.data) nc_root.close() nc_visir.close() @@ -355,7 +355,7 @@ def test_groups(self): # Different projection coordinates in one group are not supported with TempFile() as filename: with pytest.raises(ValueError): - scn.save_datasets(datasets=['VIS006', 'HRV'], filename=filename, writer='cf') + scn.save_datasets(datasets=["VIS006", "HRV"], filename=filename, writer="cf") def test_single_time_value(self): """Test setting a single time value.""" @@ -363,32 +363,32 @@ def test_single_time_value(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y'], - coords={'time': np.datetime64('2018-05-30T10:05:00')}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y"], + coords={"time": np.datetime64("2018-05-30T10:05:00")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - np.testing.assert_array_equal(f['time'], scn['test-array']['time']) - bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) + bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_time_coordinate_on_a_swath(self): """Test that time dimension is not added on swath data with time already as a coordinate.""" scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) - times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', - '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) - scn['test-array'] = xr.DataArray(test_array, - dims=['y', 'x'], - coords={'time': ('y', times)}, + times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", + "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) + scn["test-array"] = xr.DataArray(test_array, + dims=["y", "x"], + coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', pretty=True) + scn.save_datasets(filename=filename, writer="cf", pretty=True) with xr.open_dataset(filename, decode_cf=True) as f: - np.testing.assert_array_equal(f['time'], scn['test-array']['time']) + np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) def test_bounds(self): """Test setting time bounds.""" @@ -396,30 +396,30 @@ def test_bounds(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) - assert f['time'].attrs['bounds'] == 'time_bnds' + bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) + assert f["time"].attrs["bounds"] == "time_bnds" # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: - np.testing.assert_almost_equal(f['time_bnds'], [[-0.0034722, 0.0069444]]) + np.testing.assert_almost_equal(f["time_bnds"], [[-0.0034722, 0.0069444]]) # User-specified time encoding should have preference with TempFile() as filename: - time_units = 'seconds since 2018-01-01' - scn.save_datasets(filename=filename, encoding={'time': {'units': time_units}}, - writer='cf') + time_units = "seconds since 2018-01-01" + scn.save_datasets(filename=filename, encoding={"time": {"units": time_units}}, + writer="cf") with xr.open_dataset(filename, decode_cf=False) as f: - np.testing.assert_array_equal(f['time_bnds'], [[12909600, 12910500]]) + np.testing.assert_array_equal(f["time_bnds"], [[12909600, 12910500]]) def test_bounds_minimum(self): """Test minimum bounds.""" @@ -430,21 +430,21 @@ def test_bounds_minimum(self): end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) - scn['test-arrayA'] = xr.DataArray(test_arrayA, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayA"] = xr.DataArray(test_arrayA, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) - scn['test-arrayB'] = xr.DataArray(test_arrayB, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayB"] = xr.DataArray(test_arrayB, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_timeA, end_timeB]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + bounds_exp = np.array([[start_timeA, end_timeB]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" @@ -453,19 +453,19 @@ def test_bounds_missing_time_info(self): end_timeA = datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) - scn['test-arrayA'] = xr.DataArray(test_arrayA, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayA"] = xr.DataArray(test_arrayA, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) - scn['test-arrayB'] = xr.DataArray(test_arrayB, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}) + scn["test-arrayB"] = xr.DataArray(test_arrayB, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_timeA, end_timeA]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + bounds_exp = np.array([[start_timeA, end_timeA]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" @@ -473,51 +473,51 @@ def test_unlimited_dims_kwarg(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y'], - coords={'time': np.datetime64('2018-05-30T10:05:00')}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y"], + coords={"time": np.datetime64("2018-05-30T10:05:00")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time']) + scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"]) with xr.open_dataset(filename) as f: - assert set(f.encoding['unlimited_dims']) == {'time'} + assert set(f.encoding["unlimited_dims"]) == {"time"} def test_header_attrs(self): """Check global attributes are set.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([1, 2, 3], + scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - header_attrs = {'sensor': 'SEVIRI', - 'orbit': 99999, - 'none': None, - 'list': [1, 2, 3], - 'set': {1, 2, 3}, - 'dict': {'a': 1, 'b': 2}, - 'nested': {'outer': {'inner1': 1, 'inner2': 2}}, - 'bool': True, - 'bool_': np.bool_(True)} + header_attrs = {"sensor": "SEVIRI", + "orbit": 99999, + "none": None, + "list": [1, 2, 3], + "set": {1, 2, 3}, + "dict": {"a": 1, "b": 2}, + "nested": {"outer": {"inner1": 1, "inner2": 2}}, + "bool": True, + "bool_": np.bool_(True)} scn.save_datasets(filename=filename, header_attrs=header_attrs, flatten_attrs=True, - writer='cf') + writer="cf") with xr.open_dataset(filename) as f: - assert 'history' in f.attrs - assert f.attrs['sensor'] == 'SEVIRI' - assert f.attrs['orbit'] == 99999 - np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) - assert f.attrs['set'] == '{1, 2, 3}' - assert f.attrs['dict_a'] == 1 - assert f.attrs['dict_b'] == 2 - assert f.attrs['nested_outer_inner1'] == 1 - assert f.attrs['nested_outer_inner2'] == 2 - assert f.attrs['bool'] == 'true' - assert f.attrs['bool_'] == 'true' - assert 'none' not in f.attrs.keys() + assert "history" in f.attrs + assert f.attrs["sensor"] == "SEVIRI" + assert f.attrs["orbit"] == 99999 + np.testing.assert_array_equal(f.attrs["list"], [1, 2, 3]) + assert f.attrs["set"] == "{1, 2, 3}" + assert f.attrs["dict_a"] == 1 + assert f.attrs["dict_b"] == 2 + assert f.attrs["nested_outer_inner1"] == 1 + assert f.attrs["nested_outer_inner2"] == 2 + assert f.attrs["bool"] == "true" + assert f.attrs["bool_"] == "true" + assert "none" not in f.attrs.keys() def get_test_attrs(self): """Create some dataset attributes for testing purpose. @@ -526,79 +526,79 @@ def get_test_attrs(self): Attributes, encoded attributes, encoded and flattened attributes """ - attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.string_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + attrs = {"name": "IR_108", + "start_time": datetime(2018, 1, 1, 0), + "end_time": datetime(2018, 1, 1, 0, 15), + "int": 1, + "float": 1.0, + "none": None, # should be dropped + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": True, + "numpy_void": np.void(0), + "numpy_bytes": np.bytes_("test"), + "numpy_string": np.string_("test"), + "list": [1, 2, np.float64(3)], + "nested_list": ["1", ["2", [3]]], + "bool": True, + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": np.array([True, False, True]), + "array_2d": np.array([[1, 2], [3, 4]]), + "array_3d": np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + "dict": {"a": 1, "b": 2}, + "nested_dict": {"l1": {"l2": {"l3": np.array([1, 2, 3], dtype="uint8")}}}, + "raw_metadata": OrderedDict([ + ("recarray", np.zeros(3, dtype=[("x", "i4"), ("y", "u1")])), + ("flag", np.bool_(True)), + ("dict", OrderedDict([("a", 1), ("b", np.array([1, 2, 3], dtype="uint8"))])) ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + encoded = {"name": "IR_108", + "start_time": "2018-01-01 00:00:00", + "end_time": "2018-01-01 00:15:00", + "int": 1, + "float": 1.0, + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": "true", + "numpy_void": "[]", + "numpy_bytes": "test", + "numpy_string": "test", + "list": [1, 2, np.float64(3)], + "nested_list": '["1", ["2", [3]]]', + "bool": "true", + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": ["true", "false", "true"], + "array_2d": "[[1, 2], [3, 4]]", + "array_3d": "[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]", + "dict": '{"a": 1, "b": 2}', + "nested_dict": '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + "raw_metadata": '{"recarray": [[0, 0], [0, 0], [0, 0]], ' '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} + encoded_flat = {"name": "IR_108", + "start_time": "2018-01-01 00:00:00", + "end_time": "2018-01-01 00:15:00", + "int": 1, + "float": 1.0, + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": "true", + "numpy_void": "[]", + "numpy_bytes": "test", + "numpy_string": "test", + "list": [1, 2, np.float64(3)], + "nested_list": '["1", ["2", [3]]]', + "bool": "true", + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": ["true", "false", "true"], + "array_2d": "[[1, 2], [3, 4]]", + "array_3d": "[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]", + "dict_a": 1, + "dict_b": 2, + "nested_dict_l1_l2_l3": np.array([1, 2, 3], dtype="uint8"), + "raw_metadata_recarray": "[[0, 0], [0, 0], [0, 0]]", + "raw_metadata_flag": "true", + "raw_metadata_dict_a": 1, + "raw_metadata_dict_b": np.array([1, 2, 3], dtype="uint8")} return attrs, encoded, encoded_flat def assertDictWithArraysEqual(self, d1, d2): @@ -626,13 +626,13 @@ def test_encode_attrs_nc(self): self.assertDictWithArraysEqual(expected, encoded) # Test decoding of json-encoded attributes - raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], - 'flag': 'true', - 'dict': {'a': 1, 'b': [1, 2, 3]}} - assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip - assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] - assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} - assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] + raw_md_roundtrip = {"recarray": [[0, 0], [0, 0], [0, 0]], + "flag": "true", + "dict": {"a": 1, "b": [1, 2, 3]}} + assert json.loads(encoded["raw_metadata"]) == raw_md_roundtrip + assert json.loads(encoded["array_3d"]) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] + assert json.loads(encoded["nested_dict"]) == {"l1": {"l2": {"l3": [1, 2, 3]}}} + assert json.loads(encoded["nested_list"]) == ["1", ["2", [3]]] def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" @@ -640,44 +640,44 @@ def test_da2cf(self): # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() - attrs['area'] = 'some_area' - attrs['prerequisites'] = [make_dsq(name='hej')] - attrs['_satpy_id_name'] = 'myname' + attrs["area"] = "some_area" + attrs["prerequisites"] = [make_dsq(name="hej")] + attrs["_satpy_id_name"] = "myname" # Adjust expected attributes expected_prereq = ("DataQuery(name='hej')") - update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} + update = {"prerequisites": [expected_prereq], "long_name": attrs["name"]} attrs_expected.update(update) attrs_expected_flat.update(update) - attrs_expected.pop('name') - attrs_expected_flat.pop('name') + attrs_expected.pop("name") + attrs_expected_flat.pop("name") # Create test data array - arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) + arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [1, 2], "acq_time": ("y", [3, 4])}) # Test conversion to something cf-compliant res = CFWriter.da2cf(arr) - np.testing.assert_array_equal(res['x'], arr['x']) - np.testing.assert_array_equal(res['y'], arr['y']) - np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) - assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} - assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} + np.testing.assert_array_equal(res["x"], arr["x"]) + np.testing.assert_array_equal(res["y"], arr["y"]) + np.testing.assert_array_equal(res["acq_time"], arr["acq_time"]) + assert res["x"].attrs == {"units": "m", "standard_name": "projection_x_coordinate"} + assert res["y"].attrs == {"units": "m", "standard_name": "projection_y_coordinate"} self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs - res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=['int']) - attrs_expected_flat.pop('int') + res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=["int"]) + attrs_expected_flat.pop("int") self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) def test_da2cf_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" from satpy.writers.cf_writer import CFWriter - arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), - coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) + arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=("y",), + coords={"y": [0, 1, 2, 3], "acq_time": ("y", [0, 1, 2, 3])}) _ = CFWriter.da2cf(arr) def test_collect_cf_dataarrays(self): @@ -685,10 +685,10 @@ def test_collect_cf_dataarrays(self): from satpy.writers.cf_writer import _collect_cf_dataset geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) @@ -699,38 +699,38 @@ def test_collect_cf_dataarrays(self): time = [1, 2] tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) - list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), - xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var2', 'long_name': 'variable 2'})] + list_dataarrays = [xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time)}, + attrs={"name": "var1", "start_time": tstart, "end_time": tend, "area": geos}), + xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time)}, + attrs={"name": "var2", "long_name": "variable 2"})] # Collect datasets ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) # Test results assert len(ds.keys()) == 3 - assert set(ds.keys()) == {'var1', 'var2', 'geos'} + assert set(ds.keys()) == {"var1", "var2", "geos"} - da_var1 = ds['var1'] - da_var2 = ds['var2'] - assert da_var1.name == 'var1' - assert da_var1.attrs['grid_mapping'] == 'geos' - assert da_var1.attrs['long_name'] == 'var1' + da_var1 = ds["var1"] + da_var2 = ds["var2"] + assert da_var1.name == "var1" + assert da_var1.attrs["grid_mapping"] == "geos" + assert da_var1.attrs["long_name"] == "var1" # variable 2 - assert 'grid_mapping' not in da_var2.attrs - assert da_var2.attrs['long_name'] == 'variable 2' + assert "grid_mapping" not in da_var2.attrs + assert da_var2.attrs["long_name"] == "variable 2" def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" from satpy.writers.cf_writer import assert_xy_unique dummy = [[1, 2], [3, 4]] - datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} + datas = {"a": xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}), + "b": xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}), + "n": xr.DataArray(data=dummy, dims=("v", "w"), coords={"v": [1, 2], "w": [3, 4]})} assert_xy_unique(datas) - datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) + datas["c"] = xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 3], "x": [3, 4]}) with pytest.raises(ValueError): assert_xy_unique(datas) @@ -743,31 +743,31 @@ def test_link_coords(self): lon2 = np.zeros((1, 2, 2)) lat = np.ones((2, 2)) datasets = { - 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), - 'var2': xr.DataArray(data=data, dims=('y', 'x')), - 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), - 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), - 'lon': xr.DataArray(data=lon, dims=('y', 'x')), - 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), - 'lat': xr.DataArray(data=lat, dims=('y', 'x')) + "var1": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "lon lat"}), + "var2": xr.DataArray(data=data, dims=("y", "x")), + "var3": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "lon2 lat"}), + "var4": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "not_exist lon lat"}), + "lon": xr.DataArray(data=lon, dims=("y", "x")), + "lon2": xr.DataArray(data=lon2, dims=("time", "y", "x")), + "lat": xr.DataArray(data=lat, dims=("y", "x")) } link_coords(datasets) # Check that link has been established correctly and 'coordinate' atrribute has been dropped - assert 'lon' in datasets['var1'].coords - assert 'lat' in datasets['var1'].coords - np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) - np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - assert 'coordinates' not in datasets['var1'].attrs + assert "lon" in datasets["var1"].coords + assert "lat" in datasets["var1"].coords + np.testing.assert_array_equal(datasets["var1"]["lon"].data, lon) + np.testing.assert_array_equal(datasets["var1"]["lat"].data, lat) + assert "coordinates" not in datasets["var1"].attrs # There should be no link if there was no 'coordinate' attribute - assert 'lon' not in datasets['var2'].coords - assert 'lat' not in datasets['var2'].coords + assert "lon" not in datasets["var2"].coords + assert "lat" not in datasets["var2"].coords # The non-existent dimension or coordinate should be dropped - assert 'time' not in datasets['var3'].coords - assert 'not_exist' not in datasets['var4'].coords + assert "time" not in datasets["var3"].coords + assert "not_exist" not in datasets["var4"].coords def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" @@ -778,87 +778,87 @@ def test_make_alt_coords_unique(self): x = [1, 2] time1 = [1, 2] time2 = [3, 4] - datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} + datasets = {"var1": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x, "acq_time": ("y", time1)}), + "var2": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x, "acq_time": ("y", time2)})} # Test that dataset names are prepended to alternative coordinates res = make_alt_coords_unique(datasets) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["var1_acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["var2_acq_time"], time2) + assert "acq_time" not in res["var1"].coords + assert "acq_time" not in res["var2"].coords # Make sure nothing else is modified - np.testing.assert_array_equal(res['var1']['x'], x) - np.testing.assert_array_equal(res['var1']['y'], y) - np.testing.assert_array_equal(res['var2']['x'], x) - np.testing.assert_array_equal(res['var2']['y'], y) + np.testing.assert_array_equal(res["var1"]["x"], x) + np.testing.assert_array_equal(res["var1"]["y"], y) + np.testing.assert_array_equal(res["var2"]["x"], x) + np.testing.assert_array_equal(res["var2"]["y"], y) # Coords not unique -> Dataset names must be prepended, even if pretty=True with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["var1_acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["var2_acq_time"], time2) + assert "acq_time" not in res["var1"].coords + assert "acq_time" not in res["var2"].coords # Coords unique and pretty=True -> Don't modify coordinate names - datasets['var2']['acq_time'] = ('y', time1) + datasets["var2"]["acq_time"] = ("y", time1) res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['acq_time'], time1) - np.testing.assert_array_equal(res['var2']['acq_time'], time1) - assert 'var1_acq_time' not in res['var1'].coords - assert 'var2_acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["acq_time"], time1) + assert "var1_acq_time" not in res["var1"].coords + assert "var2_acq_time" not in res["var2"].coords def test_area2cf(self): """Test the conversion of an area to CF standards.""" from satpy.writers.cf_writer import area2cf - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, + attrs={"name": "var1"}) # a) Area Definition and strict=False geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos + ds.attrs["area"] = geos res = area2cf(ds, include_lonlats=False) assert len(res) == 2 assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] + assert res[0].name == res[1].attrs["grid_mapping"] # b) Area Definition and include_lonlats=False ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos + ds.attrs["area"] = geos res = area2cf(ds, include_lonlats=True) # same as above assert len(res) == 2 assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] + assert res[0].name == res[1].attrs["grid_mapping"] # but now also have the lon/lats - assert 'longitude' in res[1].coords - assert 'latitude' in res[1].coords + assert "longitude" in res[1].coords + assert "latitude" in res[1].coords # c) Swath Definition swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) ds = ds_base.copy(deep=True) - ds.attrs['area'] = swath + ds.attrs["area"] = swath res = area2cf(ds, include_lonlats=False) assert len(res) == 1 - assert 'longitude' in res[0].coords - assert 'latitude' in res[0].coords - assert 'grid_mapping' not in res[0].attrs + assert "longitude" in res[0].coords + assert "latitude" in res[0].coords + assert "grid_mapping" not in res[0].attrs def test__add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" @@ -873,175 +873,175 @@ def _gm_matches(gmapping, expected): else: np.testing.assert_almost_equal(test_val, attr_val, decimal=3) - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, + attrs={"name": "var1"}) # a) Projection has a corresponding CF representation (e.g. geos) a = 6378169. b = 6356583.8 h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'a': a, 'b': b, - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "a": a, "b": b, + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'semi_major_axis': a, - 'semi_minor_axis': b, + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", + "semi_major_axis": a, + "semi_minor_axis": b, # 'sweep_angle_axis': None, }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - if 'sweep_angle_axis' in grid_mapping.attrs: + if "sweep_angle_axis" in grid_mapping.attrs: # older versions of pyproj might not include this - assert grid_mapping.attrs['sweep_angle_axis'] == 'y' + assert grid_mapping.attrs["sweep_angle_axis"] == "y" - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # should not have been modified - assert 'grid_mapping' not in ds.attrs + assert "grid_mapping" not in ds.attrs # b) Projection does not have a corresponding CF representation (COSMO) cosmo7 = pyresample.geometry.AreaDefinition( - area_id='cosmo7', - description='cosmo7', - proj_id='cosmo7', - projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, - 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, + area_id="cosmo7", + description="cosmo7", + proj_id="cosmo7", + projection={"proj": "ob_tran", "ellps": "WGS84", "lat_0": 46, "lon_0": 4.535, + "o_proj": "stere", "o_lat_p": 90, "o_lon_p": -5.465}, width=597, height=510, area_extent=[-1812933, -1003565, 814056, 1243448] ) ds = ds_base.copy() - ds.attrs['area'] = cosmo7 + ds.attrs["area"] = cosmo7 new_ds, grid_mapping = _add_grid_mapping(ds) - assert 'crs_wkt' in grid_mapping.attrs - wkt = grid_mapping.attrs['crs_wkt'] + assert "crs_wkt" in grid_mapping.attrs + wkt = grid_mapping.attrs["crs_wkt"] assert 'ELLIPSOID["WGS 84"' in wkt assert 'PARAMETER["lat_0",46' in wkt assert 'PARAMETER["lon_0",4.535' in wkt assert 'PARAMETER["o_lat_p",90' in wkt assert 'PARAMETER["o_lon_p",-5.465' in wkt - assert new_ds.attrs['grid_mapping'] == 'cosmo7' + assert new_ds.attrs["grid_mapping"] == "cosmo7" # c) Projection Transverse Mercator lat_0 = 36.5 lon_0 = 15.0 tmerc = pyresample.geometry.AreaDefinition( - area_id='tmerc', - description='tmerc', - proj_id='tmerc', - projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, + area_id="tmerc", + description="tmerc", + proj_id="tmerc", + projection={"proj": "tmerc", "ellps": "WGS84", "lat_0": 36.5, "lon_0": 15.0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) tmerc_expected = xr.DataArray(data=0, - attrs={'latitude_of_projection_origin': lat_0, - 'longitude_of_central_meridian': lon_0, - 'grid_mapping_name': 'transverse_mercator', - 'reference_ellipsoid_name': 'WGS 84', - 'false_easting': 0., - 'false_northing': 0., + attrs={"latitude_of_projection_origin": lat_0, + "longitude_of_central_meridian": lon_0, + "grid_mapping_name": "transverse_mercator", + "reference_ellipsoid_name": "WGS 84", + "false_easting": 0., + "false_northing": 0., }) ds = ds_base.copy() - ds.attrs['area'] = tmerc + ds.attrs["area"] = tmerc new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'tmerc' + assert new_ds.attrs["grid_mapping"] == "tmerc" _gm_matches(grid_mapping, tmerc_expected) # d) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", # 'semi_major_axis': 6378137.0, # 'semi_minor_axis': 6356752.314, # 'sweep_angle_axis': None, }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # e) oblique Mercator area = pyresample.geometry.AreaDefinition( - area_id='omerc_otf', - description='On-the-fly omerc area', - proj_id='omerc', - projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', - 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', - 'proj': 'omerc', 'units': 'm'}, + area_id="omerc_otf", + description="On-the-fly omerc area", + proj_id="omerc", + projection={"alpha": "9.02638777018478", "ellps": "WGS84", "gamma": "0", "k": "1", + "lat_0": "-0.256794486098476", "lonc": "13.7888658224205", + "proj": "omerc", "units": "m"}, width=2837, height=5940, area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] ) - omerc_dict = {'azimuth_of_central_line': 9.02638777018478, - 'false_easting': 0., - 'false_northing': 0., + omerc_dict = {"azimuth_of_central_line": 9.02638777018478, + "false_easting": 0., + "false_northing": 0., # 'gamma': 0, # this is not CF compliant - 'grid_mapping_name': "oblique_mercator", - 'latitude_of_projection_origin': -0.256794486098476, - 'longitude_of_projection_origin': 13.7888658224205, + "grid_mapping_name": "oblique_mercator", + "latitude_of_projection_origin": -0.256794486098476, + "longitude_of_projection_origin": 13.7888658224205, # 'prime_meridian_name': "Greenwich", - 'reference_ellipsoid_name': "WGS 84"} + "reference_ellipsoid_name": "WGS 84"} omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) ds = ds_base.copy() - ds.attrs['area'] = area + ds.attrs["area"] = area new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'omerc_otf' + assert new_ds.attrs["grid_mapping"] == "omerc_otf" _gm_matches(grid_mapping, omerc_expected) # f) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'reference_ellipsoid_name': 'WGS 84', + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", + "reference_ellipsoid_name": "WGS 84", }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) def test_add_lonlat_coords(self): @@ -1049,50 +1049,50 @@ def test_add_lonlat_coords(self): from satpy.writers.cf_writer import add_lonlat_coords area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', + "seviri", + "Native SEVIRI grid", + "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 2, 2, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) + dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), attrs={"area": area}) res = add_lonlat_coords(dataarray) # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] + assert "longitude" not in dataarray.coords + assert set(res.coords) == {"longitude", "latitude"} + lat = res["latitude"] + lon = res["longitude"] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() + assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', + "seviri", + "Native SEVIRI grid", + "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 10, 10, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), - dims=('bands', 'y', 'x'), attrs={'area': area}) + dims=("bands", "y", "x"), attrs={"area": area}) res = add_lonlat_coords(dataarray) # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] + assert "longitude" not in dataarray.coords + assert set(res.coords) == {"longitude", "latitude"} + lat = res["latitude"] + lon = res["longitude"] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() + assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" @@ -1100,50 +1100,50 @@ def test_load_module_with_old_pyproj(self): import sys import pyproj # noqa 401 - old_version = sys.modules['pyproj'].__version__ - sys.modules['pyproj'].__version__ = "1.9.6" + old_version = sys.modules["pyproj"].__version__ + sys.modules["pyproj"].__version__ = "1.9.6" try: - importlib.reload(sys.modules['satpy.writers.cf_writer']) + importlib.reload(sys.modules["satpy.writers.cf_writer"]) finally: # Tear down - sys.modules['pyproj'].__version__ = old_version - importlib.reload(sys.modules['satpy.writers.cf_writer']) + sys.modules["pyproj"].__version__ = old_version + importlib.reload(sys.modules["satpy.writers.cf_writer"]) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - assert f.attrs['Conventions'] == 'CF-1.7' - assert 'Created by pytroll/satpy on' in f.attrs['history'] + assert f.attrs["Conventions"] == "CF-1.7" + assert "Created by pytroll/satpy on" in f.attrs["history"] def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) header_attrs = {} - header_attrs['history'] = ('TEST add history',) - header_attrs['Conventions'] = 'CF-1.7, ACDD-1.3' + header_attrs["history"] = ("TEST add history",) + header_attrs["Conventions"] = "CF-1.7, ACDD-1.3" with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs) + scn.save_datasets(filename=filename, writer="cf", header_attrs=header_attrs) with xr.open_dataset(filename) as f: - assert f.attrs['Conventions'] == 'CF-1.7, ACDD-1.3' - assert 'TEST add history\n' in f.attrs['history'] - assert 'Created by pytroll/satpy on' in f.attrs['history'] + assert f.attrs["Conventions"] == "CF-1.7, ACDD-1.3" + assert "TEST add history\n" in f.attrs["history"] + assert "Created by pytroll/satpy on" in f.attrs["history"] class TestCFWriterData: @@ -1156,49 +1156,49 @@ def datasets(self): y = [1, 2] x = [1, 2] geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) datasets = { - 'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - datasets['lat'].attrs['standard_name'] = 'latitude' - datasets['var1'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['area'] = geos - datasets['var1'].attrs['area'] = geos - datasets['lat'].attrs['name'] = 'lat' - datasets['var1'].attrs['name'] = 'var1' - datasets['var2'].attrs['name'] = 'var2' - datasets['lon'].attrs['name'] = 'lon' + "var1": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "var2": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "lat": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "lon": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x})} + datasets["lat"].attrs["standard_name"] = "latitude" + datasets["var1"].attrs["standard_name"] = "dummy" + datasets["var2"].attrs["standard_name"] = "dummy" + datasets["var2"].attrs["area"] = geos + datasets["var1"].attrs["area"] = geos + datasets["lat"].attrs["name"] = "lat" + datasets["var1"].attrs["name"] = "var1" + datasets["var2"].attrs["name"] = "var2" + datasets["lon"].attrs["name"] = "lon" return datasets def test_is_lon_or_lat_dataarray(self, datasets): """Test the is_lon_or_lat_dataarray function.""" from satpy.writers.cf_writer import is_lon_or_lat_dataarray - assert is_lon_or_lat_dataarray(datasets['lat']) - assert not is_lon_or_lat_dataarray(datasets['var1']) + assert is_lon_or_lat_dataarray(datasets["lat"]) + assert not is_lon_or_lat_dataarray(datasets["var1"]) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" from satpy.writers.cf_writer import has_projection_coords assert has_projection_coords(datasets) - datasets['lat'].attrs['standard_name'] = 'dummy' + datasets["lat"].attrs["standard_name"] = "dummy" assert not has_projection_coords(datasets) def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): @@ -1206,7 +1206,7 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): from satpy.writers.cf_writer import _collect_cf_dataset datasets_list = [datasets[key] for key in datasets.keys()] - datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] + datasets_list_no_latlon = [datasets[key] for key in ["var1", "var2"]] # Collect datasets ds = _collect_cf_dataset(datasets_list, include_lonlats=True) @@ -1214,13 +1214,13 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): # Test results assert len(ds.keys()) == 5 - assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} + assert set(ds.keys()) == {"var1", "var2", "lon", "lat", "geos"} with pytest.raises(KeyError): - ds['var1'].attrs["latitude"] + ds["var1"].attrs["latitude"] with pytest.raises(KeyError): - ds['var1'].attrs["longitude"] - assert ds2['var1']['latitude'].attrs['name'] == 'latitude' - assert ds2['var1']['longitude'].attrs['name'] == 'longitude' + ds["var1"].attrs["longitude"] + assert ds2["var1"]["latitude"].attrs["name"] == "latitude" + assert ds2["var1"]["longitude"].attrs["name"] == "longitude" class EncodingUpdateTest: @@ -1229,21 +1229,21 @@ class EncodingUpdateTest: @pytest.fixture def fake_ds(self): """Create fake data for testing.""" - ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), - 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + ds = xr.Dataset({"foo": (("y", "x"), [[1, 2], [3, 4]]), + "bar": (("y", "x"), [[3, 4], [5, 6]])}, + coords={"y": [1, 2], + "x": [3, 4], + "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds @pytest.fixture def fake_ds_digit(self): """Create fake data for testing.""" - ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), - 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + ds_digit = xr.Dataset({"CHANNEL_1": (("y", "x"), [[1, 2], [3, 4]]), + "CHANNEL_2": (("y", "x"), [[3, 4], [5, 6]])}, + coords={"y": [1, 2], + "x": [3, 4], + "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds_digit def test_dataset_name_digit(self, fake_ds_digit): @@ -1252,18 +1252,18 @@ def test_dataset_name_digit(self, fake_ds_digit): # Dataset with name staring with digit ds_digit = fake_ds_digit - kwargs = {'encoding': {'1': {'dtype': 'float32'}, - '2': {'dtype': 'float32'}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') + kwargs = {"encoding": {"1": {"dtype": "float32"}, + "2": {"dtype": "float32"}}, + "other": "kwargs"} + enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix="CHANNEL_") expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'CHANNEL_1': {'dtype': 'float32'}, - 'CHANNEL_2': {'dtype': 'float32'} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "CHANNEL_1": {"dtype": "float32"}, + "CHANNEL_2": {"dtype": "float32"} } assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} + assert other_kwargs == {"other": "kwargs"} def test_without_time(self, fake_ds): """Test data with no time dimension.""" @@ -1271,29 +1271,29 @@ def test_without_time(self, fake_ds): # Without time dimension ds = fake_ds.chunk(2) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, - 'other': 'kwargs'} + kwargs = {"encoding": {"bar": {"chunksizes": (1, 1)}}, + "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (1, 1)} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (2, 2)}, + "bar": {"chunksizes": (1, 1)} } assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} + assert other_kwargs == {"other": "kwargs"} # Chunksize may not exceed shape ds = fake_ds.chunk(8) - kwargs = {'encoding': {}, 'other': 'kwargs'} + kwargs = {"encoding": {}, "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (2, 2)} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (2, 2)}, + "bar": {"chunksizes": (2, 2)} } assert enc == expected_dict @@ -1302,26 +1302,26 @@ def test_with_time(self, fake_ds): from satpy.writers.cf_writer import update_encoding # With time dimension - ds = fake_ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, - 'other': 'kwargs'} + ds = fake_ds.chunk(8).expand_dims({"time": [datetime(2009, 7, 1, 12, 15)]}) + kwargs = {"encoding": {"bar": {"chunksizes": (1, 1, 1)}}, + "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (1, 2, 2)}, - 'bar': {'chunksizes': (1, 1, 1)}, - 'time': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}, - 'time_bnds': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (1, 2, 2)}, + "bar": {"chunksizes": (1, 1, 1)}, + "time": {"_FillValue": None, + "calendar": "proleptic_gregorian", + "units": "days since 2009-07-01 12:15:00"}, + "time_bnds": {"_FillValue": None, + "calendar": "proleptic_gregorian", + "units": "days since 2009-07-01 12:15:00"} } assert enc == expected_dict # User-defined encoding may not be altered - assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} + assert kwargs["encoding"] == {"bar": {"chunksizes": (1, 1, 1)}} class TestEncodingKwarg: @@ -1335,7 +1335,7 @@ def scene(self): "start_time": datetime(2018, 5, 30, 10, 0), "end_time": datetime(2018, 5, 30, 10, 15) } - scn['test-array'] = xr.DataArray([1., 2, 3], attrs=attrs) + scn["test-array"] = xr.DataArray([1., 2, 3], attrs=attrs) return scn @pytest.fixture(params=[True, False]) @@ -1347,11 +1347,11 @@ def compression_on(self, request): def encoding(self, compression_on): """Get encoding.""" enc = { - 'test-array': { - 'dtype': 'int8', - 'scale_factor': 0.1, - 'add_offset': 0.0, - '_FillValue': 3, + "test-array": { + "dtype": "int8", + "scale_factor": 0.1, + "add_offset": 0.0, + "_FillValue": 3, } } if compression_on: @@ -1384,15 +1384,15 @@ def expected(self, complevel_exp): def test_encoding_kwarg(self, scene, encoding, filename, expected): """Test 'encoding' keyword argument.""" - scene.save_datasets(filename=filename, encoding=encoding, writer='cf') + scene.save_datasets(filename=filename, encoding=encoding, writer="cf") self._assert_encoding_as_expected(filename, expected) def _assert_encoding_as_expected(self, filename, expected): with xr.open_dataset(filename, mask_and_scale=False) as f: - np.testing.assert_array_equal(f['test-array'][:], expected["data"]) - assert f['test-array'].attrs['scale_factor'] == expected["scale_factor"] - assert f['test-array'].attrs['_FillValue'] == expected["fill_value"] - assert f['test-array'].dtype == expected["dtype"] + np.testing.assert_array_equal(f["test-array"][:], expected["data"]) + assert f["test-array"].attrs["scale_factor"] == expected["scale_factor"] + assert f["test-array"].attrs["_FillValue"] == expected["fill_value"] + assert f["test-array"].dtype == expected["dtype"] assert f["test-array"].encoding["complevel"] == expected["complevel"] def test_warning_if_backends_dont_match(self, scene, filename, monkeypatch): @@ -1427,7 +1427,7 @@ def scene_with_encoding(self, scene, encoding): def test_encoding_attribute(self, scene_with_encoding, filename, expected): """Test 'encoding' dataset attribute.""" - scene_with_encoding.save_datasets(filename=filename, writer='cf') + scene_with_encoding.save_datasets(filename=filename, writer="cf") self._assert_encoding_as_expected(filename, expected) diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py index bb6afc0c21..74fcd43609 100644 --- a/satpy/tests/writer_tests/test_geotiff.py +++ b/satpy/tests/writer_tests/test_geotiff.py @@ -34,9 +34,9 @@ def _get_test_datasets_2d(): """Create a single 2D test dataset.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), "units": "K"} ) return [ds1] @@ -56,10 +56,10 @@ def _get_test_datasets_3d(): """Create a single 3D test dataset.""" ds1 = xr.DataArray( da.zeros((3, 100, 200), chunks=50), - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"name": "test", + "start_time": datetime.utcnow()} ) return [ds1] @@ -103,7 +103,7 @@ def test_simple_delayed_write(self, tmp_path): assert isinstance(res[0][0], da.Array) da.store(res[0], res[1]) for target in res[1]: - if hasattr(target, 'close'): + if hasattr(target, "close"): target.close() def test_colormap_write(self, tmp_path): @@ -138,43 +138,43 @@ def test_dtype_for_enhance_false(self, tmp_path): from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['dtype'] == np.float64 + assert save_method.call_args[1]["dtype"] == np.float64 def test_dtype_for_enhance_false_and_given_dtype(self, tmp_path): """Test that dtype of dataset is used if enhance=False and dtype=uint8.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=np.uint8) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['dtype'] == np.uint8 + assert save_method.call_args[1]["dtype"] == np.uint8 def test_fill_value_from_config(self, tmp_path): """Test fill_value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['fill_value'] == 128 + assert save_method.call_args[1]["fill_value"] == 128 def test_tags(self, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() - w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w = GeoTIFFWriter(tags={"test1": 1}, base_dir=tmp_path) + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None - w.save_datasets(datasets, tags={'test2': 2}, compute=False) - called_tags = save_method.call_args[1]['tags'] - assert called_tags == {'test1': 1, 'test2': 2} + w.save_datasets(datasets, tags={"test2": 2}, compute=False) + called_tags = save_method.call_args[1]["tags"] + assert called_tags == {"test1": 1, "test2": 2} @pytest.mark.parametrize( "input_func", @@ -195,11 +195,11 @@ def test_scale_offset(self, input_func, save_kwargs, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = input_func() - w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w = GeoTIFFWriter(tags={"test1": 1}, base_dir=tmp_path) + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None - w.save_datasets(datasets, tags={'test2': 2}, compute=False, **save_kwargs) + w.save_datasets(datasets, tags={"test2": 2}, compute=False, **save_kwargs) kwarg_name = "include_scale_offset_tags" if "include_scale_offset" in save_kwargs else "scale_offset_tags" kwarg_value = save_method.call_args[1].get(kwarg_name) assert kwarg_value is not None @@ -209,10 +209,10 @@ def test_tiled_value_from_config(self, tmp_path): from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['tiled'] + assert save_method.call_args[1]["tiled"] def test_float_write_with_unit_conversion(self, tmp_path): """Test that geotiffs can be written as floats and convert units.""" diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 6369cddc51..4e5c8b7c9c 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -55,11 +55,11 @@ def _get_test_datasets(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -67,48 +67,48 @@ def _get_test_datasets(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '1', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1'], - 'calibration': 'reflectance', - 'metadata_requirements': { - 'order': ['1'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, + dims=("y", "x"), + attrs={"name": "1", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1"], + "calibration": "reflectance", + "metadata_requirements": { + "order": ["1"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, }, - 'translate': {'1': '1', + "translate": {"1": "1", }, - 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' + "file_pattern": "1_{start_time:%Y%m%d_%H%M%S}.mitiff" }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '4', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['4'], - 'calibration': 'brightness_temperature', - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + dims=("y", "x"), + attrs={"name": "4", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["4"], + "calibration": "brightness_temperature", + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} + "file_pattern": "4_{start_time:%Y%m%d_%H%M%S}.mitiff"} } ) return [ds1, ds2] @@ -122,11 +122,11 @@ def _get_test_datasets_sensor_set(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -134,48 +134,48 @@ def _get_test_datasets_sensor_set(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '1', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'TEST_SENSOR_NAME'}, - 'area': area_def, - 'prerequisites': ['1'], - 'calibration': 'reflectance', - 'metadata_requirements': { - 'order': ['1'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, + dims=("y", "x"), + attrs={"name": "1", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"TEST_SENSOR_NAME"}, + "area": area_def, + "prerequisites": ["1"], + "calibration": "reflectance", + "metadata_requirements": { + "order": ["1"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, }, - 'translate': {'1': '1', + "translate": {"1": "1", }, - 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' + "file_pattern": "1_{start_time:%Y%m%d_%H%M%S}.mitiff" }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '4', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'TEST_SENSOR_NAME'}, - 'area': area_def, - 'prerequisites': ['4'], - 'calibration': 'brightness_temperature', - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + dims=("y", "x"), + attrs={"name": "4", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"TEST_SENSOR_NAME"}, + "area": area_def, + "prerequisites": ["4"], + "calibration": "brightness_temperature", + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} + "file_pattern": "4_{start_time:%Y%m%d_%H%M%S}.mitiff"} } ) return [ds1, ds2] @@ -189,11 +189,11 @@ def _get_test_dataset(self, bands=3): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -201,13 +201,13 @@ def _get_test_dataset(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1', '2', '3']} + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1", "2", "3"]} ) return ds1 @@ -220,11 +220,11 @@ def _get_test_one_dataset(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. h=36000. +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -232,13 +232,13 @@ def _get_test_one_dataset(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'avhrr', - 'area': area_def, - 'prerequisites': [10.8]} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "avhrr", + "area": area_def, + "prerequisites": [10.8]} ) return ds1 @@ -251,11 +251,11 @@ def _get_test_one_dataset_sensor_set(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. h=36000. +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -263,13 +263,13 @@ def _get_test_one_dataset_sensor_set(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'avhrr'}, - 'area': area_def, - 'prerequisites': [10.8]} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"avhrr"}, + "area": area_def, + "prerequisites": [10.8]} ) return ds1 @@ -281,11 +281,11 @@ def _get_test_dataset_with_bad_values(self, bands=3): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -298,13 +298,13 @@ def _get_test_dataset_with_bad_values(self, bands=3): rgb_data = np.stack([data, data, data]) ds1 = xr.DataArray(rgb_data, - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1', '2', '3']}) + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1", "2", "3"]}) return ds1 def _get_test_dataset_calibration(self, bands=6): @@ -319,93 +319,93 @@ def _get_test_dataset_calibration(self, bands=6): from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) prereqs = [ - make_dsq(name='1', calibration='reflectance'), - make_dsq(name='2', calibration='reflectance'), - make_dsq(name='3', calibration='brightness_temperature'), - make_dsq(name='4', calibration='brightness_temperature'), - make_dsq(name='5', calibration='brightness_temperature'), - make_dsq(name='6', calibration='reflectance') + make_dsq(name="1", calibration="reflectance"), + make_dsq(name="2", calibration="reflectance"), + make_dsq(name="3", calibration="brightness_temperature"), + make_dsq(name="4", calibration="brightness_temperature"), + make_dsq(name="5", calibration="brightness_temperature"), + make_dsq(name="6", calibration="reflectance") ] scene = Scene() scene["1"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) scene["2"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) scene["3"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["5"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["6"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) - data = xr.concat(scene, 'bands', coords='minimal') + data = xr.concat(scene, "bands", coords="minimal") bands = [] calibration = [] for p in scene: - calibration.append(p.attrs['calibration']) - bands.append(p.attrs['name']) - data['bands'] = list(bands) - new_attrs = {'name': 'datasets', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'test-sensor', - 'area': area_def, - 'prerequisites': prereqs, - 'metadata_requirements': { - 'order': ['1', '2', '3', '4', '5', '6'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, - '2': {'alias': '2-VIS0.86', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, - '3': {'alias': '3(3B)-IR3.7', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '5': {'alias': '5-IR11.5', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '6': {'alias': '6(3A)-VIS1.6', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'} + calibration.append(p.attrs["calibration"]) + bands.append(p.attrs["name"]) + data["bands"] = list(bands) + new_attrs = {"name": "datasets", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "test-sensor", + "area": area_def, + "prerequisites": prereqs, + "metadata_requirements": { + "order": ["1", "2", "3", "4", "5", "6"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, + "2": {"alias": "2-VIS0.86", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, + "3": {"alias": "3(3B)-IR3.7", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "5": {"alias": "5-IR11.5", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "6": {"alias": "6(3A)-VIS1.6", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"} }, - 'translate': {'1': '1', - '2': '2', - '3': '3', - '4': '4', - '5': '5', - '6': '6' + "translate": {"1": "1", + "2": "2", + "3": "3", + "4": "4", + "5": "5", + "6": "6" }, - 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' + "file_pattern": "test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff" } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, @@ -424,43 +424,43 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) - prereqs = [make_dsq(name='4', calibration='brightness_temperature')] + prereqs = [make_dsq(name="4", calibration="brightness_temperature")] scene = Scene() scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) - data = scene['4'] + data = scene["4"] calibration = [] for p in scene: - calibration.append(p.attrs['calibration']) - new_attrs = {'name': 'datasets', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'test-sensor', - 'area': area_def, - 'prerequisites': prereqs, - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': 'BT', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + calibration.append(p.attrs["calibration"]) + new_attrs = {"name": "datasets", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "test-sensor", + "area": area_def, + "prerequisites": prereqs, + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "BT", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' + "file_pattern": "test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff" } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, @@ -478,11 +478,11 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -490,15 +490,15 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': [make_dsq(name='1', calibration='reflectance'), - make_dsq(name='2', calibration='reflectance')]} + coords=[["R", "G", "B"], list(range(100)), list(range(200))], + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": [make_dsq(name="1", calibration="reflectance"), + make_dsq(name="2", calibration="reflectance")]} ) return ds1 @@ -513,11 +513,11 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -525,15 +525,15 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': [make_dsq(wavelength=0.6, modifiers=('sunz_corrected',)), - make_dsq(wavelength=0.8, modifiers=('sunz_corrected',)), + coords=[["R", "G", "B"], list(range(100)), list(range(200))], + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": [make_dsq(wavelength=0.6, modifiers=("sunz_corrected",)), + make_dsq(wavelength=0.8, modifiers=("sunz_corrected",)), 10.8]}) return ds1 @@ -547,7 +547,7 @@ def _read_back_mitiff_and_check(self, filename, expected, test_shape=(100, 200)) def _imagedescription_from_mitiff(self, filename): pillow_tif = Image.open(filename) IMAGEDESCRIPTION = 270 - imgdesc = (pillow_tif.tag_v2.get(IMAGEDESCRIPTION)).split('\n') + imgdesc = (pillow_tif.tag_v2.get(IMAGEDESCRIPTION)).split("\n") return imgdesc def test_init(self): @@ -569,8 +569,8 @@ def test_save_datasets(self): dataset = self._get_test_datasets() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) - filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset[0].attrs['start_time']) + filename = (dataset[0].attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset[0].attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_datasets_sensor_set(self): @@ -580,8 +580,8 @@ def test_save_datasets_sensor_set(self): dataset = self._get_test_datasets_sensor_set() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) - filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset[0].attrs['start_time']) + filename = (dataset[0].attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset[0].attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_one_dataset(self): @@ -592,8 +592,8 @@ def test_save_one_dataset(self): w.save_dataset(dataset) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: - if 'In this file' in key: - self.assertEqual(key, ' Channels: 1 In this file: 1') + if "In this file" in key: + self.assertEqual(key, " Channels: 1 In this file: 1") def test_save_one_dataset_sensor_set(self): """Test basic writer operation with one dataset ie. no bands.""" @@ -603,8 +603,8 @@ def test_save_one_dataset_sensor_set(self): w.save_dataset(dataset) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: - if 'In this file' in key: - self.assertEqual(key, ' Channels: 1 In this file: 1') + if "In this file" in key: + self.assertEqual(key, " Channels: 1 In this file: 1") def test_save_dataset_with_calibration(self): """Test writer operation with calibration.""" @@ -613,159 +613,159 @@ def test_save_dataset_with_calibration(self): expected_ir = np.full((100, 200), 255) expected_vis = np.full((100, 200), 0) expected = np.stack([expected_vis, expected_vis, expected_ir, expected_ir, expected_ir, expected_vis]) - expected_key_channel = ['Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' - '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' - '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' - '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' - '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' - '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' - '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' - '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' - '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' - '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' - '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' - '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' - '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' - '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' - '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' - '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' - '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' - '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' - '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', - 'Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' - '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' - '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' - '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' - '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' - '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' - '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' - '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' - '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' - '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' - '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' - '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' - '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' - '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' - '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' - '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' - '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' - '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' - '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', - u'Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 ' - '34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 ' - '23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 ' - '12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 ' - '-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 ' - '-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 ' - '-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 ' - '-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 ' - '-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 ' - '-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 ' - '-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 ' - '-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 ' - '-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 ' - '-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 ' - '-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 ' - '-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 ' - '-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 ' - '-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 ' - '-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 ' - '-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 ' - '-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - u'Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - u'Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - 'Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 ' - '1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 ' - '8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 ' - '14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 ' - '19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 ' - '25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 ' - '30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 ' - '36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 ' - '41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 ' - '47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 ' - '52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 ' - '58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 ' - '63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 ' - '69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 ' - '74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 ' - '80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 ' - '85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 ' - '90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 ' - '96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]'] + expected_key_channel = ["Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 " + "1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 " + "8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 " + "14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 " + "20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 " + "25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 " + "30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 " + "36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 " + "41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 " + "47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 " + "52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 " + "58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 " + "63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 " + "69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 " + "74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 " + "80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 " + "85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 " + "91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 " + "96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]", + "Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 " + "1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 " + "8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 " + "14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 " + "20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 " + "25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 " + "30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 " + "36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 " + "41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 " + "47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 " + "52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 " + "58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 " + "63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 " + "69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 " + "74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 " + "80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 " + "85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 " + "91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 " + "96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]", + u"Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 " + "34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 " + "23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 " + "12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 " + "-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 " + "-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 " + "-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 " + "-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 " + "-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 " + "-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 " + "-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 " + "-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 " + "-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 " + "-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 " + "-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 " + "-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 " + "-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 " + "-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 " + "-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 " + "-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 " + "-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + u"Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + u"Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + "Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 " + "1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 " + "8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 " + "14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 " + "19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 " + "25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 " + "30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 " + "36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 " + "41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 " + "47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 " + "52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 " + "58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 " + "63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 " + "69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 " + "74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 " + "80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 " + "85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 " + "90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 " + "96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]"] dataset = self._get_test_dataset_calibration() - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) w.save_dataset(dataset) - filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset.attrs['start_time']) + filename = (dataset.attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: - if 'Table_calibration' in key: + if "Table_calibration" in key: found_table_calibration = True - if '1-VIS0.63' in key: + if "1-VIS0.63" in key: self.assertEqual(key, expected_key_channel[0]) number_of_calibrations += 1 - elif '2-VIS0.86' in key: + elif "2-VIS0.86" in key: self.assertEqual(key, expected_key_channel[1]) number_of_calibrations += 1 - elif '3(3B)-IR3.7' in key: + elif "3(3B)-IR3.7" in key: self.assertEqual(key, expected_key_channel[2]) number_of_calibrations += 1 - elif '4-IR10.8' in key: + elif "4-IR10.8" in key: self.assertEqual(key, expected_key_channel[3]) number_of_calibrations += 1 - elif '5-IR11.5' in key: + elif "5-IR11.5" in key: self.assertEqual(key, expected_key_channel[4]) number_of_calibrations += 1 - elif '6(3A)-VIS1.6' in key: + elif "6(3A)-VIS1.6" in key: self.assertEqual(key, expected_key_channel[5]) number_of_calibrations += 1 else: @@ -781,42 +781,42 @@ def test_save_dataset_with_calibration_one_dataset(self): from satpy.writers.mitiff import MITIFFWriter expected = [np.full((100, 200), 255)] - expected_key_channel = [u'Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', ] + expected_key_channel = [u"Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", ] dataset = self._get_test_dataset_calibration_one_dataset() - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) w.save_dataset(dataset) - filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset.attrs['start_time']) + filename = (dataset.attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: - if 'Table_calibration' in key: + if "Table_calibration" in key: found_table_calibration = True - if 'BT' in key: + if "BT" in key: self.assertEqual(key, expected_key_channel[0]) number_of_calibrations += 1 self.assertTrue(found_table_calibration, "Expected table_calibration is not found in the imagedescription.") @@ -833,8 +833,8 @@ def test_save_dataset_with_bad_value(self): dataset = self._get_test_dataset_with_bad_values() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected, test_shape=(2, 5)) def test_convert_proj4_string(self): @@ -844,32 +844,32 @@ def test_convert_proj4_string(self): from pyresample.geometry import AreaDefinition from satpy.writers.mitiff import MITIFFWriter - checks = [{'epsg': '+init=EPSG:32631', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32632', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32633', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32634', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32635', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}] + checks = [{"epsg": "+init=EPSG:32631", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32632", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32633", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32634", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32635", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}] for check in checks: area_def = AreaDefinition( - 'test', - 'test', - 'test', - check['epsg'], + "test", + "test", + "test", + check["epsg"], 100, 200, (-1000., -1500., 1000., 1500.), @@ -877,13 +877,13 @@ def test_convert_proj4_string(self): ds1 = xr.DataArray( da.zeros((10, 20), chunks=20), - dims=('y', 'x'), - attrs={'area': area_def} + dims=("y", "x"), + attrs={"area": area_def} ) - w = MITIFFWriter(filename='dummy.tif', base_dir=self.base_dir) + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) proj4_string = w._add_proj4_string(ds1, ds1) - self.assertEqual(proj4_string, check['proj4']) + self.assertEqual(proj4_string, check["proj4"]) def test_save_dataset_palette(self): """Test writer operation as palette.""" @@ -918,20 +918,20 @@ def test_save_dataset_palette(self): 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] color_map = (0, 1, 2, 3, 4, 5) - pal_desc = ['test', 'test2'] + pal_desc = ["test", "test2"] unit = "Test" dataset = self._get_test_one_dataset() - palette = {'palette': True, - 'palette_color_map': color_map, - 'palette_description': pal_desc, - 'palette_unit': unit, - 'palette_channel_name': dataset.attrs['name']} + palette = {"palette": True, + "palette_color_map": color_map, + "palette_description": pal_desc, + "palette_unit": unit, + "palette_channel_name": dataset.attrs["name"]} w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset, **palette) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) pillow_tif = Image.open(os.path.join(self.base_dir, filename)) # Need to check PHOTOMETRIC is 3, ie palette self.assertEqual(pillow_tif.tag_v2.get(262), 3) @@ -958,14 +958,14 @@ def test_save_dataset_palette(self): unit_name = key unit_name_found = True found_color_info = False - elif 'COLOR INFO:' in key: + elif "COLOR INFO:" in key: found_color_info = True # Check the name of the palette description self.assertEqual(name_length, 2) # Check the name and unit name of the palette - self.assertEqual(unit_name, ' Test') + self.assertEqual(unit_name, " Test") # Check the palette description of the palette - self.assertEqual(names, [' test', ' test2']) + self.assertEqual(names, [" test", " test2"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_simple_write_two_bands(self): @@ -982,12 +982,12 @@ def test_get_test_dataset_three_bands_prereq(self): dataset = self._get_test_dataset_three_bands_prereq() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) for element in imgdesc: - if ' Channels:' in element: - self.assertEqual(element, ' Channels: 3 In this file: 1 2 3') + if " Channels:" in element: + self.assertEqual(element, " Channels: 3 In this file: 1 2 3") def test_save_dataset_with_calibration_error_one_dataset(self): """Test saving if mitiff as dataset with only one channel with invalid calibration.""" @@ -998,9 +998,9 @@ def test_save_dataset_with_calibration_error_one_dataset(self): logger.level = logging.DEBUG dataset = self._get_test_dataset_calibration_one_dataset() - prereqs = [make_dsq(name='4', calibration='not_valid_calibration_name')] - dataset.attrs['prerequisites'] = prereqs - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + prereqs = [make_dsq(name="4", calibration="not_valid_calibration_name")] + dataset.attrs["prerequisites"] = prereqs + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) _reverse_offset = 0. _reverse_scale = 1. _decimals = 2 @@ -1024,17 +1024,17 @@ def test_save_dataset_with_missing_palette(self): logger.setLevel(logging.DEBUG) dataset = self._get_test_one_dataset() - pal_desc = ['test', 'test2'] + pal_desc = ["test", "test2"] unit = "Test" - palette = {'palette': True, - 'palette_description': pal_desc, - 'palette_unit': unit, - 'palette_channel_name': dataset.attrs['name']} + palette = {"palette": True, + "palette_description": pal_desc, + "palette_unit": unit, + "palette_channel_name": dataset.attrs["name"]} w = MITIFFWriter(base_dir=self.base_dir) tiffinfo = {} - tiffinfo[270] = "Just dummy image desc".encode('utf-8') - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + tiffinfo[270] = "Just dummy image desc".encode("utf-8") + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) try: with self.assertLogs(logger, logging.ERROR) as lc: w._save_as_palette(dataset.compute(), os.path.join(self.base_dir, filename), tiffinfo, **palette) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index ac75b68cbf..bba3e9b44e 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -477,7 +477,7 @@ def now(cls, tz=datetime.timezone.utc): return datetime.datetime(2033, 5, 18, 3, 33, 20, tzinfo=tz) - monkeypatch.setattr(datetime, 'datetime', mydatetime) + monkeypatch.setattr(datetime, "datetime", mydatetime) def test_write_and_read_file(test_image_small_mid_atlantic_L, tmp_path): diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index 58f991e73d..f36f1028b7 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -43,58 +43,58 @@ def get_scaling_from_history(self): pyninjotiff_mock.ninjotiff = mock.Mock() -@mock.patch.dict(sys.modules, {'pyninjotiff': pyninjotiff_mock, 'pyninjotiff.ninjotiff': pyninjotiff_mock.ninjotiff}) +@mock.patch.dict(sys.modules, {"pyninjotiff": pyninjotiff_mock, "pyninjotiff.ninjotiff": pyninjotiff_mock.ninjotiff}) class TestNinjoTIFFWriter(unittest.TestCase): """The ninjo tiff writer tests.""" - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_init(self): """Test the init.""" from satpy.writers.ninjotiff import NinjoTIFFWriter - ninjo_tags = {40000: 'NINJO'} + ninjo_tags = {40000: "NINJO"} ntw = NinjoTIFFWriter(tags=ninjo_tags) self.assertDictEqual(ntw.tags, ninjo_tags) - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_dataset(self, iwsd): """Test saving a dataset.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: - ntw.save_dataset(dataset, physic_unit='CELSIUS') - uconv.assert_called_once_with(dataset, 'K', 'CELSIUS') + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: + ntw.save_dataset(dataset, physic_unit="CELSIUS") + uconv.assert_called_once_with(dataset, "K", "CELSIUS") self.assertEqual(iwsd.call_count, 1) - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_dataset_skip_unit_conversion(self, iwsd): """Test saving a dataset without unit conversion.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: - ntw.save_dataset(dataset, physic_unit='CELSIUS', + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: + ntw.save_dataset(dataset, physic_unit="CELSIUS", convert_temperature_units=False) uconv.assert_not_called() self.assertEqual(iwsd.call_count, 1) - @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_image(self, iwsi, save_dataset): """Test saving an image.""" nt = pyninjotiff_mock.ninjotiff nt.reset_mock() from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - img = FakeImage(dataset, 'L') - ret = ntw.save_image(img, filename='bla.tif', compute=False) + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + img = FakeImage(dataset, "L") + ret = ntw.save_image(img, filename="bla.tif", compute=False) nt.save.assert_called() - assert nt.save.mock_calls[0][2]['compute'] is False - assert nt.save.mock_calls[0][2]['ch_min_measurement_unit'] < nt.save.mock_calls[0][2]['ch_max_measurement_unit'] + assert nt.save.mock_calls[0][2]["compute"] is False + assert nt.save.mock_calls[0][2]["ch_min_measurement_unit"] < nt.save.mock_calls[0][2]["ch_max_measurement_unit"] assert ret == nt.save.return_value def test_convert_units_self(self): @@ -145,9 +145,9 @@ def test_convert_units_other(self): with pytest.raises(NotImplementedError): convert_units(ds_in, "millimeter/hour", "m/s") - @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_P_image_is_uint8(self, iwsi, save_dataset): """Test that a P-mode image is converted to uint8s.""" nt = pyninjotiff_mock.ninjotiff @@ -155,6 +155,6 @@ def test_P_image_is_uint8(self, iwsi, save_dataset): from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3]).astype(int) - img = FakeImage(dataset, 'P') - ntw.save_image(img, filename='bla.tif', compute=False) + img = FakeImage(dataset, "P") + ntw.save_image(img, filename="bla.tif", compute=False) assert nt.save.mock_calls[0][1][0].data.dtype == np.uint8 diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py index 5ebf0dfb47..b3e92c9510 100644 --- a/satpy/tests/writer_tests/test_simple_image.py +++ b/satpy/tests/writer_tests/test_simple_image.py @@ -44,9 +44,9 @@ def _get_test_datasets(): import xarray as xr ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow()} ) return [ds1] diff --git a/satpy/tests/writer_tests/test_utils.py b/satpy/tests/writer_tests/test_utils.py index 10a199d8b0..a0cf88e54f 100644 --- a/satpy/tests/writer_tests/test_utils.py +++ b/satpy/tests/writer_tests/test_utils.py @@ -27,9 +27,9 @@ class WriterUtilsTest(unittest.TestCase): def test_flatten_dict(self): """Test dictionary flattening.""" - d = {'a': 1, 'b': {'c': 1, 'd': {'e': 1, 'f': {'g': [1, 2]}}}} - expected = {'a': 1, - 'b_c': 1, - 'b_d_e': 1, - 'b_d_f_g': [1, 2]} + d = {"a": 1, "b": {"c": 1, "d": {"e": 1, "f": {"g": [1, 2]}}}} + expected = {"a": 1, + "b_c": 1, + "b_d_e": 1, + "b_d_f_g": [1, 2]} self.assertDictEqual(wutils.flatten_dict(d), expected) diff --git a/satpy/utils.py b/satpy/utils.py index a9785a544a..1ef9a3524c 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -130,12 +130,12 @@ def logging_on(level=logging.WARNING): console = logging.StreamHandler() console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", - '%Y-%m-%d %H:%M:%S')) + "%Y-%m-%d %H:%M:%S")) console.setLevel(level) - logging.getLogger('').addHandler(console) + logging.getLogger("").addHandler(console) _is_logging_on = True - log = logging.getLogger('') + log = logging.getLogger("") log.setLevel(level) for h in log.handlers: h.setLevel(level) @@ -143,13 +143,13 @@ def logging_on(level=logging.WARNING): def logging_off(): """Turn logging off.""" - logging.getLogger('').handlers = [logging.NullHandler()] + logging.getLogger("").handlers = [logging.NullHandler()] def get_logger(name): """Return logger with null handler added if needed.""" - if not hasattr(logging.Logger, 'trace'): - logging.addLevelName(TRACE_LEVEL, 'TRACE') + if not hasattr(logging.Logger, "trace"): + logging.addLevelName(TRACE_LEVEL, "TRACE") def trace(self, message, *args, **kwargs): if self.isEnabledFor(TRACE_LEVEL): @@ -165,7 +165,7 @@ def trace(self, message, *args, **kwargs): def in_ipynb(): """Check if we are in a jupyter notebook.""" try: - return 'ZMQ' in get_ipython().__class__.__name__ + return "ZMQ" in get_ipython().__class__.__name__ except NameError: return False @@ -243,20 +243,20 @@ def proj_units_to_meters(proj_str): proj_parts = proj_str.split() new_parts = [] for itm in proj_parts: - key, val = itm.split('=') - key = key.strip('+') - if key in ['a', 'b', 'h']: + key, val = itm.split("=") + key = key.strip("+") + if key in ["a", "b", "h"]: val = float(val) if val < 6e6: val *= 1000. - val = '%.3f' % val + val = "%.3f" % val - if key == 'units' and val == 'km': + if key == "units" and val == "km": continue - new_parts.append('+%s=%s' % (key, val)) + new_parts.append("+%s=%s" % (key, val)) - return ' '.join(new_parts) + return " ".join(new_parts) def _get_sunz_corr_li_and_shibata(cos_zen): @@ -371,9 +371,9 @@ def _get_sat_altitude(data_arr, key_prefixes): try: alt = _get_first_available_item(orb_params, alt_keys) except KeyError: - alt = orb_params['projection_altitude'] + alt = orb_params["projection_altitude"] warnings.warn( - 'Actual satellite altitude not available, using projection altitude instead.', + "Actual satellite altitude not available, using projection altitude instead.", stacklevel=3 ) return alt @@ -387,10 +387,10 @@ def _get_sat_lonlat(data_arr, key_prefixes): lon = _get_first_available_item(orb_params, lon_keys) lat = _get_first_available_item(orb_params, lat_keys) except KeyError: - lon = orb_params['projection_longitude'] - lat = orb_params['projection_latitude'] + lon = orb_params["projection_longitude"] + lat = orb_params["projection_latitude"] warnings.warn( - 'Actual satellite lon/lat not available, using projection center instead.', + "Actual satellite lon/lat not available, using projection center instead.", stacklevel=3 ) return lon, lat @@ -454,21 +454,21 @@ def _check_yaml_configs(configs, key): diagnostic = {} for i in configs: for fname in i: - msg = 'ok' + msg = "ok" res = None - with open(fname, 'r', encoding='utf-8') as stream: + with open(fname, "r", encoding="utf-8") as stream: try: res = yaml.load(stream, Loader=UnsafeLoader) except yaml.YAMLError as err: stream.seek(0) res = yaml.load(stream, Loader=BaseLoader) - if err.context == 'while constructing a Python object': + if err.context == "while constructing a Python object": msg = err.problem else: - msg = 'error' + msg = "error" finally: try: - diagnostic[res[key]['name']] = msg + diagnostic[res[key]["name"]] = msg except (KeyError, TypeError): # this object doesn't have a 'name' pass @@ -481,7 +481,7 @@ def _check_import(module_names): for module_name in module_names: try: __import__(module_name) - res = 'ok' + res = "ok" except ImportError as err: res = str(err) diagnostics[module_name] = res @@ -503,23 +503,23 @@ def check_satpy(readers=None, writers=None, extras=None): from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer - print('Readers') - print('=======') - for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), 'reader').items()): - print(reader + ': ', res) + print("Readers") + print("=======") + for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): + print(reader + ": ", res) print() - print('Writers') - print('=======') - for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), 'writer').items()): - print(writer + ': ', res) + print("Writers") + print("=======") + for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): + print(writer + ": ", res) print() - print('Extras') - print('======') - module_names = extras if extras is not None else ('cartopy', 'geoviews') + print("Extras") + print("======") + module_names = extras if extras is not None else ("cartopy", "geoviews") for module_name, res in sorted(_check_import(module_names).items()): - print(module_name + ': ', res) + print(module_name + ": ", res) print() @@ -619,7 +619,7 @@ def get_legacy_chunk_size(): def _get_pytroll_chunk_size(): try: - chunk_size = int(os.environ['PYTROLL_CHUNK_SIZE']) + chunk_size = int(os.environ["PYTROLL_CHUNK_SIZE"]) warnings.warn( "The PYTROLL_CHUNK_SIZE environment variable is pending deprecation. " "You can use the dask config setting `array.chunk-size` (or the DASK_ARRAY__CHUNK_SIZE environment" @@ -668,7 +668,7 @@ def _sort_files_to_local_remote_and_fsfiles(filenames): fs_files.append(f) elif isinstance(f, pathlib.Path): local_files.append(f) - elif urlparse(f).scheme in ('', 'file') or "\\" in f: + elif urlparse(f).scheme in ("", "file") or "\\" in f: local_files.append(f) else: remote_files.append(f) @@ -709,7 +709,7 @@ def _get_storage_dictionary_options(reader_kwargs): # set base storage options if there are any storage_opt_dict[reader_name] = shared_storage_options.copy() if isinstance(rkwargs, dict) and "storage_options" in rkwargs: - storage_opt_dict.setdefault(reader_name, {}).update(rkwargs.pop('storage_options')) + storage_opt_dict.setdefault(reader_name, {}).update(rkwargs.pop("storage_options")) return storage_opt_dict diff --git a/satpy/writers/__init__.py b/satpy/writers/__init__.py index 0af433f28d..dcf482188d 100644 --- a/satpy/writers/__init__.py +++ b/satpy/writers/__init__.py @@ -44,18 +44,18 @@ def read_writer_config(config_files, loader=UnsafeLoader): """Read the writer `config_files` and return the info extracted.""" conf = {} - LOG.debug('Reading %s', str(config_files)) + LOG.debug("Reading %s", str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: - writer_info = conf['writer'] + writer_info = conf["writer"] except KeyError: raise KeyError( "Malformed config file {}: missing writer 'writer'".format( config_files)) - writer_info['config_files'] = config_files + writer_info["config_files"] = config_files return writer_info @@ -63,7 +63,7 @@ def load_writer_configs(writer_configs, **writer_kwargs): """Load the writer from the provided `writer_configs`.""" try: writer_info = read_writer_config(writer_configs) - writer_class = writer_info['writer'] + writer_class = writer_info["writer"] except (ValueError, KeyError, yaml.YAMLError): raise ValueError("Invalid writer configs: " "'{}'".format(writer_configs)) @@ -78,11 +78,11 @@ def load_writer(writer, **writer_kwargs): config_fn = writer + ".yaml" if "." not in writer else writer config_files = config_search_paths(os.path.join("writers", config_fn)) writer_kwargs.setdefault("config_files", config_files) - if not writer_kwargs['config_files']: + if not writer_kwargs["config_files"]: raise ValueError("Unknown writer '{}'".format(writer)) try: - return load_writer_configs(writer_kwargs['config_files'], + return load_writer_configs(writer_kwargs["config_files"], **writer_kwargs) except ValueError: raise ValueError("Writer '{}' does not exist or could not be " @@ -102,15 +102,15 @@ def configs_for_writer(writer=None): if not isinstance(writer, (list, tuple)): writer = [writer] # given a config filename or writer name - config_files = [w if w.endswith('.yaml') else w + '.yaml' for w in writer] + config_files = [w if w.endswith(".yaml") else w + ".yaml" for w in writer] else: - paths = get_entry_points_config_dirs('satpy.writers') - writer_configs = glob_config(os.path.join('writers', '*.yaml'), search_dirs=paths) + paths = get_entry_points_config_dirs("satpy.writers") + writer_configs = glob_config(os.path.join("writers", "*.yaml"), search_dirs=paths) config_files = set(writer_configs) for config_file in config_files: config_basename = os.path.basename(config_file) - paths = get_entry_points_config_dirs('satpy.writers') + paths = get_entry_points_config_dirs("satpy.writers") writer_configs = config_search_paths( os.path.join("writers", config_basename), search_dirs=paths, @@ -143,7 +143,7 @@ def available_writers(as_dict=False): LOG.warning("Could not import writer config from: %s", writer_configs) LOG.debug("Error loading YAML", exc_info=True) continue - writers.append(writer_info if as_dict else writer_info['name']) + writers.append(writer_info if as_dict else writer_info["name"]) return writers @@ -231,11 +231,11 @@ def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=No DeprecationWarning, stacklevel=2 ) - if hasattr(orig_img, 'convert'): + if hasattr(orig_img, "convert"): # image must be in RGB space to work with pycoast/pydecorate - res_mode = ('RGBA' if orig_img.final_mode(fill_value).endswith('A') else 'RGB') + res_mode = ("RGBA" if orig_img.final_mode(fill_value).endswith("A") else "RGB") orig_img = orig_img.convert(res_mode) - elif not orig_img.mode.startswith('RGB'): + elif not orig_img.mode.startswith("RGB"): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") @@ -244,7 +244,7 @@ def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=No cw_ = ContourWriterAGG(coast_dir) new_image = orig_img.apply_pil(_burn_overlay, res_mode, - None, {'fill_value': fill_value}, + None, {"fill_value": fill_value}, (area, cw_, overlays), None) return new_image @@ -253,25 +253,25 @@ def _create_overlays_dict(color, width, grid, level_coast, level_borders): """Fill in the overlays dict.""" overlays = dict() # fill with sensible defaults - general_params = {'outline': color or (0, 0, 0), - 'width': width or 0.5} + general_params = {"outline": color or (0, 0, 0), + "width": width or 0.5} for key, val in general_params.items(): if val is not None: - overlays.setdefault('coasts', {}).setdefault(key, val) - overlays.setdefault('borders', {}).setdefault(key, val) + overlays.setdefault("coasts", {}).setdefault(key, val) + overlays.setdefault("borders", {}).setdefault(key, val) if level_coast is None: level_coast = 1 - overlays.setdefault('coasts', {}).setdefault('level', level_coast) + overlays.setdefault("coasts", {}).setdefault("level", level_coast) if level_borders is None: level_borders = 1 - overlays.setdefault('borders', {}).setdefault('level', level_borders) + overlays.setdefault("borders", {}).setdefault("level", level_borders) if grid is not None: - if 'major_lonlat' in grid and grid['major_lonlat']: - major_lonlat = grid.pop('major_lonlat') - minor_lonlat = grid.pop('minor_lonlat', major_lonlat) - grid.update({'Dlonlat': major_lonlat, 'dlonlat': minor_lonlat}) + if "major_lonlat" in grid and grid["major_lonlat"]: + major_lonlat = grid.pop("major_lonlat") + minor_lonlat = grid.pop("minor_lonlat", major_lonlat) + grid.update({"Dlonlat": major_lonlat, "dlonlat": minor_lonlat}) for key, val in grid.items(): - overlays.setdefault('grid', {}).setdefault(key, val) + overlays.setdefault("grid", {}).setdefault(key, val) return overlays @@ -288,10 +288,10 @@ def add_text(orig, dc, img, text): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -309,10 +309,10 @@ def add_logo(orig, dc, img, logo): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -330,10 +330,10 @@ def add_scale(orig, dc, img, scale): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -373,10 +373,10 @@ def add_decorate(orig, fill_value=None, **decorate): # Need to create this here to possible keep the alignment # when adding text and/or logo with pydecorate - if hasattr(orig, 'convert'): + if hasattr(orig, "convert"): # image must be in RGB space to work with pycoast/pydecorate - orig = orig.convert('RGBA' if orig.mode.endswith('A') else 'RGB') - elif not orig.mode.startswith('RGB'): + orig = orig.convert("RGBA" if orig.mode.endswith("A") else "RGB") + elif not orig.mode.startswith("RGB"): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") img_orig = orig.pil_image(fill_value=fill_value) @@ -386,14 +386,14 @@ def add_decorate(orig, fill_value=None, **decorate): # decorate need to be a list to maintain the alignment # as ordered in the list img = orig - if 'decorate' in decorate: - for dec in decorate['decorate']: - if 'logo' in dec: - img = add_logo(img, dc, img_orig, logo=dec['logo']) - elif 'text' in dec: - img = add_text(img, dc, img_orig, text=dec['text']) - elif 'scale' in dec: - img = add_scale(img, dc, img_orig, scale=dec['scale']) + if "decorate" in decorate: + for dec in decorate["decorate"]: + if "logo" in dec: + img = add_logo(img, dc, img_orig, logo=dec["logo"]) + elif "text" in dec: + img = add_text(img, dc, img_orig, text=dec["text"]) + elif "scale" in dec: + img = add_scale(img, dc, img_orig, scale=dec["scale"]) return img @@ -445,7 +445,7 @@ def get_enhanced_image(dataset, enhance=None, overlay=None, decorate=None, enhancer.apply(img, **dataset.attrs) if overlay is not None: - img = add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay) + img = add_overlay(img, dataset.attrs["area"], fill_value=fill_value, **overlay) if decorate is not None: img = add_decorate(img, fill_value=fill_value, **decorate) @@ -595,7 +595,7 @@ def compute_writer_results(results): if targets: for target in targets: - if hasattr(target, 'close'): + if hasattr(target, "close"): target.close() @@ -632,23 +632,23 @@ def __init__(self, name=None, filename=None, base_dir=None, **kwargs): """ # Load the config Plugin.__init__(self, **kwargs) - self.info = self.config.get('writer', {}) + self.info = self.config.get("writer", {}) - if 'file_pattern' in self.info: + if "file_pattern" in self.info: warnings.warn( "Writer YAML config is using 'file_pattern' which " "has been deprecated, use 'filename' instead.", stacklevel=2 ) - self.info['filename'] = self.info.pop('file_pattern') + self.info["filename"] = self.info.pop("file_pattern") - if 'file_pattern' in kwargs: + if "file_pattern" in kwargs: warnings.warn( "'file_pattern' has been deprecated, use 'filename' instead.", DeprecationWarning, stacklevel=2 ) - filename = kwargs.pop('file_pattern') + filename = kwargs.pop("file_pattern") # Use options from the config file if they weren't passed as arguments self.name = self.info.get("name", None) if name is None else name @@ -679,7 +679,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs = {} kwargs = kwargs.copy() - for kw in ['base_dir', 'filename', 'file_pattern']: + for kw in ["base_dir", "filename", "file_pattern"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs @@ -696,8 +696,8 @@ def create_filename_parser(self, base_dir): @staticmethod def _prepare_metadata_for_filename_formatting(attrs): - if isinstance(attrs.get('sensor'), set): - attrs['sensor'] = '-'.join(sorted(attrs['sensor'])) + if isinstance(attrs.get("sensor"), set): + attrs["sensor"] = "-".join(sorted(attrs["sensor"])) def get_filename(self, **kwargs): """Create a filename where output data will be saved. @@ -863,7 +863,7 @@ def separate_init_kwargs(cls, kwargs): """Separate the init kwargs.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(ImageWriter, cls).separate_init_kwargs(kwargs) - for kw in ['enhancement_config', 'enhance']: + for kw in ["enhancement_config", "enhance"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs @@ -1179,7 +1179,7 @@ def __init__(self, enhancement_config_file=None): # it wasn't specified in the config or in the kwargs, we should # provide a default config_fn = os.path.join("enhancements", "generic.yaml") - paths = get_entry_points_config_dirs('satpy.enhancements') + paths = get_entry_points_config_dirs("satpy.enhancements") self.enhancement_config_file = config_search_paths(config_fn, search_dirs=paths) if not self.enhancement_config_file: @@ -1199,7 +1199,7 @@ def get_sensor_enhancement_config(self, sensor): # one single sensor sensor = [sensor] - paths = get_entry_points_config_dirs('satpy.enhancements') + paths = get_entry_points_config_dirs("satpy.enhancements") for sensor_name in sensor: config_fn = os.path.join("enhancements", sensor_name + ".yaml") config_files = config_search_paths(config_fn, search_dirs=paths) @@ -1227,8 +1227,8 @@ def apply(self, img, **info): backup_id = f"" data_id = info.get("_satpy_id", backup_id) LOG.debug(f"Data for {data_id} will be enhanced with options:\n\t{enh_kwargs['operations']}") - for operation in enh_kwargs['operations']: - fun = operation['method'] - args = operation.get('args', []) - kwargs = operation.get('kwargs', {}) + for operation in enh_kwargs["operations"]: + fun = operation["method"] + args = operation.get("args", []) + kwargs = operation.get("kwargs", {}) fun(img, *args, **kwargs) diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 4b7f32d1df..5f10418e8a 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -233,25 +233,25 @@ from satpy.writers import DecisionTree, Enhancer, Writer, get_enhanced_image LOG = logging.getLogger(__name__) -DEFAULT_OUTPUT_PATTERN = '{source_name}_AII_{platform_name}_{sensor}_' \ - '{name}_{sector_id}_{tile_id}_' \ - '{start_time:%Y%m%d_%H%M}.nc' +DEFAULT_OUTPUT_PATTERN = "{source_name}_AII_{platform_name}_{sensor}_" \ + "{name}_{sector_id}_{tile_id}_" \ + "{start_time:%Y%m%d_%H%M}.nc" UNIT_CONV = { - 'micron': 'microm', - 'mm h-1': 'mm/h', - '1': '*1', - 'none': '*1', - 'percent': '%', - 'Kelvin': 'kelvin', - 'K': 'kelvin', + "micron": "microm", + "mm h-1": "mm/h", + "1": "*1", + "none": "*1", + "percent": "%", + "Kelvin": "kelvin", + "K": "kelvin", } -TileInfo = namedtuple('TileInfo', ['tile_count', 'image_shape', 'tile_shape', - 'tile_row_offset', 'tile_column_offset', 'tile_id', - 'tile_number', - 'x', 'y', 'xy_factors', 'tile_slices', 'data_slices']) -XYFactors = namedtuple('XYFactors', ['mx', 'bx', 'my', 'by']) +TileInfo = namedtuple("TileInfo", ["tile_count", "image_shape", "tile_shape", + "tile_row_offset", "tile_column_offset", "tile_id", + "tile_number", + "x", "y", "xy_factors", "tile_slices", "data_slices"]) +XYFactors = namedtuple("XYFactors", ["mx", "bx", "my", "by"]) def fix_awips_file(fn): @@ -265,9 +265,9 @@ def fix_awips_file(fn): # of NetCDF LOG.info("Modifying output NetCDF file to work with AWIPS") import h5py - h = h5py.File(fn, 'a') - if '_NCProperties' in h.attrs: - del h.attrs['_NCProperties'] + h = h5py.File(fn, "a") + if "_NCProperties" in h.attrs: + del h.attrs["_NCProperties"] h.close() @@ -604,12 +604,12 @@ def _generate_tile_info(self): def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): - dtype_str = encoding['dtype'] + dtype_str = encoding["dtype"] dtype = np.dtype(getattr(np, dtype_str)) file_bit_depth = dtype.itemsize * 8 - unsigned_in_signed = encoding.get('_Unsigned') == "true" - is_unsigned = dtype.kind == 'u' - bit_depth = input_data_arr.attrs.get('bit_depth', file_bit_depth) + unsigned_in_signed = encoding.get("_Unsigned") == "true" + is_unsigned = dtype.kind == "u" + bit_depth = input_data_arr.attrs.get("bit_depth", file_bit_depth) num_fills = 1 # future: possibly support more than one fill value if bit_depth is None: bit_depth = file_bit_depth @@ -666,7 +666,7 @@ def _add_valid_ranges(data_arrs): # we don't want to effect the original attrs data_arr = data_arr.copy(deep=False) # these are dask arrays, they need to get computed later - data_arr.attrs['valid_range'] = (vmin, vmax) + data_arr.attrs["valid_range"] = (vmin, vmax) yield data_arr @@ -676,7 +676,7 @@ class AWIPSTiledVariableDecisionTree(DecisionTree): def __init__(self, decision_dicts, **kwargs): """Initialize decision tree with specific keys to look for.""" # Fields used to match a product object to it's correct configuration - attrs = kwargs.pop('attrs', + attrs = kwargs.pop("attrs", ["name", "standard_name", "satellite", @@ -693,30 +693,30 @@ class NetCDFTemplate: def __init__(self, template_dict): """Parse template dictionary and prepare for rendering.""" - self.is_single_variable = template_dict.get('single_variable', False) - self.global_attributes = template_dict.get('global_attributes', {}) + self.is_single_variable = template_dict.get("single_variable", False) + self.global_attributes = template_dict.get("global_attributes", {}) default_var_config = { "default": { "encoding": {"dtype": "uint16"}, } } - self.variables = template_dict.get('variables', default_var_config) + self.variables = template_dict.get("variables", default_var_config) default_coord_config = { "default": { "encoding": {"dtype": "uint16"}, } } - self.coordinates = template_dict.get('coordinates', default_coord_config) + self.coordinates = template_dict.get("coordinates", default_coord_config) self._var_tree = AWIPSTiledVariableDecisionTree([self.variables]) self._coord_tree = AWIPSTiledVariableDecisionTree([self.coordinates]) - self._filename_format_str = template_dict.get('filename') + self._filename_format_str = template_dict.get("filename") self._str_formatter = StringFormatter() self._template_dict = template_dict - def get_filename(self, base_dir='', **kwargs): + def get_filename(self, base_dir="", **kwargs): """Generate output NetCDF file from metadata.""" # format the filename if self._filename_format_str is None: @@ -794,7 +794,7 @@ def get_attr_value(self, attr_name, input_metadata, value=None, raw_key=None, ra if func is not None: value = func(input_metadata) if value is None: - LOG.debug('no routine matching %s', meth_name) + LOG.debug("no routine matching %s", meth_name) return value def _render_attrs(self, attr_configs, input_metadata, prefix="_"): @@ -814,28 +814,28 @@ def _render_global_attributes(self, input_metadata): prefix="_global_") def _render_variable_attributes(self, var_config, input_metadata): - attr_configs = var_config['attributes'] + attr_configs = var_config["attributes"] var_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_data_") return var_attrs def _render_coordinate_attributes(self, coord_config, input_metadata): - attr_configs = coord_config['attributes'] + attr_configs = coord_config["attributes"] coord_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_coord_") return coord_attrs def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = input_data_arr.encoding.copy() # determine fill value and - if 'encoding' in var_config: - new_encoding.update(var_config['encoding']) + if "encoding" in var_config: + new_encoding.update(var_config["encoding"]) if "dtype" not in new_encoding: - new_encoding['dtype'] = 'int16' - new_encoding['_Unsigned'] = 'true' + new_encoding["dtype"] = "int16" + new_encoding["_Unsigned"] = "true" return new_encoding def _render_variable(self, data_arr): var_config = self._var_tree.find_match(**data_arr.attrs) - new_var_name = var_config.get('var_name', data_arr.attrs['name']) + new_var_name = var_config.get("var_name", data_arr.attrs["name"]) new_data_arr = data_arr.copy() # remove coords which may cause issues later on new_data_arr = new_data_arr.reset_coords(drop=True) @@ -848,8 +848,8 @@ def _render_variable(self, data_arr): def _get_matchable_coordinate_metadata(self, coord_name, coord_attrs): match_kwargs = {} - if 'name' not in coord_attrs: - match_kwargs['name'] = coord_name + if "name" not in coord_attrs: + match_kwargs["name"] = coord_name match_kwargs.update(coord_attrs) return match_kwargs @@ -897,29 +897,29 @@ def __init__(self, template_dict, swap_end_time=False): def _swap_attributes_end_time(self, template_dict): """Swap every use of 'start_time' to use 'end_time' instead.""" - variable_attributes = [var_section['attributes'] for var_section in template_dict.get('variables', {}).values()] - global_attributes = template_dict.get('global_attributes', {}) + variable_attributes = [var_section["attributes"] for var_section in template_dict.get("variables", {}).values()] + global_attributes = template_dict.get("global_attributes", {}) for attr_section in variable_attributes + [global_attributes]: for attr_name in attr_section: attr_config = attr_section[attr_name] - if '{start_time' in attr_config.get('value', ''): - attr_config['value'] = attr_config['value'].replace('{start_time', '{end_time') - if attr_config.get('raw_key', '') == 'start_time': - attr_config['raw_key'] = 'end_time' + if "{start_time" in attr_config.get("value", ""): + attr_config["value"] = attr_config["value"].replace("{start_time", "{end_time") + if attr_config.get("raw_key", "") == "start_time": + attr_config["raw_key"] = "end_time" def _data_units(self, input_metadata): - units = input_metadata.get('units', '1') + units = input_metadata.get("units", "1") # we *know* AWIPS can't handle some units return UNIT_CONV.get(units, units) def _global_start_date_time(self, input_metadata): - start_time = input_metadata['start_time'] + start_time = input_metadata["start_time"] if self._swap_end_time: - start_time = input_metadata['end_time'] + start_time = input_metadata["end_time"] return start_time.strftime("%Y-%m-%dT%H:%M:%S") def _global_awips_id(self, input_metadata): - return "AWIPS_" + input_metadata['name'] + return "AWIPS_" + input_metadata["name"] def _global_physical_element(self, input_metadata): var_config = self._var_tree.find_match(**input_metadata) @@ -930,11 +930,11 @@ def _global_physical_element(self, input_metadata): def _global_production_location(self, input_metadata): """Get default global production_location attribute.""" del input_metadata - org = os.environ.get('ORGANIZATION', None) + org = os.environ.get("ORGANIZATION", None) if org is not None: prod_location = org else: - LOG.warning('environment ORGANIZATION not set for .production_location attribute, using hostname') + LOG.warning("environment ORGANIZATION not set for .production_location attribute, using hostname") import socket prod_location = socket.gethostname() # FUTURE: something more correct but this will do for now @@ -954,25 +954,25 @@ def _global_production_location(self, input_metadata): @staticmethod def _get_vmin_vmax(var_config, input_data_arr): - if 'valid_range' in var_config: - return var_config['valid_range'] + if "valid_range" in var_config: + return var_config["valid_range"] data_vmin, data_vmax = _get_data_vmin_vmax(input_data_arr) return data_vmin, data_vmax def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = super()._render_variable_encoding(var_config, input_data_arr) vmin, vmax = self._get_vmin_vmax(var_config, input_data_arr) - has_flag_meanings = 'flag_meanings' in input_data_arr.attrs + has_flag_meanings = "flag_meanings" in input_data_arr.attrs is_int = np.issubdtype(input_data_arr.dtype, np.integer) is_cat = has_flag_meanings or is_int - has_sf = new_encoding.get('scale_factor') is not None + has_sf = new_encoding.get("scale_factor") is not None if not has_sf and is_cat: # AWIPS doesn't like Identity conversion so we can't have # a factor of 1 and an offset of 0 # new_encoding['scale_factor'] = None # new_encoding['add_offset'] = None - if '_FillValue' in input_data_arr.attrs: - new_encoding['_FillValue'] = input_data_arr.attrs['_FillValue'] + if "_FillValue" in input_data_arr.attrs: + new_encoding["_FillValue"] = input_data_arr.attrs["_FillValue"] elif not has_sf and vmin is not None and vmax is not None: # calculate scale_factor and add_offset sf, ao, fill = _get_factor_offset_fill( @@ -980,57 +980,57 @@ def _render_variable_encoding(self, var_config, input_data_arr): ) # NOTE: These could be dask arrays that will be computed later # when we go to write the files. - new_encoding['scale_factor'] = sf - new_encoding['add_offset'] = ao - new_encoding['_FillValue'] = fill - new_encoding['coordinates'] = ' '.join([ele for ele in input_data_arr.dims]) + new_encoding["scale_factor"] = sf + new_encoding["add_offset"] = ao + new_encoding["_FillValue"] = fill + new_encoding["coordinates"] = " ".join([ele for ele in input_data_arr.dims]) return new_encoding def _get_projection_attrs(self, area_def): """Assign projection attributes per CF standard.""" proj_attrs = area_def.crs.to_cf() proj_encoding = {"dtype": "i4"} - proj_attrs['short_name'] = area_def.area_id - gmap_name = proj_attrs['grid_mapping_name'] + proj_attrs["short_name"] = area_def.area_id + gmap_name = proj_attrs["grid_mapping_name"] preferred_names = { - 'geostationary': 'fixedgrid_projection', - 'lambert_conformal_conic': 'lambert_projection', - 'polar_stereographic': 'polar_projection', - 'mercator': 'mercator_projection', + "geostationary": "fixedgrid_projection", + "lambert_conformal_conic": "lambert_projection", + "polar_stereographic": "polar_projection", + "mercator": "mercator_projection", } if gmap_name not in preferred_names: LOG.warning("Data is in projection %s which may not be supported " "by AWIPS", gmap_name) - area_id_as_var_name = area_def.area_id.replace('-', '_').lower() + area_id_as_var_name = area_def.area_id.replace("-", "_").lower() proj_name = preferred_names.get(gmap_name, area_id_as_var_name) return proj_name, proj_attrs, proj_encoding def _set_xy_coords_attrs(self, new_ds, crs): - y_attrs = new_ds.coords['y'].attrs + y_attrs = new_ds.coords["y"].attrs if crs.is_geographic: - self._fill_units_and_standard_name(y_attrs, 'degrees_north', 'latitude') + self._fill_units_and_standard_name(y_attrs, "degrees_north", "latitude") else: - self._fill_units_and_standard_name(y_attrs, 'meters', 'projection_y_coordinate') - y_attrs['axis'] = 'Y' + self._fill_units_and_standard_name(y_attrs, "meters", "projection_y_coordinate") + y_attrs["axis"] = "Y" - x_attrs = new_ds.coords['x'].attrs + x_attrs = new_ds.coords["x"].attrs if crs.is_geographic: - self._fill_units_and_standard_name(x_attrs, 'degrees_east', 'longitude') + self._fill_units_and_standard_name(x_attrs, "degrees_east", "longitude") else: - self._fill_units_and_standard_name(x_attrs, 'meters', 'projection_x_coordinate') - x_attrs['axis'] = 'X' + self._fill_units_and_standard_name(x_attrs, "meters", "projection_x_coordinate") + x_attrs["axis"] = "X" @staticmethod def _fill_units_and_standard_name(attrs, units, standard_name): """Fill in units and standard_name if not set in `attrs`.""" - if attrs.get('units') is None: - attrs['units'] = units - if attrs['units'] in ('meter', 'metre'): + if attrs.get("units") is None: + attrs["units"] = units + if attrs["units"] in ("meter", "metre"): # AWIPS doesn't like 'meter' - attrs['units'] = 'meters' - if attrs.get('standard_name') is None: - attrs['standard_name'] = standard_name + attrs["units"] = "meters" + if attrs.get("standard_name") is None: + attrs["standard_name"] = standard_name def apply_area_def(self, new_ds, area_def): """Apply information we can gather from the AreaDefinition.""" @@ -1040,25 +1040,25 @@ def apply_area_def(self, new_ds, area_def): new_ds[gmap_name] = gmap_data_arr self._set_xy_coords_attrs(new_ds, area_def.crs) for data_arr in new_ds.data_vars.values(): - if 'y' in data_arr.dims and 'x' in data_arr.dims: - data_arr.attrs['grid_mapping'] = gmap_name + if "y" in data_arr.dims and "x" in data_arr.dims: + data_arr.attrs["grid_mapping"] = gmap_name - new_ds.attrs['pixel_x_size'] = area_def.pixel_size_x / 1000.0 - new_ds.attrs['pixel_y_size'] = area_def.pixel_size_y / 1000.0 + new_ds.attrs["pixel_x_size"] = area_def.pixel_size_x / 1000.0 + new_ds.attrs["pixel_y_size"] = area_def.pixel_size_y / 1000.0 return new_ds def apply_tile_coord_encoding(self, new_ds, xy_factors): """Add encoding information specific to the coordinate variables.""" - if 'x' in new_ds.coords: - new_ds.coords['x'].encoding['dtype'] = 'int16' - new_ds.coords['x'].encoding['scale_factor'] = np.float64(xy_factors.mx) - new_ds.coords['x'].encoding['add_offset'] = np.float64(xy_factors.bx) - new_ds.coords['x'].encoding['_FillValue'] = -1 - if 'y' in new_ds.coords: - new_ds.coords['y'].encoding['dtype'] = 'int16' - new_ds.coords['y'].encoding['scale_factor'] = np.float64(xy_factors.my) - new_ds.coords['y'].encoding['add_offset'] = np.float64(xy_factors.by) - new_ds.coords['y'].encoding['_FillValue'] = -1 + if "x" in new_ds.coords: + new_ds.coords["x"].encoding["dtype"] = "int16" + new_ds.coords["x"].encoding["scale_factor"] = np.float64(xy_factors.mx) + new_ds.coords["x"].encoding["add_offset"] = np.float64(xy_factors.bx) + new_ds.coords["x"].encoding["_FillValue"] = -1 + if "y" in new_ds.coords: + new_ds.coords["y"].encoding["dtype"] = "int16" + new_ds.coords["y"].encoding["scale_factor"] = np.float64(xy_factors.my) + new_ds.coords["y"].encoding["add_offset"] = np.float64(xy_factors.by) + new_ds.coords["y"].encoding["_FillValue"] = -1 return new_ds def apply_tile_info(self, new_ds, tile_info): @@ -1067,25 +1067,25 @@ def apply_tile_info(self, new_ds, tile_info): total_pixels = tile_info.image_shape tile_row = tile_info.tile_row_offset tile_column = tile_info.tile_column_offset - tile_height = new_ds.sizes['y'] - tile_width = new_ds.sizes['x'] - new_ds.attrs['tile_row_offset'] = tile_row - new_ds.attrs['tile_column_offset'] = tile_column - new_ds.attrs['product_tile_height'] = tile_height - new_ds.attrs['product_tile_width'] = tile_width - new_ds.attrs['number_product_tiles'] = total_tiles[0] * total_tiles[1] - new_ds.attrs['product_rows'] = total_pixels[0] - new_ds.attrs['product_columns'] = total_pixels[1] + tile_height = new_ds.sizes["y"] + tile_width = new_ds.sizes["x"] + new_ds.attrs["tile_row_offset"] = tile_row + new_ds.attrs["tile_column_offset"] = tile_column + new_ds.attrs["product_tile_height"] = tile_height + new_ds.attrs["product_tile_width"] = tile_width + new_ds.attrs["number_product_tiles"] = total_tiles[0] * total_tiles[1] + new_ds.attrs["product_rows"] = total_pixels[0] + new_ds.attrs["product_columns"] = total_pixels[1] return new_ds def _add_sector_id_global(self, new_ds, sector_id): - if not self._template_dict.get('add_sector_id_global'): + if not self._template_dict.get("add_sector_id_global"): return if sector_id is None: raise ValueError("Keyword 'sector_id' is required for this " "template.") - new_ds.attrs['sector_id'] = sector_id + new_ds.attrs["sector_id"] = sector_id def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_time=None): """Add attributes that don't fit into any other category.""" @@ -1095,9 +1095,9 @@ def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_tim creation_time = datetime.utcnow() self._add_sector_id_global(new_ds, sector_id) - new_ds.attrs['Conventions'] = "CF-1.7" - new_ds.attrs['creator'] = creator - new_ds.attrs['creation_time'] = creation_time.strftime('%Y-%m-%dT%H:%M:%S') + new_ds.attrs["Conventions"] = "CF-1.7" + new_ds.attrs["creator"] = creator + new_ds.attrs["creation_time"] = creation_time.strftime("%Y-%m-%dT%H:%M:%S") return new_ds def _render_variable_attributes(self, var_config, input_metadata): @@ -1128,7 +1128,7 @@ def render(self, dataset_or_data_arrays, area_def, def _notnull(data_arr, check_categories=True): is_int = np.issubdtype(data_arr.dtype, np.integer) - fill_value = data_arr.encoding.get('_FillValue', data_arr.attrs.get('_FillValue')) + fill_value = data_arr.encoding.get("_FillValue", data_arr.attrs.get("_FillValue")) if is_int and fill_value is not None: # some DQF datasets are always valid if check_categories: @@ -1178,7 +1178,7 @@ def _copy_to_existing(dataset_to_save, output_filename): new_data[valid_current] = var_data_arr.data[valid_current] var_data_arr.data[:] = new_data var_data_arr.encoding.update(existing_data_arr.encoding) - var_data_arr.encoding.pop('source', None) + var_data_arr.encoding.pop("source", None) return dataset_to_save @@ -1187,10 +1187,10 @@ def _extract_factors(dataset_to_save): factors = {} for data_var in dataset_to_save.data_vars.values(): enc = data_var.encoding - data_var.attrs.pop('valid_range', None) - factor_set = (enc.pop('scale_factor', None), - enc.pop('add_offset', None), - enc.pop('_FillValue', None)) + data_var.attrs.pop("valid_range", None) + factor_set = (enc.pop("scale_factor", None), + enc.pop("add_offset", None), + enc.pop("_FillValue", None)) factors[data_var.name] = factor_set return factors @@ -1199,11 +1199,11 @@ def _reapply_factors(dataset_to_save, factors): for var_name, factor_set in factors.items(): data_arr = dataset_to_save[var_name] if factor_set[0] is not None: - data_arr.encoding['scale_factor'] = factor_set[0] + data_arr.encoding["scale_factor"] = factor_set[0] if factor_set[1] is not None: - data_arr.encoding['add_offset'] = factor_set[1] + data_arr.encoding["add_offset"] = factor_set[1] if factor_set[2] is not None: - data_arr.encoding['_FillValue'] = factor_set[2] + data_arr.encoding["_FillValue"] = factor_set[2] return dataset_to_save @@ -1228,9 +1228,9 @@ def to_nonempty_netcdf(dataset_to_save: xr.Dataset, # TODO: Allow for new variables to be created if update_existing and os.path.isfile(output_filename): dataset_to_save = _copy_to_existing(dataset_to_save, output_filename) - mode = 'a' + mode = "a" else: - mode = 'w' + mode = "w" return dataset_to_save, output_filename, mode # return dataset_to_save.to_netcdf(output_filename, mode=mode) # if fix_awips: @@ -1258,9 +1258,9 @@ class AWIPSTiledWriter(Writer): def __init__(self, compress=False, fix_awips=False, **kwargs): """Initialize writer and decision trees.""" super(AWIPSTiledWriter, self).__init__(default_config_filename="writers/awips_tiled.yaml", **kwargs) - self.base_dir = kwargs.get('base_dir', '') - self.awips_sectors = self.config['sectors'] - self.templates = self.config['templates'] + self.base_dir = kwargs.get("base_dir", "") + self.awips_sectors = self.config["sectors"] + self.templates = self.config["templates"] self.compress = compress self.fix_awips = fix_awips self._fill_sector_info() @@ -1289,7 +1289,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(AWIPSTiledWriter, cls).separate_init_kwargs( kwargs) - for kw in ['compress', 'fix_awips']: + for kw in ["compress", "fix_awips"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) @@ -1298,16 +1298,16 @@ def separate_init_kwargs(cls, kwargs): def _fill_sector_info(self): """Convert sector extents if needed.""" for sector_info in self.awips_sectors.values(): - sector_info['projection'] = CRS.from_user_input(sector_info['projection']) - p = Proj(sector_info['projection']) - if 'lower_left_xy' in sector_info: - sector_info['lower_left_lonlat'] = p(*sector_info['lower_left_xy'], inverse=True) + sector_info["projection"] = CRS.from_user_input(sector_info["projection"]) + p = Proj(sector_info["projection"]) + if "lower_left_xy" in sector_info: + sector_info["lower_left_lonlat"] = p(*sector_info["lower_left_xy"], inverse=True) else: - sector_info['lower_left_xy'] = p(*sector_info['lower_left_lonlat']) - if 'upper_right_xy' in sector_info: - sector_info['upper_right_lonlat'] = p(*sector_info['upper_right_xy'], inverse=True) + sector_info["lower_left_xy"] = p(*sector_info["lower_left_lonlat"]) + if "upper_right_xy" in sector_info: + sector_info["upper_right_lonlat"] = p(*sector_info["upper_right_xy"], inverse=True) else: - sector_info['upper_right_xy'] = p(*sector_info['upper_right_lonlat']) + sector_info["upper_right_xy"] = p(*sector_info["upper_right_lonlat"]) def _get_lettered_sector_info(self, sector_id): """Get metadata for the current sector if configured. @@ -1334,9 +1334,9 @@ def _get_tile_generator(self, area_def, lettered_grid, sector_id, sector_info = self._get_lettered_sector_info(sector_id) tile_gen = LetteredTileGenerator( area_def, - sector_info['lower_left_xy'] + sector_info['upper_right_xy'], - sector_crs=sector_info['projection'], - cell_size=sector_info['resolution'], + sector_info["lower_left_xy"] + sector_info["upper_right_xy"], + sector_crs=sector_info["projection"], + cell_size=sector_info["resolution"], num_subtiles=num_subtiles, use_sector_reference=use_sector_reference, ) @@ -1356,18 +1356,18 @@ def _area_id(area_def): # get all of the datasets stored by area area_datasets = {} for x in datasets: - area_id = _area_id(x.attrs['area']) - area, ds_list = area_datasets.setdefault(area_id, (x.attrs['area'], [])) + area_id = _area_id(x.attrs["area"]) + area, ds_list = area_datasets.setdefault(area_id, (x.attrs["area"], [])) ds_list.append(x) return area_datasets def _split_rgbs(self, ds): """Split a single RGB dataset in to multiple.""" - for component in 'RGB': + for component in "RGB": band_data = ds.sel(bands=component) - band_data.attrs['name'] += '_{}'.format(component) - band_data.attrs['valid_min'] = 0.0 - band_data.attrs['valid_max'] = 1.0 + band_data.attrs["name"] += "_{}".format(component) + band_data.attrs["valid_min"] = 0.0 + band_data.attrs["valid_max"] = 1.0 yield band_data def _enhance_and_split_rgbs(self, datasets): @@ -1377,7 +1377,7 @@ def _enhance_and_split_rgbs(self, datasets): if ds.ndim == 2: new_datasets.append(ds) continue - elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and 'bands' not in ds.coords): + elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and "bands" not in ds.coords): LOG.error("Can't save datasets with more or less than 2 dimensions " "that aren't RGBs to AWIPS Tiled format: %s", ds.name) else: @@ -1389,31 +1389,31 @@ def _enhance_and_split_rgbs(self, datasets): return new_datasets def _tile_filler(self, tile_info, data_arr): - fill = np.nan if np.issubdtype(data_arr.dtype, np.floating) else data_arr.attrs.get('_FillValue', 0) + fill = np.nan if np.issubdtype(data_arr.dtype, np.floating) else data_arr.attrs.get("_FillValue", 0) data_arr_data = data_arr.data[tile_info.data_slices] data_arr_data = data_arr_data.rechunk(data_arr_data.shape) new_data = da.map_blocks(tile_filler, data_arr_data, tile_info.tile_shape, tile_info.tile_slices, fill, dtype=data_arr.dtype, chunks=tile_info.tile_shape) - return xr.DataArray(new_data, dims=('y', 'x'), + return xr.DataArray(new_data, dims=("y", "x"), attrs=data_arr.attrs.copy()) def _slice_and_update_coords(self, tile_info, data_arrays): - new_x = xr.DataArray(tile_info.x, dims=('x',)) - if 'x' in data_arrays[0].coords: - old_x = data_arrays[0].coords['x'] + new_x = xr.DataArray(tile_info.x, dims=("x",)) + if "x" in data_arrays[0].coords: + old_x = data_arrays[0].coords["x"] new_x.attrs.update(old_x.attrs) new_x.encoding = old_x.encoding - new_y = xr.DataArray(tile_info.y, dims=('y',)) - if 'y' in data_arrays[0].coords: - old_y = data_arrays[0].coords['y'] + new_y = xr.DataArray(tile_info.y, dims=("y",)) + if "y" in data_arrays[0].coords: + old_y = data_arrays[0].coords["y"] new_y.attrs.update(old_y.attrs) new_y.encoding = old_y.encoding for data_arr in data_arrays: new_data_arr = self._tile_filler(tile_info, data_arr) - new_data_arr.coords['x'] = new_x - new_data_arr.coords['y'] = new_y + new_data_arr.coords["x"] = new_x + new_data_arr.coords["y"] = new_y yield new_data_arr def _iter_tile_info_and_datasets(self, tile_gen, data_arrays, single_variable=True): @@ -1491,9 +1491,9 @@ def _get_tile_data_info(self, data_arrs, creation_time, source_name): # use the first data array as a "representative" for the group ds_info = data_arrs[0].attrs.copy() # we want to use our own creation_time - ds_info['creation_time'] = creation_time + ds_info["creation_time"] = creation_time if source_name is not None: - ds_info['source_name'] = source_name + ds_info["source_name"] = source_name self._adjust_metadata_times(ds_info) return ds_info @@ -1503,8 +1503,8 @@ def save_datasets(self, datasets, sector_id=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, use_end_time=False, use_sector_reference=False, - template='polar', check_categories=True, - extra_global_attrs=None, environment_prefix='DR', + template="polar", check_categories=True, + extra_global_attrs=None, environment_prefix="DR", compute=True, **kwargs): """Write a series of DataArray objects to multiple NetCDF4 Tile files. @@ -1583,7 +1583,7 @@ def save_datasets(self, datasets, sector_id=None, """ if not isinstance(template, dict): - template = self.config['templates'][template] + template = self.config["templates"][template] template = AWIPSNetCDFTemplate(template, swap_end_time=use_end_time) area_data_arrs = self._group_by_area(datasets) datasets_to_save = [] @@ -1609,9 +1609,9 @@ def save_datasets(self, datasets, sector_id=None, shared_attrs=ds_info, extra_global_attrs=extra_global_attrs) if self.compress: - new_ds.encoding['zlib'] = True + new_ds.encoding["zlib"] = True for var in new_ds.variables.values(): - var.encoding['zlib'] = True + var.encoding["zlib"] = True datasets_to_save.append(new_ds) output_filenames.append(output_filename) @@ -1669,24 +1669,24 @@ def dataset_iter(_delayed_gen): return dataset_iter -def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'): +def _create_debug_array(sector_info, num_subtiles, font_path="Verdana.ttf"): from PIL import Image, ImageDraw, ImageFont from pkg_resources import resource_filename as get_resource_filename size = (1000, 1000) img = Image.new("L", size, 0) draw = ImageDraw.Draw(img) - if ':' in font_path: + if ":" in font_path: # load from a python package - font_path = get_resource_filename(*font_path.split(':')) + font_path = get_resource_filename(*font_path.split(":")) font = ImageFont.truetype(font_path, 25) - ll_extent = sector_info['lower_left_xy'] - ur_extent = sector_info['upper_right_xy'] + ll_extent = sector_info["lower_left_xy"] + ur_extent = sector_info["upper_right_xy"] total_meters_x = ur_extent[0] - ll_extent[0] total_meters_y = ur_extent[1] - ll_extent[1] - fcs_x = np.ceil(float(sector_info['resolution'][1]) / num_subtiles[1]) - fcs_y = np.ceil(float(sector_info['resolution'][0]) / num_subtiles[0]) + fcs_x = np.ceil(float(sector_info["resolution"][1]) / num_subtiles[1]) + fcs_y = np.ceil(float(sector_info["resolution"][0]) / num_subtiles[0]) total_cells_x = np.ceil(total_meters_x / fcs_x) total_cells_y = np.ceil(total_meters_y / fcs_y) total_cells_x = np.ceil(total_cells_x / num_subtiles[1]) * num_subtiles[1] @@ -1735,10 +1735,10 @@ def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'): ur_extent[1], ) grid_def = AreaDefinition( - 'debug_grid', - 'debug_grid', - 'debug_grid', - sector_info['projection'], + "debug_grid", + "debug_grid", + "debug_grid", + sector_info["projection"], 1000, 1000, new_extents @@ -1756,26 +1756,26 @@ def draw_rectangle(draw, coordinates, outline=None, fill=None, width=1): def create_debug_lettered_tiles(**writer_kwargs): """Create tile files with tile identifiers "burned" in to the image data for debugging.""" - writer_kwargs['lettered_grid'] = True - writer_kwargs['num_subtiles'] = (2, 2) # default, don't use command line argument + writer_kwargs["lettered_grid"] = True + writer_kwargs["num_subtiles"] = (2, 2) # default, don't use command line argument init_kwargs, save_kwargs = AWIPSTiledWriter.separate_init_kwargs(**writer_kwargs) writer = AWIPSTiledWriter(**init_kwargs) - sector_id = save_kwargs['sector_id'] + sector_id = save_kwargs["sector_id"] sector_info = writer.awips_sectors[sector_id] - area_def, arr = _create_debug_array(sector_info, save_kwargs['num_subtiles']) + area_def, arr = _create_debug_array(sector_info, save_kwargs["num_subtiles"]) now = datetime.utcnow() - product = xr.DataArray(da.from_array(arr, chunks='auto'), attrs=dict( - name='debug_{}'.format(sector_id), - platform_name='DEBUG', - sensor='TILES', + product = xr.DataArray(da.from_array(arr, chunks="auto"), attrs=dict( + name="debug_{}".format(sector_id), + platform_name="DEBUG", + sensor="TILES", start_time=now, end_time=now, area=area_def, standard_name="toa_bidirectional_reflectance", - units='1', + units="1", valid_min=0, valid_max=255, )) @@ -1790,12 +1790,12 @@ def main(): """Command line interface mimicing CSPP Polar2Grid.""" import argparse parser = argparse.ArgumentParser(description="Create AWIPS compatible NetCDF tile files") - parser.add_argument("--create-debug", action='store_true', - help='Create debug NetCDF files to show tile locations in AWIPS') - parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, - help='each occurrence increases verbosity 1 level through ' - 'ERROR-WARNING-INFO-DEBUG (default INFO)') - parser.add_argument('-l', '--log', dest="log_fn", default=None, + parser.add_argument("--create-debug", action="store_true", + help="Create debug NetCDF files to show tile locations in AWIPS") + parser.add_argument("-v", "--verbose", dest="verbosity", action="count", default=0, + help="each occurrence increases verbosity 1 level through " + "ERROR-WARNING-INFO-DEBUG (default INFO)") + parser.add_argument("-l", "--log", dest="log_fn", default=None, help="specify the log filename") group_1 = parser.add_argument_group(title="Writer Initialization") @@ -1812,17 +1812,17 @@ def main(): help="Specify how many pixels are in each tile (overrides '--tiles')") # group.add_argument('--tile-offset', nargs=2, default=(0, 0), # help="Start counting tiles from this offset ('row_offset col_offset')") - group_2.add_argument("--letters", dest="lettered_grid", action='store_true', + group_2.add_argument("--letters", dest="lettered_grid", action="store_true", help="Create tiles from a static letter-based grid based on the product projection") group_2.add_argument("--letter-subtiles", nargs=2, type=int, default=(2, 2), help="Specify number of subtiles in each lettered tile: \'row col\'") group_2.add_argument("--output-pattern", default=DEFAULT_OUTPUT_PATTERN, help="output filenaming pattern") - group_2.add_argument("--source-name", default='SSEC', + group_2.add_argument("--source-name", default="SSEC", help="specify processing source name used in attributes and filename (default 'SSEC')") group_2.add_argument("--sector-id", required=True, help="specify name for sector/region used in attributes and filename (example 'LCC')") - group_2.add_argument("--template", default='polar', + group_2.add_argument("--template", default="polar", help="specify the template name to use (default: polar)") args = parser.parse_args() @@ -1838,5 +1838,5 @@ def main(): raise NotImplementedError("Command line interface not implemented yet for AWIPS tiled writer") -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py index c7e559adc2..6ae80da468 100644 --- a/satpy/writers/cf/coords_attrs.py +++ b/satpy/writers/cf/coords_attrs.py @@ -19,28 +19,28 @@ def add_xy_coords_attrs(dataarray): dataarray = _add_xy_projected_coords_attrs(dataarray) else: dataarray = _add_xy_geographic_coords_attrs(dataarray) - if 'crs' in dataarray.coords: - dataarray = dataarray.drop_vars('crs') + if "crs" in dataarray.coords: + dataarray = dataarray.drop_vars("crs") return dataarray -def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): +def _add_xy_projected_coords_attrs(dataarray, x="x", y="y"): """Add relevant attributes to x, y coordinates of a projected CRS.""" if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' - dataarray[x].attrs['units'] = 'm' + dataarray[x].attrs["standard_name"] = "projection_x_coordinate" + dataarray[x].attrs["units"] = "m" if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' - dataarray[y].attrs['units'] = 'm' + dataarray[y].attrs["standard_name"] = "projection_y_coordinate" + dataarray[y].attrs["units"] = "m" return dataarray -def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): +def _add_xy_geographic_coords_attrs(dataarray, x="x", y="y"): """Add relevant attributes to x, y coordinates of a geographic CRS.""" if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'longitude' - dataarray[x].attrs['units'] = 'degrees_east' + dataarray[x].attrs["standard_name"] = "longitude" + dataarray[x].attrs["units"] = "degrees_east" if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'latitude' - dataarray[y].attrs['units'] = 'degrees_north' + dataarray[y].attrs["standard_name"] = "latitude" + dataarray[y].attrs["units"] = "degrees_north" return dataarray diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index b9a24b9292..6446e53bc3 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -190,41 +190,41 @@ # Ensure that either netCDF4 or h5netcdf is available to avoid silent failure if netCDF4 is None and h5netcdf is None: - raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.') + raise ImportError("Ensure that the netCDF4 or h5netcdf package is installed.") # Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" -NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), - np.dtype('int16'), np.dtype('uint16'), - np.dtype('int32'), np.dtype('uint32'), - np.dtype('int64'), np.dtype('uint64'), - np.dtype('float32'), np.dtype('float64'), +NC4_DTYPES = [np.dtype("int8"), np.dtype("uint8"), + np.dtype("int16"), np.dtype("uint16"), + np.dtype("int32"), np.dtype("uint32"), + np.dtype("int64"), np.dtype("uint64"), + np.dtype("float32"), np.dtype("float64"), np.string_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible -CF_DTYPES = [np.dtype('int8'), - np.dtype('int16'), - np.dtype('int32'), - np.dtype('float32'), - np.dtype('float64'), +CF_DTYPES = [np.dtype("int8"), + np.dtype("int16"), + np.dtype("int32"), + np.dtype("float32"), + np.dtype("float64"), np.string_] -CF_VERSION = 'CF-1.7' +CF_VERSION = "CF-1.7" def get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" ds_collection = {} # Retrieve ancillary variable datarrays - for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): + for ancillary_dataarray in dataarray.attrs.get("ancillary_variables", []): ancillary_variable = ancillary_dataarray.name if keys and ancillary_variable not in keys: keys.append(ancillary_variable) ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) # Add input dataarray - ds_collection[dataarray.attrs['name']] = dataarray + ds_collection[dataarray.attrs["name"]] = dataarray return ds_collection @@ -235,20 +235,20 @@ def get_extra_ds(dataarray, keys=None): def add_lonlat_coords(dataarray): """Add 'longitude' and 'latitude' coordinates to DataArray.""" dataarray = dataarray.copy() - area = dataarray.attrs['area'] - ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} - chunks = getattr(dataarray.isel(**ignore_dims), 'chunks', None) + area = dataarray.attrs["area"] + ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ["x", "y"]} + chunks = getattr(dataarray.isel(**ignore_dims), "chunks", None) lons, lats = area.get_lonlats(chunks=chunks) - dataarray['longitude'] = xr.DataArray(lons, dims=['y', 'x'], - attrs={'name': "longitude", - 'standard_name': "longitude", - 'units': 'degrees_east'}, - name='longitude') - dataarray['latitude'] = xr.DataArray(lats, dims=['y', 'x'], - attrs={'name': "latitude", - 'standard_name': "latitude", - 'units': 'degrees_north'}, - name='latitude') + dataarray["longitude"] = xr.DataArray(lons, dims=["y", "x"], + attrs={"name": "longitude", + "standard_name": "longitude", + "units": "degrees_east"}, + name="longitude") + dataarray["latitude"] = xr.DataArray(lats, dims=["y", "x"], + attrs={"name": "latitude", + "standard_name": "latitude", + "units": "degrees_north"}, + name="latitude") return dataarray @@ -256,7 +256,7 @@ def _create_grid_mapping(area): """Create the grid mapping instance for `area`.""" import pyproj - if Version(pyproj.__version__) < Version('2.4.1'): + if Version(pyproj.__version__) < Version("2.4.1"): # technically 2.2, but important bug fixes in 2.4.1 raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") # let pyproj do the heavily lifting (pyproj 2.0+ required) @@ -267,18 +267,18 @@ def _create_grid_mapping(area): def _add_grid_mapping(dataarray): """Convert an area to at CF grid mapping.""" dataarray = dataarray.copy() - area = dataarray.attrs['area'] + area = dataarray.attrs["area"] gmapping_var_name, attrs = _create_grid_mapping(area) - dataarray.attrs['grid_mapping'] = gmapping_var_name + dataarray.attrs["grid_mapping"] = gmapping_var_name return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] - if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): + if not got_lonlats and (isinstance(dataarray.attrs["area"], SwathDefinition) or include_lonlats): dataarray = add_lonlat_coords(dataarray) - if isinstance(dataarray.attrs['area'], AreaDefinition): + if isinstance(dataarray.attrs["area"], AreaDefinition): dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) res.append(dataarray) @@ -287,7 +287,7 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): def is_lon_or_lat_dataarray(dataarray): """Check if the DataArray represents the latitude or longitude coordinate.""" - if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: + if "standard_name" in dataarray.attrs and dataarray.attrs["standard_name"] in ["longitude", "latitude"]: return True return False @@ -339,12 +339,12 @@ def make_alt_coords_unique(datas, pretty=False): if pretty: warnings.warn( 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), + "not identical among the given datasets".format(coord_name), stacklevel=2 ) for ds_name, dataset in datas.items(): if coord_name in dataset.coords: - rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} + rename = {coord_name: "{}_{}".format(ds_name, coord_name)} new_datas[ds_name] = new_datas[ds_name].rename(rename) return new_datas @@ -355,15 +355,15 @@ def assert_xy_unique(datas): unique_x = set() unique_y = set() for dataset in datas.values(): - if 'y' in dataset.dims: - token_y = tokenize(dataset['y'].data) + if "y" in dataset.dims: + token_y = tokenize(dataset["y"].data) unique_y.add(token_y) - if 'x' in dataset.dims: - token_x = tokenize(dataset['x'].data) + if "x" in dataset.dims: + token_x = tokenize(dataset["x"].data) unique_x.add(token_x) if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' - 'Please group them by area or save them in separate files.') + raise ValueError("Datasets to be saved in one file (or one group) must have identical projection coordinates. " + "Please group them by area or save them in separate files.") def link_coords(datas): @@ -376,9 +376,9 @@ def link_coords(datas): """ for da_name, data in datas.items(): - declared_coordinates = data.attrs.get('coordinates', []) + declared_coordinates = data.attrs.get("coordinates", []) if isinstance(declared_coordinates, str): - declared_coordinates = declared_coordinates.split(' ') + declared_coordinates = declared_coordinates.split(" ") for coord in declared_coordinates: if coord not in data.coords: try: @@ -387,13 +387,13 @@ def link_coords(datas): except KeyError: warnings.warn( 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), + "exist, dropping reference.".format(coord, da_name), stacklevel=2 ) continue # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - data.attrs.pop('coordinates', None) + data.attrs.pop("coordinates", None) # ###--------------------------------------------------------------------------. @@ -410,11 +410,11 @@ def add_time_bounds_dimension(ds, time="time"): if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) - ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + ds["time_bnds"] = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - ds[time].attrs['bounds'] = "time_bnds" - ds[time].attrs['standard_name'] = "time" + dims=["time", "bnds_1d"]) + ds[time].attrs["bounds"] = "time_bnds" + ds[time].attrs["standard_name"] = "time" return ds @@ -429,13 +429,13 @@ def _process_time_coord(dataarray, epoch): - the time coordinate has size 1 """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) + if "time" in dataarray.coords: + dataarray["time"].encoding["units"] = epoch + dataarray["time"].attrs["standard_name"] = "time" + dataarray["time"].attrs.pop("bounds", None) - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') + if "time" not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims("time") return dataarray @@ -503,7 +503,7 @@ def _encode_nc(obj): return [s.lower() for s in obj.astype(str)] return obj.tolist() - raise ValueError('Unable to encode') + raise ValueError("Unable to encode") def encode_nc(obj): @@ -552,10 +552,10 @@ def encode_attrs_nc(attrs): def _add_ancillary_variables_attrs(dataarray): """Replace ancillary_variables DataArray with a list of their name.""" - list_ancillary_variable_names = [da_ancillary.attrs['name'] - for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + list_ancillary_variable_names = [da_ancillary.attrs["name"] + for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] if list_ancillary_variable_names: - dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) + dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) else: dataarray.attrs.pop("ancillary_variables", None) return dataarray @@ -572,17 +572,17 @@ def _drop_exclude_attrs(dataarray, exclude_attrs): def _remove_satpy_attrs(new_data): """Remove _satpy attribute.""" - satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] + satpy_attrs = [key for key in new_data.attrs if key.startswith("_satpy")] for satpy_attr in satpy_attrs: new_data.attrs.pop(satpy_attr) - new_data.attrs.pop('_last_resampler', None) + new_data.attrs.pop("_last_resampler", None) return new_data def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" - if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + if "prerequisites" in dataarray.attrs: + dataarray.attrs["prerequisites"] = [np.string_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] return dataarray @@ -603,8 +603,8 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): dataarray = _remove_none_attrs(dataarray) _ = dataarray.attrs.pop("area", None) - if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: - dataarray.attrs['long_name'] = dataarray.name + if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: + dataarray.attrs["long_name"] = dataarray.name if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) @@ -642,7 +642,7 @@ def _set_default_chunks(encoding, dataset): variable.shape]).min(axis=0) ) # Chunksize may not exceed shape encoding.setdefault(var_name, {}) - encoding[var_name].setdefault('chunksizes', chunks) + encoding[var_name].setdefault("chunksizes", chunks) return encoding @@ -657,7 +657,7 @@ def _set_default_fill_value(encoding, dataset): coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) for coord_var in coord_vars: encoding.setdefault(coord_var, {}) - encoding[coord_var].update({'_FillValue': None}) + encoding[coord_var].update({"_FillValue": None}) return encoding @@ -668,20 +668,20 @@ def _set_default_time_encoding(encoding, dataset): Default is xarray's CF datetime encoding, which can be overridden by user-defined encoding. """ - if 'time' in dataset: + if "time" in dataset: try: - dtnp64 = dataset['time'].data[0] + dtnp64 = dataset["time"].data[0] except IndexError: - dtnp64 = dataset['time'].data + dtnp64 = dataset["time"].data default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) - time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} - time_enc.update(encoding.get('time', {})) - bounds_enc = {'units': time_enc['units'], - 'calendar': time_enc['calendar'], - '_FillValue': None} - encoding['time'] = time_enc - encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ + time_enc = {"units": default.attrs["units"], "calendar": default.attrs["calendar"]} + time_enc.update(encoding.get("time", {})) + bounds_enc = {"units": time_enc["units"], + "calendar": time_enc["calendar"], + "_FillValue": None} + encoding["time"] = time_enc + encoding["time_bnds"] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ return encoding @@ -697,20 +697,20 @@ def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): for var_name in list(dataset.variables): if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): continue - orig_var_name = var_name.replace(numeric_name_prefix, '') + orig_var_name = var_name.replace(numeric_name_prefix, "") if orig_var_name in encoding: encoding[var_name] = encoding.pop(orig_var_name) return encoding -def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): +def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix="CHANNEL_"): """Update encoding. Preserve dask chunks, avoid fill values in coordinate variables and make sure that time & time bounds have the same units. """ other_to_netcdf_kwargs = to_netcdf_kwargs.copy() - encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() + encoding = other_to_netcdf_kwargs.pop("encoding", {}).copy() encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) encoding = _set_default_chunks(encoding, dataset) encoding = _set_default_fill_value(encoding, dataset) @@ -728,7 +728,7 @@ def _handle_dataarray_name(original_name, numeric_name_prefix): new_name = numeric_name_prefix + original_name else: warnings.warn( - f'Invalid NetCDF dataset name: {original_name} starts with a digit.', + f"Invalid NetCDF dataset name: {original_name} starts with a digit.", stacklevel=5 ) new_name = original_name # occurs when numeric_name_prefix = '', None or False @@ -741,26 +741,26 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" original_name = None dataarray = dataarray.copy() - if 'name' in dataarray.attrs: - original_name = dataarray.attrs.pop('name') + if "name" in dataarray.attrs: + original_name = dataarray.attrs.pop("name") original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: - dataarray.attrs['original_name'] = original_name + dataarray.attrs["original_name"] = original_name return dataarray def _add_history(attrs): """Add 'history' attribute to dictionary.""" - _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) - if 'history' in attrs: - if isinstance(attrs['history'], list): - attrs['history'] = ''.join(attrs['history']) - attrs['history'] += '\n' + _history_create + _history_create = "Created by pytroll/satpy on {}".format(datetime.utcnow()) + if "history" in attrs: + if isinstance(attrs["history"], list): + attrs["history"] = "".join(attrs["history"]) + attrs["history"] += "\n" + _history_create else: - attrs['history'] = _history_create + attrs["history"] = _history_create return attrs @@ -776,7 +776,7 @@ def _get_groups(groups, list_datarrays): grouped_dataarrays = defaultdict(list) for datarray in list_datarrays: for group_name, group_members in groups.items(): - if datarray.attrs['name'] in group_members: + if datarray.attrs["name"] in group_members: grouped_dataarrays[group_name].append(datarray) break return grouped_dataarrays @@ -787,7 +787,7 @@ def make_cf_dataarray(dataarray, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Make the xr.DataArray CF-compliant. Parameters @@ -833,7 +833,7 @@ def _collect_cf_dataset(list_dataarrays, include_lonlats=True, pretty=False, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Parameters @@ -881,7 +881,7 @@ def _collect_cf_dataset(list_dataarrays, dataarray_type = dataarray.dtype if dataarray_type not in CF_DTYPES: warnings.warn( - f'dtype {dataarray_type} not compatible with {CF_VERSION}.', + f"dtype {dataarray_type} not compatible with {CF_VERSION}.", stacklevel=3 ) # Deep copy the datarray since adding/modifying attributes and coordinates @@ -938,7 +938,7 @@ def collect_cf_datasets(list_dataarrays, include_lonlats=True, epoch=EPOCH, include_orig_name=True, - numeric_name_prefix='CHANNEL_', + numeric_name_prefix="CHANNEL_", groups=None): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. @@ -1003,7 +1003,7 @@ def collect_cf_datasets(list_dataarrays, # If not grouped, add CF conventions. # - If 'Conventions' key already present, do not overwrite ! if "Conventions" not in header_attrs and not is_grouped: - header_attrs['Conventions'] = CF_VERSION + header_attrs["Conventions"] = CF_VERSION # Create dictionary of group xr.Datasets # --> If no groups (groups=None) --> group_name=None @@ -1022,7 +1022,7 @@ def collect_cf_datasets(list_dataarrays, if not is_grouped: ds.attrs = header_attrs - if 'time' in ds: + if "time" in ds: ds = add_time_bounds_dimension(ds, time="time") grouped_datasets[group_name] = ds @@ -1032,7 +1032,7 @@ def collect_cf_datasets(list_dataarrays, def _sanitize_writer_kwargs(writer_kwargs): """Remove satpy-specific kwargs.""" writer_kwargs = copy.deepcopy(writer_kwargs) - satpy_kwargs = ['overlay', 'decorate', 'config_files'] + satpy_kwargs = ["overlay", "decorate", "config_files"] for kwarg in satpy_kwargs: writer_kwargs.pop(kwarg, None) return writer_kwargs @@ -1042,9 +1042,9 @@ def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): """Initialize root empty netCDF.""" root = xr.Dataset({}, attrs=header_attrs) init_nc_kwargs = to_netcdf_kwargs.copy() - init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point - init_nc_kwargs.pop('unlimited_dims', None) - written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] + init_nc_kwargs.pop("encoding", None) # No variables to be encoded at this point + init_nc_kwargs.pop("unlimited_dims", None) + written = [root.to_netcdf(filename, engine=engine, mode="w", **init_nc_kwargs)] return written @@ -1053,7 +1053,7 @@ class CFWriter(Writer): @staticmethod def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, - include_orig_name=True, numeric_name_prefix='CHANNEL_'): + include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Convert the dataarray to something cf-compatible. Args: @@ -1070,8 +1070,8 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ - warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + warnings.warn("CFWriter.da2cf is deprecated." + "Use satpy.writers.cf_writer.make_cf_dataarray instead.", DeprecationWarning, stacklevel=3) return make_cf_dataarray(dataarray=dataarray, epoch=epoch, @@ -1083,8 +1083,8 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" - warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf_writer.update_encoding instead.', + warnings.warn("CFWriter.update_encoding is deprecated. " + "Use satpy.writers.cf_writer.update_encoding instead.", DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) @@ -1094,7 +1094,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, - include_orig_name=True, numeric_name_prefix='CHANNEL_', **to_netcdf_kwargs): + include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. Note that all datasets (if grouping: in one group) must have the same projection coordinates. @@ -1130,7 +1130,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ - logger.info('Saving datasets to NetCDF4/CF.') + logger.info("Saving datasets to NetCDF4/CF.") _check_backend_versions() # Define netCDF filename if not provided diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py index ba3cad7d6a..1a522ecd68 100644 --- a/satpy/writers/geotiff.py +++ b/satpy/writers/geotiff.py @@ -131,7 +131,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(GeoTIFFWriter, cls).separate_init_kwargs( kwargs) - for kw in ['dtype', 'tags']: + for kw in ["dtype", "tags"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) @@ -246,7 +246,7 @@ def save_image( gdal_options = self._get_gdal_options(kwargs) if fill_value is None: # fall back to fill_value from configuration file - fill_value = self.info.get('fill_value') + fill_value = self.info.get("fill_value") dtype = dtype if dtype is not None else self.dtype if dtype is None and self.enhancer is not False: @@ -268,14 +268,14 @@ def save_image( fill_value = np.nan if keep_palette and cmap is None and img.palette is not None: from satpy.enhancements import create_colormap - cmap = create_colormap({'colors': img.palette}) + cmap = create_colormap({"colors": img.palette}) cmap.set_range(0, len(img.palette) - 1) if tags is None: tags = {} tags.update(self.tags) - return img.save(filename, fformat='tif', driver=driver, + return img.save(filename, fformat="tif", driver=driver, fill_value=fill_value, dtype=dtype, compute=compute, keep_palette=keep_palette, cmap=cmap, diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 11f847c114..950fce8b21 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -35,19 +35,19 @@ def _adjust_kwargs(dataset, kwargs): - if 'platform_name' not in kwargs: - kwargs['platform_name'] = dataset.attrs['platform_name'] - if 'name' not in kwargs: - kwargs['name'] = dataset.attrs['name'] - if 'start_time' not in kwargs: - kwargs['start_time'] = dataset.attrs['start_time'] - if 'sensor' not in kwargs: - kwargs['sensor'] = dataset.attrs['sensor'] + if "platform_name" not in kwargs: + kwargs["platform_name"] = dataset.attrs["platform_name"] + if "name" not in kwargs: + kwargs["name"] = dataset.attrs["name"] + if "start_time" not in kwargs: + kwargs["start_time"] = dataset.attrs["start_time"] + if "sensor" not in kwargs: + kwargs["sensor"] = dataset.attrs["sensor"] # Sensor attrs could be set. MITIFFs needing to handle sensor can only have one sensor # Assume the first value of set as the sensor. - if isinstance(kwargs['sensor'], set): - LOG.warning('Sensor is set, will use the first value: %s', kwargs['sensor']) - kwargs['sensor'] = (list(kwargs['sensor']))[0] + if isinstance(kwargs["sensor"], set): + LOG.warning("Sensor is set, will use the first value: %s", kwargs["sensor"]) + kwargs["sensor"] = (list(kwargs["sensor"]))[0] class MITIFFWriter(ImageWriter): @@ -80,22 +80,22 @@ def save_dataset(self, dataset, filename=None, fill_value=None, def _delayed_create(dataset): try: - if 'palette' in kwargs: - self.palette = kwargs['palette'] + if "palette" in kwargs: + self.palette = kwargs["palette"] _adjust_kwargs(dataset, kwargs) try: - self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] - self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] - self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] + self.mitiff_config[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["config"] + self.channel_order[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["order"] + self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass try: - self.translate_channel_name[kwargs['sensor']] = \ - dataset.attrs['metadata_requirements']['translate'] + self.translate_channel_name[kwargs["sensor"]] = \ + dataset.attrs["metadata_requirements"]["translate"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this @@ -127,11 +127,11 @@ def _delayed_create(datasets): _adjust_kwargs(dataset, kwargs) try: - self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] - translate = dataset.attrs['metadata_requirements']['translate'] - self.translate_channel_name[kwargs['sensor']] = translate - self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] - self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] + self.mitiff_config[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["config"] + translate = dataset.attrs["metadata_requirements"]["translate"] + self.translate_channel_name[kwargs["sensor"]] = translate + self.channel_order[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["order"] + self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this @@ -140,9 +140,9 @@ def _delayed_create(datasets): image_description = self._make_image_description(datasets, **kwargs) LOG.debug("File pattern %s", self.file_pattern) if isinstance(datasets, list): - kwargs['start_time'] = dataset.attrs['start_time'] + kwargs["start_time"] = dataset.attrs["start_time"] else: - kwargs['start_time'] = datasets.attrs['start_time'] + kwargs["start_time"] = datasets.attrs["start_time"] gen_filename = filename or self.get_filename(**kwargs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(datasets, image_description, gen_filename, **kwargs) @@ -161,8 +161,8 @@ def _make_channel_list(self, datasets, **kwargs): if self.channel_order: channels = self._reorder_channels(datasets, **kwargs) elif self.palette: - if 'palette_channel_name' in kwargs: - channels.append(kwargs['palette_channel_name'].upper()) + if "palette_channel_name" in kwargs: + channels.append(kwargs["palette_channel_name"].upper()) else: LOG.error("Is palette but can not find palette_channel_name to name the dataset") else: @@ -175,17 +175,17 @@ def _make_channel_list(self, datasets, **kwargs): def _reorder_channels(self, datasets, **kwargs): channels = [] - for cn in self.channel_order[kwargs['sensor']]: + for cn in self.channel_order[kwargs["sensor"]]: for ch, ds in enumerate(datasets): - if isinstance(ds.attrs['prerequisites'][ch], (DataQuery, DataID)): - if ds.attrs['prerequisites'][ch]['name'] == cn: + if isinstance(ds.attrs["prerequisites"][ch], (DataQuery, DataID)): + if ds.attrs["prerequisites"][ch]["name"] == cn: channels.append( - ds.attrs['prerequisites'][ch]['name']) + ds.attrs["prerequisites"][ch]["name"]) break else: - if ds.attrs['prerequisites'][ch] == cn: + if ds.attrs["prerequisites"][ch] == cn: channels.append( - ds.attrs['prerequisites'][ch]) + ds.attrs["prerequisites"][ch]) break return channels @@ -194,29 +194,29 @@ def _channel_names(self, channels, cns, **kwargs): for ch in channels: try: _image_description += str( - self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) + self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]) except KeyError: _image_description += str(ch) - _image_description += ' ' + _image_description += " " # Replace last char(space) with \n _image_description = _image_description[:-1] - _image_description += '\n' + _image_description += "\n" return _image_description def _add_sizes(self, datasets, first_dataset): - _image_description = ' Xsize: ' + _image_description = " Xsize: " if isinstance(datasets, list): - _image_description += str(first_dataset.sizes['x']) + '\n' + _image_description += str(first_dataset.sizes["x"]) + "\n" else: - _image_description += str(datasets.sizes['x']) + '\n' + _image_description += str(datasets.sizes["x"]) + "\n" - _image_description += ' Ysize: ' + _image_description += " Ysize: " if isinstance(datasets, list): - _image_description += str(first_dataset.sizes['y']) + '\n' + _image_description += str(first_dataset.sizes["y"]) + "\n" else: - _image_description += str(datasets.sizes['y']) + '\n' + _image_description += str(datasets.sizes["y"]) + "\n" return _image_description @@ -224,12 +224,12 @@ def _add_proj4_string(self, datasets, first_dataset): proj4_string = " Proj string: " if isinstance(datasets, list): - area = first_dataset.attrs['area'] + area = first_dataset.attrs["area"] else: - area = datasets.attrs['area'] + area = datasets.attrs["area"] # Use pyproj's CRS object to get a valid EPSG code if possible # only in newer pyresample versions with pyproj 2.0+ installed - if hasattr(area, 'crs') and area.crs.to_epsg() is not None: + if hasattr(area, "crs") and area.crs.to_epsg() is not None: proj4_string += "+init=EPSG:{}".format(area.crs.to_epsg()) else: proj4_string += area.proj_str @@ -239,23 +239,23 @@ def _add_proj4_string(self, datasets, first_dataset): # FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible proj4_string, x_0 = self._convert_epsg_to_proj(proj4_string, x_0) - if 'geos' in proj4_string: + if "geos" in proj4_string: proj4_string = proj4_string.replace("+sweep=x ", "") - if '+a=6378137.0 +b=6356752.31414' in proj4_string: + if "+a=6378137.0 +b=6356752.31414" in proj4_string: proj4_string = proj4_string.replace("+a=6378137.0 +b=6356752.31414", "+ellps=WGS84") - if '+units=m' in proj4_string: + if "+units=m" in proj4_string: proj4_string = proj4_string.replace("+units=m", "+units=km") - if not any(datum in proj4_string for datum in ['datum', 'towgs84']): - proj4_string += ' +towgs84=0,0,0' + if not any(datum in proj4_string for datum in ["datum", "towgs84"]): + proj4_string += " +towgs84=0,0,0" - if 'units' not in proj4_string: - proj4_string += ' +units=km' + if "units" not in proj4_string: + proj4_string += " +units=km" proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0) LOG.debug("proj4_string: %s", proj4_string) - proj4_string += '\n' + proj4_string += "\n" return proj4_string @@ -264,59 +264,59 @@ def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, dataset = first_dataset else: dataset = datasets - if 'x_0' not in proj4_string: - proj4_string += ' +x_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[0] + - dataset.attrs['area'].pixel_size_x) + x_0) - proj4_string += ' +y_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[1] + - dataset.attrs['area'].pixel_size_y) + y_0) - elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string: - proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[0] + - dataset.attrs['area'].pixel_size_x) + x_0)) - proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[1] + - dataset.attrs['area'].pixel_size_y) + y_0)) + if "x_0" not in proj4_string: + proj4_string += " +x_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[0] + + dataset.attrs["area"].pixel_size_x) + x_0) + proj4_string += " +y_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[1] + + dataset.attrs["area"].pixel_size_y) + y_0) + elif "+x_0=0" in proj4_string and "+y_0=0" in proj4_string: + proj4_string = proj4_string.replace("+x_0=0", "+x_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[0] + + dataset.attrs["area"].pixel_size_x) + x_0)) + proj4_string = proj4_string.replace("+y_0=0", "+y_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[1] + + dataset.attrs["area"].pixel_size_y) + y_0)) return proj4_string def _convert_epsg_to_proj(self, proj4_string, x_0): - if 'EPSG:32631' in proj4_string: + if "EPSG:32631" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32631", "+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32632' in proj4_string: + elif "EPSG:32632" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32632", "+proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32633' in proj4_string: + elif "EPSG:32633" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32633", "+proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32634' in proj4_string: + elif "EPSG:32634" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32634", "+proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32635' in proj4_string: + elif "EPSG:32635" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32635", "+proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG' in proj4_string: + elif "EPSG" in proj4_string: LOG.warning("EPSG used in proj string but not converted. Please add this in code") return proj4_string, x_0 def _add_pixel_sizes(self, datasets, first_dataset): _image_description = "" if isinstance(datasets, list): - _image_description += ' Ax: %.6f' % ( - first_dataset.attrs['area'].pixel_size_x / 1000.) - _image_description += ' Ay: %.6f' % ( - first_dataset.attrs['area'].pixel_size_y / 1000.) + _image_description += " Ax: %.6f" % ( + first_dataset.attrs["area"].pixel_size_x / 1000.) + _image_description += " Ay: %.6f" % ( + first_dataset.attrs["area"].pixel_size_y / 1000.) else: - _image_description += ' Ax: %.6f' % ( - datasets.attrs['area'].pixel_size_x / 1000.) - _image_description += ' Ay: %.6f' % ( - datasets.attrs['area'].pixel_size_y / 1000.) + _image_description += " Ax: %.6f" % ( + datasets.attrs["area"].pixel_size_x / 1000.) + _image_description += " Ay: %.6f" % ( + datasets.attrs["area"].pixel_size_y / 1000.) return _image_description @@ -326,21 +326,21 @@ def _add_corners(self, datasets, first_dataset): # Therefor use the center of the upper left pixel. _image_description = "" if isinstance(datasets, list): - _image_description += ' Bx: %.6f' % ( - first_dataset.attrs['area'].area_extent[0] / 1000. + - first_dataset.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x - _image_description += ' By: %.6f' % ( - first_dataset.attrs['area'].area_extent[3] / 1000. - - first_dataset.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y + _image_description += " Bx: %.6f" % ( + first_dataset.attrs["area"].area_extent[0] / 1000. + + first_dataset.attrs["area"].pixel_size_x / 1000. / 2.) # LL_x + _image_description += " By: %.6f" % ( + first_dataset.attrs["area"].area_extent[3] / 1000. - + first_dataset.attrs["area"].pixel_size_y / 1000. / 2.) # UR_y else: - _image_description += ' Bx: %.6f' % ( - datasets.attrs['area'].area_extent[0] / 1000. + - datasets.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x - _image_description += ' By: %.6f' % ( - datasets.attrs['area'].area_extent[3] / 1000. - - datasets.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y - - _image_description += '\n' + _image_description += " Bx: %.6f" % ( + datasets.attrs["area"].area_extent[0] / 1000. + + datasets.attrs["area"].pixel_size_x / 1000. / 2.) # LL_x + _image_description += " By: %.6f" % ( + datasets.attrs["area"].area_extent[3] / 1000. - + datasets.attrs["area"].pixel_size_y / 1000. / 2.) # UR_y + + _image_description += "\n" return _image_description def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, decimals): @@ -351,34 +351,34 @@ def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, found_calibration = False skip_calibration = False ds_list = datasets - if not isinstance(datasets, list) and 'bands' not in datasets.sizes: + if not isinstance(datasets, list) and "bands" not in datasets.sizes: ds_list = [datasets] for i, ds in enumerate(ds_list): - if ('prerequisites' in ds.attrs and - isinstance(ds.attrs['prerequisites'], list) and - len(ds.attrs['prerequisites']) >= i + 1 and - isinstance(ds.attrs['prerequisites'][i], (DataQuery, DataID))): - if ds.attrs['prerequisites'][i].get('name') == str(ch): - if ds.attrs['prerequisites'][i].get('calibration') == 'RADIANCE': + if ("prerequisites" in ds.attrs and + isinstance(ds.attrs["prerequisites"], list) and + len(ds.attrs["prerequisites"]) >= i + 1 and + isinstance(ds.attrs["prerequisites"][i], (DataQuery, DataID))): + if ds.attrs["prerequisites"][i].get("name") == str(ch): + if ds.attrs["prerequisites"][i].get("calibration") == "RADIANCE": raise NotImplementedError( "Mitiff radiance calibration not implemented.") # _table_calibration += ', Radiance, ' # _table_calibration += '[W/m²/µm/sr]' # _decimals = 8 - elif ds.attrs['prerequisites'][i].get('calibration') == 'brightness_temperature': + elif ds.attrs["prerequisites"][i].get("calibration") == "brightness_temperature": found_calibration = True - _table_calibration += ', BT, ' + _table_calibration += ", BT, " _table_calibration += "\N{DEGREE SIGN}" - _table_calibration += u'[C]' + _table_calibration += u"[C]" _reverse_offset = 255. _reverse_scale = -1. _decimals = 2 - elif ds.attrs['prerequisites'][i].get('calibration') == 'reflectance': + elif ds.attrs["prerequisites"][i].get("calibration") == "reflectance": found_calibration = True - _table_calibration += ', Reflectance(Albedo), ' - _table_calibration += '[%]' + _table_calibration += ", Reflectance(Albedo), " + _table_calibration += "[%]" _decimals = 2 else: LOG.warning("Unknown calib type. Must be Radiance, Reflectance or BT.") @@ -399,13 +399,13 @@ def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, def _add_palette_info(self, datasets, palette_unit, palette_description, **kwargs): # mitiff key word for palette interpretion - _palette = '\n COLOR INFO:\n' + _palette = "\n COLOR INFO:\n" # mitiff info for the unit of the interpretion - _palette += ' {}\n'.format(palette_unit) + _palette += " {}\n".format(palette_unit) # The length of the palette description as needed by mitiff in DIANA - _palette += ' {}\n'.format(len(palette_description)) + _palette += " {}\n".format(len(palette_description)) for desc in palette_description: - _palette += ' {}\n'.format(desc) + _palette += " {}\n".format(desc) return _palette def _add_calibration(self, channels, cns, datasets, **kwargs): @@ -419,10 +419,10 @@ def _add_calibration(self, channels, cns, datasets, **kwargs): if palette: raise NotImplementedError("Mitiff palette saving is not implemented.") else: - _table_calibration += 'Table_calibration: ' + _table_calibration += "Table_calibration: " try: _table_calibration += str( - self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) + self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]) except KeyError: _table_calibration += str(ch) @@ -435,18 +435,18 @@ def _add_calibration(self, channels, cns, datasets, **kwargs): _table_calibration += __table_calibration if not skip_calibration: - _table_calibration += ', 8, [ ' + _table_calibration += ", 8, [ " for val in range(0, 256): # Comma separated list of values - _table_calibration += '{0:.{1}f} '.format((float(self.mitiff_config[ - kwargs['sensor']][cns.get(ch, ch)]['min-val']) + + _table_calibration += "{0:.{1}f} ".format((float(self.mitiff_config[ + kwargs["sensor"]][cns.get(ch, ch)]["min-val"]) + ((_reverse_offset + _reverse_scale * val) * - (float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['max-val']) - - float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['min-val']))) / 255.), + (float(self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["max-val"]) - + float(self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["min-val"]))) / 255.), _decimals) # _table_calibration += '0.00000000 ' - _table_calibration += ']\n\n' + _table_calibration += "]\n\n" else: _table_calibration = "" @@ -498,14 +498,14 @@ def _make_image_description(self, datasets, **kwargs): []\n\n """ - translate_platform_name = {'metop01': 'Metop-B', - 'metop02': 'Metop-A', - 'metop03': 'Metop-C', - 'noaa15': 'NOAA-15', - 'noaa16': 'NOAA-16', - 'noaa17': 'NOAA-17', - 'noaa18': 'NOAA-18', - 'noaa19': 'NOAA-19'} + translate_platform_name = {"metop01": "Metop-B", + "metop02": "Metop-A", + "metop03": "Metop-C", + "noaa15": "NOAA-15", + "noaa16": "NOAA-16", + "noaa17": "NOAA-17", + "noaa18": "NOAA-18", + "noaa19": "NOAA-19"} first_dataset = datasets if isinstance(datasets, list): @@ -514,40 +514,40 @@ def _make_image_description(self, datasets, **kwargs): _platform_name = self._get_platform_name(first_dataset, translate_platform_name, kwargs) - _image_description = '' - _image_description.encode('utf-8') + _image_description = "" + _image_description.encode("utf-8") - _image_description += ' Satellite: ' + _image_description += " Satellite: " if _platform_name is not None: _image_description += _platform_name - _image_description += '\n' + _image_description += "\n" - _image_description += ' Date and Time: ' + _image_description += " Date and Time: " # Select earliest start_time first = True earliest = 0 for dataset in datasets: if first: - earliest = dataset.attrs['start_time'] + earliest = dataset.attrs["start_time"] else: - if dataset.attrs['start_time'] < earliest: - earliest = dataset.attrs['start_time'] + if dataset.attrs["start_time"] < earliest: + earliest = dataset.attrs["start_time"] first = False LOG.debug("earliest start_time: %s", earliest) _image_description += earliest.strftime("%H:%M %d/%m-%Y\n") - _image_description += ' SatDir: 0\n' + _image_description += " SatDir: 0\n" - _image_description += ' Channels: ' + _image_description += " Channels: " _image_description += self._get_dataset_len(datasets) - _image_description += ' In this file: ' + _image_description += " In this file: " channels = self._make_channel_list(datasets, **kwargs) try: - cns = self.translate_channel_name.get(kwargs['sensor'], {}) + cns = self.translate_channel_name.get(kwargs["sensor"], {}) except KeyError: pass @@ -555,25 +555,25 @@ def _make_image_description(self, datasets, **kwargs): _image_description += self._add_sizes(datasets, first_dataset) - _image_description += ' Map projection: Stereographic\n' + _image_description += " Map projection: Stereographic\n" _image_description += self._add_proj4_string(datasets, first_dataset) - _image_description += ' TrueLat: 60N\n' - _image_description += ' GridRot: 0\n' + _image_description += " TrueLat: 60N\n" + _image_description += " GridRot: 0\n" - _image_description += ' Xunit:1000 m Yunit: 1000 m\n' + _image_description += " Xunit:1000 m Yunit: 1000 m\n" - _image_description += ' NPX: %.6f' % (0) - _image_description += ' NPY: %.6f' % (0) + '\n' + _image_description += " NPX: %.6f" % (0) + _image_description += " NPY: %.6f" % (0) + "\n" _image_description += self._add_pixel_sizes(datasets, first_dataset) _image_description += self._add_corners(datasets, first_dataset) if isinstance(datasets, list): - LOG.debug("Area extent: %s", first_dataset.attrs['area'].area_extent) + LOG.debug("Area extent: %s", first_dataset.attrs["area"].area_extent) else: - LOG.debug("Area extent: %s", datasets.attrs['area'].area_extent) + LOG.debug("Area extent: %s", datasets.attrs["area"].area_extent) if self.palette: LOG.debug("Doing palette image") @@ -587,24 +587,24 @@ def _get_dataset_len(self, datasets): if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) dataset_len = str(len(datasets)) - elif 'bands' in datasets.sizes: - LOG.debug("len datasets: %s", datasets.sizes['bands']) - dataset_len = str(datasets.sizes['bands']) + elif "bands" in datasets.sizes: + LOG.debug("len datasets: %s", datasets.sizes["bands"]) + dataset_len = str(datasets.sizes["bands"]) elif len(datasets.sizes) == 2: LOG.debug("len datasets: 1") - dataset_len = '1' + dataset_len = "1" else: dataset_len = "" return dataset_len def _get_platform_name(self, first_dataset, translate_platform_name, kwargs): - if 'platform_name' in first_dataset.attrs: + if "platform_name" in first_dataset.attrs: _platform_name = translate_platform_name.get( - first_dataset.attrs['platform_name'], - first_dataset.attrs['platform_name']) - elif 'platform_name' in kwargs: + first_dataset.attrs["platform_name"], + first_dataset.attrs["platform_name"]) + elif "platform_name" in kwargs: _platform_name = translate_platform_name.get( - kwargs['platform_name'], kwargs['platform_name']) + kwargs["platform_name"], kwargs["platform_name"]) else: _platform_name = None return _platform_name @@ -612,7 +612,7 @@ def _get_platform_name(self, first_dataset, translate_platform_name, kwargs): def _calibrate_data(self, dataset, calibration, min_val, max_val): reverse_offset = 0. reverse_scale = 1. - if calibration == 'brightness_temperature': + if calibration == "brightness_temperature": # If data is brightness temperature, the data must be inverted. reverse_offset = 255. reverse_scale = -1. @@ -631,44 +631,44 @@ def _save_as_palette(self, datasets, tmp_gen_filename, tiffinfo, **kwargs): # The value of the component is used as an index into the red, green and blue curves # in the ColorMap field to retrieve an RGB triplet that defines the color. When # PhotometricInterpretation=3 is used, ColorMap must be present and SamplesPerPixel must be 1. - tiffinfo[270] = tiffinfo[270].decode('utf-8') + tiffinfo[270] = tiffinfo[270].decode("utf-8") - img = Image.fromarray(datasets.data.astype(np.uint8), mode='P') - if 'palette_color_map' in kwargs: - img.putpalette(ImagePalette.ImagePalette('RGB', kwargs['palette_color_map'])) + img = Image.fromarray(datasets.data.astype(np.uint8), mode="P") + if "palette_color_map" in kwargs: + img.putpalette(ImagePalette.ImagePalette("RGB", kwargs["palette_color_map"])) else: LOG.error("In a mitiff palette image a color map must be provided: palette_color_map is missing.") return - img.save(tmp_gen_filename, compression='raw', compress_level=9, tiffinfo=tiffinfo) + img.save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) def _save_as_enhanced(self, datasets, tmp_gen_filename, **kwargs): """Save datasets as an enhanced RGB image.""" img = get_enhanced_image(datasets.squeeze(), enhance=self.enhancer) tiffinfo = {} - if 'bands' in img.data.sizes and 'bands' not in datasets.sizes: + if "bands" in img.data.sizes and "bands" not in datasets.sizes: LOG.debug("Datasets without 'bands' become image with 'bands' due to enhancement.") LOG.debug("Needs to regenerate mitiff image description") image_description = self._make_image_description(img.data, **kwargs) - tiffinfo[IMAGEDESCRIPTION] = (image_description).encode('utf-8') + tiffinfo[IMAGEDESCRIPTION] = (image_description).encode("utf-8") mitiff_frames = [] - for band in img.data['bands']: + for band in img.data["bands"]: chn = img.data.sel(bands=band) data = chn.values.clip(0, 1) * 254. + 1 data = data.clip(0, 255) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) + compression="raw", compress_level=9, tiffinfo=tiffinfo) def _generate_intermediate_filename(self, gen_filename): """Replace mitiff ext because pillow doesn't recognise the file type.""" bs, ex = os.path.splitext(gen_filename) tmp_gen_filename = gen_filename - if ex.endswith('mitiff'): + if ex.endswith("mitiff"): bd = os.path.dirname(bs) bn = os.path.basename(bs) - tmp_gen_filename = os.path.join(bd, '.' + bn + '.tif') + tmp_gen_filename = os.path.join(bd, "." + bn + ".tif") return tmp_gen_filename def _save_datasets_as_mitiff(self, datasets, image_description, @@ -680,25 +680,25 @@ def _save_datasets_as_mitiff(self, datasets, image_description, """ tmp_gen_filename = self._generate_intermediate_filename(gen_filename) tiffinfo = {} - tiffinfo[IMAGEDESCRIPTION] = (image_description).encode('latin-1') + tiffinfo[IMAGEDESCRIPTION] = (image_description).encode("latin-1") - cns = self.translate_channel_name.get(kwargs['sensor'], {}) + cns = self.translate_channel_name.get(kwargs["sensor"], {}) if isinstance(datasets, list): LOG.debug("Saving datasets as list") mitiff_frames = [] - for _cn in self.channel_order[kwargs['sensor']]: + for _cn in self.channel_order[kwargs["sensor"]]: for dataset in datasets: - if dataset.attrs['name'] == _cn: + if dataset.attrs["name"] == _cn: # Need to possible translate channels names from satpy to mitiff - cn = cns.get(dataset.attrs['name'], dataset.attrs['name']) - data = self._calibrate_data(dataset, dataset.attrs['calibration'], - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + cn = cns.get(dataset.attrs["name"], dataset.attrs["name"]) + data = self._calibrate_data(dataset, dataset.attrs["calibration"], + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) - elif 'dataset' in datasets.attrs['name']: + compression="raw", compress_level=9, tiffinfo=tiffinfo) + elif "dataset" in datasets.attrs["name"]: LOG.debug("Saving dataset as single dataset.") self._save_single_dataset(datasets, cns, tmp_gen_filename, tiffinfo, kwargs) elif self.palette: @@ -710,35 +710,35 @@ def _save_datasets_as_mitiff(self, datasets, image_description, os.rename(tmp_gen_filename, gen_filename) def _save_single_dataset(self, datasets, cns, tmp_gen_filename, tiffinfo, kwargs): - LOG.debug("Saving %s as a dataset.", datasets.attrs['name']) - if len(datasets.dims) == 2 and (all('bands' not in i for i in datasets.dims)): + LOG.debug("Saving %s as a dataset.", datasets.attrs["name"]) + if len(datasets.dims) == 2 and (all("bands" not in i for i in datasets.dims)): # Special case with only one channel ie. no bands # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. - cn = cns.get(datasets.attrs['prerequisites'][0]['name'], - datasets.attrs['prerequisites'][0]['name']) - data = self._calibrate_data(datasets, datasets.attrs['prerequisites'][0].get('calibration'), - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) + cn = cns.get(datasets.attrs["prerequisites"][0]["name"], + datasets.attrs["prerequisites"][0]["name"]) + data = self._calibrate_data(datasets, datasets.attrs["prerequisites"][0].get("calibration"), + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) Image.fromarray(data.astype(np.uint8)).save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) else: mitiff_frames = [] - for _cn_i, _cn in enumerate(self.channel_order[kwargs['sensor']]): - for band in datasets['bands']: + for _cn_i, _cn in enumerate(self.channel_order[kwargs["sensor"]]): + for band in datasets["bands"]: if band == _cn: chn = datasets.sel(bands=band) # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. - cn = cns.get(chn.attrs['prerequisites'][_cn_i]['name'], - chn.attrs['prerequisites'][_cn_i]['name']) - data = self._calibrate_data(chn, chn.attrs['prerequisites'][_cn_i].get('calibration'), - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) + cn = cns.get(chn.attrs["prerequisites"][_cn_i]["name"], + chn.attrs["prerequisites"][_cn_i]["name"]) + data = self._calibrate_data(chn, chn.attrs["prerequisites"][_cn_i].get("calibration"), + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) + compression="raw", compress_level=9, tiffinfo=tiffinfo) diff --git a/satpy/writers/utils.py b/satpy/writers/utils.py index 3308115ff9..fe9ff00625 100644 --- a/satpy/writers/utils.py +++ b/satpy/writers/utils.py @@ -18,7 +18,7 @@ """Writer utilities.""" -def flatten_dict(d, parent_key='', sep='_'): +def flatten_dict(d, parent_key="", sep="_"): """Flatten a nested dictionary. Based on https://stackoverflow.com/a/6027615/5703449 diff --git a/setup.py b/setup.py index 612db4fa05..76c2919200 100644 --- a/setup.py +++ b/setup.py @@ -22,68 +22,68 @@ from setuptools import find_packages, setup -requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.24.0', 'trollsift', - 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', - 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', - 'packaging', 'pooch', 'pyorbital'] +requires = ["numpy >=1.13", "pillow", "pyresample >=1.24.0", "trollsift", + "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.10.1, !=0.13.0", + "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", + "packaging", "pooch", "pyorbital"] -test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', - 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', - 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml', - 's3fs', 'eccodes', 'h5netcdf', 'xarray-datatree', - 'skyfield', 'ephem', 'pint-xarray', 'astropy', 'dask-image'] +test_requires = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", + "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", + "rioxarray", "pytest", "pytest-lazy-fixture", "defusedxml", + "s3fs", "eccodes", "h5netcdf", "xarray-datatree", + "skyfield", "ephem", "pint-xarray", "astropy", "dask-image"] extras_require = { # Readers: - 'avhrr_l1b_gaclac': ['pygac >= 1.3.0'], - 'modis_l1b': ['pyhdf', 'python-geotiepoints >= 1.1.7'], - 'geocat': ['pyhdf'], - 'acspo': ['netCDF4 >= 1.1.8'], - 'clavrx': ['netCDF4 >= 1.1.8'], - 'viirs_l1b': ['netCDF4 >= 1.1.8'], - 'viirs_sdr': ['h5py >= 2.7.0'], - 'viirs_compact': ['h5py >= 2.7.0'], - 'omps_edr': ['h5py >= 2.7.0'], - 'amsr2_l1b': ['h5py >= 2.7.0'], - 'hrpt': ['pyorbital >= 1.3.1', 'pygac', 'python-geotiepoints >= 1.1.7'], - 'hrit_msg': ['pytroll-schedule'], - 'msi_safe': ['rioxarray', "bottleneck", "python-geotiepoints"], - 'nc_nwcsaf_msg': ['netCDF4 >= 1.1.8'], - 'sar_c': ['python-geotiepoints >= 1.1.7', 'rasterio', 'rioxarray', 'defusedxml'], - 'abi_l1b': ['h5netcdf'], - 'seviri_l1b_hrit': ['pyorbital >= 1.3.1'], - 'seviri_l1b_native': ['pyorbital >= 1.3.1'], - 'seviri_l1b_nc': ['pyorbital >= 1.3.1', 'netCDF4 >= 1.1.8'], - 'seviri_l2_bufr': ['eccodes'], - 'seviri_l2_grib': ['eccodes'], - 'hsaf_grib': ['pygrib'], - 'remote_reading': ['fsspec'], - 'insat_3d': ['xarray-datatree'], - 'gms5-vissr_l1b': ["numba"], + "avhrr_l1b_gaclac": ["pygac >= 1.3.0"], + "modis_l1b": ["pyhdf", "python-geotiepoints >= 1.1.7"], + "geocat": ["pyhdf"], + "acspo": ["netCDF4 >= 1.1.8"], + "clavrx": ["netCDF4 >= 1.1.8"], + "viirs_l1b": ["netCDF4 >= 1.1.8"], + "viirs_sdr": ["h5py >= 2.7.0"], + "viirs_compact": ["h5py >= 2.7.0"], + "omps_edr": ["h5py >= 2.7.0"], + "amsr2_l1b": ["h5py >= 2.7.0"], + "hrpt": ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"], + "hrit_msg": ["pytroll-schedule"], + "msi_safe": ["rioxarray", "bottleneck", "python-geotiepoints"], + "nc_nwcsaf_msg": ["netCDF4 >= 1.1.8"], + "sar_c": ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"], + "abi_l1b": ["h5netcdf"], + "seviri_l1b_hrit": ["pyorbital >= 1.3.1"], + "seviri_l1b_native": ["pyorbital >= 1.3.1"], + "seviri_l1b_nc": ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"], + "seviri_l2_bufr": ["eccodes"], + "seviri_l2_grib": ["eccodes"], + "hsaf_grib": ["pygrib"], + "remote_reading": ["fsspec"], + "insat_3d": ["xarray-datatree"], + "gms5-vissr_l1b": ["numba"], # Writers: - 'cf': ['h5netcdf >= 0.7.3'], - 'awips_tiled': ['netCDF4 >= 1.1.8'], - 'geotiff': ['rasterio', 'trollimage[geotiff]'], - 'ninjo': ['pyninjotiff', 'pint'], + "cf": ["h5netcdf >= 0.7.3"], + "awips_tiled": ["netCDF4 >= 1.1.8"], + "geotiff": ["rasterio", "trollimage[geotiff]"], + "ninjo": ["pyninjotiff", "pint"], "units": ["pint-xarray"], # Composites/Modifiers: - 'rayleigh': ['pyspectral >= 0.10.1'], - 'angles': ['pyorbital >= 1.3.1'], - 'filters': ['dask-image'], + "rayleigh": ["pyspectral >= 0.10.1"], + "angles": ["pyorbital >= 1.3.1"], + "filters": ["dask-image"], # MultiScene: - 'animations': ['imageio'], + "animations": ["imageio"], # Documentation: - 'doc': ['sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-apidoc'], + "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other - 'geoviews': ['geoviews'], - 'overlays': ['pycoast', 'pydecorate'], - 'satpos_from_tle': ['skyfield', 'astropy'], - 'tests': test_requires, + "geoviews": ["geoviews"], + "overlays": ["pycoast", "pydecorate"], + "satpos_from_tle": ["skyfield", "astropy"], + "tests": test_requires, } all_extras = [] for extra_deps in extras_require.values(): all_extras.extend(extra_deps) -extras_require['all'] = list(set(all_extras)) +extras_require["all"] = list(set(all_extras)) def _config_data_files(base_dirs, extensions=(".cfg", )): @@ -110,21 +110,21 @@ def _config_data_files(base_dirs, extensions=(".cfg", )): entry_points = { - 'console_scripts': [ - 'satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd', + "console_scripts": [ + "satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd", ], } -NAME = 'satpy' -with open('README.rst', 'r') as readme: +NAME = "satpy" +with open("README.rst", "r") as readme: README = readme.read() setup(name=NAME, - description='Python package for earth-observing satellite data processing', + description="Python package for earth-observing satellite data processing", long_description=README, - author='The Pytroll Team', - author_email='pytroll@googlegroups.com', + author="The Pytroll Team", + author_email="pytroll@googlegroups.com", classifiers=["Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 " + @@ -147,23 +147,23 @@ def _config_data_files(base_dirs, extensions=(".cfg", )): packages=find_packages(), # Always use forward '/', even on Windows # See https://setuptools.readthedocs.io/en/latest/userguide/datafiles.html#data-files-support - package_data={'satpy': ['etc/geo_image.cfg', - 'etc/areas.yaml', - 'etc/satpy.cfg', - 'etc/himawari-8.cfg', - 'etc/eps_avhrrl1b_6.5.xml', - 'etc/readers/*.yaml', - 'etc/writers/*.yaml', - 'etc/composites/*.yaml', - 'etc/enhancements/*.cfg', - 'etc/enhancements/*.yaml', - 'tests/etc/readers/*.yaml', - 'tests/etc/composites/*.yaml', - 'tests/etc/writers/*.yaml', + package_data={"satpy": ["etc/geo_image.cfg", + "etc/areas.yaml", + "etc/satpy.cfg", + "etc/himawari-8.cfg", + "etc/eps_avhrrl1b_6.5.xml", + "etc/readers/*.yaml", + "etc/writers/*.yaml", + "etc/composites/*.yaml", + "etc/enhancements/*.cfg", + "etc/enhancements/*.yaml", + "tests/etc/readers/*.yaml", + "tests/etc/composites/*.yaml", + "tests/etc/writers/*.yaml", ]}, zip_safe=False, install_requires=requires, - python_requires='>=3.9', + python_requires=">=3.9", extras_require=extras_require, entry_points=entry_points, ) diff --git a/utils/convert_to_ninjotiff.py b/utils/convert_to_ninjotiff.py index e457ee35e3..2189a11dec 100644 --- a/utils/convert_to_ninjotiff.py +++ b/utils/convert_to_ninjotiff.py @@ -38,20 +38,20 @@ debug_on() -parser = argparse.ArgumentParser(description='Turn an image into a NinjoTiff.') -parser.add_argument('--cfg', dest='cfg', action="store", +parser = argparse.ArgumentParser(description="Turn an image into a NinjoTiff.") +parser.add_argument("--cfg", dest="cfg", action="store", help="YAML configuration as an alternative to the command line input for NinJo metadata.") -parser.add_argument('--input_dir', dest='input_dir', action="store", +parser.add_argument("--input_dir", dest="input_dir", action="store", help="Directory with input data, that must contain a timestamp in the filename.") -parser.add_argument('--chan_id', dest='chan_id', action="store", help="Channel ID", default="9999") -parser.add_argument('--sat_id', dest='sat_id', action="store", help="Satellite ID", default="8888") -parser.add_argument('--data_cat', dest='data_cat', action="store", +parser.add_argument("--chan_id", dest="chan_id", action="store", help="Channel ID", default="9999") +parser.add_argument("--sat_id", dest="sat_id", action="store", help="Satellite ID", default="8888") +parser.add_argument("--data_cat", dest="data_cat", action="store", help="Category of data (one of GORN, GPRN, PORN)", default="GORN") -parser.add_argument('--area', dest='areadef', action="store", +parser.add_argument("--area", dest="areadef", action="store", help="Area name, the definition must exist in your areas configuration file", default="nrEURO1km_NPOL_COALeqc") -parser.add_argument('--ph_unit', dest='ph_unit', action="store", help="Physical unit", default="CELSIUS") -parser.add_argument('--data_src', dest='data_src', action="store", help="Data source", default="EUMETCAST") +parser.add_argument("--ph_unit", dest="ph_unit", action="store", help="Physical unit", default="CELSIUS") +parser.add_argument("--data_src", dest="data_src", action="store", help="Data source", default="EUMETCAST") args = parser.parse_args() if (args.input_dir is not None): @@ -59,21 +59,21 @@ cfg = vars(args) if (args.cfg is not None): - with open(args.cfg, 'r') as ymlfile: + with open(args.cfg, "r") as ymlfile: cfg = yaml.load(ymlfile, Loader=UnsafeLoader) narea = get_area_def(args.areadef) global_data = Scene(reader="generic_image") -global_data.load(['image']) +global_data.load(["image"]) -global_data['image'].info['area'] = narea -fname = global_data['image'].info['filename'] +global_data["image"].info["area"] = narea +fname = global_data["image"].info["filename"] ofname = fname[:-3] + "tif" # global_data.save_dataset('image', filename="out.png", writer="simple_image") -global_data.save_dataset('image', filename=ofname, writer="ninjotiff", - sat_id=cfg['sat_id'], - chan_id=cfg['chan_id'], - data_cat=cfg['data_cat'], - data_source=cfg['data_src'], - physic_unit=cfg['ph_unit']) +global_data.save_dataset("image", filename=ofname, writer="ninjotiff", + sat_id=cfg["sat_id"], + chan_id=cfg["chan_id"], + data_cat=cfg["data_cat"], + data_source=cfg["data_src"], + physic_unit=cfg["ph_unit"]) diff --git a/utils/coord2area_def.py b/utils/coord2area_def.py index e3727b9aba..8b6aa0478b 100644 --- a/utils/coord2area_def.py +++ b/utils/coord2area_def.py @@ -66,7 +66,7 @@ from pyproj import Proj -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("name", @@ -126,7 +126,7 @@ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) - print('### ' + proj4_string) + print("### " + proj4_string) print() print(name + ":") print(" description: " + name) @@ -146,14 +146,14 @@ sys.exit(0) from PIL import Image from pycoast import ContourWriterAGG - img = Image.new('RGB', (xsize, ysize)) + img = Image.new("RGB", (xsize, ysize)) area_def = (proj4_string, area_extent) cw = ContourWriterAGG(args.shapes) cw.add_coastlines(img, (proj4_string, area_extent), - resolution='l', width=0.5) + resolution="l", width=0.5) - cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline='white', outline_opacity=175, - width=1.0, minor_outline='white', minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) + cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline="white", outline_opacity=175, + width=1.0, minor_outline="white", minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) img.show() From f29762f05303ff5717a503c436cf73cf9b96464a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:49:14 +0200 Subject: [PATCH 0593/1416] Fix too long like --- pyproject.toml | 2 +- satpy/readers/fci_l1c_nc.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 01b0272e89..c6f9770ef1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["Q"] +select = ["E", "Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index a405c86201..e42975b3a4 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -68,8 +68,8 @@ The reading routine supports channel data in counts, radiances, and (depending -on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on the formulas indicated in -`PUG`_. +on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on +the formulas indicated in `PUG`_. Radiance datasets are returned in units of radiance per unit wavenumber (mW m-2 sr-1 (cm-1)-1). Radiances can be converted to units of radiance per unit wavelength (W m-2 um-1 sr-1) by multiplying with the `radiance_unit_conversion_coefficient` dataset attribute. From 84d38493615397e247c4078b1269d764e6907f8d Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:52:36 +0200 Subject: [PATCH 0594/1416] Add exception for conftest importing unused modules --- pyproject.toml | 2 +- satpy/tests/reader_tests/modis_tests/conftest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c6f9770ef1..cc7d219274 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["E", "Q"] +select = ["E", "W", "F", "Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 09f98049db..5d7e8adfef 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Setup and configuration for all reader tests.""" -from ._modis_fixtures import ( +from ._modis_fixtures import ( # noqa: F401 modis_l1b_imapp_1000m_file, modis_l1b_imapp_geo_file, modis_l1b_nasa_1km_mod03_files, From 0e1efa3f549aaebff6570d377968aa6651e4032e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 24 Oct 2023 13:56:02 +0200 Subject: [PATCH 0595/1416] Fix imports --- pyproject.toml | 2 +- satpy/demo/__init__.py | 2 +- satpy/enhancements/__init__.py | 3 +- satpy/modifiers/__init__.py | 2 +- .../enhancement_tests/test_enhancements.py | 3 +- satpy/tests/modifier_tests/test_crefl.py | 2 +- satpy/tests/modifier_tests/test_parallax.py | 59 +++++++++---------- .../reader_tests/modis_tests/conftest.py | 2 +- satpy/tests/reader_tests/test_cmsaf_claas.py | 2 +- satpy/tests/writer_tests/test_cf.py | 2 +- satpy/tests/writer_tests/test_ninjotiff.py | 9 +-- 11 files changed, 41 insertions(+), 47 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index cc7d219274..547cfd146c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["E", "W", "F", "Q"] +select = ["E", "W", "F", "I", "TID", "C90", "Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/demo/__init__.py b/satpy/demo/__init__.py index b12c9e285b..e6ad87eb88 100644 --- a/satpy/demo/__init__.py +++ b/satpy/demo/__init__.py @@ -50,7 +50,7 @@ """ -from .abi_l1b import get_hurricane_florence_abi # noqa: F401 +from .abi_l1b import get_hurricane_florence_abi # noqa: F401, I001 from .abi_l1b import get_us_midlatitude_cyclone_abi # noqa: F401 from .ahi_hsd import download_typhoon_surigae_ahi # noqa: F401 from .fci import download_fci_test_data # noqa: F401 diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index b74cc2c8bd..e2dda9cf63 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -31,8 +31,7 @@ from satpy._compat import ArrayLike from satpy._config import get_config_path - -from ..utils import find_in_ancillary +from satpy.utils import find_in_ancillary LOG = logging.getLogger(__name__) diff --git a/satpy/modifiers/__init__.py b/satpy/modifiers/__init__.py index a0888167b3..d77a5ff58e 100644 --- a/satpy/modifiers/__init__.py +++ b/satpy/modifiers/__init__.py @@ -20,7 +20,7 @@ # file deepcode ignore W0611: Ignore unused imports in init module from .base import ModifierBase # noqa: F401, isort: skip -from .atmosphere import CO2Corrector # noqa: F401 +from .atmosphere import CO2Corrector # noqa: F401, I001 from .atmosphere import PSPAtmosphericalCorrection # noqa: F401 from .atmosphere import PSPRayleighReflectance # noqa: F401 from .geometry import EffectiveSolarPathLengthCorrector # noqa: F401 diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index b518cc3f39..964e634ba4 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -571,9 +571,8 @@ def fake_area(): ) def test_nwcsaf_comps(fake_area, tmp_path, data): """Test loading NWCSAF composites.""" + from satpy import Scene from satpy.writers import get_enhanced_image - - from ... import Scene (flavour, dvname, altname, palettename, statusname, comp, filelabel, dtp) = _nwcsaf_geo_props[data] rng = (0, 100) if dtp == "uint8" else (-100, 1000) if flavour == "geo": diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index e43d7bc3fa..ab42f85155 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -23,7 +23,7 @@ from dask import array as da from pyresample.geometry import AreaDefinition -from ..utils import assert_maximum_dask_computes +from satpy.tests.utils import assert_maximum_dask_computes # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 04af43981f..8fa358ec35 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -30,8 +30,7 @@ from pyresample import create_area_def import satpy.resample - -from ...writers import get_enhanced_image +from satpy.writers import get_enhanced_image # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -88,7 +87,7 @@ class TestForwardParallax: def test_get_parallax_corrected_lonlats_ssp(self): """Test that at SSP, parallax correction does nothing.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = lon = lat = 0. height = 5000. # m sat_alt = 30_000_000. # m @@ -98,7 +97,7 @@ def test_get_parallax_corrected_lonlats_ssp(self): def test_get_parallax_corrected_lonlats_clearsky(self): """Test parallax correction for clearsky case (returns NaN).""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = 0 lat = np.linspace(-20, 20, 25).reshape(5, 5) lon = np.linspace(-20, 20, 25).reshape(5, 5).T @@ -114,7 +113,7 @@ def test_get_parallax_corrected_lonlats_clearsky(self): @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): """Test parallax correction for fully cloudy scene at SSP.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats N = 5 lats = np.linspace(lat-N*resolution, lat+N*resolution, 25).reshape(N, N) @@ -145,7 +144,7 @@ def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): def test_get_parallax_corrected_lonlats_cloudy_slant(self): """Test parallax correction for fully cloudy scene (not SSP).""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = 0 lat = np.linspace(-20, 20, 25).reshape(5, 5) lon = np.linspace(-20, 20, 25).reshape(5, 5).T @@ -161,7 +160,7 @@ def test_get_parallax_corrected_lonlats_cloudy_slant(self): def test_get_parallax_corrected_lonlats_mixed(self): """Test parallax correction for mixed cloudy case.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lon = sat_lat = 0 sat_alt = 35_785_831.0 # m @@ -189,7 +188,7 @@ def test_get_parallax_corrected_lonlats_horizon(self): Test the rather unlikely case of a satellite elevation of exactly 0 """ - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = lon = lat = 0. height = 5000. sat_alt = 30_000_000. @@ -200,7 +199,7 @@ def test_get_parallax_corrected_lonlats_horizon(self): def test_get_surface_parallax_displacement(self): """Test surface parallax displacement.""" - from ...modifiers.parallax import get_surface_parallax_displacement + from satpy.modifiers.parallax import get_surface_parallax_displacement val = get_surface_parallax_displacement( 0, 0, 36_000_000, 0, 10, 10_000) @@ -215,7 +214,7 @@ class TestParallaxCorrectionClass: @pytest.mark.parametrize("resolution", [0.05, 1, 10]) def test_init_parallaxcorrection(self, center, sizes, resolution): """Test that ParallaxCorrection class can be instantiated.""" - from ...modifiers.parallax import ParallaxCorrection + from satpy.modifiers.parallax import ParallaxCorrection fake_area = _get_fake_areas(center, sizes, resolution)[0] pc = ParallaxCorrection(fake_area) assert pc.base_area == fake_area @@ -225,8 +224,8 @@ def test_init_parallaxcorrection(self, center, sizes, resolution): @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): """Test that ParallaxCorrection doesn't change clearsky geolocation.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene (sat_lat, sat_lon) = sat_pos (ar_lat, ar_lon) = ar_pos small = 5 @@ -254,8 +253,8 @@ def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_ssp(self, lat, lon, resolution): """Test that ParallaxCorrection doesn't touch SSP.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene codes = { (0, 0): 4326, (0, 40): 4326, @@ -298,8 +297,8 @@ def test_correct_area_ssp(self, lat, lon, resolution): @pytest.mark.parametrize("daskify", [False, True]) def test_correct_area_partlycloudy(self, daskify): """Test ParallaxCorrection for partly cloudy situation.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( @@ -349,8 +348,8 @@ def test_correct_area_partlycloudy(self, daskify): @pytest.mark.parametrize("res1,res2", [(0.08, 0.3), (0.3, 0.08)]) def test_correct_area_clearsky_different_resolutions(self, res1, res2): """Test clearsky correction when areas have different resolutions.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene # areas with different resolutions, but same coverage @@ -385,8 +384,8 @@ def test_correct_area_clearsky_different_resolutions(self, res1, res2): @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_correct_area_cloudy_no_overlap(self, ): """Test cloudy correction when areas have no overlap.""" - from ...modifiers.parallax import MissingHeightError, ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import MissingHeightError, ParallaxCorrection + from satpy.tests.utils import make_fake_scene areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1) areas_shift = _get_fake_areas((90, 20), [5, 9], 0.1) fake_area_small = areas_00[0] @@ -405,8 +404,8 @@ def test_correct_area_cloudy_no_overlap(self, ): @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_correct_area_cloudy_partly_shifted(self, ): """Test cloudy correction when areas overlap only partly.""" - from ...modifiers.parallax import IncompleteHeightWarning, ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import IncompleteHeightWarning, ParallaxCorrection + from satpy.tests.utils import make_fake_scene areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1) areas_shift = _get_fake_areas((0.5, 40), [5, 9], 0.1) fake_area_small = areas_00[0] @@ -426,8 +425,8 @@ def test_correct_area_cloudy_partly_shifted(self, ): def test_correct_area_cloudy_same_area(self, ): """Test cloudy correction when areas are the same.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene area = _get_fake_areas((0, 0), [9], 0.1)[0] sc = make_fake_scene( @@ -446,8 +445,8 @@ def test_correct_area_no_orbital_parameters(self, caplog, fake_tle): on satellite location directly. Rather, they include platform name, sensor, start time, and end time, that we have to use instead. """ - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( @@ -478,7 +477,7 @@ class TestParallaxCorrectionModifier: def test_parallax_modifier_interface(self): """Test the modifier interface.""" - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (area_small, area_large) = _get_fake_areas((0, 0), [5, 9], 0.1) fake_bt = xr.DataArray( np.linspace(220, 230, 25).reshape(5, 5), @@ -512,7 +511,7 @@ def test_parallax_modifier_interface_with_cloud(self): BT corresponding to full disk SEVIRI, and test that no strange speckles occur. """ - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier w_cth = 25 h_cth = 15 @@ -623,7 +622,7 @@ def _get_fake_cloud_datasets(self, test_area, cth, use_dask): @pytest.mark.parametrize("test_area", ["foroyar", "ouagadougou"], indirect=["test_area"]) def test_modifier_interface_fog_no_shift(self, test_area): """Test that fog isn't masked or shifted.""" - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (fake_bt, fake_cth, _) = self._get_fake_cloud_datasets(test_area, 50, use_dask=False) @@ -647,7 +646,7 @@ def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_ar With the modifier interface, use a high resolution area and test that pixels are moved in the direction of the observer and not away from it. """ - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (fake_bt, fake_cth, cma) = self._get_fake_cloud_datasets(test_area, cth, use_dask=use_dask) diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 5d7e8adfef..e6a8432653 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Setup and configuration for all reader tests.""" -from ._modis_fixtures import ( # noqa: F401 +from ._modis_fixtures import ( # noqa: F401, I001 modis_l1b_imapp_1000m_file, modis_l1b_imapp_geo_file, modis_l1b_nasa_1km_mod03_files, diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index 7f5b728ba8..db2117d264 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Tests for the 'cmsaf-claas2_l2_nc' reader.""" -import datetime +import datetime # noqa: I001 import os import numpy as np diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 6a51a71b36..0c9ca9f234 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -70,7 +70,7 @@ def __exit__(self, *args): def test_lonlat_storage(tmp_path): """Test correct storage for area with lon/lat units.""" - from ..utils import make_fake_scene + from satpy.tests.utils import make_fake_scene scn = make_fake_scene( {"ketolysis": np.arange(25).reshape(5, 5)}, daskify=True, diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index f36f1028b7..b8c311f9ed 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -99,10 +99,9 @@ def test_image(self, iwsi, save_dataset): def test_convert_units_self(self): """Test that unit conversion to themselves do nothing.""" + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - from ..utils import make_fake_scene - # ensure that converting from % to itself does not change the data sc = make_fake_scene( {"VIS006": np.arange(25, dtype="f4").reshape(5, 5)}, @@ -115,9 +114,8 @@ def test_convert_units_self(self): def test_convert_units_temp(self): """Test that temperature unit conversions works as expected.""" # test converting between °C and K + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - - from ..utils import make_fake_scene sc = make_fake_scene( {"IR108": np.arange(25, dtype="f4").reshape(5, 5)}, common_attrs={"units": "K"}) @@ -134,9 +132,8 @@ def test_convert_units_temp(self): def test_convert_units_other(self): """Test that other unit conversions are not implemented.""" # test arbitrary different conversion + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - - from ..utils import make_fake_scene sc = make_fake_scene( {"rain_rate": np.arange(25, dtype="f8").reshape(5, 5)}, common_attrs={"units": "millimeter/hour"}) From f189402adf5dfc4b2ad93ce7b7a5dfa45415b1ed Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 25 Oct 2023 08:43:32 +0200 Subject: [PATCH 0596/1416] Rename zarr_format to zarr_file_pattern --- satpy/modifiers/angles.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index f4146b60d5..87a6b433e0 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -151,18 +151,18 @@ def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: def _cache_and_read(self, args, cache_dir): sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args - zarr_format = self._get_zarr_format(sanitized_args, cache_dir) - zarr_paths = glob(zarr_format.format("*")) + zarr_file_pattern = self._get_zarr_file_pattern(sanitized_args, cache_dir) + zarr_paths = glob(zarr_file_pattern.format("*")) if not zarr_paths: # use sanitized arguments self._warn_if_irregular_input_chunks(args, sanitized_args) res_to_cache = self._func(*(sanitized_args)) - self._cache_results(res_to_cache, zarr_format) + self._cache_results(res_to_cache, zarr_file_pattern) # if we did any caching, let's load from the zarr files, so that future calls have the same name # re-calculate the cached paths - zarr_paths = sorted(glob(zarr_format.format("*"))) + zarr_paths = sorted(glob(zarr_file_pattern.format("*"))) if not zarr_paths: raise RuntimeError("Data was cached to disk but no files were found") @@ -170,12 +170,11 @@ def _cache_and_read(self, args, cache_dir): res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) return res - def _get_zarr_format(self, sanitized_args, cache_dir): + def _get_zarr_file_pattern(self, sanitized_args, cache_dir): arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) zarr_filename = self._zarr_pattern(arg_hash) cache_dir = self._get_cache_dir_from_config(cache_dir) - zarr_format = os.path.join(cache_dir, zarr_filename) - return zarr_format + return os.path.join(cache_dir, zarr_filename) @staticmethod def _get_cache_dir_from_config(cache_dir: Optional[str]) -> str: @@ -198,14 +197,14 @@ def _warn_if_irregular_input_chunks(args, modified_args): stacklevel=3 ) - def _cache_results(self, res, zarr_format): - os.makedirs(os.path.dirname(zarr_format), exist_ok=True) + def _cache_results(self, res, zarr_file_pattern): + os.makedirs(os.path.dirname(zarr_file_pattern), exist_ok=True) new_res = [] for idx, sub_res in enumerate(res): if not isinstance(sub_res, da.Array): raise ValueError("Zarr caching currently only supports dask " f"arrays. Got {type(sub_res)}") - zarr_path = zarr_format.format(idx) + zarr_path = zarr_file_pattern.format(idx) # See https://github.com/dask/dask/issues/8380 with dask.config.set({"optimization.fuse.active": False}): new_sub_res = sub_res.to_zarr(zarr_path, compute=False) From 95c880cc818e02ef8c1ac9026428facfa84e94d3 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 25 Oct 2023 15:01:18 +0300 Subject: [PATCH 0597/1416] Replace da.where() usage with arr.where() in DayNightCompositor --- satpy/composites/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index da4d1a9e5c..7ae2e99aeb 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -754,8 +754,8 @@ def _get_data_for_single_side_product(self, foreground_data, weights): def _mask_weights(self, weights): if "day" in self.day_night: - return da.where(weights != 0, weights, np.nan) - return da.where(weights != 1, weights, np.nan) + return weights.where(weights == 0, np.nan) + return weights.where(weights == 1, np.nan) def _get_day_night_data_for_single_side_product(self, foreground_data): if "day" in self.day_night: @@ -786,12 +786,12 @@ def _mask_weights_with_data(self, weights, day_data, night_data): else: mask = _get_weight_mask_for_daynight_product(weights, data_a, data_b) - return da.where(mask, weights, np.nan) + return weights.where(mask, np.nan) def _weight_data(self, day_data, night_data, weights, attrs): if not self.include_alpha: fill = 1 if self.day_night == "night_only" else 0 - weights = da.where(np.isnan(weights), fill, weights) + weights = weights.where(~np.isnan(weights), fill) data = [] for b in _get_band_names(day_data, night_data): From 7070cfbee5ad5a8dc1fc11ca8aecdf6f7ebb1c79 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 25 Oct 2023 16:01:30 +0300 Subject: [PATCH 0598/1416] Fix reversed arr.where() logic --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 7ae2e99aeb..8ed200df4c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -754,8 +754,8 @@ def _get_data_for_single_side_product(self, foreground_data, weights): def _mask_weights(self, weights): if "day" in self.day_night: - return weights.where(weights == 0, np.nan) - return weights.where(weights == 1, np.nan) + return weights.where(weights != 0, np.nan) + return weights.where(weights != 1, np.nan) def _get_day_night_data_for_single_side_product(self, foreground_data): if "day" in self.day_night: From c5805510d1dc24b1afdd98813e7eede0654f4409 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 25 Oct 2023 16:07:45 +0300 Subject: [PATCH 0599/1416] Make sure there are no additional compute()s in DNC tests --- satpy/tests/test_composites.py | 89 +++++++++++++++++++++------------- 1 file changed, 56 insertions(+), 33 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index f056d2fa93..e390fba7c6 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -30,6 +30,7 @@ from pyresample import AreaDefinition import satpy +from satpy.tests.utils import CustomScheduler # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -431,18 +432,22 @@ def setUp(self): def test_daynight_sza(self): """Test compositor with both day and night portions when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") - res = comp((self.data_a, self.data_b, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_night") + res = comp((self.data_a, self.data_b, self.sza)) + res = res.compute() expected = np.array([[0., 0.22122352], [0.5, 1.]]) np.testing.assert_allclose(res.values[0], expected) def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") - res = comp((self.data_a, self.data_b)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_night") + res = comp((self.data_a, self.data_b)) + res = res.compute() expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel) @@ -450,9 +455,11 @@ def test_daynight_area(self): def test_night_only_sza_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) - res = comp((self.data_b, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + res = comp((self.data_b, self.sza)) + res = res.compute() expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]]) expected_alpha = np.array([[0., 0.33296056], [1., 1.]]) np.testing.assert_allclose(res.values[0], expected_red_channel) @@ -461,9 +468,11 @@ def test_night_only_sza_with_alpha(self): def test_night_only_sza_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) - res = comp((self.data_a, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + res = comp((self.data_a, self.sza)) + res = res.compute() expected = np.array([[0., 0.11042631], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) assert 'A' not in res.bands @@ -471,9 +480,11 @@ def test_night_only_sza_without_alpha(self): def test_night_only_area_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) - res = comp((self.data_b,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + res = comp((self.data_b,)) + res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) expected_alpha = np.array([[np.nan, 0.], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected_l_channel) @@ -482,9 +493,11 @@ def test_night_only_area_with_alpha(self): def test_night_only_area_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) - res = comp((self.data_b,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + res = comp((self.data_b,)) + res = res.compute() expected = np.array([[np.nan, 0.], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected) assert 'A' not in res.bands @@ -492,9 +505,11 @@ def test_night_only_area_without_alpha(self): def test_day_only_sza_with_alpha(self): """Test compositor with day portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) - res = comp((self.data_a, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + res = comp((self.data_a, self.sza)) + res = res.compute() expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) expected_alpha = np.array([[1., 0.66703944], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected_red_channel) @@ -503,9 +518,11 @@ def test_day_only_sza_with_alpha(self): def test_day_only_sza_without_alpha(self): """Test compositor with day portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) - res = comp((self.data_a, self.sza)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + res = comp((self.data_a, self.sza)) + res = res.compute() expected_channel_data = np.array([[0., 0.22122352], [0., 0.]]) for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) @@ -514,9 +531,11 @@ def test_day_only_sza_without_alpha(self): def test_day_only_area_with_alpha(self): """Test compositor with day portion with alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) - res = comp((self.data_a,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + res = comp((self.data_a,)) + res = res.compute() expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) expected_alpha = np.array([[1., 1.], [1., 1.]]) np.testing.assert_allclose(res.values[0], expected_l_channel) @@ -525,9 +544,11 @@ def test_day_only_area_with_alpha(self): def test_day_only_area_with_alpha_and_missing_data(self): """Test compositor with day portion with alpha_band when SZA data is not provided and there is missing data.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) - res = comp((self.data_b,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + res = comp((self.data_b,)) + res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) expected_alpha = np.array([[np.nan, 1.], [1., 1.]]) np.testing.assert_allclose(res.values[0], expected_l_channel) @@ -536,9 +557,11 @@ def test_day_only_area_with_alpha_and_missing_data(self): def test_day_only_area_without_alpha(self): """Test compositor with day portion without alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) - res = comp((self.data_a,)) - res = res.compute() + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + res = comp((self.data_a,)) + res = res.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) assert 'A' not in res.bands From cdb1920393dff5c7e25669bd66f9eeb6131eb280 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 26 Oct 2023 10:56:20 +0300 Subject: [PATCH 0600/1416] Do not use DataArray as weight --- satpy/composites/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 8ed200df4c..62ac07aad8 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -792,7 +792,8 @@ def _weight_data(self, day_data, night_data, weights, attrs): if not self.include_alpha: fill = 1 if self.day_night == "night_only" else 0 weights = weights.where(~np.isnan(weights), fill) - + if isinstance(weights, xr.DataArray): + weights = weights.data data = [] for b in _get_band_names(day_data, night_data): # if self.day_night == "night_only" and self.include_alpha is False: From 67ae9e401a5c0e6b9952596043f0f619786eaf4e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 26 Oct 2023 11:06:16 +0300 Subject: [PATCH 0601/1416] Remove old commented debug code --- satpy/composites/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 62ac07aad8..c6883f9ab9 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -796,8 +796,6 @@ def _weight_data(self, day_data, night_data, weights, attrs): weights = weights.data data = [] for b in _get_band_names(day_data, night_data): - # if self.day_night == "night_only" and self.include_alpha is False: - # import ipdb; ipdb.set_trace() day_band = _get_single_band_data(day_data, b) night_band = _get_single_band_data(night_data, b) # For day-only and night-only products only the alpha channel is weighted From c88c887023d4fca719faad5020810cabf8a32098 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 12:24:56 +0200 Subject: [PATCH 0602/1416] Fix pytest style --- pyproject.toml | 2 +- satpy/tests/compositor_tests/test_abi.py | 9 +- satpy/tests/compositor_tests/test_agri.py | 9 +- satpy/tests/compositor_tests/test_sar.py | 18 +- satpy/tests/compositor_tests/test_spectral.py | 2 +- satpy/tests/compositor_tests/test_viirs.py | 10 +- satpy/tests/conftest.py | 6 +- .../enhancement_tests/test_enhancements.py | 8 +- satpy/tests/modifier_tests/test_angles.py | 2 +- satpy/tests/modifier_tests/test_crefl.py | 10 +- satpy/tests/modifier_tests/test_parallax.py | 18 +- satpy/tests/multiscene_tests/test_blend.py | 18 +- satpy/tests/multiscene_tests/test_misc.py | 24 +- .../multiscene_tests/test_save_animation.py | 42 ++-- .../reader_tests/gms/test_gms5_vissr_l1b.py | 70 +++--- .../gms/test_gms5_vissr_navigation.py | 36 +-- .../modis_tests/_modis_fixtures.py | 2 +- .../modis_tests/test_modis_l1b.py | 36 +-- .../reader_tests/modis_tests/test_modis_l2.py | 22 +- satpy/tests/reader_tests/test_aapp_l1b.py | 8 +- satpy/tests/reader_tests/test_abi_l1b.py | 73 +++--- satpy/tests/reader_tests/test_acspo.py | 6 +- satpy/tests/reader_tests/test_ahi_hrit.py | 46 ++-- satpy/tests/reader_tests/test_ahi_hsd.py | 44 ++-- .../reader_tests/test_ahi_l1b_gridded_bin.py | 24 +- satpy/tests/reader_tests/test_ami_l1b.py | 37 ++- satpy/tests/reader_tests/test_amsr2_l1b.py | 40 ++- satpy/tests/reader_tests/test_amsr2_l2.py | 20 +- .../test_ascat_l2_soilmoisture_bufr.py | 12 +- satpy/tests/reader_tests/test_atms_l1b_nc.py | 24 +- .../tests/reader_tests/test_atms_sdr_hdf5.py | 2 +- .../reader_tests/test_avhrr_l1b_gaclac.py | 42 ++-- satpy/tests/reader_tests/test_clavrx.py | 102 ++++---- satpy/tests/reader_tests/test_cmsaf_claas.py | 28 +-- .../tests/reader_tests/test_electrol_hrit.py | 16 +- satpy/tests/reader_tests/test_eps_l1b.py | 4 +- satpy/tests/reader_tests/test_eum_base.py | 48 ++-- satpy/tests/reader_tests/test_fci_l1c_nc.py | 38 +-- satpy/tests/reader_tests/test_fci_l2_nc.py | 58 ++--- satpy/tests/reader_tests/test_fy4_base.py | 4 +- .../tests/reader_tests/test_generic_image.py | 102 ++++---- satpy/tests/reader_tests/test_geocat.py | 22 +- satpy/tests/reader_tests/test_geos_area.py | 27 +- satpy/tests/reader_tests/test_glm_l2.py | 20 +- .../reader_tests/test_goes_imager_hrit.py | 20 +- .../reader_tests/test_goes_imager_nc_eum.py | 13 +- .../reader_tests/test_goes_imager_nc_noaa.py | 85 +++---- satpy/tests/reader_tests/test_gpm_imerg.py | 24 +- satpy/tests/reader_tests/test_hdf4_utils.py | 34 +-- satpy/tests/reader_tests/test_hdf5_utils.py | 50 ++-- satpy/tests/reader_tests/test_hdfeos_base.py | 6 +- satpy/tests/reader_tests/test_hrit_base.py | 14 +- satpy/tests/reader_tests/test_hsaf_grib.py | 15 +- .../reader_tests/test_hy2_scat_l2b_h5.py | 44 ++-- satpy/tests/reader_tests/test_iasi_l2.py | 60 ++--- .../reader_tests/test_iasi_l2_so2_bufr.py | 7 +- satpy/tests/reader_tests/test_ici_l1b_nc.py | 19 +- .../reader_tests/test_insat3d_img_l1b_h5.py | 6 +- satpy/tests/reader_tests/test_li_l2_nc.py | 2 +- satpy/tests/reader_tests/test_meris_nc.py | 4 +- .../reader_tests/test_mimic_TPW2_lowres.py | 44 ++-- .../tests/reader_tests/test_mimic_TPW2_nc.py | 16 +- satpy/tests/reader_tests/test_msi_safe.py | 2 +- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 2 +- satpy/tests/reader_tests/test_mws_l1b_nc.py | 17 +- satpy/tests/reader_tests/test_netcdf_utils.py | 53 ++-- satpy/tests/reader_tests/test_nucaps.py | 126 +++++----- satpy/tests/reader_tests/test_nwcsaf_msg.py | 27 +- satpy/tests/reader_tests/test_nwcsaf_nc.py | 56 ++--- .../reader_tests/test_oceancolorcci_l3_nc.py | 6 +- satpy/tests/reader_tests/test_olci_nc.py | 6 +- satpy/tests/reader_tests/test_omps_edr.py | 42 ++-- .../reader_tests/test_safe_sar_l2_ocn.py | 12 +- satpy/tests/reader_tests/test_satpy_cf_nc.py | 226 ++++++++--------- satpy/tests/reader_tests/test_scmi.py | 64 +++-- satpy/tests/reader_tests/test_seviri_base.py | 23 +- .../test_seviri_l1b_calibration.py | 12 +- .../reader_tests/test_seviri_l1b_hrit.py | 82 +++--- .../reader_tests/test_seviri_l1b_icare.py | 26 +- .../reader_tests/test_seviri_l1b_native.py | 44 ++-- .../tests/reader_tests/test_seviri_l1b_nc.py | 2 +- .../tests/reader_tests/test_seviri_l2_grib.py | 30 ++- satpy/tests/reader_tests/test_slstr_l1b.py | 22 +- satpy/tests/reader_tests/test_smos_l2_wind.py | 48 ++-- satpy/tests/reader_tests/test_tropomi_l2.py | 62 +++-- satpy/tests/reader_tests/test_utils.py | 54 ++-- satpy/tests/reader_tests/test_vii_base_nc.py | 66 ++--- satpy/tests/reader_tests/test_vii_l1b_nc.py | 12 +- satpy/tests/reader_tests/test_vii_l2_nc.py | 4 +- satpy/tests/reader_tests/test_vii_utils.py | 10 +- satpy/tests/reader_tests/test_vii_wv_nc.py | 4 +- .../reader_tests/test_viirs_atms_utils.py | 5 +- .../tests/reader_tests/test_viirs_compact.py | 6 +- satpy/tests/reader_tests/test_viirs_edr.py | 2 +- .../test_viirs_edr_active_fires.py | 92 +++---- .../reader_tests/test_viirs_edr_flood.py | 12 +- satpy/tests/reader_tests/test_viirs_sdr.py | 172 +++++++------ .../reader_tests/test_viirs_vgac_l1c_nc.py | 8 +- satpy/tests/reader_tests/test_virr_l1b.py | 58 +++-- satpy/tests/scene_tests/test_conversions.py | 6 +- satpy/tests/scene_tests/test_data_access.py | 5 +- satpy/tests/scene_tests/test_init.py | 6 +- satpy/tests/scene_tests/test_resampling.py | 2 +- satpy/tests/scene_tests/test_saving.py | 3 +- satpy/tests/test_composites.py | 191 +++++++------- satpy/tests/test_config.py | 12 +- satpy/tests/test_crefl_utils.py | 8 +- satpy/tests/test_data_download.py | 2 +- satpy/tests/test_dataset.py | 18 +- satpy/tests/test_demo.py | 40 +-- satpy/tests/test_dependency_tree.py | 14 +- satpy/tests/test_file_handlers.py | 26 +- satpy/tests/test_modifiers.py | 14 +- satpy/tests/test_readers.py | 196 +++++++-------- satpy/tests/test_resample.py | 238 +++++++++--------- satpy/tests/test_utils.py | 190 +++++++------- satpy/tests/test_writers.py | 52 ++-- satpy/tests/test_yaml_reader.py | 159 ++++++------ satpy/tests/utils.py | 4 +- satpy/tests/writer_tests/test_cf.py | 22 +- satpy/tests/writer_tests/test_mitiff.py | 47 ++-- satpy/tests/writer_tests/test_ninjogeotiff.py | 23 +- satpy/tests/writer_tests/test_ninjotiff.py | 6 +- satpy/tests/writer_tests/test_simple_image.py | 2 +- satpy/tests/writer_tests/test_utils.py | 2 +- 125 files changed, 2154 insertions(+), 2283 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 547cfd146c..64c036b07f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["E", "W", "F", "I", "TID", "C90", "Q"] +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/tests/compositor_tests/test_abi.py b/satpy/tests/compositor_tests/test_abi.py index 79c5ae99ed..7c29a12c63 100644 --- a/satpy/tests/compositor_tests/test_abi.py +++ b/satpy/tests/compositor_tests/test_abi.py @@ -57,10 +57,9 @@ def test_simulated_green(self): dims=("y", "x"), attrs={"name": "C03", "area": area}) res = comp((c01, c02, c03)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs["name"], "green") - self.assertEqual(res.attrs["standard_name"], - "toa_bidirectional_reflectance") + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.28025) diff --git a/satpy/tests/compositor_tests/test_agri.py b/satpy/tests/compositor_tests/test_agri.py index 27a566a82c..b477dc53cb 100644 --- a/satpy/tests/compositor_tests/test_agri.py +++ b/satpy/tests/compositor_tests/test_agri.py @@ -54,10 +54,9 @@ def test_simulated_red(self): dims=("y", "x"), attrs={"name": "C02", "area": area}) res = comp((c01, c02)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs["name"], "red") - self.assertEqual(res.attrs["standard_name"], - "toa_bidirectional_reflectance") + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "red" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.24252874) diff --git a/satpy/tests/compositor_tests/test_sar.py b/satpy/tests/compositor_tests/test_sar.py index d7cd2a9c80..30c342ce44 100644 --- a/satpy/tests/compositor_tests/test_sar.py +++ b/satpy/tests/compositor_tests/test_sar.py @@ -43,11 +43,10 @@ def test_sar_ice(self): attrs={"name": "hv"}) res = comp((hh, hv)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs["name"], "sar_ice") - self.assertEqual(res.attrs["standard_name"], - "sar-ice") + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "sar_ice" + assert res.attrs["standard_name"] == "sar-ice" data = res.compute() np.testing.assert_allclose(data.sel(bands="R"), 31.58280822) np.testing.assert_allclose(data.sel(bands="G"), 159869.56789876) @@ -73,11 +72,10 @@ def test_sar_ice_log(self): attrs={"name": "hv"}) res = comp((hh, hv)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs["name"], "sar_ice_log") - self.assertEqual(res.attrs["standard_name"], - "sar-ice-log") + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "sar_ice_log" + assert res.attrs["standard_name"] == "sar-ice-log" data = res.compute() np.testing.assert_allclose(data.sel(bands="R"), -20) np.testing.assert_allclose(data.sel(bands="G"), -4.6) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 4800f12a7b..80f4dd8d93 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -39,7 +39,7 @@ def test_bad_lengths(self): """Test that error is raised if the amount of channels to blend does not match the number of weights.""" comp = SpectralBlender("blended_channel", fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), standard_name="toa_bidirectional_reflectance") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="fractions and projectables must have the same length."): comp((self.c01, self.c02, self.c03)) def test_spectral_blender(self): diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 969f4579ef..1641e4248b 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -29,7 +29,7 @@ class TestVIIRSComposites: """Test various VIIRS-specific composites.""" - @pytest.fixture + @pytest.fixture() def area(self): """Return fake area for use with DNB tests.""" rows = 5 @@ -42,7 +42,7 @@ def area(self): (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) return area - @pytest.fixture + @pytest.fixture() def dnb(self, area): """Return fake channel 1 data for DNB tests.""" dnb = np.zeros(area.shape) + 0.25 @@ -55,7 +55,7 @@ def dnb(self, area): "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c01 - @pytest.fixture + @pytest.fixture() def sza(self, area): """Return fake sza dataset for DNB tests.""" # data changes by row, sza changes by col for testing @@ -69,7 +69,7 @@ def sza(self, area): "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c02 - @pytest.fixture + @pytest.fixture() def lza(self, area): """Return fake lunal zenith angle dataset for DNB tests.""" lza = np.zeros(area.shape) + 70.0 @@ -141,7 +141,7 @@ def test_hncc_dnb(self, area, dnb, sza, lza): 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03, 4.50001560e+03]) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected .*, got 2"): comp((dnb, sza)) def test_hncc_dnb_nomoonpha(self, area, dnb, sza, lza): diff --git a/satpy/tests/conftest.py b/satpy/tests/conftest.py index 842dade04e..754b11ffcd 100644 --- a/satpy/tests/conftest.py +++ b/satpy/tests/conftest.py @@ -30,7 +30,7 @@ @pytest.fixture(autouse=True) -def reset_satpy_config(tmpdir): +def _reset_satpy_config(tmpdir): """Set satpy config to logical defaults for tests.""" test_config = { "cache_dir": str(tmpdir / "cache"), @@ -44,13 +44,13 @@ def reset_satpy_config(tmpdir): @pytest.fixture(autouse=True) -def clear_function_caches(): +def _clear_function_caches(): """Clear out global function-level caches that may cause conflicts between tests.""" from satpy.composites.config_loader import load_compositor_configs_for_sensor load_compositor_configs_for_sensor.cache_clear() -@pytest.fixture +@pytest.fixture() def include_test_etc(): """Tell Satpy to use the config 'etc' directory from the tests directory.""" with satpy.config.set(config_path=[TEST_ETC_DIR]): diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 964e634ba4..ca0d56f11f 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -365,7 +365,7 @@ def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix): cmap_data = _generate_cmap_test_data(None, real_mode) _write_cmap_to_file(cmap_filename, cmap_data) # Force colormap_mode VRGBA to RGBA and we should see an exception - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unexpected colormap shape for mode .*"): create_colormap({"filename": cmap_filename, "colormap_mode": forced_mode}) def test_cmap_from_file_bad_shape(self): @@ -381,7 +381,7 @@ def test_cmap_from_file_bad_shape(self): [255], ])) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unexpected colormap shape for mode 'None'"): create_colormap({"filename": cmap_filename}) def test_cmap_from_config_path(self, tmp_path): @@ -415,7 +415,7 @@ def test_cmap_from_trollimage(self): def test_cmap_no_colormap(self): """Test that being unable to create a colormap raises an error.""" from satpy.enhancements import create_colormap - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown colormap format: .*"): create_colormap({}) def test_cmap_list(self): @@ -484,7 +484,7 @@ def func(dask_array): assert res.shape == arr.shape -@pytest.fixture +@pytest.fixture() def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index 7bea78b7d1..2ebebacbc7 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -318,7 +318,7 @@ def test_cached_result_numpy_fails(self, tmp_path): def _fake_func(shape, chunks): return np.zeros(shape) - with pytest.raises(ValueError), \ + with pytest.raises(ValueError, match="Zarr caching currently only supports dask arrays. Got .*"), \ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func((5, 5), ((5,), (5,))) diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index ab42f85155..dc9f4a232a 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -194,7 +194,7 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) @pytest.mark.parametrize( - "url,dem_mock_cm,dem_sds", + ("url", "dem_mock_cm", "dem_sds"), [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), @@ -341,12 +341,12 @@ def test_reflectance_corrector_bad_prereqs(self): """Test ReflectanceCorrector modifier with wrong number of inputs.""" from satpy.modifiers._crefl import ReflectanceCorrector ref_cor = ReflectanceCorrector("test") - pytest.raises(ValueError, ref_cor, [1], [2, 3, 4]) - pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], []) - pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4]) + pytest.raises(ValueError, ref_cor, [1], [2, 3, 4], match="Not sure how to handle provided dependencies..*") + pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], [], match="Not sure how to handle provided dependencies..*") + pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4], match="Not sure how to handle provided dependencies..*") @pytest.mark.parametrize( - "url,dem_mock_cm,dem_sds", + ("url", "dem_mock_cm", "dem_sds"), [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 8fa358ec35..f1385e9b18 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -39,7 +39,7 @@ # - request -@pytest.fixture +@pytest.fixture() def fake_tle(): """Produce fake Two Line Element (TLE) object from pyorbital.""" return pyorbital.tlefile.Tle( @@ -109,7 +109,7 @@ def test_get_parallax_corrected_lonlats_clearsky(self): assert np.isnan(corr_lon).all() assert np.isnan(corr_lat).all() - @pytest.mark.parametrize("lat,lon", [(0, 0), (0, 40), (0, 179.9)]) + @pytest.mark.parametrize(("lat", "lon"), [(0, 0), (0, 40), (0, 179.9)]) @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): """Test parallax correction for fully cloudy scene at SSP.""" @@ -219,7 +219,7 @@ def test_init_parallaxcorrection(self, center, sizes, resolution): pc = ParallaxCorrection(fake_area) assert pc.base_area == fake_area - @pytest.mark.parametrize("sat_pos,ar_pos", + @pytest.mark.parametrize(("sat_pos", "ar_pos"), [((0, 0), (0, 0)), ((0, 0), (40, 0))]) @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): @@ -247,7 +247,7 @@ def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): new_area.get_lonlats(), fake_area_small.get_lonlats()) - @pytest.mark.parametrize("lat,lon", + @pytest.mark.parametrize(("lat", "lon"), [(0, 0), (0, 40), (0, 180), (90, 0)]) # relevant for Арктика satellites @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) @@ -345,7 +345,7 @@ def test_correct_area_partlycloudy(self, daskify): [49.86860622, 49.9097198, 49.90971976, 49.9097198, 49.88231496]]), rtol=1e-6) - @pytest.mark.parametrize("res1,res2", [(0.08, 0.3), (0.3, 0.08)]) + @pytest.mark.parametrize(("res1", "res2"), [(0.08, 0.3), (0.3, 0.08)]) def test_correct_area_clearsky_different_resolutions(self, res1, res2): """Test clearsky correction when areas have different resolutions.""" from satpy.modifiers.parallax import ParallaxCorrection @@ -564,7 +564,7 @@ def test_parallax_modifier_interface_with_cloud(self): # do so after parallax correction assert not (res.diff("x") < 0).any() - @pytest.fixture + @pytest.fixture() def test_area(self, request): """Produce test area for parallax correction unit tests. @@ -711,12 +711,12 @@ def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_ar class TestParallaxCorrectionSceneLoad: """Test that scene load interface works as expected.""" - @pytest.fixture + @pytest.fixture() def yaml_code(self): """Return YAML code for parallax_corrected_VIS006.""" return _test_yaml_code - @pytest.fixture + @pytest.fixture() def conf_file(self, yaml_code, tmp_path): """Produce a fake configuration file.""" conf_file = tmp_path / "test.yaml" @@ -724,7 +724,7 @@ def conf_file(self, yaml_code, tmp_path): fp.write(yaml_code) return conf_file - @pytest.fixture + @pytest.fixture() def fake_scene(self, yaml_code): """Produce fake scene and prepare fake composite config.""" from satpy import Scene diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index c6e65d4615..6bd32ebc98 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -59,7 +59,7 @@ def _get_expected_stack_blend(scene1: Scene, scene2: Scene) -> xr.DataArray: return expected -@pytest.fixture +@pytest.fixture() def test_area(): """Get area definition used by test DataArrays.""" return _create_test_area() @@ -77,7 +77,7 @@ def image_mode(request): return request.param -@pytest.fixture +@pytest.fixture() def cloud_type_data_array1(test_area, data_type, image_mode): """Get DataArray for cloud type in the first test Scene.""" dsid1 = make_dataid( @@ -107,7 +107,7 @@ def cloud_type_data_array1(test_area, data_type, image_mode): return data_arr -@pytest.fixture +@pytest.fixture() def cloud_type_data_array2(test_area, data_type, image_mode): """Get DataArray for cloud type in the second test Scene.""" dsid1 = make_dataid( @@ -133,7 +133,7 @@ def cloud_type_data_array2(test_area, data_type, image_mode): return data_arr -@pytest.fixture +@pytest.fixture() def scene1_with_weights(cloud_type_data_array1, test_area): """Create first test scene with a dataset of weights.""" from satpy import Scene @@ -160,7 +160,7 @@ def scene1_with_weights(cloud_type_data_array1, test_area): return scene, [wgt1, wgt2] -@pytest.fixture +@pytest.fixture() def scene2_with_weights(cloud_type_data_array2, test_area): """Create second test scene.""" from satpy import Scene @@ -183,7 +183,7 @@ def scene2_with_weights(cloud_type_data_array2, test_area): return scene, [wgt1, wgt2] -@pytest.fixture +@pytest.fixture() def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): """Create small multi-scene for testing.""" from satpy import MultiScene @@ -193,7 +193,7 @@ def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): return MultiScene([scene1, scene2]), [weights1, weights2] -@pytest.fixture +@pytest.fixture() def groups(): """Get group definitions for the MultiScene.""" return { @@ -237,7 +237,7 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): weights = [weights[0][0], weights[1][0]] stack_func = partial(stack, weights=weights, blend_type="i_dont_exist") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown weighted blending type: .*.Expected one of: .*"): multi_scene.blend(blend_function=stack_func) @pytest.mark.parametrize( @@ -283,7 +283,7 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 11, 7, 250000) assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 20, 11, 950000) - @pytest.fixture + @pytest.fixture() def datasets_and_weights(self): """X-Array datasets with area definition plus weights for input to tests.""" shape = (8, 12) diff --git a/satpy/tests/multiscene_tests/test_misc.py b/satpy/tests/multiscene_tests/test_misc.py index 190045dad0..9f6e400e31 100644 --- a/satpy/tests/multiscene_tests/test_misc.py +++ b/satpy/tests/multiscene_tests/test_misc.py @@ -58,19 +58,17 @@ def test_properties(self): scenes[1]["ds3"] = _create_test_dataset("ds3") mscn = MultiScene(scenes) - self.assertSetEqual(mscn.loaded_dataset_ids, - {ds1_id, ds2_id, ds3_id}) - self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) - self.assertTrue(mscn.all_same_area) + assert mscn.loaded_dataset_ids == {ds1_id, ds2_id, ds3_id} + assert mscn.shared_dataset_ids == {ds1_id, ds2_id} + assert mscn.all_same_area bigger_area = _create_test_area(shape=(20, 40)) scenes[0]["ds4"] = _create_test_dataset("ds4", shape=(20, 40), area=bigger_area) - self.assertSetEqual(mscn.loaded_dataset_ids, - {ds1_id, ds2_id, ds3_id, ds4_id}) - self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) - self.assertFalse(mscn.all_same_area) + assert mscn.loaded_dataset_ids == {ds1_id, ds2_id, ds3_id, ds4_id} + assert mscn.shared_dataset_ids == {ds1_id, ds2_id} + assert not mscn.all_same_area def test_from_files(self): """Test creating a multiscene from multiple files.""" @@ -133,7 +131,7 @@ def test_from_files(self): class TestMultiSceneGrouping: """Test dataset grouping in MultiScene.""" - @pytest.fixture + @pytest.fixture() def scene1(self): """Create first test scene.""" from satpy import Scene @@ -154,7 +152,7 @@ def scene1(self): scene[dsid2] = _create_test_dataset(name="ds2") return scene - @pytest.fixture + @pytest.fixture() def scene2(self): """Create second test scene.""" from satpy import Scene @@ -175,13 +173,13 @@ def scene2(self): scene[dsid2] = _create_test_dataset(name="ds4") return scene - @pytest.fixture + @pytest.fixture() def multi_scene(self, scene1, scene2): """Create small multi scene for testing.""" from satpy import MultiScene return MultiScene([scene1, scene2]) - @pytest.fixture + @pytest.fixture() def groups(self): """Get group definitions for the MultiScene.""" return { @@ -201,5 +199,5 @@ def test_fails_to_add_multiple_datasets_from_the_same_scene_to_a_group(self, mul """Test that multiple datasets from the same scene in one group fails.""" groups = {DataQuery(name="mygroup"): ["ds1", "ds2"]} multi_scene.group(groups) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Cannot add multiple datasets from a scene to the same group"): next(multi_scene.scenes) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 2ea41f18f4..6807446bbb 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -83,11 +83,11 @@ def test_save_mp4_distributed(self): # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" # Test no distributed client found mscn = MultiScene(scenes) @@ -106,11 +106,11 @@ def test_save_mp4_distributed(self): # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_no_distributed(self): @@ -146,11 +146,11 @@ def test_save_mp4_no_distributed(self): # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], "test_save_mp4_ds1_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[1], "test_save_mp4_ds2_20180101_00_20180102_12.mp4") - self.assertEqual(filenames[2], "test_save_mp4_ds3_20180102_00_20180102_12.mp4") + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_simple(self): @@ -181,7 +181,7 @@ def test_save_datasets_simple(self): writer="simple_image") # 2 for each scene - self.assertEqual(save_datasets.call_count, 2) + assert save_datasets.call_count == 2 @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_delayed(self): @@ -216,7 +216,7 @@ def test_save_datasets_distributed_delayed(self): writer="simple_image") # 2 for each scene - self.assertEqual(save_datasets.call_count, 2) + assert save_datasets.call_count == 2 @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_source_target(self): @@ -290,13 +290,13 @@ def test_crop(self): # by lon/lat bbox new_mscn = mscn.crop(ll_bbox=(-20., -5., 0, 0)) new_scn1 = list(new_mscn.scenes)[0] - self.assertIn("1", new_scn1) - self.assertIn("2", new_scn1) - self.assertIn("3", new_scn1) - self.assertTupleEqual(new_scn1["1"].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1["2"].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1["3"].shape, (184, 714)) - self.assertTupleEqual(new_scn1["4"].shape, (92, 357)) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (184, 714) + assert new_scn1["4"].shape == (92, 357) @mock.patch("satpy.multiscene._multiscene.get_enhanced_image") diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index 486eba370b..4638bcfca3 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -25,7 +25,7 @@ @pytest.fixture(params=[False, True], autouse=True) -def disable_jit(request, monkeypatch): +def _disable_jit(request, monkeypatch): """Run tests with jit enabled and disabled. Reason: Coverage report is only accurate with jit disabled. @@ -58,7 +58,7 @@ class TestFileHandler: """Test VISSR file handler.""" @pytest.fixture(autouse=True) - def patch_number_of_pixels_per_scanline(self, monkeypatch): + def _patch_number_of_pixels_per_scanline(self, monkeypatch): """Patch data types so that each scanline has two pixels.""" num_pixels = 2 IMAGE_DATA_BLOCK_IR = np.dtype( @@ -116,12 +116,12 @@ def with_compression(self, request): """Enable compression.""" return request.param - @pytest.fixture + @pytest.fixture() def open_function(self, with_compression): """Get open function for writing test files.""" return gzip.open if with_compression else open - @pytest.fixture + @pytest.fixture() def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): """Get test VISSR file.""" filename = tmp_path / "vissr_file" @@ -130,7 +130,7 @@ def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): writer.write(filename, file_contents) return filename - @pytest.fixture + @pytest.fixture() def file_contents(self, control_block, image_parameters, image_data): """Get VISSR file contents.""" return { @@ -139,7 +139,7 @@ def file_contents(self, control_block, image_parameters, image_data): "image_data": image_data, } - @pytest.fixture + @pytest.fixture() def control_block(self, dataset_id): """Get VISSR control block.""" block_size = {"IR1": 16, "VIS": 4} @@ -148,7 +148,7 @@ def control_block(self, dataset_id): ctrl_block["available_block_size_of_image_data"] = 2 return ctrl_block - @pytest.fixture + @pytest.fixture() def image_parameters(self, mode_block, cal_params, nav_params): """Get VISSR image parameters.""" image_params = {"mode": mode_block} @@ -156,7 +156,7 @@ def image_parameters(self, mode_block, cal_params, nav_params): image_params.update(nav_params) return image_params - @pytest.fixture + @pytest.fixture() def nav_params( self, coordinate_conversion, @@ -170,7 +170,7 @@ def nav_params( nav_params.update(coordinate_conversion) return nav_params - @pytest.fixture + @pytest.fixture() def cal_params( self, vis_calibration, @@ -186,7 +186,7 @@ def cal_params( "wv_calibration": wv_calibration, } - @pytest.fixture + @pytest.fixture() def mode_block(self): """Get VISSR mode block.""" mode = np.zeros(1, dtype=fmt.MODE_BLOCK) @@ -201,7 +201,7 @@ def mode_block(self): mode["vis_frame_parameters"]["number_of_pixels"] = 2 return mode - @pytest.fixture + @pytest.fixture() def coordinate_conversion(self, coord_conv, simple_coord_conv_table): """Get all coordinate conversion parameters.""" return { @@ -209,7 +209,7 @@ def coordinate_conversion(self, coord_conv, simple_coord_conv_table): "simple_coordinate_conversion_table": simple_coord_conv_table } - @pytest.fixture + @pytest.fixture() def coord_conv(self): """Get parameters for coordinate conversions. @@ -255,14 +255,14 @@ def coord_conv(self): conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 return conv - @pytest.fixture + @pytest.fixture() def attitude_prediction(self): """Get attitude prediction.""" att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION) att_pred["data"] = real_world.ATTITUDE_PREDICTION return {"attitude_prediction": att_pred} - @pytest.fixture + @pytest.fixture() def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): """Get predictions of orbital parameters.""" return { @@ -270,21 +270,21 @@ def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): "orbit_prediction_2": orbit_prediction_2 } - @pytest.fixture + @pytest.fixture() def orbit_prediction_1(self): """Get first block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_1 return orb_pred - @pytest.fixture + @pytest.fixture() def orbit_prediction_2(self): """Get second block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_2 return orb_pred - @pytest.fixture + @pytest.fixture() def vis_calibration(self): """Get VIS calibration block.""" vis_cal = np.zeros(1, dtype=fmt.VIS_CALIBRATION) @@ -292,7 +292,7 @@ def vis_calibration(self): table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) return vis_cal - @pytest.fixture + @pytest.fixture() def ir1_calibration(self): """Get IR1 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) @@ -300,32 +300,32 @@ def ir1_calibration(self): table[0, 0:4] = np.array([0, 100, 200, 300]) return cal - @pytest.fixture + @pytest.fixture() def ir2_calibration(self): """Get IR2 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal - @pytest.fixture + @pytest.fixture() def wv_calibration(self): """Get WV calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal - @pytest.fixture + @pytest.fixture() def simple_coord_conv_table(self): """Get simple coordinate conversion table.""" table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE) table["satellite_height"] = 123457.0 return table - @pytest.fixture + @pytest.fixture() def image_data(self, dataset_id, image_data_ir1, image_data_vis): """Get VISSR image data.""" data = {"IR1": image_data_ir1, "VIS": image_data_vis} return data[dataset_id["name"]] - @pytest.fixture + @pytest.fixture() def image_data_ir1(self): """Get IR1 image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_IR) @@ -336,7 +336,7 @@ def image_data_ir1(self): image_data["image_data"] = [[0, 1], [2, 3]] return image_data - @pytest.fixture + @pytest.fixture() def image_data_vis(self): """Get VIS image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_VIS) @@ -347,7 +347,7 @@ def image_data_vis(self): image_data["image_data"] = [[0, 1], [2, 3]] return image_data - @pytest.fixture + @pytest.fixture() def vissr_file_like(self, vissr_file, with_compression): """Get file-like object for VISSR test file.""" if with_compression: @@ -355,14 +355,14 @@ def vissr_file_like(self, vissr_file, with_compression): return FSFile(open_file) return vissr_file - @pytest.fixture + @pytest.fixture() def file_handler(self, vissr_file_like, mask_space): """Get file handler to be tested.""" return vissr.GMS5VISSRFileHandler( vissr_file_like, {}, {}, mask_space=mask_space ) - @pytest.fixture + @pytest.fixture() def vis_refl_exp(self, mask_space, lons_lats_exp): """Get expected VIS reflectance.""" lons, lats = lons_lats_exp @@ -384,7 +384,7 @@ def vis_refl_exp(self, mask_space, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def ir1_counts_exp(self, lons_lats_exp): """Get expected IR1 counts.""" lons, lats = lons_lats_exp @@ -402,7 +402,7 @@ def ir1_counts_exp(self, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def ir1_bt_exp(self, lons_lats_exp): """Get expected IR1 brightness temperature.""" lons, lats = lons_lats_exp @@ -420,7 +420,7 @@ def ir1_bt_exp(self, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def lons_lats_exp(self, dataset_id): """Get expected lon/lat coordinates. @@ -456,7 +456,7 @@ def lons_lats_exp(self, dataset_id): lats = xr.DataArray(exp["lats"], dims=("y", "x")) return lons, lats - @pytest.fixture + @pytest.fixture() def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): """Get expected dataset.""" ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) @@ -473,7 +473,7 @@ def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): } return expectations[dataset_id] - @pytest.fixture + @pytest.fixture() def area_def_exp(self, dataset_id): """Get expected area definition.""" if dataset_id["name"] == "IR1": @@ -507,7 +507,7 @@ def area_def_exp(self, dataset_id): height=size, ) - @pytest.fixture + @pytest.fixture() def attrs_exp(self, area_def_exp): """Get expected dataset attributes.""" return { @@ -546,7 +546,7 @@ def test_time_attributes(self, file_handler, attrs_exp): class TestCorruptFile: """Test reading corrupt files.""" - @pytest.fixture + @pytest.fixture() def file_contents(self): """Get corrupt file contents (all zero).""" control_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) @@ -557,7 +557,7 @@ def file_contents(self): "image_data": image_data, } - @pytest.fixture + @pytest.fixture() def corrupt_file(self, file_contents, tmp_path): """Write corrupt VISSR file to disk.""" filename = tmp_path / "my_vissr_file" diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index 5b3c6117d4..2a1a1cade9 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -219,7 +219,7 @@ @pytest.fixture(params=[False, True], autouse=True) -def disable_jit(request, monkeypatch): +def _disable_jit(request, monkeypatch): """Run tests with jit enabled and disabled. Reason: Coverage report is only accurate with jit disabled. @@ -234,7 +234,7 @@ class TestSinglePixelNavigation: """Test navigation of a single pixel.""" @pytest.mark.parametrize( - "point,nav_params,expected", + ("point", "nav_params", "expected"), [ (ref["pixel"], ref["nav_params"], (ref["lon"], ref["lat"])) for ref in NAVIGATION_REFERENCE @@ -297,7 +297,7 @@ def test_intersect_view_vector_with_earth(self): np.testing.assert_allclose(point, exp) @pytest.mark.parametrize( - "point_earth_fixed,point_geodetic_exp", + ("point_earth_fixed", "point_geodetic_exp"), [ ([0, 0, 1], [0, 90]), ([0, 0, -1], [0, -90]), @@ -328,7 +328,7 @@ def test_normalize_vector(self): class TestImageNavigation: """Test navigation of an entire image.""" - @pytest.fixture + @pytest.fixture() def expected(self): """Get expected coordinates.""" exp = { @@ -356,7 +356,7 @@ class TestPredictionInterpolation: """Test interpolation of orbit and attitude predictions.""" @pytest.mark.parametrize( - "obs_time,expected", [(-1, np.nan), (1.5, 2.5), (5, np.nan)] + ("obs_time", "expected"), [(-1, np.nan), (1.5, 2.5), (5, np.nan)] ) def test_interpolate_continuous(self, obs_time, expected): """Test interpolation of continuous variables.""" @@ -366,7 +366,7 @@ def test_interpolate_continuous(self, obs_time, expected): np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - "obs_time,expected", + ("obs_time", "expected"), [ (-1, np.nan), (1.5, 0.75 * np.pi), @@ -385,7 +385,7 @@ def test_interpolate_angles(self, obs_time, expected): np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - "obs_time,expected", + ("obs_time", "expected"), [ (-1, np.nan * np.ones((2, 2))), (1.5, [[1, 0], [0, 2]]), @@ -417,12 +417,12 @@ def test_interpolate_attitude_prediction( attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) _assert_namedtuple_close(attitude, attitude_expected) - @pytest.fixture + @pytest.fixture() def obs_time(self): """Get observation time.""" return 2.5 - @pytest.fixture + @pytest.fixture() def orbit_expected(self): """Get expected orbit.""" return nav.Orbit( @@ -439,7 +439,7 @@ def orbit_expected(self): nutation_precession=1.6 * np.identity(3), ) - @pytest.fixture + @pytest.fixture() def attitude_expected(self): """Get expected attitude.""" return nav.Attitude( @@ -449,13 +449,13 @@ def attitude_expected(self): ) -@pytest.fixture +@pytest.fixture() def sampling_angle(): """Get sampling angle.""" return 0.000095719995443 -@pytest.fixture +@pytest.fixture() def scan_params(sampling_angle): """Get scanning parameters.""" return nav.ScanningParameters( @@ -466,7 +466,7 @@ def scan_params(sampling_angle): ) -@pytest.fixture +@pytest.fixture() def attitude_prediction(): """Get attitude prediction.""" return nav.AttitudePrediction( @@ -479,7 +479,7 @@ def attitude_prediction(): ) -@pytest.fixture +@pytest.fixture() def orbit_prediction(): """Get orbit prediction.""" return nav.OrbitPrediction( @@ -505,7 +505,7 @@ def orbit_prediction(): ) -@pytest.fixture +@pytest.fixture() def proj_params(sampling_angle): """Get projection parameters.""" return nav.ProjectionParameters( @@ -525,19 +525,19 @@ def proj_params(sampling_angle): ) -@pytest.fixture +@pytest.fixture() def static_nav_params(proj_params, scan_params): """Get static navigation parameters.""" return nav.StaticNavigationParameters(proj_params, scan_params) -@pytest.fixture +@pytest.fixture() def predicted_nav_params(attitude_prediction, orbit_prediction): """Get predicted navigation parameters.""" return nav.PredictedNavigationParameters(attitude_prediction, orbit_prediction) -@pytest.fixture +@pytest.fixture() def navigation_params(static_nav_params, predicted_nav_params): """Get image navigation parameters.""" return nav.ImageNavigationParameters(static_nav_params, predicted_nav_params) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index efecd1aa53..3af38328f8 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -370,7 +370,7 @@ def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: return [full_path] -@pytest.fixture +@pytest.fixture() def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: """Create a single MOD02QKM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD02Qkm") diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 85048de0af..a00eae8a3f 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -84,16 +84,16 @@ def test_available_reader(self): @pytest.mark.parametrize( ("input_files", "expected_names", "expected_data_res", "expected_geo_res"), [ - [lazy_fixture("modis_l1b_nasa_mod021km_file"), + (lazy_fixture("modis_l1b_nasa_mod021km_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, - [1000], [5000, 1000]], - [lazy_fixture("modis_l1b_imapp_1000m_file"), + [1000], [5000, 1000]), + (lazy_fixture("modis_l1b_imapp_1000m_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, - [1000], [5000, 1000]], - [lazy_fixture("modis_l1b_nasa_mod02hkm_file"), - AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]], - [lazy_fixture("modis_l1b_nasa_mod02qkm_file"), - AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]], + [1000], [5000, 1000]), + (lazy_fixture("modis_l1b_nasa_mod02hkm_file"), + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]), + (lazy_fixture("modis_l1b_nasa_mod02qkm_file"), + AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]), ] ) def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res): @@ -128,16 +128,16 @@ def test_scene_available_datasets(self, input_files, expected_names, expected_da @pytest.mark.parametrize( ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture("modis_l1b_nasa_mod021km_file"), - True, False, False, 1000], - [lazy_fixture("modis_l1b_imapp_1000m_file"), - True, False, False, 1000], - [lazy_fixture("modis_l1b_nasa_mod02hkm_file"), - False, True, True, 250], - [lazy_fixture("modis_l1b_nasa_mod02qkm_file"), - False, True, True, 250], - [lazy_fixture("modis_l1b_nasa_1km_mod03_files"), - True, True, True, 250], + (lazy_fixture("modis_l1b_nasa_mod021km_file"), + True, False, False, 1000), + (lazy_fixture("modis_l1b_imapp_1000m_file"), + True, False, False, 1000), + (lazy_fixture("modis_l1b_nasa_mod02hkm_file"), + False, True, True, 250), + (lazy_fixture("modis_l1b_nasa_mod02qkm_file"), + False, True, True, 250), + (lazy_fixture("modis_l1b_nasa_1km_mod03_files"), + True, True, True, 250), ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 99c0890d30..8876decb59 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -72,8 +72,8 @@ def test_scene_available_datasets(self, modis_l2_nasa_mod35_file): @pytest.mark.parametrize( ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture("modis_l2_nasa_mod35_file"), - True, False, False, 1000], + (lazy_fixture("modis_l2_nasa_mod35_file"), + True, False, False, 1000), ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): @@ -108,12 +108,12 @@ def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): @pytest.mark.parametrize( ("input_files", "loadables", "request_resolution", "exp_resolution", "exp_area"), [ - [lazy_fixture("modis_l2_nasa_mod35_mod03_files"), + (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), ["cloud_mask"], - 1000, 1000, True], - [lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"), + 1000, 1000, True), + (lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"), ["cloud_mask", "land_sea_mask", "snow_ice_mask"], - None, 1000, True], + None, 1000, True), ] ) def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area): @@ -138,8 +138,8 @@ def test_load_category_dataset(self, input_files, loadables, request_resolution, @pytest.mark.parametrize( ("input_files", "exp_area"), [ - [lazy_fixture("modis_l2_nasa_mod35_file"), False], - [lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True], + (lazy_fixture("modis_l2_nasa_mod35_file"), False), + (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True), ] ) def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): @@ -162,10 +162,10 @@ def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): @pytest.mark.parametrize( ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"), [ - [lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0], + (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0), # snow mask is considered a category product, factor/offset ignored - [lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0], - [lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0], + (lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0), + (lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0), ] ) def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value): diff --git a/satpy/tests/reader_tests/test_aapp_l1b.py b/satpy/tests/reader_tests/test_aapp_l1b.py index e9414ee521..a9997f7a7e 100644 --- a/satpy/tests/reader_tests/test_aapp_l1b.py +++ b/satpy/tests/reader_tests/test_aapp_l1b.py @@ -218,9 +218,9 @@ def test_interpolation(self): fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km) (lons, lats) = fh._get_all_interpolated_coordinates() lon_data = lons.compute() - self.assertTrue(np.max(lon_data) <= 180) + assert (np.max(lon_data) <= 180) # Not longitdes between -110, 110 in indata - self.assertTrue(np.all(np.abs(lon_data) > 110)) + assert np.all(np.abs(lon_data) > 110) def test_interpolation_angles(self): """Test reading the lon and lats.""" @@ -276,8 +276,8 @@ def test_interpolation_angles(self): fh._get_tiepoint_angles_in_degrees = mock.MagicMock() fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km) (sunz, satz, azidiff) = fh._get_all_interpolated_angles() - self.assertTrue(np.max(sunz) <= 123) - self.assertTrue(np.max(satz) <= 70) + assert (np.max(sunz) <= 123) + assert (np.max(satz) <= 70) class TestAAPPL1BChannel3AMissing(unittest.TestCase): diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 1b54b58249..ab2b1eec54 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -131,7 +131,7 @@ def setUp(self, xr_, rad=None, clip_negative_radiances=False): class TestABIYAML: """Tests for the ABI L1b reader's YAML configuration.""" - @pytest.mark.parametrize(["channel", "suffix"], + @pytest.mark.parametrize(("channel", "suffix"), [("C{:02d}".format(num), suffix) for num in range(1, 17) for suffix in ("", "_test_suffix")]) @@ -157,10 +157,8 @@ class Test_NC_ABI_L1B(Test_NC_ABI_L1B_Base): def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - self.assertEqual(self.reader.start_time, - datetime(2017, 9, 20, 17, 30, 40, 800000)) - self.assertEqual(self.reader.end_time, - datetime(2017, 9, 20, 17, 41, 17, 500000)) + assert self.reader.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) + assert self.reader.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) def test_get_dataset(self): """Test the get_dataset method.""" @@ -190,24 +188,24 @@ def test_get_dataset(self): "suffix": "custom", "units": "W m-2 um-1 sr-1"} - self.assertDictEqual(res.attrs, exp) + assert res.attrs == exp # we remove any time dimension information - self.assertNotIn("t", res.coords) - self.assertNotIn("t", res.dims) - self.assertNotIn("time", res.coords) - self.assertNotIn("time", res.dims) + assert "t" not in res.coords + assert "t" not in res.dims + assert "time" not in res.coords + assert "time" not in res.dims @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], {"a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "proj": "geos", - "sweep": "x", "units": "m"}) - self.assertEqual(call_args[4], self.reader.ncols) - self.assertEqual(call_args[5], self.reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, + "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m"} + assert call_args[4] == self.reader.ncols + assert call_args[5] == self.reader.nlines np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) @@ -236,11 +234,10 @@ def test_ir_calibration_attrs(self): make_dataid(name="C05", calibration="brightness_temperature"), {}) # make sure the attributes from the file are in the data array - self.assertNotIn("scale_factor", res.attrs) - self.assertNotIn("_FillValue", res.attrs) - self.assertEqual(res.attrs["standard_name"], - "toa_brightness_temperature") - self.assertEqual(res.attrs["long_name"], "Brightness Temperature") + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_brightness_temperature" + assert res.attrs["long_name"] == "Brightness Temperature" def test_clip_negative_radiances_attribute(self): """Assert that clip_negative_radiances is set to False.""" @@ -331,13 +328,11 @@ def test_vis_calibrate(self): expected = np.array([[0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171]]) - self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) - self.assertNotIn("scale_factor", res.attrs) - self.assertNotIn("_FillValue", res.attrs) - self.assertEqual(res.attrs["standard_name"], - "toa_bidirectional_reflectance") - self.assertEqual(res.attrs["long_name"], - "Bidirectional Reflectance") + assert np.allclose(res.data, expected, equal_nan=True) + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert res.attrs["long_name"] == "Bidirectional Reflectance" class Test_NC_ABI_L1B_raw_cal(Test_NC_ABI_L1B_Base): @@ -366,22 +361,20 @@ def test_raw_calibrate(self): # We expect the raw data to be unchanged expected = res.data - self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) + assert np.allclose(res.data, expected, equal_nan=True) # check for the presence of typical attributes - self.assertIn("scale_factor", res.attrs) - self.assertIn("add_offset", res.attrs) - self.assertIn("_FillValue", res.attrs) - self.assertIn("orbital_parameters", res.attrs) - self.assertIn("platform_shortname", res.attrs) - self.assertIn("scene_id", res.attrs) + assert "scale_factor" in res.attrs + assert "add_offset" in res.attrs + assert "_FillValue" in res.attrs + assert "orbital_parameters" in res.attrs + assert "platform_shortname" in res.attrs + assert "scene_id" in res.attrs # determine if things match their expected values/types. - self.assertEqual(res.data.dtype, np.int16, "int16 data type expected") - self.assertEqual(res.attrs["standard_name"], - "counts") - self.assertEqual(res.attrs["long_name"], - "Raw Counts") + assert res.data.dtype == np.int16 + assert res.attrs["standard_name"] == "counts" + assert res.attrs["long_name"] == "Raw Counts" class Test_NC_ABI_L1B_invalid_cal(Test_NC_ABI_L1B_Base): @@ -405,7 +398,7 @@ class Test_NC_ABI_File(unittest.TestCase): """Test file opening.""" @mock.patch("satpy.readers.abi_base.xr") - def test_open_dataset(self, _): + def test_open_dataset(self, _): # noqa: PT019 """Test openning a dataset.""" from satpy.readers.abi_l1b import NC_ABI_L1B diff --git a/satpy/tests/reader_tests/test_acspo.py b/satpy/tests/reader_tests/test_acspo.py index 166c77227d..723d1dbecd 100644 --- a/satpy/tests/reader_tests/test_acspo.py +++ b/satpy/tests/reader_tests/test_acspo.py @@ -119,10 +119,10 @@ def teardown_method(self): self.p.stop() @pytest.mark.parametrize( - ("filename",), + "filename", [ - ["20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc"], - ["20210916161708-STAR-L2P_GHRSST-SSTsubskin-VIIRS_N20-ACSPO_V2.80-v02.0-fv01.0.nc"], + ("20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc"), + ("20210916161708-STAR-L2P_GHRSST-SSTsubskin-VIIRS_N20-ACSPO_V2.80-v02.0-fv01.0.nc"), ] ) def test_init(self, filename): diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py index 5746661cac..1dbf36c66b 100644 --- a/satpy/tests/reader_tests/test_ahi_hrit.py +++ b/satpy/tests/reader_tests/test_ahi_hrit.py @@ -106,29 +106,29 @@ def test_init(self): "unit": "ALBEDO(%)"}) mda_expected["projection_parameters"]["SSP_longitude"] = 140.7 reader = self._get_reader(mda=mda) - self.assertEqual(reader.mda, mda_expected) + assert reader.mda == mda_expected # Check projection name - self.assertEqual(reader.projection_name, "GEOS(140.70)") + assert reader.projection_name == "GEOS(140.70)" # Check calibration table cal_expected = np.array([[0, -0.1], [1023, 100], [65535, 100]]) - self.assertTrue(np.all(reader.calibration_table == cal_expected)) + assert np.all(reader.calibration_table == cal_expected) # Check if scanline timestamps are there (dedicated test below) - self.assertIsInstance(reader.acq_time, np.ndarray) + assert isinstance(reader.acq_time, np.ndarray) # Check platform - self.assertEqual(reader.platform, HIMAWARI8) + assert reader.platform == HIMAWARI8 # Check is_segmented attribute expected = {0: False, 1: True, 8: True} for segno, is_segmented in expected.items(): mda = self._get_mda(segno=segno) reader = self._get_reader(mda=mda) - self.assertEqual(reader.is_segmented, is_segmented) + assert reader.is_segmented == is_segmented # Check area IDs expected = [ @@ -139,7 +139,7 @@ def test_init(self): mda = self._get_mda() for filename_info, area_id in expected: reader = self._get_reader(mda=mda, filename_info=filename_info) - self.assertEqual(reader.area_id, area_id) + assert reader.area_id == area_id @mock.patch("satpy.readers.hrit_jma.HRITJMAFileHandler.__init__") def test_get_platform(self, mocked_init): @@ -151,11 +151,11 @@ def test_get_platform(self, mocked_init): for proj_name, platform in PLATFORMS.items(): reader.projection_name = proj_name - self.assertEqual(reader._get_platform(), platform) + assert reader._get_platform() == platform with mock.patch("logging.Logger.error") as mocked_log: reader.projection_name = "invalid" - self.assertEqual(reader._get_platform(), UNKNOWN_PLATFORM) + assert reader._get_platform() == UNKNOWN_PLATFORM mocked_log.assert_called() def test_get_area_def(self): @@ -206,8 +206,8 @@ def test_get_area_def(self): reader = self._get_reader(mda=mda, filename_info={"area": case["area"]}) area = reader.get_area_def("some_id") - self.assertTupleEqual(area.area_extent, case["extent"]) - self.assertEqual(area.description, AREA_NAMES[case["area"]]["long"]) + assert area.area_extent == case["extent"] + assert area.description == AREA_NAMES[case["area"]]["long"] def test_calibrate(self): """Test calibration.""" @@ -238,7 +238,7 @@ def test_calibrate(self): # 1. Counts res = reader.calibrate(data=counts, calibration="counts") - self.assertTrue(np.all(counts.values == res.values)) + assert np.all(counts.values == res.values) # 2. Reflectance res = reader.calibrate(data=counts, calibration="reflectance") @@ -263,7 +263,7 @@ def test_mask_space(self): # First line of the segment should be space, in the middle of the # last line there should be some valid pixels np.testing.assert_allclose(masked.values[0, :], np.nan) - self.assertTrue(np.all(masked.values[-1, 588:788] == 1)) + assert np.all(masked.values[-1, 588:788] == 1) @mock.patch("satpy.readers.hrit_jma.HRITFileHandler.get_dataset") def test_get_dataset(self, base_get_dataset): @@ -281,15 +281,15 @@ def test_get_dataset(self, base_get_dataset): # Check attributes res = reader.get_dataset(key, {"units": "%", "sensor": "ahi"}) - self.assertEqual(res.attrs["units"], "%") - self.assertEqual(res.attrs["sensor"], "ahi") - self.assertEqual(res.attrs["platform_name"], HIMAWARI8) - self.assertDictEqual(res.attrs["orbital_parameters"], {"projection_longitude": 140.7, - "projection_latitude": 0., - "projection_altitude": 35785831.0}) + assert res.attrs["units"] == "%" + assert res.attrs["sensor"] == "ahi" + assert res.attrs["platform_name"] == HIMAWARI8 + assert res.attrs["orbital_parameters"] == {"projection_longitude": 140.7, + "projection_latitude": 0.0, + "projection_altitude": 35785831.0} # Check if acquisition time is a coordinate - self.assertIn("acq_time", res.coords) + assert "acq_time" in res.coords # Check called methods with mock.patch.object(reader, "_mask_space") as mask_space: @@ -305,10 +305,8 @@ def test_get_dataset(self, base_get_dataset): def test_mjd2datetime64(self): """Test conversion from modified julian day to datetime64.""" from satpy.readers.hrit_jma import mjd2datetime64 - self.assertEqual(mjd2datetime64(np.array([0])), - np.datetime64("1858-11-17", "us")) - self.assertEqual(mjd2datetime64(np.array([40587.5])), - np.datetime64("1970-01-01 12:00", "us")) + assert mjd2datetime64(np.array([0])) == np.datetime64("1858-11-17", "us") + assert mjd2datetime64(np.array([40587.5])) == np.datetime64("1970-01-01 12:00", "us") def test_get_acq_time(self): """Test computation of scanline acquisition times.""" diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 3d2375f5ec..6b4efca8c1 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -142,12 +142,12 @@ def test_region(self, fromfile, np2str): area_def = fh.get_area_def(None) proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378137.0) - self.assertEqual(b, 6356752.3) - self.assertEqual(proj_dict["h"], 35785863.0) - self.assertEqual(proj_dict["lon_0"], 140.7) - self.assertEqual(proj_dict["proj"], "geos") - self.assertEqual(proj_dict["units"], "m") + assert a == 6378137.0 + assert b == 6356752.3 + assert proj_dict["h"] == 35785863.0 + assert proj_dict["lon_0"] == 140.7 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" np.testing.assert_allclose(area_def.area_extent, (592000.0038256242, 4132000.0267018233, 1592000.0102878273, 5132000.033164027)) @@ -190,17 +190,17 @@ def test_segment(self, fromfile, np2str): area_def = fh.get_area_def(None) proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378137.0) - self.assertEqual(b, 6356752.3) - self.assertEqual(proj_dict["h"], 35785863.0) - self.assertEqual(proj_dict["lon_0"], 140.7) - self.assertEqual(proj_dict["proj"], "geos") - self.assertEqual(proj_dict["units"], "m") + assert a == 6378137.0 + assert b == 6356752.3 + assert proj_dict["h"] == 35785863.0 + assert proj_dict["lon_0"] == 140.7 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" np.testing.assert_allclose(area_def.area_extent, (-5500000.035542117, -3300000.021325271, 5500000.035542117, -2200000.0142168473)) -@pytest.fixture +@pytest.fixture() def hsd_file_jp01(tmp_path): """Create a jp01 hsd file.""" from satpy.readers.ahi_hsd import ( # _IRCAL_INFO_TYPE, @@ -279,7 +279,7 @@ class TestAHIHSDFileHandler: def test_bad_calibration(self): """Test that a bad calibration mode causes an exception.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid calibration mode: BAD_MODE. Choose one of (.*)"): with _fake_hsd_handler(fh_kwargs={"calib_mode": "BAD_MODE"}): pass @@ -503,16 +503,14 @@ def test_default_calibrate(self, *mocks): """Test default in-file calibration modes.""" self.setUp() # Counts - self.assertEqual(self.fh.calibrate(data=123, - calibration="counts"), - 123) + assert self.fh.calibrate(data=123, calibration="counts") == 123 # Radiance rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) rad = self.fh.calibrate(data=self.counts, calibration="radiance") - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) # Brightness Temperature bt_exp = np.array([[330.978979, 310.524688], @@ -526,7 +524,7 @@ def test_default_calibrate(self, *mocks): [1.50189, 0.]]) refl = self.fh.calibrate(data=self.counts, calibration="reflectance") - self.assertTrue(np.allclose(refl, refl_exp)) + assert np.allclose(refl, refl_exp) def test_updated_calibrate(self): """Test updated in-file calibration modes.""" @@ -535,7 +533,7 @@ def test_updated_calibrate(self): rad_exp = np.array([[30.4, 23.0], [15.6, -6.6]]) rad = self.fh.calibrate(data=self.counts, calibration="radiance") - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) # Case for no updated calibration available (older data) self.fh._header = { @@ -556,7 +554,7 @@ def test_updated_calibrate(self): rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) def test_user_calibration(self): """Test user-defined calibration modes.""" @@ -567,7 +565,7 @@ def test_user_calibration(self): rad = self.fh.calibrate(data=self.counts, calibration="radiance").compute() rad_exp = np.array([[16.10526316, 12.21052632], [8.31578947, -3.36842105]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) # This is for DN calibration self.fh.user_calibration = {"B13": {"slope": -0.0032, @@ -577,7 +575,7 @@ def test_user_calibration(self): rad = self.fh.calibrate(data=self.counts, calibration="radiance").compute() rad_exp = np.array([[15.2, 12.], [8.8, -0.8]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) @contextlib.contextmanager diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index 9d1302ef41..73206e7ffd 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -57,20 +57,20 @@ def make_fh(filetype, area="fld"): def test_low_res(self): """Check size of the low resolution (2km) grid.""" tmp_fh = self.make_fh("tir.01") - self.assertEqual(self.FULLDISK_SIZES[0.02]["x_size"], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.02]["y_size"], tmp_fh.nlines) + assert self.FULLDISK_SIZES[0.02]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.02]["y_size"] == tmp_fh.nlines def test_med_res(self): """Check size of the low resolution (1km) grid.""" tmp_fh = self.make_fh("vis.02") - self.assertEqual(self.FULLDISK_SIZES[0.01]["x_size"], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.01]["y_size"], tmp_fh.nlines) + assert self.FULLDISK_SIZES[0.01]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.01]["y_size"] == tmp_fh.nlines def test_hi_res(self): """Check size of the low resolution (0.5km) grid.""" tmp_fh = self.make_fh("ext.01") - self.assertEqual(self.FULLDISK_SIZES[0.005]["x_size"], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.005]["y_size"], tmp_fh.nlines) + assert self.FULLDISK_SIZES[0.005]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.005]["y_size"] == tmp_fh.nlines def test_area_def(self): """Check that a valid full disk area is produced.""" @@ -84,7 +84,7 @@ def test_area_def(self): tmp_fh = self.make_fh("vis.01") tmp_fh.get_area_def(None) - self.assertEqual(tmp_fh.area, good_area) + assert tmp_fh.area == good_area def test_bad_area(self): """Ensure an error is raised for an usupported area.""" @@ -170,7 +170,7 @@ def setUp(self): filetype_info={"file_type": "tir.01"}) # Check that the filename is altered for bz2 format files - self.assertNotEqual(in_fname, fh.filename) + assert in_fname != fh.filename self.fh = fh key = {"calibration": "counts", @@ -206,8 +206,8 @@ def test_get_dataset(self, mocked_read): # Check output data is correct np.testing.assert_allclose(res.values, out_data) # Also check a couple of attributes - self.assertEqual(res.attrs["name"], self.key["name"]) - self.assertEqual(res.attrs["wavelength"], self.info["wavelength"]) + assert res.attrs["name"] == self.key["name"] + assert res.attrs["wavelength"] == self.info["wavelength"] @mock.patch("os.path.exists", return_value=True) @mock.patch("os.remove") @@ -269,9 +269,9 @@ def test_get_luts(self): tempdir = tempfile.gettempdir() print(self.fh.lut_dir) self.fh._get_luts() - self.assertFalse(os.path.exists(os.path.join(tempdir, "count2tbb_v102/"))) + assert not os.path.exists(os.path.join(tempdir, "count2tbb_v102/")) for lut_name in AHI_LUT_NAMES: - self.assertTrue(os.path.isfile(os.path.join(self.fh.lut_dir, lut_name))) + assert os.path.isfile(os.path.join(self.fh.lut_dir, lut_name)) @mock.patch("urllib.request.urlopen") @mock.patch("shutil.copyfileobj") diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index 58a3612b49..cdbc4468c9 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np import xarray as xr +from pytest import approx # noqa: PT013 class FakeDataset(object): @@ -144,7 +145,7 @@ def _check_orbital_parameters(self, orb_params): "satellite_actual_longitude": 128.2707, } for key, val in exp_params.items(): - self.assertAlmostEqual(val, orb_params[key], places=3) + assert val == approx(orb_params[key], abs=1e-3) def test_filename_grouping(self): """Test that filenames are grouped properly.""" @@ -167,16 +168,14 @@ def test_filename_grouping(self): "gk2a_ami_le1b_wv069_fd020ge_201909300300.nc", "gk2a_ami_le1b_wv073_fd020ge_201909300300.nc"] groups = group_files(filenames, reader="ami_l1b") - self.assertEqual(len(groups), 1) - self.assertEqual(len(groups[0]["ami_l1b"]), 16) + assert len(groups) == 1 + assert len(groups[0]["ami_l1b"]) == 16 def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - self.assertEqual(self.reader.start_time, - datetime(2019, 9, 30, 3, 0, 31, 957882)) - self.assertEqual(self.reader.end_time, - datetime(2019, 9, 30, 3, 9, 35, 606133)) + assert self.reader.start_time == datetime(2019, 9, 30, 3, 0, 31, 957882) + assert self.reader.end_time == datetime(2019, 9, 30, 3, 9, 35, 606133) def test_get_dataset(self): """Test gettting radiance data.""" @@ -193,7 +192,7 @@ def test_get_dataset(self): "sensor": "ami", "units": "W m-2 um-1 sr-1"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) + assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_bad_calibration(self): @@ -212,15 +211,15 @@ def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] exp = {"a": 6378137.0, "b": 6356752.3, "h": 35785863.0, "lon_0": 128.2, "proj": "geos", "units": "m"} for key, val in exp.items(): - self.assertIn(key, call_args[3]) - self.assertAlmostEqual(val, call_args[3][key]) - self.assertEqual(call_args[4], self.reader.nc.attrs["number_of_columns"]) - self.assertEqual(call_args[5], self.reader.nc.attrs["number_of_lines"]) + assert key in call_args[3] + assert val == approx(call_args[3][key]) + assert call_args[4] == self.reader.nc.attrs["number_of_columns"] + assert call_args[5] == self.reader.nc.attrs["number_of_lines"] np.testing.assert_allclose(call_args[6], [-5511022.902, -5511022.902, 5511022.902, 5511022.902]) @@ -239,7 +238,7 @@ def test_get_dataset_vis(self): "sensor": "ami", "units": "%"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) + assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_get_dataset_counts(self): @@ -257,7 +256,7 @@ def test_get_dataset_counts(self): "sensor": "ami", "units": "1"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) + assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) @@ -310,7 +309,7 @@ def test_default_calibrate(self): [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_infile_calibrate(self): """Test IR calibration using in-file coefficients.""" @@ -324,7 +323,7 @@ def test_infile_calibrate(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.04) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_gsics_radiance_corr(self): """Test IR radiance adjustment using in-file GSICS coefs.""" @@ -338,7 +337,7 @@ def test_gsics_radiance_corr(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_user_radiance_corr(self): """Test IR radiance adjustment using user-supplied coefs.""" @@ -354,4 +353,4 @@ def test_user_radiance_corr(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs["standard_name"], "toa_brightness_temperature") + assert res.attrs["standard_name"] == "toa_brightness_temperature" diff --git a/satpy/tests/reader_tests/test_amsr2_l1b.py b/satpy/tests/reader_tests/test_amsr2_l1b.py index b8e51b845b..b627a53a0b 100644 --- a/satpy/tests/reader_tests/test_amsr2_l1b.py +++ b/satpy/tests/reader_tests/test_amsr2_l1b.py @@ -124,10 +124,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_basic(self): """Test loading of basic channels.""" @@ -136,7 +136,7 @@ def test_load_basic(self): loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load([ "btemp_10.7v", @@ -152,16 +152,14 @@ def test_load_basic(self): "btemp_36.5v", "btemp_36.5h", ]) - self.assertEqual(len(ds), 12) + assert len(ds) == 12 for d in ds.values(): - self.assertEqual(d.attrs["calibration"], "brightness_temperature") - self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2))) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertTupleEqual(d.attrs["area"].lons.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) - self.assertTupleEqual(d.attrs["area"].lats.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) + assert d.attrs["calibration"] == "brightness_temperature" + assert d.shape == (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2)) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) + assert d.attrs["area"].lats.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) assert d.attrs["sensor"] == "amsr2" assert d.attrs["platform_name"] == "GCOM-W1" @@ -172,7 +170,7 @@ def test_load_89ghz(self): loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load([ "btemp_89.0av", @@ -180,13 +178,11 @@ def test_load_89ghz(self): "btemp_89.0bv", "btemp_89.0bh", ]) - self.assertEqual(len(ds), 4) + assert len(ds) == 4 for d in ds.values(): - self.assertEqual(d.attrs["calibration"], "brightness_temperature") - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertTupleEqual(d.attrs["area"].lons.shape, - DEFAULT_FILE_SHAPE) - self.assertTupleEqual(d.attrs["area"].lats.shape, - DEFAULT_FILE_SHAPE) + assert d.attrs["calibration"] == "brightness_temperature" + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == DEFAULT_FILE_SHAPE + assert d.attrs["area"].lats.shape == DEFAULT_FILE_SHAPE diff --git a/satpy/tests/reader_tests/test_amsr2_l2.py b/satpy/tests/reader_tests/test_amsr2_l2.py index 106f558919..7199a619bc 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2.py +++ b/satpy/tests/reader_tests/test_amsr2_l2.py @@ -96,10 +96,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_basic(self): """Test loading of basic channels.""" @@ -108,15 +108,13 @@ def test_load_basic(self): loadables = r.select_files_from_pathnames([ "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load(["ssw"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1]))) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertTupleEqual(d.attrs["area"].lons.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) - self.assertTupleEqual(d.attrs["area"].lats.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) + assert d.shape == (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1])) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + assert d.attrs["area"].lats.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py index 17ac9f62de..07ed218e72 100644 --- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py +++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py @@ -151,9 +151,9 @@ def test_scene(self): from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) - self.assertTrue("scatterometer" in scn.sensor_names) - self.assertTrue(datetime(2020, 12, 21, 9, 33, 0) == scn.start_time) - self.assertTrue(datetime(2020, 12, 21, 9, 33, 59) == scn.end_time) + assert "scatterometer" in scn.sensor_names + assert datetime(2020, 12, 21, 9, 33, 0) == scn.start_time + assert datetime(2020, 12, 21, 9, 33, 59) == scn.end_time @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): @@ -161,10 +161,10 @@ def test_scene_load_available_datasets(self): from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) - self.assertTrue("surface_soil_moisture" in scn.available_dataset_names()) + assert "surface_soil_moisture" in scn.available_dataset_names() scn.load(scn.available_dataset_names()) loaded = [dataset.name for dataset in scn] - self.assertTrue(sorted(loaded) == sorted(scn.available_dataset_names())) + assert sorted(loaded) == sorted(scn.available_dataset_names()) @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): @@ -184,4 +184,4 @@ def test_scene_dataset_values(self): # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): - self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) + assert np.allclose(original_values, loaded_values_nan_filled) diff --git a/satpy/tests/reader_tests/test_atms_l1b_nc.py b/satpy/tests/reader_tests/test_atms_l1b_nc.py index eca5454307..6b27081ed9 100644 --- a/satpy/tests/reader_tests/test_atms_l1b_nc.py +++ b/satpy/tests/reader_tests/test_atms_l1b_nc.py @@ -27,7 +27,7 @@ # - tmp_path -@pytest.fixture +@pytest.fixture() def reader(l1b_file): """Return reader of ATMS level1b data.""" return AtmsL1bNCFileHandler( @@ -37,15 +37,15 @@ def reader(l1b_file): ) -@pytest.fixture +@pytest.fixture() def l1b_file(tmp_path, atms_fake_dataset): """Return file path to level1b file.""" l1b_file_path = tmp_path / "test_file_atms_l1b.nc" atms_fake_dataset.to_netcdf(l1b_file_path) - yield l1b_file_path + return l1b_file_path -@pytest.fixture +@pytest.fixture() def atms_fake_dataset(): """Return fake ATMS dataset.""" atrack = 2 @@ -99,20 +99,20 @@ def test_antenna_temperature(self, reader, atms_fake_dataset): atms_fake_dataset.antenna_temp.values, ) - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("start_time", datetime(2000, 1, 2, 3, 4, 5)), ("end_time", datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), - )) + ]) def test_attrs(self, reader, param, expect): """Test attributes.""" assert reader.attrs[param] == expect - @pytest.mark.parametrize("dims", ( + @pytest.mark.parametrize("dims", [ ("xtrack", "atrack"), ("x", "y"), - )) + ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" data = xr.DataArray( @@ -134,7 +134,7 @@ def test_drop_coords(self, reader): data = reader._drop_coords(data) assert coords not in data.coords - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("start_time", datetime(2000, 1, 2, 3, 4, 5)), ("end_time", datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), @@ -142,7 +142,7 @@ def test_drop_coords(self, reader): ("creation_time", datetime(2020, 1, 2, 3, 4, 5)), ("type", "test_data"), ("name", "test"), - )) + ]) def test_merge_attributes(self, reader, param, expect): """Test merge attributes.""" data = xr.DataArray( @@ -154,10 +154,10 @@ def test_merge_attributes(self, reader, param, expect): data = reader._merge_attributes(data, dataset_info) assert data.attrs[param] == expect - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("1", 100.), ("sat_azi", 3.), - )) + ]) def test_select_dataset(self, reader, param, expect): """Test select dataset.""" np.testing.assert_array_equal( diff --git a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py index aa11e66d09..8971c2d933 100644 --- a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py +++ b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py @@ -299,7 +299,7 @@ def test_init_start_end_time(self): # make sure we have some files assert r.file_handlers - @pytest.mark.parametrize("files, expected", + @pytest.mark.parametrize(("files", "expected"), [(["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", "GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5"], True), diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 076f89b0f2..2272a950bf 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -158,10 +158,8 @@ def test_init(self): [GACPODReader, GACKLMReader, LACPODReader, LACKLMReader]): for filename in filenames: fh = self._get_fh(filename, **kwargs) - self.assertLess(fh.start_time, fh.end_time, - "Start time must precede end time.") - self.assertIs(fh.reader_class, reader_cls, - "Wrong reader class assigned to {}".format(filename)) + assert fh.start_time < fh.end_time + assert fh.reader_class is reader_cls def test_read_raw_data(self): """Test raw data reading.""" @@ -261,7 +259,7 @@ def test_get_dataset_latlon(self, *mocks): key = make_dataid(name=name) info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ("y", "x_every_eighth")) + assert res.dims == ("y", "x_every_eighth") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle") @@ -298,7 +296,7 @@ def test_get_dataset_angles(self, get_angle, *mocks): key = make_dataid(name=angle) info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ("y", "x_every_eighth")) + assert res.dims == ("y", "x_every_eighth") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_qual_flags(self, *mocks): @@ -348,21 +346,19 @@ def test_get_channel(self): key = make_dataid(name="1", calibration="counts") # Counts res = fh._get_channel(key=key) - np.testing.assert_array_equal(res, [[1, 2, 3], - [4, 5, 6]]) + np.testing.assert_array_equal(res, [[1, 2, 3], [4, 5, 6]]) np.testing.assert_array_equal(fh.counts, counts) # Reflectance and Brightness Temperature for calib in ["reflectance", "brightness_temperature"]: key = make_dataid(name="1", calibration=calib) res = fh._get_channel(key=key) - np.testing.assert_array_equal(res, [[2, 4, 6], - [8, 10, 12]]) + np.testing.assert_array_equal(res, [[2, 4, 6], [8, 10, 12]]) np.testing.assert_array_equal(fh.calib_channels, calib_channels) # Invalid - with pytest.raises(ValueError): - key = make_dataid(name="7", calibration="coffee") + with pytest.raises(ValueError, match="coffee invalid value for "): + _ = make_dataid(name="7", calibration="coffee") # Buffering reader.get_counts.reset_mock() @@ -387,12 +383,10 @@ def test_get_angle(self): # Test angle readout key = make_dataid(name="sensor_zenith_angle") res = fh._get_angle(key) - self.assertEqual(res, 2) - self.assertDictEqual(fh.angles, {"sensor_zenith_angle": 2, - "sensor_azimuth_angle": 1, - "solar_zenith_angle": 4, - "solar_azimuth_angle": 3, - "sun_sensor_azimuth_difference_angle": 5}) + assert res == 2 + assert fh.angles == {"sensor_zenith_angle": 2, "sensor_azimuth_angle": 1, + "solar_zenith_angle": 4, "solar_azimuth_angle": 3, + "sun_sensor_azimuth_difference_angle": 5} # Test buffering key = make_dataid(name="sensor_azimuth_angle") @@ -410,14 +404,14 @@ def test_strip_invalid_lat(self): # Test stripping pygac.utils.strip_invalid_lat.return_value = 1, 2 start, end = fh._strip_invalid_lat() - self.assertTupleEqual((start, end), (1, 2)) + assert (start, end) == (1, 2) # Test buffering fh._strip_invalid_lat() pygac.utils.strip_invalid_lat.assert_called_once() @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice") - def test_slice(self, _slice): + def test_slice(self, _slice): # noqa: PT019 """Test slicing.""" def _slice_patched(data): @@ -431,8 +425,8 @@ def _slice_patched(data): data_slc, times_slc = fh.slice(data, times) np.testing.assert_array_equal(data_slc, data[1:3]) np.testing.assert_array_equal(times_slc, times[1:3]) - self.assertEqual(fh.start_time, datetime(1970, 1, 1, 0, 0, 0, 2)) - self.assertEqual(fh.end_time, datetime(1970, 1, 1, 0, 0, 0, 3)) + assert fh.start_time == datetime(1970, 1, 1, 0, 0, 0, 2) + assert fh.end_time == datetime(1970, 1, 1, 0, 0, 0, 3) @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat") @@ -449,7 +443,7 @@ def test__slice(self, strip_invalid_lat, get_qual_flags): # a) Only start/end line given fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=False) data_slc = fh._slice(data) - self.assertEqual(data_slc, "sliced") + assert data_slc == "sliced" pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=None, last_valid_lat=None, along_track=2) @@ -474,7 +468,7 @@ def test__slice(self, strip_invalid_lat, get_qual_flags): # Test slicing with older pygac versions pygac.utils.slice_channel.return_value = ("sliced", "foo", "bar") data_slc = fh._slice(data) - self.assertEqual(data_slc, "sliced") + assert data_slc == "sliced" class TestGetDataset(GACLACFilePatcher): diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 66758d44dc..bc5e968b08 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -133,10 +133,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_available_datasets(self): """Test available_datasets with fake variables from YAML.""" @@ -145,10 +145,10 @@ def test_available_datasets(self): loadables = r.select_files_from_pathnames([ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers # mimic the YAML file being configured for more datasets fake_dataset_info = [ @@ -162,47 +162,47 @@ def test_available_datasets(self): ] new_ds_infos = list(r.file_handlers["clavrx_hdf4"][0].available_datasets( fake_dataset_info)) - self.assertEqual(len(new_ds_infos), 9) + assert len(new_ds_infos) == 9 # we have this and can provide the resolution - self.assertTrue(new_ds_infos[0][0]) - self.assertEqual(new_ds_infos[0][1]["resolution"], 742) # hardcoded + assert new_ds_infos[0][0] + assert new_ds_infos[0][1]["resolution"] == 742 # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have - self.assertTrue(new_ds_infos[1][0]) - self.assertEqual(new_ds_infos[1][1]["resolution"], 742) + assert new_ds_infos[1][0] + assert new_ds_infos[1][1]["resolution"] == 742 # we have this, but don't want to change the resolution # because a previous handler said it has it - self.assertTrue(new_ds_infos[2][0]) - self.assertEqual(new_ds_infos[2][1]["resolution"], 1) + assert new_ds_infos[2][0] + assert new_ds_infos[2][1]["resolution"] == 1 # even though the previous one was known we can still # produce it at our new resolution - self.assertTrue(new_ds_infos[3][0]) - self.assertEqual(new_ds_infos[3][1]["resolution"], 742) + assert new_ds_infos[3][0] + assert new_ds_infos[3][1]["resolution"] == 742 # we have this and can update the resolution since # no one else has claimed it - self.assertTrue(new_ds_infos[4][0]) - self.assertEqual(new_ds_infos[4][1]["resolution"], 742) + assert new_ds_infos[4][0] + assert new_ds_infos[4][1]["resolution"] == 742 # we don't have this variable, don't change it - self.assertFalse(new_ds_infos[5][0]) - self.assertIsNone(new_ds_infos[5][1].get("resolution")) + assert not new_ds_infos[5][0] + assert new_ds_infos[5][1].get("resolution") is None # we have this, but it isn't supposed to come from our file type - self.assertIsNone(new_ds_infos[6][0]) - self.assertIsNone(new_ds_infos[6][1].get("resolution")) + assert new_ds_infos[6][0] is None + assert new_ds_infos[6][1].get("resolution") is None # we could have loaded this but some other file handler said it has this - self.assertTrue(new_ds_infos[7][0]) - self.assertIsNone(new_ds_infos[7][1].get("resolution")) + assert new_ds_infos[7][0] + assert new_ds_infos[7][1].get("resolution") is None # we can add resolution to the previous dataset, so we do - self.assertTrue(new_ds_infos[8][0]) - self.assertEqual(new_ds_infos[8][1]["resolution"], 742) + assert new_ds_infos[8][0] + assert new_ds_infos[8][1]["resolution"] == 742 def test_load_all(self): """Test loading all test datasets.""" @@ -218,15 +218,15 @@ def test_load_all(self): var_list = ["variable1", "variable2", "variable3"] datasets = r.load(var_list) - self.assertEqual(len(datasets), len(var_list)) + assert len(datasets) == len(var_list) for v in datasets.values(): - self.assertEqual(v.attrs["units"], "1") - self.assertEqual(v.attrs["platform_name"], "npp") - self.assertEqual(v.attrs["sensor"], "viirs") - self.assertIsInstance(v.attrs["area"], SwathDefinition) - self.assertEqual(v.attrs["area"].lons.attrs["rows_per_scan"], 16) - self.assertEqual(v.attrs["area"].lats.attrs["rows_per_scan"], 16) - self.assertIsInstance(datasets["variable3"].attrs.get("flag_meanings"), list) + assert v.attrs["units"] == "1" + assert v.attrs["platform_name"] == "npp" + assert v.attrs["sensor"] == "viirs" + assert isinstance(v.attrs["area"], SwathDefinition) + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16 + assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list) class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): @@ -331,10 +331,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "clavrx_H08_20180806_1800.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_no_nav_donor(self): """Test exception raised when no donor file is available.""" @@ -376,22 +376,22 @@ def test_load_all_old_donor(self): ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(["variable1", "variable2", "variable3"]) - self.assertEqual(len(datasets), 3) + assert len(datasets) == 3 for v in datasets.values(): - self.assertNotIn("calibration", v.attrs) - self.assertEqual(v.attrs["units"], "1") - self.assertIsInstance(v.attrs["area"], AreaDefinition) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) if v.attrs.get("flag_values"): - self.assertIn("_FillValue", v.attrs) + assert "_FillValue" in v.attrs else: - self.assertNotIn("_FillValue", v.attrs) + assert "_FillValue" not in v.attrs if v.attrs["name"] == "variable1": - self.assertIsInstance(v.attrs["valid_range"], list) + assert isinstance(v.attrs["valid_range"], list) else: - self.assertNotIn("valid_range", v.attrs) + assert "valid_range" not in v.attrs if "flag_values" in v.attrs: - self.assertTrue(np.issubdtype(v.dtype, np.integer)) - self.assertIsNotNone(v.attrs.get("flag_meanings")) + assert np.issubdtype(v.dtype, np.integer) + assert v.attrs.get("flag_meanings") is not None def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" @@ -420,12 +420,12 @@ def test_load_all_new_donor(self): ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(["variable1", "variable2", "variable3"]) - self.assertEqual(len(datasets), 3) + assert len(datasets) == 3 for v in datasets.values(): - self.assertNotIn("calibration", v.attrs) - self.assertEqual(v.attrs["units"], "1") - self.assertIsInstance(v.attrs["area"], AreaDefinition) - self.assertTrue(v.attrs["area"].is_geostationary) - self.assertEqual(v.attrs["platform_name"], "himawari8") - self.assertEqual(v.attrs["sensor"], "ahi") - self.assertIsNotNone(datasets["variable3"].attrs.get("flag_meanings")) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) + assert v.attrs["area"].is_geostationary + assert v.attrs["platform_name"] == "himawari8" + assert v.attrs["sensor"] == "ahi" + assert datasets["variable3"].attrs.get("flag_meanings") is not None diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index db2117d264..4615662b32 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -41,7 +41,7 @@ def start_time(request): return request.param -@pytest.fixture +@pytest.fixture() def start_time_str(start_time): """Get string representation of the start time.""" return start_time.strftime("%Y-%m-%dT%H:%M:%SZ") @@ -81,7 +81,7 @@ def fake_dataset(start_time_str): ) -@pytest.fixture +@pytest.fixture() def encoding(): """Dataset encoding.""" return { @@ -89,15 +89,15 @@ def encoding(): } -@pytest.fixture +@pytest.fixture() def fake_file(fake_dataset, encoding, tmp_path): """Write a fake dataset to file.""" filename = tmp_path / "CPPin20140101001500305SVMSG01MD.nc" fake_dataset.to_netcdf(filename, encoding=encoding) - yield filename + return filename -@pytest.fixture +@pytest.fixture() def fake_files(fake_dataset, encoding, tmp_path): """Write the same fake dataset into two different files.""" filenames = [ @@ -106,10 +106,10 @@ def fake_files(fake_dataset, encoding, tmp_path): ] for filename in filenames: fake_dataset.to_netcdf(filename, encoding=encoding) - yield filenames + return filenames -@pytest.fixture +@pytest.fixture() def reader(): """Return reader for CMSAF CLAAS-2.""" from satpy._config import config_search_paths @@ -137,14 +137,14 @@ def test_file_pattern(reader): class TestCLAAS2MultiFile: """Test reading multiple CLAAS-2 files.""" - @pytest.fixture + @pytest.fixture() def multi_file_reader(self, reader, fake_files): """Create a multi-file reader.""" loadables = reader.select_files_from_pathnames(fake_files) reader.create_filehandlers(loadables) return reader - @pytest.fixture + @pytest.fixture() def multi_file_dataset(self, multi_file_reader): """Load datasets from multiple files.""" ds_ids = [make_dataid(name=name) for name in ["cph", "ctt"]] @@ -157,7 +157,7 @@ def test_combine_timestamps(self, multi_file_reader, start_time): assert multi_file_reader.end_time == datetime.datetime(2085, 8, 13, 13, 15) @pytest.mark.parametrize( - "ds_name,expected", + ("ds_name", "expected"), [ ("cph", [[0, 1], [2, 0], [0, 1], [2, 0]]), ("ctt", [[280, 290], [300, 310], [280, 290], [300, 310]]), @@ -177,20 +177,20 @@ def test_number_of_datasets(self, multi_file_dataset): class TestCLAAS2SingleFile: """Test reading a single CLAAS2 file.""" - @pytest.fixture + @pytest.fixture() def file_handler(self, fake_file): """Return a CLAAS-2 file handler.""" from satpy.readers.cmsaf_claas2 import CLAAS2 return CLAAS2(fake_file, {}, {}) - @pytest.fixture + @pytest.fixture() def area_extent_exp(self, start_time): """Get expected area extent.""" if start_time < datetime.datetime(2017, 12, 6): return (-5454733.160460291, -5454733.160460292, 5454733.160460292, 5454733.160460291) return (-5456233.362099582, -5453232.958821001, 5453232.958821001, 5456233.362099582) - @pytest.fixture + @pytest.fixture() def area_exp(self, area_extent_exp): """Get expected area definition.""" proj_dict = { @@ -217,7 +217,7 @@ def test_get_area_def(self, file_handler, area_exp): assert area == area_exp @pytest.mark.parametrize( - "ds_name,expected", + ("ds_name", "expected"), [ ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=("y", "x"))), ("cph", xr.DataArray([[0, 1], [2, 0]], dims=("y", "x"))), diff --git a/satpy/tests/reader_tests/test_electrol_hrit.py b/satpy/tests/reader_tests/test_electrol_hrit.py index c555f377b1..b3e14c24d1 100644 --- a/satpy/tests/reader_tests/test_electrol_hrit.py +++ b/satpy/tests/reader_tests/test_electrol_hrit.py @@ -57,7 +57,7 @@ def test_fun(self): expected = {"test_sec": {"test_str": np.array([b"Testing"], dtype="= global_attrs.keys() -@pytest.mark.parametrize("calibration,expected_values", +@pytest.mark.parametrize(("calibration", "expected_values"), [("counts", values_1km), ("radiance", mask_array(values_1km * 2)), ("reflectance", mask_array(values_1km * 3))]) diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 62eff6d18f..5e9d0ff563 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -45,7 +45,7 @@ def std_filetype_infos(): # get the li_l2 filetype: ftypes = cfg["file_types"] - yield ftypes + return ftypes # Note: the helper class below has some missing abstract class implementation, diff --git a/satpy/tests/reader_tests/test_meris_nc.py b/satpy/tests/reader_tests/test_meris_nc.py index 0ab28b1fef..b4a2cda809 100644 --- a/satpy/tests/reader_tests/test_meris_nc.py +++ b/satpy/tests/reader_tests/test_meris_nc.py @@ -79,7 +79,7 @@ def test_get_dataset(self, mocked_dataset): filename_info = {"mission_id": "ENV", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCMERIS2("somedir/somefile.nc", filename_info, "c") res = test.get_dataset(ds_id, {"nc_key": "mask"}) - self.assertEqual(res.dtype, np.dtype("bool")) + assert res.dtype == np.dtype("bool") @mock.patch("xarray.open_dataset") def test_meris_angles(self, mocked_dataset): @@ -180,4 +180,4 @@ def test_bitflags(self): expected = np.array([True, True, True, False, False, True, True, True, False, True, True, True, True, True, True, True, True, True]) - self.assertTrue(all(mask == expected)) + assert all(mask == expected) diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py index 77344e6856..4083f7de00 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py @@ -132,10 +132,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_mimic_float(self): """Load TPW mimic float data.""" @@ -147,13 +147,13 @@ def test_load_mimic_float(self): ]) r.create_filehandlers(loadables) ds = r.load(float_variables) - self.assertEqual(len(ds), len(float_variables)) + assert len(ds) == len(float_variables) for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") - self.assertEqual(d.attrs["sensor"], "mimic") - self.assertEqual(d.attrs["units"], "mm") - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert d.attrs["units"] == "mm" + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_mimic_timedelta(self): """Load TPW mimic timedelta data (data latency variables).""" @@ -165,14 +165,14 @@ def test_load_mimic_timedelta(self): ]) r.create_filehandlers(loadables) ds = r.load(date_variables) - self.assertEqual(len(ds), len(date_variables)) + assert len(ds) == len(date_variables) for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") - self.assertEqual(d.attrs["sensor"], "mimic") - self.assertEqual(d.attrs["units"], "minutes") - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertEqual(d.dtype, DEFAULT_FILE_DTYPE) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert d.attrs["units"] == "minutes" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.dtype == DEFAULT_FILE_DTYPE def test_load_mimic_ubyte(self): """Load TPW mimic sensor grids.""" @@ -184,11 +184,11 @@ def test_load_mimic_ubyte(self): ]) r.create_filehandlers(loadables) ds = r.load(ubyte_variables) - self.assertEqual(len(ds), len(ubyte_variables)) + assert len(ds) == len(ubyte_variables) for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") - self.assertEqual(d.attrs["sensor"], "mimic") - self.assertIn("source_key", d.attrs) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertEqual(d.dtype, np.uint8) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert "source_key" in d.attrs + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.dtype == np.uint8 diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py index 9c6c24b5a7..63214b0477 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py @@ -110,10 +110,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_mimic(self): """Load Mimic data.""" @@ -125,10 +125,10 @@ def test_load_mimic(self): ]) r.create_filehandlers(loadables) ds = r.load(["tpwGrid"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "aggregated microwave") - self.assertEqual(d.attrs["sensor"], "mimic") - self.assertIn("area", d.attrs) - self.assertIn("units", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert "area" in d.attrs + assert "units" in d.attrs + assert d.attrs["area"] is not None diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 5f8490151f..bcee32ddbb 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -976,7 +976,7 @@ def setup_method(self): self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), self.filename_info, mock.MagicMock()) - @pytest.mark.parametrize("mask_saturated,calibration,expected", + @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]])]) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 8a57507141..b03336c230 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -400,7 +400,7 @@ def test_get_dataset(self, file_handler, name, calibration, resolution, is_refl = calibration == "reflectance" if is_easy and is_vis and not is_refl: # VIS counts/radiance not available in easy FCDR - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Cannot calibrate to .*. Easy FCDR provides reflectance only."): file_handler.get_dataset(dataset_id, dataset_info) else: ds = file_handler.get_dataset(dataset_id, dataset_info) diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py index 89a6eb4700..2d227822a4 100644 --- a/satpy/tests/reader_tests/test_mws_l1b_nc.py +++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py @@ -43,7 +43,7 @@ N_PRTS = 6 -@pytest.fixture +@pytest.fixture() def reader(fake_file): """Return reader of mws level-1b data.""" return MWSL1BFile( @@ -70,13 +70,13 @@ def reader(fake_file): ) -@pytest.fixture +@pytest.fixture() def fake_file(tmp_path): """Return file path to level-1b file.""" file_path = tmp_path / "test_file_mws_l1b.nc" writer = MWSL1BFakeFileWriter(file_path) writer.write() - yield file_path + return file_path class MWSL1BFakeFileWriter: @@ -325,10 +325,10 @@ def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): " no valid Dataset created") assert log_output in caplog.text - @pytest.mark.parametrize("dims", ( + @pytest.mark.parametrize("dims", [ ("n_scans", "n_fovs"), ("x", "y"), - )) + ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" variable = xr.DataArray( @@ -389,7 +389,7 @@ def test_manage_attributes(self, mock, reader): } -@pytest.mark.parametrize("name, index", [("1", 0), ("2", 1), ("24", 23)]) +@pytest.mark.parametrize(("name", "index"), [("1", 0), ("2", 1), ("24", 23)]) def test_get_channel_index_from_name(name, index): """Test getting the MWS channel index from the channel name.""" ch_idx = get_channel_index_from_name(name) @@ -398,8 +398,5 @@ def test_get_channel_index_from_name(name, index): def test_get_channel_index_from_name_throw_exception(): """Test that an excpetion is thrown when getting the MWS channel index from an unsupported name.""" - with pytest.raises(Exception) as excinfo: + with pytest.raises(AttributeError, match="Channel name 'channel 1' not supported"): _ = get_channel_index_from_name("channel 1") - - assert str(excinfo.value) == "Channel name 'channel 1' not supported" - assert excinfo.type == AttributeError diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 16dfc57a83..ea104ed086 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -127,25 +127,25 @@ def test_all_basic(self): from satpy.readers.netcdf_utils import NetCDF4FileHandler file_handler = NetCDF4FileHandler("test.nc", {}, {}) - self.assertEqual(file_handler["/dimension/rows"], 10) - self.assertEqual(file_handler["/dimension/cols"], 100) + assert file_handler["/dimension/rows"] == 10 + assert file_handler["/dimension/cols"] == 100 for ds in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): - self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith("f") else np.int32) - self.assertTupleEqual(file_handler[ds + "/shape"], (10, 100)) - self.assertEqual(file_handler[ds + "/dimensions"], ("rows", "cols")) - self.assertEqual(file_handler[ds + "/attr/test_attr_str"], "test_string") - self.assertEqual(file_handler[ds + "/attr/test_attr_int"], 0) - self.assertEqual(file_handler[ds + "/attr/test_attr_float"], 1.2) + assert file_handler[ds].dtype == (np.float32 if ds.endswith("f") else np.int32) + assert file_handler[ds + "/shape"] == (10, 100) + assert file_handler[ds + "/dimensions"] == ("rows", "cols") + assert file_handler[ds + "/attr/test_attr_str"] == "test_string" + assert file_handler[ds + "/attr/test_attr_int"] == 0 + assert file_handler[ds + "/attr/test_attr_float"] == 1.2 test_group = file_handler["test_group"] - self.assertTupleEqual(test_group["ds1_i"].shape, (10, 100)) - self.assertTupleEqual(test_group["ds1_i"].dims, ("rows", "cols")) + assert test_group["ds1_i"].shape == (10, 100) + assert test_group["ds1_i"].dims == ("rows", "cols") - self.assertEqual(file_handler["/attr/test_attr_str"], "test_string") - self.assertEqual(file_handler["/attr/test_attr_str_arr"], "test_string2") - self.assertEqual(file_handler["/attr/test_attr_int"], 0) - self.assertEqual(file_handler["/attr/test_attr_float"], 1.2) + assert file_handler["/attr/test_attr_str"] == "test_string" + assert file_handler["/attr/test_attr_str_arr"] == "test_string2" + assert file_handler["/attr/test_attr_int"] == 0 + assert file_handler["/attr/test_attr_float"] == 1.2 global_attrs = { "test_attr_str": "test_string", @@ -153,16 +153,16 @@ def test_all_basic(self): "test_attr_int": 0, "test_attr_float": 1.2 } - self.assertEqual(file_handler["/attrs"], global_attrs) + assert file_handler["/attrs"] == global_attrs - self.assertIsInstance(file_handler.get("ds2_f")[:], xr.DataArray) - self.assertIsNone(file_handler.get("fake_ds")) - self.assertEqual(file_handler.get("fake_ds", "test"), "test") + assert isinstance(file_handler.get("ds2_f")[:], xr.DataArray) + assert file_handler.get("fake_ds") is None + assert file_handler.get("fake_ds", "test") == "test" - self.assertTrue("ds2_f" in file_handler) - self.assertFalse("fake_ds" in file_handler) - self.assertIsNone(file_handler.file_handle) - self.assertEqual(file_handler["ds2_sc"], 42) + assert ("ds2_f" in file_handler) is True + assert ("fake_ds" in file_handler) is False + assert file_handler.file_handle is None + assert file_handler["ds2_sc"] == 42 def test_listed_variables(self): """Test that only listed variables/attributes area collected.""" @@ -212,11 +212,10 @@ def test_caching(self): from satpy.readers.netcdf_utils import NetCDF4FileHandler h = NetCDF4FileHandler("test.nc", {}, {}, cache_var_size=1000, cache_handle=True) - self.assertIsNotNone(h.file_handle) - self.assertTrue(h.file_handle.isopen()) + assert h.file_handle is not None + assert h.file_handle.isopen() - self.assertEqual(sorted(h.cached_file_content.keys()), - ["ds2_s", "ds2_sc"]) + assert sorted(h.cached_file_content.keys()) == ["ds2_s", "ds2_sc"] # with caching, these tests access different lines than without np.testing.assert_array_equal(h["ds2_s"], np.arange(10)) np.testing.assert_array_equal(h["test_group/ds1_i"], @@ -227,7 +226,7 @@ def test_caching(self): h["ds2_f"], np.arange(10. * 100).reshape((10, 100))) h.__del__() - self.assertFalse(h.file_handle.isopen()) + assert not h.file_handle.isopen() def test_filenotfound(self): """Test that error is raised when file not found.""" diff --git a/satpy/tests/reader_tests/test_nucaps.py b/satpy/tests/reader_tests/test_nucaps.py index 5b1c061798..a1f5736bdb 100644 --- a/satpy/tests/reader_tests/test_nucaps.py +++ b/satpy/tests/reader_tests/test_nucaps.py @@ -177,10 +177,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_init_with_kwargs(self): """Test basic init with extra parameters.""" @@ -189,10 +189,10 @@ def test_init_with_kwargs(self): loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables, fh_kwargs={"mask_surface": False}) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" @@ -209,14 +209,14 @@ def test_load_nonpressure_based(self): "Skin_Temperature", "Quality_Flag", ]) - self.assertEqual(len(datasets), 6) + assert len(datasets) == 6 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) # self.assertEqual(v.info['units'], 'degrees') - self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs["sensor"], set(["cris", "atms", "viirs"])) - self.assertEqual(type(v.attrs["start_time"]), datetime.datetime) - self.assertEqual(type(v.attrs["end_time"]), datetime.datetime) + assert v.ndim == 1 + assert v.attrs["sensor"] == set(["cris", "atms", "viirs"]) + assert type(v.attrs["start_time"]) == datetime.datetime + assert type(v.attrs["end_time"]) == datetime.datetime def test_load_pressure_based(self): """Test loading all channels based on pressure.""" @@ -246,10 +246,10 @@ def test_load_pressure_based(self): "SO2", "SO2_MR", ]) - self.assertEqual(len(datasets), 19) + assert len(datasets) == 19 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) - self.assertEqual(v.ndim, 2) + assert v.ndim == 2 if np.issubdtype(v.dtype, np.floating): assert "_FillValue" not in v.attrs @@ -263,9 +263,9 @@ def test_load_multiple_files_pressure(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 100) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" @@ -276,9 +276,9 @@ def test_load_individual_pressure_levels_true(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 100) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" @@ -289,9 +289,9 @@ def test_load_individual_pressure_levels_min_max(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 6) + assert len(datasets) == 6 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" @@ -302,9 +302,9 @@ def test_load_individual_pressure_levels_single(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" @@ -315,10 +315,10 @@ def test_load_pressure_levels_true(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) + assert v.ndim == 2 + assert v.shape == DEFAULT_PRES_FILE_SHAPE def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" @@ -329,11 +329,10 @@ def test_load_pressure_levels_min_max(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 6)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 6) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" @@ -344,11 +343,10 @@ def test_load_pressure_levels_single(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" @@ -359,13 +357,12 @@ def test_load_pressure_levels_single_and_pressure_levels(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 2) + assert len(datasets) == 2 t_ds = datasets["Temperature"] - self.assertEqual(t_ds.ndim, 2) - self.assertTupleEqual(t_ds.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert t_ds.ndim == 2 + assert t_ds.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) pl_ds = datasets["Pressure_Levels"] - self.assertTupleEqual(pl_ds.shape, (1,)) + assert pl_ds.shape == (1,) class TestNUCAPSScienceEDRReader(unittest.TestCase): @@ -394,10 +391,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" @@ -413,12 +410,12 @@ def test_load_nonpressure_based(self): "Skin_Temperature", "Quality_Flag", ]) - self.assertEqual(len(datasets), 5) + assert len(datasets) == 5 for v in datasets.values(): - self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs["sensor"], set(["cris", "atms", "viirs"])) - self.assertEqual(type(v.attrs["start_time"]), datetime.datetime) - self.assertEqual(type(v.attrs["end_time"]), datetime.datetime) + assert v.ndim == 1 + assert v.attrs["sensor"] == set(["cris", "atms", "viirs"]) + assert type(v.attrs["start_time"]) == datetime.datetime + assert type(v.attrs["end_time"]) == datetime.datetime def test_load_pressure_based(self): """Test loading all channels based on pressure.""" @@ -445,10 +442,10 @@ def test_load_pressure_based(self): "SO2", "SO2_MR", ]) - self.assertEqual(len(datasets), 16) + assert len(datasets) == 16 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) - self.assertEqual(v.ndim, 2) + assert v.ndim == 2 def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" @@ -459,9 +456,9 @@ def test_load_individual_pressure_levels_true(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 100) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" @@ -472,9 +469,9 @@ def test_load_individual_pressure_levels_min_max(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 6) + assert len(datasets) == 6 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" @@ -485,9 +482,9 @@ def test_load_individual_pressure_levels_single(self): ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" @@ -498,10 +495,10 @@ def test_load_pressure_levels_true(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=True) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) + assert v.ndim == 2 + assert v.shape == DEFAULT_PRES_FILE_SHAPE def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" @@ -512,11 +509,10 @@ def test_load_pressure_levels_min_max(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 6)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 6) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" @@ -527,11 +523,10 @@ def test_load_pressure_levels_single(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" @@ -542,10 +537,9 @@ def test_load_pressure_levels_single_and_pressure_levels(self): ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 2) + assert len(datasets) == 2 t_ds = datasets["Temperature"] - self.assertEqual(t_ds.ndim, 2) - self.assertTupleEqual(t_ds.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert t_ds.ndim == 2 + assert t_ds.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) pl_ds = datasets["Pressure_Levels"] - self.assertTupleEqual(pl_ds.shape, (1,)) + assert pl_ds.shape == (1,) diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index e323baeb20..6d4dbfe53f 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -23,6 +23,7 @@ import h5py import numpy as np +import pytest from satpy.tests.reader_tests.utils import fill_h5 @@ -483,18 +484,18 @@ def test_get_area_def(self): aext_res = AREA_DEF_DICT["area_extent"] for i in range(4): - self.assertAlmostEqual(area_def.area_extent[i], aext_res[i], 4) + assert area_def.area_extent[i] == pytest.approx(aext_res[i], abs=1e-4) proj_dict = AREA_DEF_DICT["proj_dict"] - self.assertEqual(proj_dict["proj"], area_def.proj_dict["proj"]) + assert proj_dict["proj"] == area_def.proj_dict["proj"] # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) - self.assertEqual(AREA_DEF_DICT["x_size"], area_def.width) - self.assertEqual(AREA_DEF_DICT["y_size"], area_def.height) + assert AREA_DEF_DICT["x_size"] == area_def.width + assert AREA_DEF_DICT["y_size"] == area_def.height - self.assertEqual(AREA_DEF_DICT["area_id"], area_def.area_id) + assert AREA_DEF_DICT["area_id"] == area_def.area_id def test_get_dataset(self): """Retrieve datasets from a NWCSAF msgv2013 hdf5 file.""" @@ -506,8 +507,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CT"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.uint8) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.uint8 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTYPE_TEST_FRAME) filename_info = {} @@ -515,8 +516,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_alti") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_HEIGHT"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_HEIGHT_TEST_FRAME_RES) filename_info = {} @@ -524,8 +525,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_pres") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_PRESS"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_PRESSURE_TEST_FRAME_RES) filename_info = {} @@ -533,8 +534,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_tempe") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_TEMPER"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_TEMPERATURE_TEST_FRAME_RES) def tearDown(self): diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index fb7187af1f..2070e5187c 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -105,7 +105,7 @@ def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs): return filename -@pytest.fixture +@pytest.fixture() def nwcsaf_geo_ct_filehandler(nwcsaf_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_geo_ct_filename, {}, {}) @@ -156,13 +156,13 @@ def create_ctth_file(path, attrs=global_attrs): return filename -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_cmic_filehandler(nwcsaf_pps_cmic_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_cmic_filename, {}, {"file_key_prefix": "cmic_"}) -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_ctth_filehandler(nwcsaf_pps_ctth_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_ctth_filename, {}, {}) @@ -218,7 +218,7 @@ def create_ctth_alti_pal_variable_with_fill_value_color(nc_file, var_name): var.attrs["_FillValue"] = 65535 -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_cpp_filehandler(nwcsaf_pps_cpp_filename): """Create a CPP filehandler.""" return NcNWCSAF(nwcsaf_pps_cpp_filename, {}, {"file_key_prefix": "cpp_"}) @@ -233,7 +233,7 @@ def nwcsaf_old_geo_ct_filename(tmp_path_factory): return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data-old"), attrs=attrs) -@pytest.fixture +@pytest.fixture() def nwcsaf_old_geo_ct_filehandler(nwcsaf_old_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_old_geo_ct_filename, {}, {}) @@ -242,19 +242,19 @@ def nwcsaf_old_geo_ct_filehandler(nwcsaf_old_geo_ct_filename): class TestNcNWCSAFGeo: """Test the NcNWCSAF reader for Geo products.""" - @pytest.mark.parametrize("platform, instrument", [("Metop-B", "avhrr-3"), - ("NOAA-20", "viirs"), - ("Himawari-8", "ahi"), - ("GOES-17", "abi"), - ("Meteosat-11", "seviri")]) + @pytest.mark.parametrize(("platform", "instrument"), [("Metop-B", "avhrr-3"), + ("NOAA-20", "viirs"), + ("Himawari-8", "ahi"), + ("GOES-17", "abi"), + ("Meteosat-11", "seviri")]) def test_sensor_name_platform(self, nwcsaf_geo_ct_filehandler, platform, instrument): """Test that the correct sensor name is being set.""" nwcsaf_geo_ct_filehandler.set_platform_and_sensor(platform_name=platform) assert nwcsaf_geo_ct_filehandler.sensor == set([instrument]) assert nwcsaf_geo_ct_filehandler.sensor_names == set([instrument]) - @pytest.mark.parametrize("platform, instrument", [("GOES16", "abi"), - ("MSG4", "seviri")]) + @pytest.mark.parametrize(("platform", "instrument"), [("GOES16", "abi"), + ("MSG4", "seviri")]) def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrument): """Test that the correct sensor name is being set.""" nwcsaf_geo_ct_filehandler.set_platform_and_sensor(sat_id=platform) @@ -286,22 +286,22 @@ def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): assert "scale_factor" not in var.attrs assert "add_offset" not in var.attrs - @pytest.mark.parametrize("attrs, expected", [({"scale_factor": np.array(1.5), - "add_offset": np.array(2.5), - "_FillValue": 1}, - [np.nan, 5.5, 7]), - ({"scale_factor": np.array(1.5), - "add_offset": np.array(2.5), - "valid_min": 1.1}, - [np.nan, 5.5, 7]), - ({"scale_factor": np.array(1.5), - "add_offset": np.array(2.5), - "valid_max": 2.1}, - [4, 5.5, np.nan]), - ({"scale_factor": np.array(1.5), - "add_offset": np.array(2.5), - "valid_range": (1.1, 2.1)}, - [np.nan, 5.5, np.nan])]) + @pytest.mark.parametrize(("attrs", "expected"), [({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "_FillValue": 1}, + [np.nan, 5.5, 7]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_min": 1.1}, + [np.nan, 5.5, 7]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_max": 2.1}, + [4, 5.5, np.nan]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_range": (1.1, 2.1)}, + [np.nan, 5.5, np.nan])]) def test_scale_dataset_floating(self, nwcsaf_geo_ct_filehandler, attrs, expected): """Test the scaling of the dataset with floating point values.""" var = xr.DataArray([1, 2, 3], attrs=attrs) diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index f0ed47f4f8..bdb0edfb03 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -94,7 +94,7 @@ def fake_dataset(): ds_list_kd = ["kd_490", "water_class10", "seawifs_nobs_sum"] -@pytest.fixture +@pytest.fixture() def fake_file_dict(fake_dataset, tmp_path): """Write a fake dataset to file.""" fdict = {} @@ -126,7 +126,7 @@ def fake_file_dict(fake_dataset, tmp_path): fake_dataset.to_netcdf(filename) fdict["k490_1d"] = filename - yield fdict + return fdict class TestOCCCIReader: @@ -149,7 +149,7 @@ def _create_reader_for_resolutions(self, filename): assert reader.file_handlers return reader - @pytest.fixture + @pytest.fixture() def area_exp(self): """Get expected area definition.""" proj_dict = {"datum": "WGS84", "no_defs": "None", "proj": "longlat", "type": "crs"} diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index b6f5863a25..2f37fb2098 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -109,7 +109,7 @@ def test_get_mask(self, mocked_dataset): filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCOLCI2("somedir/somefile.nc", filename_info, "c") res = test.get_dataset(ds_id, {"nc_key": "mask"}) - self.assertEqual(res.dtype, np.dtype("bool")) + assert res.dtype == np.dtype("bool") expected = np.array([[True, False, True, True, True, True], [False, False, True, True, False, False], [False, False, False, False, False, True], @@ -133,7 +133,7 @@ def test_get_mask_with_alternative_items(self, mocked_dataset): filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCOLCI2("somedir/somefile.nc", filename_info, "c", mask_items=["INVALID"]) res = test.get_dataset(ds_id, {"nc_key": "mask"}) - self.assertEqual(res.dtype, np.dtype("bool")) + assert res.dtype == np.dtype("bool") expected = np.array([True] + [False] * 29).reshape(5, 6) np.testing.assert_array_equal(res.values, expected) @@ -273,4 +273,4 @@ def test_bitflags(self): False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) - self.assertTrue(all(mask == expected)) + assert all(mask == expected) diff --git a/satpy/tests/reader_tests/test_omps_edr.py b/satpy/tests/reader_tests/test_omps_edr.py index f89e41f5d0..9aa227a200 100644 --- a/satpy/tests/reader_tests/test_omps_edr.py +++ b/satpy/tests/reader_tests/test_omps_edr.py @@ -195,10 +195,10 @@ def test_init(self): "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_basic_load_so2(self): """Test basic load of so2 datasets.""" @@ -209,32 +209,32 @@ def test_basic_load_so2(self): "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) ds = r.load(["so2_trm"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["resolution"], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None ds = r.load(["tcso2_trm_sampo"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["resolution"], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE ds = r.load(["tcso2_stl_sampo"]) - self.assertEqual(len(ds), 0) + assert len(ds) == 0 # Dataset without _FillValue ds = r.load(["tcso2_tru_sampo"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 # Dataset without unit ds = r.load(["tcso2_pbl_sampo"]) - self.assertEqual(len(ds), 0) + assert len(ds) == 0 def test_basic_load_to3(self): """Test basic load of to3 datasets.""" @@ -245,15 +245,15 @@ def test_basic_load_to3(self): "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) ds = r.load(["reflectivity_331", "uvaerosol_index"]) - self.assertEqual(len(ds), 2) + assert len(ds) == 2 for d in ds.values(): - self.assertEqual(d.attrs["resolution"], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None @mock.patch("satpy.readers.hdf5_utils.HDF5FileHandler._get_reference") @mock.patch("h5py.File") @@ -269,4 +269,4 @@ def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_refer r.create_filehandlers(loadables) ds = r.load(["tcso2_trl_sampo"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 diff --git a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py index a529ae9f50..661900e650 100644 --- a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py +++ b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py @@ -66,10 +66,10 @@ def setUp(self, xr_): def test_init(self): """Test reader initialization.""" - self.assertEqual(self.reader.start_time, 0) - self.assertEqual(self.reader.end_time, 0) - self.assertEqual(self.reader.fstart_time, 0) - self.assertEqual(self.reader.fend_time, 0) + assert self.reader.start_time == 0 + assert self.reader.end_time == 0 + assert self.reader.fstart_time == 0 + assert self.reader.fend_time == 0 def test_get_dataset(self): """Test getting a dataset.""" @@ -77,6 +77,4 @@ def test_get_dataset(self): dt = self.reader.get_dataset( key=make_dataid(name=ch), info={}) # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(self.nc[ch] == dt.to_masked_array()), - msg="get_dataset() returns invalid data for " - "dataset {}".format(ch)) + assert np.all(self.nc[ch] == dt.to_masked_array()), f"get_dataset() returns invalid data for dataset {ch}" diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index b335fd09c8..e71534fbd2 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -88,7 +88,7 @@ def _create_test_netcdf(filename, resolution=742): @pytest.fixture(scope="session") -def _cf_scene(): +def cf_scene(): tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = np.array([[1, 2], [3, 4]]) @@ -212,15 +212,15 @@ def _cf_scene(): return scene -@pytest.fixture -def _nc_filename(tmp_path): +@pytest.fixture() +def nc_filename(tmp_path): now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) -@pytest.fixture -def _nc_filename_i(tmp_path): +@pytest.fixture() +def nc_filename_i(tmp_path): now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -229,21 +229,21 @@ def _nc_filename_i(tmp_path): class TestCFReader: """Test case for CF reader.""" - def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): + def test_write_and_read_with_area_definition(self, cf_scene, nc_filename): """Save a dataset with an area definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="h5netcdf", - flatten_attrs=True, - pretty=True) + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="h5netcdf", + flatten_attrs=True, + pretty=True) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["image0", "image1", "lat"]) - np.testing.assert_array_equal(scn_["image0"].data, _cf_scene["image0"].data) - np.testing.assert_array_equal(scn_["lat"].data, _cf_scene["lat"].data) # lat loaded as dataset - np.testing.assert_array_equal(scn_["image0"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["image0"].data, cf_scene["image0"].data) + np.testing.assert_array_equal(scn_["lat"].data, cf_scene["lat"].data) # lat loaded as dataset + np.testing.assert_array_equal(scn_["image0"].coords["lon"], cf_scene["lon"].data) # lon loded as coord assert isinstance(scn_["image0"].attrs["wavelength"], WavelengthRange) - expected_area = _cf_scene["image0"].attrs["area"] + expected_area = cf_scene["image0"].attrs["area"] actual_area = scn_["image0"].attrs["area"] assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent assert expected_area.proj_dict == actual_area.proj_dict @@ -252,18 +252,18 @@ def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): assert expected_area.description == actual_area.description assert expected_area.proj_dict == actual_area.proj_dict - def test_write_and_read_with_swath_definition(self, _cf_scene, _nc_filename): + def test_write_and_read_with_swath_definition(self, cf_scene, nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="h5netcdf", - flatten_attrs=True, - pretty=True, - datasets=["swath_data"]) + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="h5netcdf", + flatten_attrs=True, + pretty=True, + datasets=["swath_data"]) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["swath_data"]) - expected_area = _cf_scene["swath_data"].attrs["area"] + expected_area = cf_scene["swath_data"].attrs["area"] actual_area = scn_["swath_data"].attrs["area"] assert expected_area.shape == actual_area.shape np.testing.assert_array_equal(expected_area.lons.data, actual_area.lons.data) @@ -278,162 +278,162 @@ def test_fix_modifier_attr(self): reader.fix_modifier_attr(ds_info) assert ds_info["modifiers"] == () - def test_read_prefixed_channels(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True) + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename], reader_kwargs={}) + filenames=[nc_filename], reader_kwargs={}) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, _cf_scene["1"].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_include_orig_name(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_include_orig_name(self, cf_scene, nc_filename): """Check channels starting with digit and includeed orig name is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - include_orig_name=True) + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=True) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loaded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord assert scn_["1"].attrs["original_name"] == "1" # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, _cf_scene["1"].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_by_user(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user and read back correctly.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - numeric_name_prefix="USER") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename], reader_kwargs={"numeric_name_prefix": "USER"}) + filenames=[nc_filename], reader_kwargs={"numeric_name_prefix": "USER"}) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk["USER1"].data, _cf_scene["1"].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["USER1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_by_user2(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user2(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user when saving and read back correctly without prefix.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - include_orig_name=False, - numeric_name_prefix="USER") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=False, + numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["USER1"]) - np.testing.assert_array_equal(scn_["USER1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["USER1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["USER1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["USER1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord - def test_read_prefixed_channels_by_user_include_prefix(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user_include_prefix(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user and include original name when saving.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - include_orig_name=True, - numeric_name_prefix="USER") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=True, + numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord - def test_read_prefixed_channels_by_user_no_prefix(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user_no_prefix(self, cf_scene, nc_filename): """Check channels starting with digit is not prefixed by user.""" with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message=".*starts with a digit.*") - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename, - engine="netcdf4", - flatten_attrs=True, - pretty=True, - numeric_name_prefix="") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + numeric_name_prefix="") scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["1"]) - np.testing.assert_array_equal(scn_["1"].data, _cf_scene["1"].data) - np.testing.assert_array_equal(scn_["1"].coords["lon"], _cf_scene["lon"].data) # lon loded as coord + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord - def test_orbital_parameters(self, _cf_scene, _nc_filename): + def test_orbital_parameters(self, cf_scene, nc_filename): """Test that the orbital parameters in attributes are handled correctly.""" - _cf_scene.save_datasets(writer="cf", - filename=_nc_filename) + cf_scene.save_datasets(writer="cf", + filename=nc_filename) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["image0"]) - orig_attrs = _cf_scene["image0"].attrs["orbital_parameters"] + orig_attrs = cf_scene["image0"].attrs["orbital_parameters"] new_attrs = scn_["image0"].attrs["orbital_parameters"] assert isinstance(new_attrs, dict) for key in orig_attrs: assert orig_attrs[key] == new_attrs[key] - def test_write_and_read_from_two_files(self, _nc_filename, _nc_filename_i): + def test_write_and_read_from_two_files(self, nc_filename, nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" - _create_test_netcdf(_nc_filename, resolution=742) - _create_test_netcdf(_nc_filename_i, resolution=371) + _create_test_netcdf(nc_filename, resolution=742) + _create_test_netcdf(nc_filename_i, resolution=371) scn_ = Scene(reader="satpy_cf_nc", - filenames=[_nc_filename, _nc_filename_i]) + filenames=[nc_filename, nc_filename_i]) scn_.load(["solar_zenith_angle"], resolution=742) assert scn_["solar_zenith_angle"].attrs["resolution"] == 742 scn_.unload() scn_.load(["solar_zenith_angle"], resolution=371) assert scn_["solar_zenith_angle"].attrs["resolution"] == 371 - def test_dataid_attrs_equal_matching_dataset(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_matching_dataset(self, cf_scene, nc_filename): """Check that get_dataset returns valid dataset when keys matches.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=()) res = reader.get_dataset(ds_id, {}) assert res.attrs["resolution"] == 742 - def test_dataid_attrs_equal_not_matching_dataset(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_not_matching_dataset(self, cf_scene, nc_filename): """Check that get_dataset returns None when key(s) are not matching.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) not_existing_resolution = 9999999 ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=not_existing_resolution, modifiers=()) assert reader.get_dataset(ds_id, {}) is None - def test_dataid_attrs_equal_contains_not_matching_key(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_contains_not_matching_key(self, cf_scene, nc_filename): """Check that get_dataset returns valid dataset when dataid have key(s) not existing in data.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {"filetype": "info"}) + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=(), calibration="counts") res = reader.get_dataset(ds_id, {}) diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 45fcc9caee..89eda0479a 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -105,12 +105,9 @@ def test_basic_attributes(self): from datetime import datetime from satpy.tests.utils import make_dataid - self.assertEqual(self.reader.start_time, - datetime(2017, 7, 29, 12, 0, 0, 0)) - self.assertEqual(self.reader.end_time, - datetime(2017, 7, 29, 12, 0, 0, 0)) - self.assertEqual(self.reader.get_shape(make_dataid(name="C05"), {}), - (2, 5)) + assert self.reader.start_time == datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.end_time == datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.get_shape(make_dataid(name="C05"), {}) == (2, 5) def test_data_load(self): """Test data loading.""" @@ -119,10 +116,9 @@ def test_data_load(self): make_dataid(name="C05", calibration="reflectance"), {}) np.testing.assert_allclose(res.data, self.expected_rad, equal_nan=True) - self.assertNotIn("scale_factor", res.attrs) - self.assertNotIn("_FillValue", res.attrs) - self.assertEqual(res.attrs["standard_name"], - "toa_bidirectional_reflectance") + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" assert "orbital_parameters" in res.attrs orb_params = res.attrs["orbital_parameters"] assert orb_params["projection_longitude"] == -90.0 @@ -181,13 +177,13 @@ def test_get_area_def_geos(self, adef): ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - "a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "lat_0": 0.0, - "proj": "geos", "sweep": "x", "units": "m"}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, + "lon_0": -90.0, "lat_0": 0.0, + "proj": "geos", "sweep": "x", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") @@ -206,13 +202,13 @@ def test_get_area_def_lcc(self, adef): ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 25.0, "lat_1": 25.0, - "proj": "lcc", "units": "m"}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 25.0, "lat_1": 25.0, + "proj": "lcc", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") @@ -231,13 +227,13 @@ def test_get_area_def_stere(self, adef): ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 90.0, "lat_ts": 60.0, - "proj": "stere", "units": "m"}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 90.0, "lat_ts": 60.0, + "proj": "stere", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") @@ -255,13 +251,13 @@ def test_get_area_def_merc(self, adef): ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - "a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 0.0, "lat_ts": 0.0, - "proj": "merc", "units": "m"}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 0.0, "lat_ts": 0.0, + "proj": "merc", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index b6540d8623..73a2eac8db 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -74,8 +74,7 @@ def test_chebyshev(self): def test_get_cds_time(self): """Test the get_cds_time function.""" # Scalar - self.assertEqual(get_cds_time(days=21246, msecs=12*3600*1000), - np.datetime64("2016-03-03 12:00")) + assert get_cds_time(days=21246, msecs=12 * 3600 * 1000) == np.datetime64("2016-03-03 12:00") # Array days = np.array([21246, 21247, 21248]) @@ -118,18 +117,10 @@ def observation_end_time(self): def test_round_nom_time(self): """Test the rouding of start/end_time.""" - self.assertEqual(round_nom_time( - dt=self.observation_start_time(), - time_delta=timedelta(minutes=15) - ), - datetime(2023, 3, 20, 15, 0) - ) - self.assertEqual(round_nom_time( - dt=self.observation_end_time(), - time_delta=timedelta(minutes=15) - ), - datetime(2023, 3, 20, 15, 15) - ) + assert round_nom_time(dt=self.observation_start_time(), + time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 0) + assert round_nom_time(dt=self.observation_end_time(), + time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 15) @staticmethod def test_pad_data_horizontally(): @@ -251,7 +242,7 @@ def test_get_padding_area_int(): class TestSatellitePosition: """Test locating the satellite.""" - @pytest.fixture + @pytest.fixture() def orbit_polynomial(self): """Get an orbit polynomial for testing.""" return OrbitPolynomial( @@ -270,7 +261,7 @@ def orbit_polynomial(self): ) ) - @pytest.fixture + @pytest.fixture() def time(self): """Get scan timestamp for testing.""" return datetime(2006, 1, 1, 12, 15, 9, 304888) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index cc1107cc6c..d46af5abd2 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -117,14 +117,14 @@ def test_convert_to_radiance(self): """Test the conversion from counts to radiances.""" result = self.algo.convert_to_radiance(COUNTS_INPUT, GAIN, OFFSET) xr.testing.assert_allclose(result, RADIANCES_OUTPUT) - self.assertEqual(result.dtype, np.float32) + assert result.dtype == np.float32 def test_ir_calibrate(self): """Test conversion from radiance to brightness temperature.""" result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE1) xr.testing.assert_allclose(result, TBS_OUTPUT1, rtol=1E-5) - self.assertEqual(result.dtype, np.float32) + assert result.dtype == np.float32 result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE2) @@ -138,8 +138,8 @@ def test_vis_calibrate(self): result = self.algo.vis_calibrate(VIS008_RADIANCE, VIS008_SOLAR_IRRADIANCE) xr.testing.assert_allclose(result, VIS008_REFLECTANCE) - self.assertTrue(result.sun_earth_distance_correction_applied) - self.assertEqual(result.dtype, np.float32) + assert result.sun_earth_distance_correction_applied + assert result.dtype == np.float32 class TestSeviriCalibrationHandler: @@ -147,7 +147,7 @@ class TestSeviriCalibrationHandler: def test_init(self): """Test initialization of the calibration handler.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid calibration mode: INVALID. Choose one of (.*)"): SEVIRICalibrationHandler( platform_id=None, channel_name=None, @@ -182,7 +182,7 @@ def _get_calibration_handler(self, calib_mode="NOMINAL", ext_coefs=None): def test_calibrate_exceptions(self): """Test exceptions raised by the calibration handler.""" calib = self._get_calibration_handler() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid calibration invalid for channel IR_108"): calib.calibrate(None, "invalid") @pytest.mark.parametrize( diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index ae042999e3..0ce40d8dfc 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -67,7 +67,7 @@ def test_read_hrv_band(self, memmap): size=int((464 * 5568 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band("HRV", None) - self.assertEqual(res.shape, (464, 5568)) + assert res.shape == (464, 5568) @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") @@ -79,7 +79,7 @@ def test_get_dataset(self, calibrate, parent_get_dataset): parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) - self.assertEqual(res.shape, (464, 11136)) + assert res.shape == (464, 11136) # Test method calls parent_get_dataset.assert_called_with(key, info) @@ -102,7 +102,7 @@ def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) - self.assertEqual(res.shape, (464, 5568)) + assert res.shape == (464, 5568) # Test method calls parent_get_dataset.assert_called_with(key, info) @@ -118,16 +118,15 @@ def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) - self.assertEqual(area.area_extent, - (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356)) + assert area.area_extent == (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378169.0) - self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict["h"], 35785831.0) - self.assertEqual(proj_dict["lon_0"], 0.0) - self.assertEqual(proj_dict["proj"], "geos") - self.assertEqual(proj_dict["units"], "m") + assert a == 6378169.0 + assert b == pytest.approx(6356583.8) + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == 0.0 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" self.reader.fill_hrv = False area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, @@ -135,8 +134,8 @@ def test_get_area_def(self): npt.assert_allclose(area.defs[1].area_extent, (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604)) - self.assertEqual(area.defs[0].area_id, "msg_seviri_fes_1km") - self.assertEqual(area.defs[1].area_id, "msg_seviri_fes_1km") + assert area.defs[0].area_id == "msg_seviri_fes_1km" + assert area.defs[1].area_id == "msg_seviri_fes_1km" class TestHRITMSGFileHandler(TestHRITMSGBase): @@ -171,24 +170,20 @@ def test_get_area_def(self): area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378169.0) - self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict["h"], 35785831.0) - self.assertEqual(proj_dict["lon_0"], self.projection_longitude) - self.assertEqual(proj_dict["proj"], "geos") - self.assertEqual(proj_dict["units"], "m") - self.assertEqual(area.area_extent, - (-77771774058.38356, -3720765401003.719, - 30310525626438.438, 77771774058.38356)) + assert a == 6378169.0 + assert b == pytest.approx(6356583.8) + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == self.projection_longitude + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" + assert area.area_extent == (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356) # Data shifted by 1.5km to N-W self.reader.mda["offset_corrected"] = False area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) - self.assertEqual(area.area_extent, - (-77771772558.38356, -3720765402503.719, - 30310525627938.438, 77771772558.38356)) + assert area.area_extent == (-77771772558.38356, -3720765402503.719, 30310525627938.438, 77771772558.38356) - self.assertEqual(area.area_id, "msg_seviri_rss_3km") + assert area.area_id == "msg_seviri_rss_3km" @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_band(self, memmap): @@ -198,7 +193,7 @@ def test_read_band(self, memmap): size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band("VIS006", None) - self.assertEqual(res.shape, (464, 3712)) + assert res.shape == (464, 3712) @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") @@ -227,18 +222,18 @@ def test_get_dataset(self, calibrate, parent_get_dataset): setup.get_attrs_exp(self.projection_longitude) ) # testing start/end time - self.assertEqual(datetime(2006, 1, 1, 12, 15, 9, 304888), self.reader.observation_start_time) - self.assertEqual(datetime(2006, 1, 1, 12, 15,), self.reader.start_time) - self.assertEqual(self.reader.start_time, self.reader.nominal_start_time) + assert datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time + assert datetime(2006, 1, 1, 12, 15) == self.reader.start_time + assert self.reader.start_time == self.reader.nominal_start_time - self.assertEqual(datetime(2006, 1, 1, 12, 27, 39), self.reader.observation_end_time) - self.assertEqual(self.reader.end_time, self.reader.nominal_end_time) - self.assertEqual(datetime(2006, 1, 1, 12, 30,), self.reader.end_time) + assert datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time + assert self.reader.end_time == self.reader.nominal_end_time + assert datetime(2006, 1, 1, 12, 30) == self.reader.end_time # test repeat cycle duration - self.assertEqual(15, self.reader._repeat_cycle_duration) + assert 15 == self.reader._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling self.reader.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 - self.assertEqual(5, self.reader._repeat_cycle_duration) + assert 5 == self.reader._repeat_cycle_duration @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") @@ -282,10 +277,10 @@ def test_get_raw_mda(self): self.reader.prologue_.reduce = lambda max_size: {"prologue": 1} self.reader.epilogue_.reduce = lambda max_size: {"epilogue": 1} expected = {"prologue": 1, "epilogue": 1, "segment": 1} - self.assertDictEqual(self.reader._get_raw_mda(), expected) + assert self.reader._get_raw_mda() == expected # Make sure _get_raw_mda() doesn't modify the original dictionary - self.assertIn("loff", self.reader.mda) + assert "loff" in self.reader.mda def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" @@ -296,10 +291,7 @@ def test_satpos_no_valid_orbit_polynomial(self): projection_longitude=self.projection_longitude, orbit_polynomials=ORBIT_POLYNOMIALS_INVALID ) - self.assertNotIn( - "satellite_actual_longitude", - reader.mda["orbital_parameters"] - ) + assert "satellite_actual_longitude" not in reader.mda["orbital_parameters"] class TestHRITMSGPrologueFileHandler(unittest.TestCase): @@ -337,10 +329,10 @@ def test_reduce(self, reduce_mda): reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), "reduced") + assert self.reader.reduce(123) == "reduced" # Read buffer - self.assertEqual(self.reader.reduce(123), "reduced") + assert self.reader.reduce(123) == "reduced" reduce_mda.assert_called_once() @@ -385,13 +377,13 @@ def test_reduce(self, reduce_mda): reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), "reduced") + assert self.reader.reduce(123) == "reduced" reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() self.reader._reduced = "red" - self.assertEqual(self.reader.reduce(123), "red") + assert self.reader.reduce(123) == "red" reduce_mda.assert_not_called() diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 81d385bc89..372611c87d 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -104,9 +104,9 @@ def compare_areas(self, v): -5570248.6866857, -5537244.2506213, -4670127.7031114)} - self.assertEqual(v.attrs["area"].area_id, test_area["area_id"]) - self.assertEqual(v.attrs["area"].width, test_area["width"]) - self.assertEqual(v.attrs["area"].height, test_area["height"]) + assert v.attrs["area"].area_id == test_area["area_id"] + assert v.attrs["area"].width == test_area["width"] + assert v.attrs["area"].height == test_area["height"] np.testing.assert_almost_equal(v.attrs["area"].area_extent, test_area["area_extent"]) @@ -117,9 +117,9 @@ def test_init(self): "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf", "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) - self.assertEqual(len(loadables), 2) + assert len(loadables) == 2 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset_vis(self): """Test loading all datasets from a full swath file.""" @@ -130,11 +130,11 @@ def test_load_dataset_vis(self): ]) r.create_filehandlers(loadables) datasets = r.load(["VIS008"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): dt = datetime(2004, 12, 29, 12, 27, 44) - self.assertEqual(v.attrs["end_time"], dt) - self.assertEqual(v.attrs["calibration"], "reflectance") + assert v.attrs["end_time"] == dt + assert v.attrs["calibration"] == "reflectance" def test_load_dataset_ir(self): """Test loading all datasets from a full swath file.""" @@ -144,9 +144,9 @@ def test_load_dataset_ir(self): ]) r.create_filehandlers(loadables) datasets = r.load(["IR_108"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["calibration"], "brightness_temperature") + assert v.attrs["calibration"] == "brightness_temperature" def test_area_def_lores(self): """Test loading all datasets from an area of interest file.""" @@ -157,7 +157,7 @@ def test_area_def_lores(self): r.create_filehandlers(loadables) ds = r.load(["VIS008"]) self.compare_areas(ds["VIS008"]) - self.assertEqual(ds["VIS008"].attrs["area"].proj_id, "msg_lowres") + assert ds["VIS008"].attrs["area"].proj_id == "msg_lowres" def test_area_def_hires(self): """Test loading all datasets from an area of interest file.""" @@ -168,7 +168,7 @@ def test_area_def_hires(self): r.create_filehandlers(loadables) ds = r.load(["HRV"]) self.compare_areas(ds["HRV"]) - self.assertEqual(ds["HRV"].attrs["area"].proj_id, "msg_hires") + assert ds["HRV"].attrs["area"].proj_id == "msg_hires" def test_sensor_names(self): """Check satellite name conversion is correct, including error case.""" @@ -191,7 +191,7 @@ def _run_target(): for sat in sensor_list: file_data["/attr/Sensors"] = sensor_list[sat] plat, sens = _run_target() - self.assertEqual(plat, sat) + assert plat == sat with self.assertRaises(NameError): file_data["/attr/Sensors"] = "BADSAT/NOSENSE" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 0130740246..ba7cf63447 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -654,8 +654,8 @@ def prepare_area_definitions(test_dict): @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN)), @@ -676,7 +676,7 @@ def prepare_area_definitions(test_dict): (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL)), - ) + ] ) def test_area_definitions(actual, expected): """Test area definitions with only one area.""" @@ -688,11 +688,11 @@ def test_area_definitions(actual, expected): @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK)), - ) + ] ) def test_stacked_area_definitions(actual, expected): """Test area definitions with stacked areas.""" @@ -736,12 +736,12 @@ def prepare_is_roi(test_dict): @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_is_roi(TEST_IS_ROI_FULLDISK)), (prepare_is_roi(TEST_IS_ROI_RAPIDSCAN)), (prepare_is_roi(TEST_IS_ROI_ROI)), - ) + ] ) def test_is_roi(actual, expected): """Test if given area is of area-of-interest.""" @@ -757,21 +757,21 @@ def test_get_available_channels(self): trues = ("WV_062", "WV_073", "IR_108", "VIS006", "VIS008", "IR_120") for bandname in AVAILABLE_CHANNELS: if bandname in trues: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] else: - self.assertFalse(available_chs[bandname]) + assert not available_chs[bandname] available_chs = get_available_channels(TEST2_HEADER_CHNLIST) trues = ("VIS006", "VIS008", "IR_039", "WV_062", "WV_073", "IR_087", "HRV") for bandname in AVAILABLE_CHANNELS: if bandname in trues: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] else: - self.assertFalse(available_chs[bandname]) + assert not available_chs[bandname] available_chs = get_available_channels(TEST3_HEADER_CHNLIST) for bandname in AVAILABLE_CHANNELS: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] TEST_HEADER_CALIB = { @@ -829,7 +829,7 @@ def file_handler(self): @pytest.mark.parametrize( ("channel", "calibration", "calib_mode", "use_ext_coefs"), - ( + [ # VIS channel, internal coefficients ("VIS006", "counts", "NOMINAL", False), ("VIS006", "radiance", "NOMINAL", False), @@ -855,7 +855,7 @@ def file_handler(self): # HRV channel, external coefficients (mode should have no effect) ("HRV", "radiance", "GSICS", True), ("HRV", "reflectance", "NOMINAL", True), - ) + ] ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, @@ -882,7 +882,7 @@ def test_calibrate( class TestNativeMSGDataset: """Tests for getting the dataset.""" - @pytest.fixture + @pytest.fixture() def file_handler(self): """Create a file handler for testing.""" trailer = { @@ -1123,7 +1123,7 @@ def test_padder_fes_hrv(self): class TestNativeMSGFilenames: """Test identification of Native format filenames.""" - @pytest.fixture + @pytest.fixture() def reader(self): """Return reader for SEVIRI Native format.""" from satpy._config import config_search_paths @@ -1150,11 +1150,11 @@ def test_file_pattern(self, reader): @pytest.mark.parametrize( - "file_content,exp_header_size", - ( + ("file_content", "exp_header_size"), + [ (ASCII_STARTSWITH, 450400), # with ascii header (b"foobar", 445286), # without ascii header - ) + ] ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" @@ -1225,7 +1225,7 @@ def test_header_warning(): @pytest.mark.parametrize( - "starts_with, expected", + ("starts_with", "expected"), [ (ASCII_STARTSWITH, True), (b"this_shall_fail", False) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index 3f7b1a6296..f6a54aa60e 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -192,7 +192,7 @@ def _get_fake_dataset(self, counts, h5netcdf): return ds - @pytest.fixture + @pytest.fixture() def h5netcdf(self): """Fixture for xr backend choice.""" return False diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py index a8b5310a78..d57fda4e79 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py @@ -95,11 +95,10 @@ def test_data_reading(self, da_, xr_): # Checks the correct file open call mock_file.assert_called_with("test.grib", "rb") # Checks that the dataset has been created as a DataArray object - self.assertEqual(valid_dataset._extract_mock_name(), "xr.DataArray()") + assert valid_dataset._extract_mock_name() == "xr.DataArray()" # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) - self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, - self.ec_.codes_release.call_count + 1) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 # Restarts the id generator and clears the call history fake_gid_generator = (i for i in FAKE_GID) @@ -110,14 +109,13 @@ def test_data_reading(self, da_, xr_): # Checks the correct execution of the get_dataset function with an invalid parameter_number invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) # Checks that the function returns None - self.assertEqual(invalid_dataset, None) + assert invalid_dataset is None # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) - self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, - self.ec_.codes_release.call_count + 1) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 # Checks the basic data reading - self.assertEqual(REPEAT_CYCLE_DURATION, 15) + assert REPEAT_CYCLE_DURATION == 15 # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() @@ -128,19 +126,19 @@ def test_data_reading(self, da_, xr_): "sensor": "seviri", "platform_name": "Meteosat-11" } - self.assertEqual(attributes, expected_attributes) + assert attributes == expected_attributes # Checks the reading of an array from the message self.reader._get_xarray_from_msg(0) # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] - self.assertTrue(np.all(args[0] == np.ones((1200, 1000)))) - self.assertEqual(args[1], CHUNK_SIZE) + assert np.all(args[0] == np.ones((1200, 1000))) is True + assert args[1] == CHUNK_SIZE # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] - self.assertEqual(kwargs["dims"], ("y", "x")) + assert kwargs["dims"] == ("y", "x") # Checks the correct execution of the _get_proj_area function pdict, area_dict = self.reader._get_proj_area(0) @@ -156,7 +154,7 @@ def test_data_reading(self, da_, xr_): "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", "p_id": "", } - self.assertEqual(pdict, expected_pdict) + assert pdict == expected_pdict expected_area_dict = { "center_point": 500, "north": 1200, @@ -164,7 +162,7 @@ def test_data_reading(self, da_, xr_): "west": 1000, "south": 1, } - self.assertEqual(area_dict, expected_area_dict) + assert area_dict == expected_area_dict # Checks the correct execution of the get_area_def function with mock.patch("satpy.readers.seviri_l2_grib.calculate_area_extent", @@ -176,9 +174,9 @@ def test_data_reading(self, da_, xr_): expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, "column_step": 400., "line_step": 400.},) name, args, kwargs = cae.mock_calls[0] - self.assertEqual(args, expected_args) + assert args == expected_args # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] - self.assertEqual(args[0], expected_pdict) + assert args[0] == expected_pdict # The second argument must be the return result of calculate_area_extent - self.assertEqual(args[1]._extract_mock_name(), "calculate_area_extent()") + assert args[1]._extract_mock_name() == "calculate_area_extent()" diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index cc0764685f..63a43c9c79 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -152,8 +152,8 @@ def test_instantiate(self, bvs_, xr_): assert test.view == "nadir" assert test.stripe == "a" test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() @@ -164,8 +164,8 @@ def test_instantiate(self, bvs_, xr_): assert test.view == "oblique" assert test.stripe == "c" test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() @@ -174,8 +174,8 @@ def test_instantiate(self, bvs_, xr_): "stripe": "a", "view": "n"} test = NCSLSTRGeo("somedir/geometry_an.nc", filename_info, "c") test.get_dataset(ds_id, dict(filename_info, **{"file_key": "latitude_{stripe:1s}{view:1s}"})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() @@ -183,15 +183,15 @@ def test_instantiate(self, bvs_, xr_): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "flags_{stripe:1s}{view:1s}"})) assert test.view == "nadir" assert test.stripe == "a" - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test = NCSLSTRAngles("somedir/S1_radiance_an.nc", filename_info, "c") test.get_dataset(ds_id, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test.get_dataset(ds_id_500, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) @@ -244,7 +244,7 @@ def test_reflectance_calibration(self, da_, xr_): ds_id = make_dataid(name="S5", calibration="reflectance", stripe="a", view="nadir") test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) - self.assertEqual(data.units, "%") + assert data.units == "%" np.testing.assert_allclose(data.values, self.rad * np.pi) def test_cal_rad(self): diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py index 3303abff17..519030447b 100644 --- a/satpy/tests/reader_tests/test_smos_l2_wind.py +++ b/satpy/tests/reader_tests/test_smos_l2_wind.py @@ -101,10 +101,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_wind_speed(self): """Load wind_speed dataset.""" @@ -116,17 +116,17 @@ def test_load_wind_speed(self): ]) r.create_filehandlers(loadables) ds = r.load(["wind_speed"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "SM") - self.assertEqual(d.attrs["sensor"], "MIRAS") - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertIn("y", d.dims) - self.assertIn("x", d.dims) - self.assertEqual(d.shape, (719, 1440)) - self.assertEqual(d.y[0].data, -89.75) - self.assertEqual(d.y[d.shape[0] - 1].data, 89.75) + assert d.attrs["platform_shortname"] == "SM" + assert d.attrs["sensor"] == "MIRAS" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims + assert d.shape == (719, 1440) + assert d.y[0].data == -89.75 + assert d.y[d.shape[0] - 1].data == 89.75 def test_load_lat(self): """Load lat dataset.""" @@ -138,12 +138,12 @@ def test_load_lat(self): ]) r.create_filehandlers(loadables) ds = r.load(["lat"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertIn("y", d.dims) - self.assertEqual(d.shape, (719,)) - self.assertEqual(d.data[0], -89.75) - self.assertEqual(d.data[d.shape[0] - 1], 89.75) + assert "y" in d.dims + assert d.shape == (719,) + assert d.data[0] == -89.75 + assert d.data[d.shape[0] - 1] == 89.75 def test_load_lon(self): """Load lon dataset.""" @@ -155,12 +155,12 @@ def test_load_lon(self): ]) r.create_filehandlers(loadables) ds = r.load(["lon"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertIn("x", d.dims) - self.assertEqual(d.shape, (1440,)) - self.assertEqual(d.data[0], -180.0) - self.assertEqual(d.data[d.shape[0] - 1], 179.75) + assert "x" in d.dims + assert d.shape == (1440,) + assert d.data[0] == -180.0 + assert d.data[d.shape[0] - 1] == 179.75 def test_adjust_lon(self): """Load adjust longitude dataset.""" @@ -174,7 +174,7 @@ def test_adjust_lon(self): expected = DataArray(np.concatenate((np.arange(0, 180., 0.25), np.arange(-180.0, 0, 0.25))), dims=("lon")) - self.assertEqual(adjusted.data.tolist(), expected.data.tolist()) + assert adjusted.data.tolist() == expected.data.tolist() def test_roll_dataset(self): """Load roll of dataset along the lon coordinate.""" @@ -187,4 +187,4 @@ def test_roll_dataset(self): data = smos_l2_wind_fh._adjust_lon_coord(data) adjusted = smos_l2_wind_fh._roll_dataset_lon_coord(data) expected = np.arange(-180., 180., 0.25) - self.assertEqual(adjusted.data.tolist(), expected.tolist()) + assert adjusted.data.tolist() == expected.tolist() diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index f2b3660089..05d0717538 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -122,10 +122,10 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_no2(self): """Load NO2 dataset.""" @@ -137,16 +137,16 @@ def test_load_no2(self): ]) r.create_filehandlers(loadables) ds = r.load(["nitrogen_dioxide_total_column"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "S5P") - self.assertEqual(d.attrs["sensor"], "tropomi") - self.assertEqual(d.attrs["time_coverage_start"], datetime(2018, 7, 9, 17, 25, 34)) - self.assertEqual(d.attrs["time_coverage_end"], datetime(2018, 7, 9, 18, 23, 4)) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertIn("y", d.dims) - self.assertIn("x", d.dims) + assert d.attrs["platform_shortname"] == "S5P" + assert d.attrs["sensor"] == "tropomi" + assert d.attrs["time_coverage_start"] == datetime(2018, 7, 9, 17, 25, 34) + assert d.attrs["time_coverage_end"] == datetime(2018, 7, 9, 18, 23, 4) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims def test_load_so2(self): """Load SO2 dataset.""" @@ -158,13 +158,13 @@ def test_load_so2(self): ]) r.create_filehandlers(loadables) ds = r.load(["sulfurdioxide_total_vertical_column"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs["platform_shortname"], "S5P") - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) - self.assertIn("y", d.dims) - self.assertIn("x", d.dims) + assert d.attrs["platform_shortname"] == "S5P" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims def test_load_bounds(self): """Load bounds dataset.""" @@ -177,12 +177,12 @@ def test_load_bounds(self): r.create_filehandlers(loadables) keys = ["latitude_bounds", "longitude_bounds"] ds = r.load(keys) - self.assertEqual(len(ds), 2) + assert len(ds) == 2 for key in keys: - self.assertEqual(ds[key].attrs["platform_shortname"], "S5P") - self.assertIn("y", ds[key].dims) - self.assertIn("x", ds[key].dims) - self.assertIn("corner", ds[key].dims) + assert ds[key].attrs["platform_shortname"] == "S5P" + assert "y" in ds[key].dims + assert "x" in ds[key].dims + assert "corner" in ds[key].dims # check assembled bounds left = np.vstack([ds[key][:, :, 0], ds[key][-1:, :, 3]]) right = np.vstack([ds[key][:, -1:, 1], ds[key][-1:, -1:, 2]]) @@ -191,13 +191,11 @@ def test_load_bounds(self): dims=("y", "x") ) dest.attrs = ds[key].attrs - self.assertEqual(dest.attrs["platform_shortname"], "S5P") - self.assertIn("y", dest.dims) - self.assertIn("x", dest.dims) - self.assertEqual(DEFAULT_FILE_SHAPE[0] + 1, dest.shape[0]) - self.assertEqual(DEFAULT_FILE_SHAPE[1] + 1, dest.shape[1]) - self.assertIsNone(np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0])) - self.assertIsNone(np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3])) - self.assertIsNone(np.testing.assert_array_equal(dest[:, -1], - np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) - ) + assert dest.attrs["platform_shortname"] == "S5P" + assert "y" in dest.dims + assert "x" in dest.dims + assert DEFAULT_FILE_SHAPE[0] + 1 == dest.shape[0] + assert DEFAULT_FILE_SHAPE[1] + 1 == dest.shape[1] + assert np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0]) + assert np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3]) + assert np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 12af80ca2a..9deaf1facc 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -162,24 +162,24 @@ def test_geostationary_mask(self): # Check results along a couple of lines # a) Horizontal - self.assertTrue(np.all(mask[50, :8] == 0)) - self.assertTrue(np.all(mask[50, 8:93] == 1)) - self.assertTrue(np.all(mask[50, 93:] == 0)) + assert np.all(mask[50, :8] == 0) + assert np.all(mask[50, 8:93] == 1) + assert np.all(mask[50, 93:] == 0) # b) Vertical - self.assertTrue(np.all(mask[:31, 50] == 0)) - self.assertTrue(np.all(mask[31:70, 50] == 1)) - self.assertTrue(np.all(mask[70:, 50] == 0)) + assert np.all(mask[:31, 50] == 0) + assert np.all(mask[31:70, 50] == 1) + assert np.all(mask[70:, 50] == 0) # c) Top left to bottom right - self.assertTrue(np.all(mask[range(33), range(33)] == 0)) - self.assertTrue(np.all(mask[range(33, 68), range(33, 68)] == 1)) - self.assertTrue(np.all(mask[range(68, 101), range(68, 101)] == 0)) + assert np.all(mask[range(33), range(33)] == 0) + assert np.all(mask[range(33, 68), range(33, 68)] == 1) + assert np.all(mask[range(68, 101), range(68, 101)] == 0) # d) Bottom left to top right - self.assertTrue(np.all(mask[range(101-1, 68-1, -1), range(33)] == 0)) - self.assertTrue(np.all(mask[range(68-1, 33-1, -1), range(33, 68)] == 1)) - self.assertTrue(np.all(mask[range(33-1, -1, -1), range(68, 101)] == 0)) + assert np.all(mask[range(101 - 1, 68 - 1, -1), range(33)] == 0) + assert np.all(mask[range(68 - 1, 33 - 1, -1), range(33, 68)] == 1) + assert np.all(mask[range(33 - 1, -1, -1), range(68, 101)] == 0) @mock.patch("satpy.readers.utils.AreaDefinition") def test_sub_area(self, adef): @@ -203,15 +203,15 @@ def test_np2str(self): """Test the np2str function.""" # byte object npstring = np.string_("hej") - self.assertEqual(hf.np2str(npstring), "hej") + assert hf.np2str(npstring) == "hej" # single element numpy array np_arr = np.array([npstring]) - self.assertEqual(hf.np2str(np_arr), "hej") + assert hf.np2str(np_arr) == "hej" # scalar numpy array np_arr = np.array(npstring) - self.assertEqual(hf.np2str(np_arr), "hej") + assert hf.np2str(np_arr) == "hej" # multi-element array npstring = np.array([npstring, npstring]) @@ -236,10 +236,10 @@ def re(lat): return n * np.sqrt((1 - e2)**2 * np.sin(lat)**2 + np.cos(lat)**2) for lon in (0, 180, 270): - self.assertEqual(hf.get_earth_radius(lon=lon, lat=0., a=a, b=b), a) + assert hf.get_earth_radius(lon=lon, lat=0.0, a=a, b=b) == a for lat in (90, -90): - self.assertEqual(hf.get_earth_radius(lon=0., lat=lat, a=a, b=b), b) - self.assertTrue(np.isclose(hf.get_earth_radius(lon=123, lat=45., a=a, b=b), re(45.))) + assert hf.get_earth_radius(lon=0.0, lat=lat, a=a, b=b) == b + assert np.isclose(hf.get_earth_radius(lon=123, lat=45.0, a=a, b=b), re(45.0)) def test_reduce_mda(self): """Test metadata size reduction.""" @@ -261,9 +261,9 @@ def test_reduce_mda(self): numpy.testing.assert_equal(hf.reduce_mda(mda, max_size=3), exp) # Make sure, reduce_mda() doesn't modify the original dictionary - self.assertIn("c", mda) - self.assertIn("c", mda["d"]) - self.assertIn("c", mda["d"]["d"]) + assert "c" in mda + assert "c" in mda["d"] + assert "c" in mda["d"]["d"] @mock.patch("satpy.readers.utils.bz2.BZ2File") @mock.patch("satpy.readers.utils.Popen") @@ -389,7 +389,7 @@ def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove): expected_filename = filename[:-4] with hf.unzip_context(filename) as new_filename: - self.assertEqual(new_filename, expected_filename) + assert new_filename == expected_filename fake_unzip_file.assert_called_with(filename) fake_remove.assert_called_with(expected_filename) @@ -409,14 +409,14 @@ def test_get_user_calibration_factors(self): "off": -0.0556}} # Test that correct values are returned from the dict slope, offset = hf.get_user_calibration_factors("WV063", radcor_dict) - self.assertEqual(slope, 1.015) - self.assertEqual(offset, -0.0556) + assert slope == 1.015 + assert offset == -0.0556 # Test that channels not present in dict return 1.0, 0.0 with self.assertWarns(UserWarning): slope, offset = hf.get_user_calibration_factors("IR097", radcor_dict) - self.assertEqual(slope, 1.) - self.assertEqual(offset, 0.) + assert slope == 1.0 + assert offset == 0.0 # Check that incorrect dict keys throw an error with self.assertRaises(KeyError): @@ -486,7 +486,7 @@ def test_remove_sunearth_corr(self): assert isinstance(out_refl.data, da.Array) -@pytest.mark.parametrize("data, filename, mode", +@pytest.mark.parametrize(("data", "filename", "mode"), [(b"Hello", "dummy.dat", "b"), ("Hello", "dummy.txt", "t")]) def test_generic_open_binary(tmp_path, data, filename, mode): diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index 4f5dbcd141..82c0e6a4e1 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -158,15 +158,15 @@ def test_file_reading(self): # Checks that the basic functionalities are correctly executed expected_start_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40, microsecond=888000) - self.assertEqual(self.reader.start_time, expected_start_time) + assert self.reader.start_time == expected_start_time expected_end_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17, microsecond=555000) - self.assertEqual(self.reader.end_time, expected_end_time) + assert self.reader.end_time == expected_end_time - self.assertEqual(self.reader.spacecraft_name, "test_spacecraft") - self.assertEqual(self.reader.sensor, "test_instrument") - self.assertEqual(self.reader.ssp_lon, None) + assert self.reader.spacecraft_name == "test_spacecraft" + assert self.reader.sensor == "test_instrument" + assert self.reader.ssp_lon is None # Checks that the global attributes are correctly read expected_global_attributes = { @@ -195,7 +195,7 @@ def test_file_reading(self): # Since the global_attributes dictionary contains numpy arrays, # it is not possible to peform a simple equality test # Must iterate on all keys to confirm that the dictionaries are equal - self.assertEqual(global_attributes.keys(), expected_global_attributes.keys()) + assert global_attributes.keys() == expected_global_attributes.keys() for key in expected_global_attributes: if key not in ["quality_group"]: # Quality check must be valid for both iterable and not iterable elements @@ -203,16 +203,16 @@ def test_file_reading(self): equal = all(global_attributes[key] == expected_global_attributes[key]) except (TypeError, ValueError): equal = global_attributes[key] == expected_global_attributes[key] - self.assertTrue(equal) + assert equal else: - self.assertEqual(global_attributes[key].keys(), expected_global_attributes[key].keys()) + assert global_attributes[key].keys() == expected_global_attributes[key].keys() for inner_key in global_attributes[key]: # Equality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key][inner_key] == expected_global_attributes[key][inner_key]) except (TypeError, ValueError): equal = global_attributes[key][inner_key] == expected_global_attributes[key][inner_key] - self.assertTrue(equal) + assert equal @mock.patch("satpy.readers.vii_base_nc.tie_points_interpolation") @mock.patch("satpy.readers.vii_base_nc.tie_points_geo_interpolation") @@ -242,10 +242,10 @@ def test_functions(self, tpgi_, tpi_): return_value = self.reader._perform_interpolation(variable) tpi_.assert_called_with([variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) - self.assertTrue(np.allclose(return_value, np.ones((10, 100)))) - self.assertEqual(return_value.attrs, {"key_1": "value_1", "key_2": "value_2"}) - self.assertEqual(return_value.name, "test_name") - self.assertEqual(return_value.dims, ("num_pixels", "num_lines")) + assert np.allclose(return_value, np.ones((10, 100))) + assert return_value.attrs == {"key_1": "value_1", "key_2": "value_2"} + assert return_value.name == "test_name" + assert return_value.dims == ("num_pixels", "num_lines") # Checks that the _perform_geo_interpolation function is correctly executed variable_lon = xr.DataArray( @@ -282,15 +282,15 @@ def test_functions(self, tpgi_, tpi_): tpgi_.assert_called_with(variable_lon, variable_lat, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) - self.assertTrue(np.allclose(return_lon, np.ones((10, 100)))) - self.assertEqual(return_lon.attrs, {"key_1": "value_lon_1", "key_2": "value_lon_2"}) - self.assertEqual(return_lon.name, "test_lon") - self.assertEqual(return_lon.dims, ("num_pixels", "num_lines")) + assert np.allclose(return_lon, np.ones((10, 100))) + assert return_lon.attrs == {"key_1": "value_lon_1", "key_2": "value_lon_2"} + assert return_lon.name == "test_lon" + assert return_lon.dims == ("num_pixels", "num_lines") - self.assertTrue(np.allclose(return_lat, 6 * np.ones((10, 100)))) - self.assertEqual(return_lat.attrs, {"key_1": "value_lat_1", "key_2": "value_lat_2"}) - self.assertEqual(return_lat.name, "test_lat") - self.assertEqual(return_lat.dims, ("num_pixels", "num_lines")) + assert np.allclose(return_lat, 6 * np.ones((10, 100))) + assert return_lat.attrs == {"key_1": "value_lat_1", "key_2": "value_lat_2"} + assert return_lat.name == "test_lat" + assert return_lat.dims == ("num_pixels", "num_lines") def test_standardize_dims(self): """Test the standardize dims function.""" @@ -304,9 +304,9 @@ def test_standardize_dims(self): data=np.ones((10, 100)) * 1. ) out_variable = self.reader._standardize_dims(test_variable) - self.assertTrue(np.allclose(out_variable.values, np.ones((100, 10)))) - self.assertEqual(out_variable.dims, ("y", "x")) - self.assertEqual(out_variable.attrs["key_1"], "value_lat_1") + assert np.allclose(out_variable.values, np.ones((100, 10))) + assert out_variable.dims == ("y", "x") + assert out_variable.attrs["key_1"] == "value_lat_1" @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration") @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation") @@ -320,10 +320,10 @@ def test_dataset(self, po_, pi_, pc_): pi_.assert_not_called() po_.assert_not_called() - self.assertTrue(np.allclose(variable.values, np.ones((100, 10)))) - self.assertEqual(variable.dims, ("y", "x")) - self.assertEqual(variable.attrs["test_attr"], "attr") - self.assertEqual(variable.attrs["units"], None) + assert np.allclose(variable.values, np.ones((100, 10))) + assert variable.dims == ("y", "x") + assert variable.attrs["test_attr"] == "attr" + assert variable.attrs["units"] is None # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation @@ -346,7 +346,7 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with an invalid file_key invalid_dataset = self.reader.get_dataset(None, {"file_key": "test_invalid", "calibration": None}) # Checks that the function returns None - self.assertEqual(invalid_dataset, None) + assert invalid_dataset is None pc_.reset_mock() pi_.reset_mock() @@ -358,12 +358,12 @@ def test_dataset(self, po_, pi_, pc_): "interpolate": True}) pc_.assert_not_called() pi_.assert_not_called() - self.assertEqual(longitude[0, 0], 1.) + assert longitude[0, 0] == 1.0 # Checks the correct execution of the get_dataset function with a 'cached_latitude' file_key latitude = self.reader.get_dataset(None, {"file_key": "cached_latitude", "calibration": None}) - self.assertEqual(latitude[0, 0], 2.) + assert latitude[0, 0] == 2.0 # Repeats some check with the reader where orthorectification and interpolation are inhibited # by means of the filetype_info flags @@ -392,7 +392,7 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key longitude = self.reader_2.get_dataset(None, {"file_key": "cached_longitude", "calibration": None}) - self.assertEqual(longitude[0, 0], 100.) + assert longitude[0, 0] == 100.0 # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key # in a reader without defined longitude @@ -400,4 +400,4 @@ def test_dataset(self, po_, pi_, pc_): "calibration": "reflectance", "interpolate": True}) # Checks that the function returns None - self.assertEqual(longitude, None) + assert longitude is None diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index d62673d9f7..d9ee714d09 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -115,14 +115,14 @@ def test_calibration_functions(self): bt = self.reader._calibrate_bt(radiance, cw, a, b) expected_bt = np.array([[675.04993213, 753.10301462, 894.93149648], [963.20401882, 1048.95086402, 1270.95546218]]) - self.assertTrue(np.allclose(bt, expected_bt)) + assert np.allclose(bt, expected_bt) angle_factor = 0.4 isi = 2.0 refl = self.reader._calibrate_refl(radiance, angle_factor, isi) expected_refl = np.array([[62.8318531, 125.6637061, 314.1592654], [439.8229715, 628.3185307, 1256.637061]]) - self.assertTrue(np.allclose(refl, expected_refl)) + assert np.allclose(refl, expected_refl) def test_functions(self): """Test the functions.""" @@ -139,12 +139,12 @@ def test_functions(self): orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = np.degrees(np.ones((600, 72)) / MEAN_EARTH_RADIUS) + np.ones((600, 72)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) + assert np.allclose(orthorect_variable.values, expected_values) # Checks that the _perform_calibration function is correctly executed in all cases # radiance calibration: return value is simply a copy of the variable return_variable = self.reader._perform_calibration(variable, {"calibration": "radiance"}) - self.assertTrue(np.all(return_variable == variable)) + assert np.all(return_variable == variable) # invalid calibration: raises a ValueError with self.assertRaises(ValueError): @@ -156,7 +156,7 @@ def test_functions(self): {"calibration": "brightness_temperature", "chan_thermal_index": 3}) expected_values = np.full((600, 72), 1101.10413712) - self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) + assert np.allclose(calibrated_variable.values, expected_values) # reflectance calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, @@ -164,4 +164,4 @@ def test_functions(self): "wavelength": [0.658, 0.668, 0.678], "chan_solar_index": 2}) expected_values = np.full((600, 72), 173.3181982) - self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) + assert np.allclose(calibrated_variable.values, expected_values) diff --git a/satpy/tests/reader_tests/test_vii_l2_nc.py b/satpy/tests/reader_tests/test_vii_l2_nc.py index 772f783684..8348470d0f 100644 --- a/satpy/tests/reader_tests/test_vii_l2_nc.py +++ b/satpy/tests/reader_tests/test_vii_l2_nc.py @@ -93,5 +93,5 @@ def test_functions(self): orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs["key_1"], "value_1") + assert np.allclose(orthorect_variable.values, expected_values) + assert orthorect_variable.attrs["key_1"] == "value_1" diff --git a/satpy/tests/reader_tests/test_vii_utils.py b/satpy/tests/reader_tests/test_vii_utils.py index ab90833887..8d9402e926 100644 --- a/satpy/tests/reader_tests/test_vii_utils.py +++ b/satpy/tests/reader_tests/test_vii_utils.py @@ -36,8 +36,8 @@ class TestViiUtils(unittest.TestCase): def test_constants(self): """Test the constant values.""" # Test the value of the constants - self.assertEqual(satpy.readers.vii_utils.C1, C1) - self.assertEqual(satpy.readers.vii_utils.C2, C2) - self.assertEqual(satpy.readers.vii_utils.TIE_POINTS_FACTOR, TIE_POINTS_FACTOR) - self.assertEqual(satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS, SCAN_ALT_TIE_POINTS) - self.assertEqual(satpy.readers.vii_utils.MEAN_EARTH_RADIUS, MEAN_EARTH_RADIUS) + assert satpy.readers.vii_utils.C1 == C1 + assert satpy.readers.vii_utils.C2 == C2 + assert satpy.readers.vii_utils.TIE_POINTS_FACTOR == TIE_POINTS_FACTOR + assert satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS == SCAN_ALT_TIE_POINTS + assert satpy.readers.vii_utils.MEAN_EARTH_RADIUS == MEAN_EARTH_RADIUS diff --git a/satpy/tests/reader_tests/test_vii_wv_nc.py b/satpy/tests/reader_tests/test_vii_wv_nc.py index 9d43f1ded1..63c5604187 100644 --- a/satpy/tests/reader_tests/test_vii_wv_nc.py +++ b/satpy/tests/reader_tests/test_vii_wv_nc.py @@ -93,5 +93,5 @@ def test_functions(self): orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs["key_1"], "value_1") + assert np.allclose(orthorect_variable.values, expected_values) + assert orthorect_variable.attrs["key_1"] == "value_1" diff --git a/satpy/tests/reader_tests/test_viirs_atms_utils.py b/satpy/tests/reader_tests/test_viirs_atms_utils.py index cdcc0b9361..cb388a5cab 100644 --- a/satpy/tests/reader_tests/test_viirs_atms_utils.py +++ b/satpy/tests/reader_tests/test_viirs_atms_utils.py @@ -49,12 +49,9 @@ def test_get_scale_factors_for_units_unsupported_units(): factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) file_units = "unknown unit" output_units = "%" - with pytest.raises(ValueError) as exec_info: + with pytest.raises(ValueError, match="Don't know how to convert 'unknown unit' to '%'"): _ = _get_scale_factors_for_units(factors, file_units, output_units) - expected = "Don't know how to convert 'unknown unit' to '%'" - assert str(exec_info.value) == expected - def test_get_scale_factors_for_units_reflectances(caplog): """Test get scale factors for units, when variable is supposed to be a reflectance.""" diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index 006cdfe968..ba8fa6f312 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -30,7 +30,7 @@ # - tmp_path -@pytest.fixture +@pytest.fixture() def fake_dnb(): """Create fake DNB content.""" fake_dnb = { @@ -2418,7 +2418,7 @@ def fake_dnb(): return fake_dnb -@pytest.fixture +@pytest.fixture() def fake_dnb_file(fake_dnb, tmp_path): """Create an hdf5 file in viirs_compact format with DNB data in it.""" filename = tmp_path / "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5" @@ -2435,7 +2435,7 @@ class TestCompact: """Test class for reading compact viirs format.""" @pytest.fixture(autouse=True) - def setup_method(self, fake_dnb_file): + def _setup_method(self, fake_dnb_file): """Create a fake file from scratch.""" self.filename = fake_dnb_file self.client = None diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index da6dc9a55b..9b13f384e2 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -33,7 +33,7 @@ import pytest import xarray as xr from pyresample import SwathDefinition -from pytest import TempPathFactory +from pytest import TempPathFactory # noqa: PT013 from pytest_lazyfixture import lazy_fixture I_COLS = 6400 diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py index de55a9c20c..7063814c34 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py +++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py @@ -179,9 +179,9 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets.""" @@ -192,23 +192,23 @@ def test_load_dataset(self): ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_pct"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "%") - self.assertEqual(v.attrs["_FillValue"], 255) - self.assertTrue(np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE)) + assert v.attrs["units"] == "%" + assert v.attrs["_FillValue"] == 255 + assert np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE) datasets = r.load(["T13"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "K") + assert v.attrs["units"] == "K" datasets = r.load(["power"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "MW") - self.assertEqual(v.attrs["platform_name"], "NOAA-21") - self.assertEqual(v.attrs["sensor"], "viirs") + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "NOAA-21" + assert v.attrs["sensor"] == "viirs" class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase): @@ -236,9 +236,9 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets.""" @@ -249,23 +249,23 @@ def test_load_dataset(self): ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_cat"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "1") - self.assertEqual(v.attrs["flag_meanings"], ["low", "medium", "high"]) - self.assertEqual(v.attrs["flag_values"], [7, 8, 9]) + assert v.attrs["units"] == "1" + assert v.attrs["flag_meanings"] == ["low", "medium", "high"] + assert v.attrs["flag_values"] == [7, 8, 9] datasets = r.load(["T4"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "K") + assert v.attrs["units"] == "K" datasets = r.load(["power"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "MW") - self.assertEqual(v.attrs["platform_name"], "Suomi-NPP") - self.assertEqual(v.attrs["sensor"], "viirs") + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "Suomi-NPP" + assert v.attrs["sensor"] == "viirs" @mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") @@ -294,9 +294,9 @@ def test_init(self, mock_obj): loadables = r.select_files_from_pathnames([ "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self, csv_mock): """Test loading all datasets.""" @@ -307,21 +307,21 @@ def test_load_dataset(self, csv_mock): ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_pct"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "%") + assert v.attrs["units"] == "%" datasets = r.load(["T13"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "K") + assert v.attrs["units"] == "K" datasets = r.load(["power"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "MW") - self.assertEqual(v.attrs["platform_name"], "NOAA-20") - self.assertEqual(v.attrs["sensor"], "VIIRS") + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "NOAA-20" + assert v.attrs["sensor"] == "VIIRS" @mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") @@ -350,9 +350,9 @@ def test_init(self, mock_obj): loadables = r.select_files_from_pathnames([ "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self, mock_obj): """Test loading all datasets.""" @@ -363,20 +363,20 @@ def test_load_dataset(self, mock_obj): ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_cat"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "1") - self.assertEqual(v.attrs["flag_meanings"], ["low", "medium", "high"]) - self.assertEqual(v.attrs["flag_values"], [7, 8, 9]) + assert v.attrs["units"] == "1" + assert v.attrs["flag_meanings"] == ["low", "medium", "high"] + assert v.attrs["flag_values"] == [7, 8, 9] datasets = r.load(["T4"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "K") + assert v.attrs["units"] == "K" datasets = r.load(["power"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "MW") - self.assertEqual(v.attrs["platform_name"], "Suomi-NPP") - self.assertEqual(v.attrs["sensor"], "VIIRS") + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "Suomi-NPP" + assert v.attrs["sensor"] == "VIIRS" diff --git a/satpy/tests/reader_tests/test_viirs_edr_flood.py b/satpy/tests/reader_tests/test_viirs_edr_flood.py index 0141259784..b7bc9f0319 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_flood.py +++ b/satpy/tests/reader_tests/test_viirs_edr_flood.py @@ -95,9 +95,9 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets from a full swath file.""" @@ -108,9 +108,9 @@ def test_load_dataset(self): ]) r.create_filehandlers(loadables) datasets = r.load(["WaterDetection"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "none") + assert v.attrs["units"] == "none" def test_load_dataset_aoi(self): """Test loading all datasets from an area of interest file.""" @@ -121,6 +121,6 @@ def test_load_dataset_aoi(self): ]) r.create_filehandlers(loadables) datasets = r.load(["WaterDetection"]) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs["units"], "none") + assert v.attrs["units"] == "none" diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py index fecd9a0b0f..952224daaf 100644 --- a/satpy/tests/reader_tests/test_viirs_sdr.py +++ b/satpy/tests/reader_tests/test_viirs_sdr.py @@ -282,40 +282,40 @@ class TestVIIRSSDRReader(unittest.TestCase): yaml_file = "viirs_sdr.yaml" def _assert_reflectance_properties(self, data_arr, num_scans=16, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs["calibration"], "reflectance") - self.assertEqual(data_arr.attrs["units"], "%") - self.assertEqual(data_arr.attrs["rows_per_scan"], num_scans) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "reflectance" + assert data_arr.attrs["units"] == "%" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - self.assertIn("area", data_arr.attrs) - self.assertIsNotNone(data_arr.attrs["area"]) - self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn("area", data_arr.attrs) + assert "area" not in data_arr.attrs def _assert_bt_properties(self, data_arr, num_scans=16, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs["calibration"], "brightness_temperature") - self.assertEqual(data_arr.attrs["units"], "K") - self.assertEqual(data_arr.attrs["rows_per_scan"], num_scans) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "brightness_temperature" + assert data_arr.attrs["units"] == "K" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - self.assertIn("area", data_arr.attrs) - self.assertIsNotNone(data_arr.attrs["area"]) - self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn("area", data_arr.attrs) + assert "area" not in data_arr.attrs def _assert_dnb_radiance_properties(self, data_arr, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs["calibration"], "radiance") - self.assertEqual(data_arr.attrs["units"], "W m-2 sr-1") - self.assertEqual(data_arr.attrs["rows_per_scan"], 16) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "radiance" + assert data_arr.attrs["units"] == "W m-2 sr-1" + assert data_arr.attrs["rows_per_scan"] == 16 if with_area: - self.assertIn("area", data_arr.attrs) - self.assertIsNotNone(data_arr.attrs["area"]) - self.assertEqual(data_arr.attrs["area"].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn("area", data_arr.attrs) + assert "area" not in data_arr.attrs def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" @@ -338,21 +338,19 @@ def test_init(self): loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_init_start_time_is_nodate(self): """Test basic init with start_time being set to the no-date 1/1-1958.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with pytest.raises(ValueError) as exec_info: + with pytest.raises(ValueError, match="Datetime invalid 1958-01-01 00:00:00"): _ = r.create_filehandlers([ "SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5", ]) - expected = "Datetime invalid 1958-01-01 00:00:00" - assert str(exec_info.value) == expected def test_init_start_time_beyond(self): """Test basic init with start_time after the provided files.""" @@ -366,7 +364,7 @@ def test_init_start_time_beyond(self): fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(fhs), 0) + assert len(fhs) == 0 def test_init_end_time_beyond(self): """Test basic init with end_time before the provided files.""" @@ -380,7 +378,7 @@ def test_init_end_time_beyond(self): fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(fhs), 0) + assert len(fhs) == 0 def test_init_start_end_time(self): """Test basic init with end_time before the provided files.""" @@ -396,10 +394,10 @@ def test_init_start_end_time(self): loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_all_m_reflectances_no_geo(self): """Load all M band reflectances with no geo files provided.""" @@ -431,7 +429,7 @@ def test_load_all_m_reflectances_no_geo(self): "M10", "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=False) @@ -467,7 +465,7 @@ def test_load_all_m_reflectances_find_geo(self): "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) @@ -503,13 +501,13 @@ def test_load_all_m_reflectances_provided_geo(self): "M10", "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs["area"].lons.min(), 5) - self.assertEqual(d.attrs["area"].lats.min(), 45) - self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) - self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) + assert d.attrs["area"].lons.min() == 5 + assert d.attrs["area"].lats.min() == 45 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_reflectances_use_nontc(self): """Load all M band reflectances but use non-TC geolocation.""" @@ -544,13 +542,13 @@ def test_load_all_m_reflectances_use_nontc(self): "M10", "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs["area"].lons.min(), 15) - self.assertEqual(d.attrs["area"].lats.min(), 55) - self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) - self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) + assert d.attrs["area"].lons.min() == 15 + assert d.attrs["area"].lats.min() == 55 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_reflectances_use_nontc2(self): """Load all M band reflectances but use non-TC geolocation because TC isn't available.""" @@ -584,13 +582,13 @@ def test_load_all_m_reflectances_use_nontc2(self): "M10", "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs["area"].lons.min(), 15) - self.assertEqual(d.attrs["area"].lats.min(), 55) - self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 16) - self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 16) + assert d.attrs["area"].lons.min() == 15 + assert d.attrs["area"].lats.min() == 55 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_bts(self): """Load all M band brightness temperatures.""" @@ -611,7 +609,7 @@ def test_load_all_m_bts(self): "M15", "M16", ]) - self.assertEqual(len(ds), 5) + assert len(ds) == 5 for d in ds.values(): self._assert_bt_properties(d, with_area=True) @@ -634,13 +632,13 @@ def test_load_dnb_sza_no_factors(self): "dnb_satellite_azimuth_angle", "dnb_lunar_zenith_angle", "dnb_lunar_azimuth_angle"]) - self.assertEqual(len(ds), 6) + assert len(ds) == 6 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs["units"], "degrees") - self.assertEqual(d.attrs["rows_per_scan"], 16) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert np.issubdtype(d.dtype, np.float32) + assert d.attrs["units"] == "degrees" + assert d.attrs["rows_per_scan"] == 16 + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_all_m_radiances(self): """Load all M band radiances.""" @@ -685,14 +683,14 @@ def test_load_all_m_radiances(self): make_dsq(name="M15", calibration="radiance"), make_dsq(name="M16", calibration="radiance"), ]) - self.assertEqual(len(ds), 16) + assert len(ds) == 16 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs["calibration"], "radiance") - self.assertEqual(d.attrs["units"], "W m-2 um-1 sr-1") - self.assertEqual(d.attrs["rows_per_scan"], 16) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert np.issubdtype(d.dtype, np.float32) + assert d.attrs["calibration"] == "radiance" + assert d.attrs["units"] == "W m-2 um-1 sr-1" + assert d.attrs["rows_per_scan"] == 16 + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_dnb(self): """Load DNB dataset.""" @@ -704,17 +702,17 @@ def test_load_dnb(self): ]) r.create_filehandlers(loadables) ds = r.load(["DNB"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): data = d.values # default scale factors are 2 and offset 1 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 2 * 10000 + 1 * 10000 => 10000 - self.assertEqual(data[0, 0], 10000) + assert data[0, 0] == 10000 # the second value of 1 should be: # 1 * 2 * 10000 + 1 * 10000 => 30000 - self.assertEqual(data[0, 1], 30000) + assert data[0, 1] == 30000 self._assert_dnb_radiance_properties(d, with_area=True) def test_load_dnb_no_factors(self): @@ -727,17 +725,17 @@ def test_load_dnb_no_factors(self): ]) r.create_filehandlers(loadables, {"include_factors": False}) ds = r.load(["DNB"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 for d in ds.values(): data = d.values # no scale factors, default factor 1 and offset 0 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 1 * 10000 + 0 * 10000 => 0 - self.assertEqual(data[0, 0], 0) + assert data[0, 0] == 0 # the second value of 1 should be: # 1 * 1 * 10000 + 0 * 10000 => 10000 - self.assertEqual(data[0, 1], 10000) + assert data[0, 1] == 10000 self._assert_dnb_radiance_properties(d, with_area=True) def test_load_i_no_files(self): @@ -749,9 +747,9 @@ def test_load_i_no_files(self): "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - self.assertNotIn("I01", [x["name"] for x in r.available_dataset_ids]) + assert "I01" not in [x["name"] for x in r.available_dataset_ids] ds = r.load(["I01"]) - self.assertEqual(len(ds), 0) + assert len(ds) == 0 def test_load_all_i_reflectances_provided_geo(self): """Load all I band reflectances with geo files provided.""" @@ -768,13 +766,13 @@ def test_load_all_i_reflectances_provided_geo(self): "I02", "I03", ]) - self.assertEqual(len(ds), 3) + assert len(ds) == 3 for d in ds.values(): self._assert_reflectance_properties(d, num_scans=32) - self.assertEqual(d.attrs["area"].lons.min(), 5) - self.assertEqual(d.attrs["area"].lats.min(), 45) - self.assertEqual(d.attrs["area"].lons.attrs["rows_per_scan"], 32) - self.assertEqual(d.attrs["area"].lats.attrs["rows_per_scan"], 32) + assert d.attrs["area"].lons.min() == 5 + assert d.attrs["area"].lats.min() == 45 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 32 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 32 def test_load_all_i_bts(self): """Load all I band brightness temperatures.""" @@ -789,7 +787,7 @@ def test_load_all_i_bts(self): ds = r.load(["I04", "I05", ]) - self.assertEqual(len(ds), 2) + assert len(ds) == 2 for d in ds.values(): self._assert_bt_properties(d, num_scans=32) @@ -814,14 +812,14 @@ def test_load_all_i_radiances(self): make_dsq(name="I04", calibration="radiance"), make_dsq(name="I05", calibration="radiance"), ]) - self.assertEqual(len(ds), 5) + assert len(ds) == 5 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs["calibration"], "radiance") - self.assertEqual(d.attrs["units"], "W m-2 um-1 sr-1") - self.assertEqual(d.attrs["rows_per_scan"], 32) - self.assertIn("area", d.attrs) - self.assertIsNotNone(d.attrs["area"]) + assert np.issubdtype(d.dtype, np.float32) is True + assert d.attrs["calibration"] == "radiance" + assert d.attrs["units"] == "W m-2 um-1 sr-1" + assert d.attrs["rows_per_scan"] == 32 + assert "area" in d.attrs + assert d.attrs["area"] is not None class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler2): @@ -909,7 +907,7 @@ def test_load_truncated_band(self): ]) r.create_filehandlers(loadables) ds = r.load(["I01"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 i01_data = ds["I01"].compute() expected_rows = sum(FakeShortHDF5FileHandlerAggr._num_scans_per_gran) * DEFAULT_FILE_SHAPE[0] - self.assertEqual(i01_data.shape, (expected_rows, 300)) + assert i01_data.shape == (expected_rows, 300) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index b14ff771d6..49206962e5 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -29,8 +29,8 @@ from netCDF4 import Dataset -@pytest.fixture -def _nc_filename(tmp_path): +@pytest.fixture() +def nc_filename(tmp_path): now = datetime.datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) @@ -66,14 +66,14 @@ def _nc_filename(tmp_path): class TestVGACREader: """Test the VGACFileHandler reader.""" - def test_read_vgac(self, _nc_filename): + def test_read_vgac(self, nc_filename): """Test reading reflectances and BT.""" from satpy.scene import Scene # Read data scn_ = Scene( reader="viirs_vgac_l1c_nc", - filenames=[_nc_filename]) + filenames=[nc_filename]) scn_.load(["M05", "M15"]) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py index ff0f780190..e3fbd73272 100644 --- a/satpy/tests/reader_tests/test_virr_l1b.py +++ b/satpy/tests/reader_tests/test_virr_l1b.py @@ -104,13 +104,13 @@ def tearDown(self): def _band_helper(self, attributes, units, calibration, standard_name, file_type, band_index_size, resolution): - self.assertEqual(units, attributes["units"]) - self.assertEqual(calibration, attributes["calibration"]) - self.assertEqual(standard_name, attributes["standard_name"]) - self.assertEqual(file_type, attributes["file_type"]) - self.assertTrue(attributes["band_index"] in range(band_index_size)) - self.assertEqual(resolution, attributes["resolution"]) - self.assertEqual(("longitude", "latitude"), attributes["coordinates"]) + assert units == attributes["units"] + assert calibration == attributes["calibration"] + assert standard_name == attributes["standard_name"] + assert file_type == attributes["file_type"] + assert (attributes["band_index"] in range(band_index_size)) is True + assert resolution == attributes["resolution"] + assert ("longitude", "latitude") == attributes["coordinates"] def _fy3_helper(self, platform_name, reader, Emissive_units): """Load channels and test accurate metadata.""" @@ -133,13 +133,13 @@ def _fy3_helper(self, platform_name, reader, Emissive_units): # Object returned by get_dataset. ds = datasets[dataset["name"]] attributes = ds.attrs - self.assertTrue(isinstance(ds.data, da.Array)) - self.assertEqual("virr", attributes["sensor"]) - self.assertEqual(platform_name, attributes["platform_name"]) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes["start_time"]) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes["end_time"]) - self.assertEqual((19, 20), datasets[dataset["name"]].shape) - self.assertEqual(("y", "x"), datasets[dataset["name"]].dims) + assert isinstance(ds.data, da.Array) + assert "virr" == attributes["sensor"] + assert platform_name == attributes["platform_name"] + assert datetime.datetime(2018, 12, 25, 21, 41, 47, 90000) == attributes["start_time"] + assert datetime.datetime(2018, 12, 25, 21, 47, 28, 254000) == attributes["end_time"] + assert (19, 20) == datasets[dataset["name"]].shape + assert ("y", "x") == datasets[dataset["name"]].dims if dataset["name"] in ["1", "2", "6", "7", "8", "9", "10"]: self._band_helper(attributes, "%", "reflectance", "toa_bidirectional_reflectance", "virr_l1b", @@ -148,19 +148,17 @@ def _fy3_helper(self, platform_name, reader, Emissive_units): self._band_helper(attributes, Emissive_units, "brightness_temperature", "toa_brightness_temperature", "virr_l1b", 3, 1000) elif dataset["name"] in ["longitude", "latitude"]: - self.assertEqual("degrees", attributes["units"]) - self.assertTrue(attributes["standard_name"] in ["longitude", "latitude"]) - self.assertEqual(["virr_l1b", "virr_geoxx"], attributes["file_type"]) - self.assertEqual(1000, attributes["resolution"]) + assert "degrees" == attributes["units"] + assert (attributes["standard_name"] in ["longitude", "latitude"]) is True + assert ["virr_l1b", "virr_geoxx"] == attributes["file_type"] + assert 1000 == attributes["resolution"] else: - self.assertEqual("degrees", attributes["units"]) - self.assertTrue( - attributes["standard_name"] in ["solar_zenith_angle", "sensor_zenith_angle", "solar_azimuth_angle", - "sensor_azimuth_angle"]) - self.assertEqual(["virr_geoxx", "virr_l1b"], attributes["file_type"]) - self.assertEqual(("longitude", "latitude"), attributes["coordinates"]) - self.assertEqual(band_values[dataset["name"]], - round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6)) + assert "degrees" == attributes["units"] + assert attributes["standard_name"] in ["solar_zenith_angle", "sensor_zenith_angle", + "solar_azimuth_angle", "sensor_azimuth_angle"] + assert ["virr_geoxx", "virr_l1b"] == attributes["file_type"] + assert ("longitude", "latitude") == attributes["coordinates"] + assert band_values[dataset["name"]] == round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6) assert "valid_range" not in ds.attrs def test_fy3b_file(self): @@ -168,10 +166,10 @@ def test_fy3b_file(self): from satpy.readers import load_reader FY3B_reader = load_reader(self.reader_configs) FY3B_file = FY3B_reader.select_files_from_pathnames(["tf2018359214943.FY3B-L_VIRRX_L1B.HDF"]) - self.assertEqual(1, len(FY3B_file)) + assert 1 == len(FY3B_file) FY3B_reader.create_filehandlers(FY3B_file) # Make sure we have some files - self.assertTrue(FY3B_reader.file_handlers) + assert FY3B_reader.file_handlers self._fy3_helper("FY3B", FY3B_reader, "milliWstts/m^2/cm^(-1)/steradian") def test_fy3c_file(self): @@ -180,8 +178,8 @@ def test_fy3c_file(self): FY3C_reader = load_reader(self.reader_configs) FY3C_files = FY3C_reader.select_files_from_pathnames(["tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF", "tf2018359143912.FY3C-L_VIRRX_L1B.HDF"]) - self.assertEqual(2, len(FY3C_files)) + assert 2 == len(FY3C_files) FY3C_reader.create_filehandlers(FY3C_files) # Make sure we have some files - self.assertTrue(FY3C_reader.file_handlers) + assert FY3C_reader.file_handlers self._fy3_helper("FY3C", FY3C_reader, "1") diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 3760249d95..a886c3fa60 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -93,7 +93,7 @@ def test_with_empty_scene(self): assert len(ds.variables) == 0 assert len(ds.coords) == 0 - @pytest.fixture + @pytest.fixture() def single_area_scn(self): """Define Scene with single area.""" from pyresample.geometry import AreaDefinition @@ -108,7 +108,7 @@ def single_area_scn(self): scn["var1"] = data_array return scn - @pytest.fixture + @pytest.fixture() def multi_area_scn(self): """Define Scene with multiple area.""" from pyresample.geometry import AreaDefinition @@ -162,5 +162,5 @@ def test_wrong_dataset_key(self, single_area_scn): def test_to_xarray_with_multiple_area_scene(self, multi_area_scn): """Test converting muiltple area Scene to xarray.""" # TODO: in future adapt for DataTree implementation - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Datasets to be saved .* must have identical projection coordinates."): _ = multi_area_scn.to_xarray() diff --git a/satpy/tests/scene_tests/test_data_access.py b/satpy/tests/scene_tests/test_data_access.py index e446af9c46..66129ad8bb 100644 --- a/satpy/tests/scene_tests/test_data_access.py +++ b/satpy/tests/scene_tests/test_data_access.py @@ -97,7 +97,8 @@ def test_iter_by_area_swath(self): def test_bad_setitem(self): """Test setting an item wrongly.""" scene = Scene() - pytest.raises(ValueError, scene.__setitem__, "1", np.arange(5)) + with pytest.raises(ValueError, match="Key must be a DataID when value is not an xarray DataArray or dict"): + scene.__setitem__("1", np.arange(5)) def test_setitem(self): """Test setting an item.""" @@ -112,7 +113,7 @@ def test_setitem(self): scene[did] = ds1 assert "oranges" in scene nparray = np.arange(5*5).reshape(5, 5) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Key must be a DataID when value is not an xarray DataArray or dict"): scene["apples"] = nparray assert "apples" not in scene did = make_dataid(name="apples") diff --git a/satpy/tests/scene_tests/test_init.py b/satpy/tests/scene_tests/test_init.py index a9b4622769..b745fad9d0 100644 --- a/satpy/tests/scene_tests/test_init.py +++ b/satpy/tests/scene_tests/test_init.py @@ -46,7 +46,8 @@ def test_init(self): def test_init_str_filename(self): """Test initializing with a single string as filenames.""" - pytest.raises(ValueError, Scene, reader="blo", filenames="test.nc") + with pytest.raises(ValueError, match="'filenames' must be a list of files: .*"): + Scene(reader="blo", filenames="test.nc") def test_start_end_times(self): """Test start and end times for a scene.""" @@ -74,7 +75,8 @@ def test_init_alone(self): def test_init_no_files(self): """Test that providing an empty list of filenames fails.""" - pytest.raises(ValueError, Scene, reader="viirs_sdr", filenames=[]) + with pytest.raises(ValueError, match="'filenames' was provided but is empty."): + Scene(reader="viirs_sdr", filenames=[]) def test_create_reader_instances_with_filenames(self): """Test creating a reader providing filenames.""" diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 286735c093..6b5f74ee59 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -619,7 +619,7 @@ def test_aggregate_with_boundary(self): scene1 = self._create_test_data(x_size, y_size) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Could not coarsen a dimension.*"): scene1.aggregate(func="sum", x=2, y=2, boundary="exact") scene2 = scene1.aggregate(func="sum", x=2, y=2, boundary="trim") diff --git a/satpy/tests/scene_tests/test_saving.py b/satpy/tests/scene_tests/test_saving.py index 0781ae8796..32c6ff61c2 100644 --- a/satpy/tests/scene_tests/test_saving.py +++ b/satpy/tests/scene_tests/test_saving.py @@ -77,7 +77,8 @@ def test_save_datasets_bad_writer(self, tmp_path): pytest.raises(ValueError, scn.save_datasets, writer="_bad_writer_", - base_dir=tmp_path) + base_dir=tmp_path, + match="Unknown writer '_bad_writer_'") def test_save_datasets_missing_wishlist(self, tmp_path): """Calling 'save_datasets' with no valid datasets.""" diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 5c3ededd40..a872ce31c4 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -126,7 +126,7 @@ def test_nondimensional_coords(self): ds["acq_time"] = ("y", [0, 1]) comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays([ds, ds]) - self.assertNotIn("acq_time", ret_datasets[0].coords) + assert "acq_time" not in ret_datasets[0].coords class TestRatioSharpenedCompositors: @@ -196,7 +196,7 @@ def setup_method(self): def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="RatioSharpenedRGB..*_band must be one of .*"): RatioSharpenedRGB(name="true_color", **init_kwargs) def test_match_data_arrays(self): @@ -210,14 +210,14 @@ def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected 3 datasets, got 4"): comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name="true_color", high_resolution_band=None) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="SelfSharpenedRGB requires at least one high resolution band, not 'None'"): comp((self.ds1, self.ds2, self.ds3)) def test_basic_no_high_res(self): @@ -355,14 +355,14 @@ def test_bad_areas_diff(self): self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2_big)) -@pytest.fixture +@pytest.fixture() def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def return create_area_def("skierffe", 4087, area_extent=[-5_000, -5_000, 5_000, 5_000], shape=(2, 2)) -@pytest.fixture +@pytest.fixture() def fake_dataset_pair(fake_area): """Return a fake pair of 2×2 datasets.""" ds1 = xr.DataArray(da.full((2, 2), 8, chunks=2, dtype=np.float32), attrs={"area": fake_area}) @@ -619,7 +619,7 @@ class TestSandwichCompositor: # Test RGB and RGBA @pytest.mark.parametrize( - "input_shape,bands", + ("input_shape", "bands"), [ ((3, 2, 2), ["R", "G", "B"]), ((4, 2, 2), ["R", "G", "B", "A"]) @@ -665,28 +665,24 @@ def test_inline_composites(self): # Check that "fog" product has all its prerequisites defined keys = comps["visir"].keys() fog = [comps["visir"][dsid] for dsid in keys if "fog" == dsid["name"]][0] - self.assertEqual(fog.attrs["prerequisites"][0]["name"], "_fog_dep_0") - self.assertEqual(fog.attrs["prerequisites"][1]["name"], "_fog_dep_1") - self.assertEqual(fog.attrs["prerequisites"][2], 10.8) + assert fog.attrs["prerequisites"][0]["name"] == "_fog_dep_0" + assert fog.attrs["prerequisites"][1]["name"] == "_fog_dep_1" + assert fog.attrs["prerequisites"][2] == 10.8 # Check that the sub-composite dependencies use wavelengths # (numeric values) keys = comps["visir"].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] - self.assertEqual(comps["visir"][fog_dep_ids[0]].attrs["prerequisites"], - [12.0, 10.8]) - self.assertEqual(comps["visir"][fog_dep_ids[1]].attrs["prerequisites"], - [10.8, 8.7]) + assert comps["visir"][fog_dep_ids[0]].attrs["prerequisites"] == [12.0, 10.8] + assert comps["visir"][fog_dep_ids[1]].attrs["prerequisites"] == [10.8, 8.7] # Check the same for SEVIRI and verify channel names are used # in the sub-composite dependencies instead of wavelengths comps = load_compositor_configs_for_sensors(["seviri"])[0] keys = comps["seviri"].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] - self.assertEqual(comps["seviri"][fog_dep_ids[0]].attrs["prerequisites"], - ["IR_120", "IR_108"]) - self.assertEqual(comps["seviri"][fog_dep_ids[1]].attrs["prerequisites"], - ["IR_108", "IR_087"]) + assert comps["seviri"][fog_dep_ids[0]].attrs["prerequisites"] == ["IR_120", "IR_108"] + assert comps["seviri"][fog_dep_ids[1]].attrs["prerequisites"] == ["IR_108", "IR_087"] class TestColormapCompositor(unittest.TestCase): @@ -701,8 +697,8 @@ def test_build_colormap_with_int_data_and_without_meanings(self): """Test colormap building.""" palette = np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]) colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) - self.assertTrue(np.allclose(colormap.values, [0, 1])) - self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) + assert np.allclose(colormap.values, [0, 1]) + assert np.allclose(squeezed_palette, palette / 255.0) def test_build_colormap_with_int_data_and_with_meanings(self): """Test colormap building.""" @@ -710,8 +706,8 @@ def test_build_colormap_with_int_data_and_with_meanings(self): dims=["value", "band"]) palette.attrs["palette_meanings"] = [2, 3, 4] colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) - self.assertTrue(np.allclose(colormap.values, [2, 3, 4])) - self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) + assert np.allclose(colormap.values, [2, 3, 4]) + assert np.allclose(squeezed_palette, palette / 255.0) class TestPaletteCompositor(unittest.TestCase): @@ -733,7 +729,7 @@ def test_call(self): [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) - self.assertTrue(np.allclose(res, exp)) + assert np.allclose(res, exp) class TestColorizeCompositor(unittest.TestCase): @@ -758,7 +754,7 @@ def test_colorize_no_fill(self): [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) - self.assertTrue(np.allclose(res, exp, atol=1e-4)) + assert np.allclose(res, exp, atol=0.0001) def test_colorize_with_interpolation(self): """Test colorizing with interpolation.""" @@ -940,14 +936,14 @@ def test_call(self): self.comp.attrs["resolution"] = None res = self.comp([all_valid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get("sensor"), "foo") - self.assertTrue("foo" in res.attrs) - self.assertEqual(res.attrs.get("foo"), "bar") - self.assertTrue("units" in res.attrs) - self.assertTrue("calibration" in res.attrs) - self.assertFalse("modifiers" in res.attrs) - self.assertEqual(res.attrs["wavelength"], 10.8) - self.assertEqual(res.attrs["resolution"], 333) + assert res.attrs.get("sensor") == "foo" + assert "foo" in res.attrs + assert res.attrs.get("foo") == "bar" + assert "units" in res.attrs + assert "calibration" in res.attrs + assert "modifiers" not in res.attrs + assert res.attrs["wavelength"] == 10.8 + assert res.attrs["resolution"] == 333 class TestCategoricalDataCompositor(unittest.TestCase): @@ -1023,33 +1019,33 @@ def test_concat_datasets(self): from satpy.composites import IncompatibleAreas res = self.comp._concat_datasets([self.all_valid], "L") num_bands = len(res.bands) - self.assertEqual(num_bands, 1) - self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], "L") + assert num_bands == 1 + assert res.shape[0] == num_bands + assert res.bands[0] == "L" res = self.comp._concat_datasets([self.all_valid, self.all_valid], "LA") num_bands = len(res.bands) - self.assertEqual(num_bands, 2) - self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], "L") - self.assertEqual(res.bands[1], "A") + assert num_bands == 2 + assert res.shape[0] == num_bands + assert res.bands[0] == "L" + assert res.bands[1] == "A" self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, [self.all_valid, self.wrong_shape], "LA") def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" res = self.comp._get_sensors([self.all_valid]) - self.assertIsNone(res) + assert res is None dset1 = self.all_valid dset1.attrs["sensor"] = "foo" res = self.comp._get_sensors([dset1]) - self.assertEqual(res, "foo") + assert res == "foo" dset2 = self.first_invalid dset2.attrs["sensor"] = "bar" res = self.comp._get_sensors([dset1, dset2]) - self.assertIn("foo", res) - self.assertIn("bar", res) - self.assertEqual(len(res), 2) - self.assertIsInstance(res, set) + assert "foo" in res + assert "bar" in res + assert len(res) == 2 + assert isinstance(res, set) @mock.patch("satpy.composites.GenericCompositor._get_sensors") @mock.patch("satpy.composites.combine_metadata") @@ -1062,8 +1058,8 @@ def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, get_sensors.return_value = "foo" # One dataset, no mode given res = self.comp([self.all_valid]) - self.assertEqual(res.shape[0], 1) - self.assertEqual(res.attrs["mode"], "L") + assert res.shape[0] == 1 + assert res.attrs["mode"] == "L" match_data_arrays.assert_not_called() # This compositor has been initialized without common masking, so the # masking shouldn't have been called @@ -1093,15 +1089,15 @@ def test_call(self): self.comp.attrs["resolution"] = None res = self.comp([self.all_valid, self.first_invalid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get("sensor"), "foo") - self.assertIn("foo", res.attrs) - self.assertEqual(res.attrs.get("foo"), "bar") - self.assertNotIn("units", res.attrs) - self.assertNotIn("calibration", res.attrs) - self.assertNotIn("modifiers", res.attrs) - self.assertIsNone(res.attrs["wavelength"]) - self.assertEqual(res.attrs["mode"], "LA") - self.assertEqual(res.attrs["resolution"], 333) + assert res.attrs.get("sensor") == "foo" + assert "foo" in res.attrs + assert res.attrs.get("foo") == "bar" + assert "units" not in res.attrs + assert "calibration" not in res.attrs + assert "modifiers" not in res.attrs + assert res.attrs["wavelength"] is None + assert res.attrs["mode"] == "LA" + assert res.attrs["resolution"] == 333 def test_deprecation_warning(self): """Test deprecation warning for dcprecated composite recipes.""" @@ -1125,7 +1121,7 @@ def test_add_bands_l_rgb(self): coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B"] - self.assertEqual(res.attrs["mode"], "".join(res_bands)) + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) @@ -1140,7 +1136,7 @@ def test_add_bands_l_rgba(self): coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] - self.assertEqual(res.attrs["mode"], "".join(res_bands)) + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) @@ -1155,7 +1151,7 @@ def test_add_bands_la_rgb(self): coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] - self.assertEqual(res.attrs["mode"], "".join(res_bands)) + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) @@ -1171,7 +1167,7 @@ def test_add_bands_rgb_rbga(self): coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] - self.assertEqual(res.attrs["mode"], "".join(res_bands)) + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) @@ -1203,14 +1199,14 @@ def test_init(self, get_area_def): # No area defined comp = StaticImageCompositor("name", filename="/foo.tif") - self.assertEqual(comp._cache_filename, "/foo.tif") - self.assertIsNone(comp.area) + assert comp._cache_filename == "/foo.tif" + assert comp.area is None # Area defined get_area_def.return_value = "bar" comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") - self.assertEqual(comp._cache_filename, "/foo.tif") - self.assertEqual(comp.area, "bar") + assert comp._cache_filename == "/foo.tif" + assert comp.area == "bar" get_area_def.assert_called_once_with("euro4") @mock.patch("satpy.aux_download.retrieve") @@ -1239,11 +1235,11 @@ def load(self, arg): filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() - self.assertIn("start_time", res.attrs) - self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs["sensor"]) - self.assertNotIn("modifiers", res.attrs) - self.assertNotIn("calibration", res.attrs) + assert "start_time" in res.attrs + assert "end_time" in res.attrs + assert res.attrs["sensor"] is None + assert "modifiers" not in res.attrs + assert "calibration" not in res.attrs # remote file with local cached version Scene.reset_mock() @@ -1253,11 +1249,11 @@ def load(self, arg): res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["data_dir/foo.tif"]) - self.assertIn("start_time", res.attrs) - self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs["sensor"]) - self.assertNotIn("modifiers", res.attrs) - self.assertNotIn("calibration", res.attrs) + assert "start_time" in res.attrs + assert "end_time" in res.attrs + assert res.attrs["sensor"] is None + assert "modifiers" not in res.attrs + assert "calibration" not in res.attrs # Non-georeferenced image, no area given img.attrs.pop("area") @@ -1268,25 +1264,24 @@ def load(self, arg): # Non-georeferenced image, area given comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() - self.assertEqual(res.attrs["area"].area_id, "euro4") + assert res.attrs["area"].area_id == "euro4" # Filename contains environment variable os.environ["TEST_IMAGE_PATH"] = "/path/to/image" comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area="euro4") - self.assertEqual(comp._cache_filename, "/path/to/image/foo.tif") + assert comp._cache_filename == "/path/to/image/foo.tif" # URL and filename without absolute path comp = StaticImageCompositor("name", url=remote_tif, filename="bar.tif") - self.assertEqual(comp._url, remote_tif) - self.assertEqual(comp._cache_filename, "bar.tif") + assert comp._url == remote_tif + assert comp._cache_filename == "bar.tif" # No URL, filename without absolute path, use default data_dir from config with mock.patch("os.path.exists") as exists: exists.return_value = True comp = StaticImageCompositor("name", filename="foo.tif") - self.assertEqual(comp._url, None) - self.assertEqual(comp._cache_filename, - os.path.join(os.path.sep, "path", "to", "image", "foo.tif")) + assert comp._url is None + assert comp._cache_filename == os.path.join(os.path.sep, "path", "to", "image", "foo.tif") def _enhance2dataset(dataset, convert_p=False): @@ -1384,7 +1379,7 @@ def test_multiple_sensors(self): class TestMaskingCompositor: """Test case for the simple masking compositor.""" - @pytest.fixture + @pytest.fixture() def conditions_v1(self): """Masking conditions with string values.""" return [{"method": "equal", @@ -1394,7 +1389,7 @@ def conditions_v1(self): "value": "Cloud-free_sea", "transparency": 50}] - @pytest.fixture + @pytest.fixture() def conditions_v2(self): """Masking conditions with numerical values.""" return [{"method": "equal", @@ -1404,12 +1399,12 @@ def conditions_v2(self): "value": 2, "transparency": 50}] - @pytest.fixture + @pytest.fixture() def test_data(self): """Test data to use with masking compositors.""" return xr.DataArray(da.random.random((3, 3)), dims=["y", "x"]) - @pytest.fixture + @pytest.fixture() def test_ct_data(self): """Test 2D CT data array.""" flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] @@ -1422,18 +1417,18 @@ def test_ct_data(self): ct_data.attrs["flag_values"] = flag_values return ct_data - @pytest.fixture + @pytest.fixture() def test_ct_data_v3(self, test_ct_data): """Set ct data to NaN where it originally is 1.""" return test_ct_data.where(test_ct_data == 1) - @pytest.fixture + @pytest.fixture() def reference_data(self, test_data, test_ct_data): """Get reference data to use in masking compositor tests.""" # The data are set to NaN where ct is `1` return test_data.where(test_ct_data > 1) - @pytest.fixture + @pytest.fixture() def reference_alpha(self): """Get reference alpha to use in masking compositor tests.""" ref_alpha = da.array([[0, 0.5, 0.5], @@ -1446,8 +1441,8 @@ def test_init(self): from satpy.composites import MaskingCompositor # No transparency or conditions given raises ValueError - with pytest.raises(ValueError): - comp = MaskingCompositor("name") + with pytest.raises(ValueError, match="Masking conditions not defined."): + _ = MaskingCompositor("name") # transparency defined transparency = {0: 100, 1: 50} @@ -1621,7 +1616,7 @@ def test_incorrect_method(self, test_data, test_ct_data): with pytest.raises(AttributeError): comp([test_data, test_ct_data]) # Test with too few projectables. - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected 2 datasets, got 1"): comp([test_data]) def test_incorrect_mode(self, conditions_v1): @@ -1629,7 +1624,7 @@ def test_incorrect_mode(self, conditions_v1): from satpy.composites import MaskingCompositor # Incorrect mode raises ValueError - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid mode YCbCrA. Supported modes: .*"): MaskingCompositor("name", conditions=conditions_v1, mode="YCbCrA") @@ -1660,17 +1655,17 @@ def temp_func(*args): match_data_arrays.side_effect = temp_func comp = NaturalEnh("foo", ch16_w=self.ch16_w, ch08_w=self.ch08_w, ch06_w=self.ch06_w) - self.assertEqual(comp.ch16_w, self.ch16_w) - self.assertEqual(comp.ch08_w, self.ch08_w) - self.assertEqual(comp.ch06_w, self.ch06_w) + assert comp.ch16_w == self.ch16_w + assert comp.ch08_w == self.ch08_w + assert comp.ch06_w == self.ch06_w res = comp(projectables) assert mock.call(projectables) in match_data_arrays.mock_calls correct = (self.ch16_w * projectables[0] + self.ch08_w * projectables[1] + self.ch06_w * projectables[2]) - self.assertEqual(res[0], correct) - self.assertEqual(res[1], projectables[1]) - self.assertEqual(res[2], projectables[2]) + assert res[0] == correct + assert res[1] == projectables[1] + assert res[2] == projectables[2] class TestEnhance2Dataset(unittest.TestCase): diff --git a/satpy/tests/test_config.py b/satpy/tests/test_config.py index 5cb1c047d2..df33436b45 100644 --- a/satpy/tests/test_config.py +++ b/satpy/tests/test_config.py @@ -154,7 +154,7 @@ def _fake_importlib_files(module_name: str) -> Path: return _fake_importlib_files -@pytest.fixture +@pytest.fixture() def fake_composite_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake compositor YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -182,7 +182,7 @@ def _write_fake_composite_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_reader_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake reader YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -205,7 +205,7 @@ def _write_fake_reader_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_writer_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake writer YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -226,7 +226,7 @@ def _write_fake_writer_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_enh_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake enhancement YAML configure files. @@ -479,7 +479,9 @@ def test_bad_str_config_path(self): # strings are not allowed, lists are with satpy.config.set(config_path="/single/string/paths/are/bad"): - pytest.raises(ValueError, satpy._config.get_config_path_safe) + with pytest.raises(ValueError, + match="Satpy config option 'config_path' must be a list, not ''"): + satpy._config.get_config_path_safe() def test_tmp_dir_is_writable(self): """Check that the default temporary directory is writable.""" diff --git a/satpy/tests/test_crefl_utils.py b/satpy/tests/test_crefl_utils.py index 1e5da8cd9a..57eb4f84a6 100644 --- a/satpy/tests/test_crefl_utils.py +++ b/satpy/tests/test_crefl_utils.py @@ -33,7 +33,7 @@ def test_get_atm_variables_abi(self): 0.0043149700000000004, 0.0037296, 0.014107995000000002, 0.052349, ) sphalb, rhoray, TtotraytH2O, tOG = atm_vars() - self.assertLess(abs(np.array(sphalb) - 0.045213532544630494), 1e-10) - self.assertLess(abs(rhoray - 2.2030281148621356), 1e-10) - self.assertLess(abs(TtotraytH2O - 0.30309880915889087), 1e-10) - self.assertLess(abs(tOG - 0.5969089524560548), 1e-10) + assert abs(np.array(sphalb) - 0.045213532544630494) < 1e-10 + assert abs(rhoray - 2.2030281148621356) < 1e-10 + assert abs(TtotraytH2O - 0.30309880915889087) < 1e-10 + assert abs(tOG - 0.5969089524560548) < 1e-10 diff --git a/satpy/tests/test_data_download.py b/satpy/tests/test_data_download.py index 85cd420951..78edf180af 100644 --- a/satpy/tests/test_data_download.py +++ b/satpy/tests/test_data_download.py @@ -158,7 +158,7 @@ def _setup_custom_configs(self, tmpdir): _setup_custom_writer_config(tmpdir) self.tmpdir = tmpdir - @pytest.mark.parametrize("comp_sensors", [[], None, ["visir"]]) + @pytest.mark.parametrize("comp_sensors", [tuple(), None, ("visir",)]) @pytest.mark.parametrize("writers", [[], None, ["fake"]]) @pytest.mark.parametrize("readers", [[], None, ["fake"]]) def test_find_registerable(self, readers, writers, comp_sensors): diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index b8df391d30..014a450e0c 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -45,7 +45,7 @@ def test_basic_init(self): calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, calibration="radiance", modifiers=("sunz_corrected",)) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Required field name missing."): DataID(dikc, wavelength=0.86) did = DataID(mdkc, name="comp24", resolution=500) assert did["resolution"] == 500 @@ -64,14 +64,14 @@ def test_compare_no_wl(self): d2 = DataID(dikc, name="a", wavelength=None) # this happens when sorting IDs during dependency checks - self.assertFalse(d1 < d2) - self.assertTrue(d2 < d1) + assert not (d1 < d2) + assert d2 < d1 def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="_bad_ invalid value for "): DataID(dikc, name="C05", calibration="_bad_") def test_is_modified(self): @@ -119,20 +119,20 @@ def test_average_datetimes(self): datetime(2018, 2, 1, 12, 2, 0), ) ret = average_datetimes(dts) - self.assertEqual(dts[2], ret) + assert dts[2] == ret def test_combine_times_with_averaging(self): """Test the combine_metadata with times with averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts) - self.assertEqual(self.datetime_dts[2]["start_time"], ret["start_time"]) + assert self.datetime_dts[2]["start_time"] == ret["start_time"] def test_combine_times_without_averaging(self): """Test the combine_metadata with times without averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts, average_times=False) # times are not equal so don't include it in the final result - self.assertNotIn("start_time", ret) + assert "start_time" not in ret def test_combine_arrays(self): """Test the combine_metadata with arrays.""" @@ -387,7 +387,7 @@ def test_dataid(): did = make_dataid(name="cheese_shops", resolution=None) assert "resolution" not in did assert "None" not in did.__repr__() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Required field name missing."): make_dataid(name=None, resolution=1000) # Check that defaults are applied correctly @@ -404,7 +404,7 @@ def test_dataid(): did["resolution"] = 1000 # Check that a missing required field crashes - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Required field name missing."): make_dataid(resolution=1000) # Check to_dict diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index 7ed3a3ac43..d1dddd5e8d 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -97,7 +97,7 @@ def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): filenames = get_us_midlatitude_cyclone_abi() expected = os.path.join(".", "abi_l1b", "20190314_us_midlatitude_cyclone", "a.nc") for fn in filenames: - self.assertEqual(expected, fn) + assert expected == fn @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_hurricane_florence_abi(self, gcsfs_mod): @@ -114,19 +114,19 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() - self.assertEqual(10 * 16, len(filenames)) + assert 10 * 16 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4]) - self.assertEqual(10 * 3, len(filenames)) + assert 10 * 3 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4], num_frames=5) - self.assertEqual(5 * 3, len(filenames)) + assert 5 * 3 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(num_frames=5) - self.assertEqual(5 * 16, len(filenames)) + assert 5 * 16 == len(filenames) class TestGCPUtils(unittest.TestCase): @@ -137,7 +137,7 @@ def test_is_gcp_instance(self, uo): """Test is_google_cloud_instance.""" from satpy.demo._google_cloud_platform import URLError, is_google_cloud_instance uo.side_effect = URLError("Test Environment") - self.assertFalse(is_google_cloud_instance()) + assert not is_google_cloud_instance() @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_bucket_files(self, gcsfs_mod): @@ -149,11 +149,11 @@ def test_get_bucket_files(self, gcsfs_mod): gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] filenames = get_bucket_files("*.nc", ".") expected = [os.path.join(".", "a.nc"), os.path.join(".", "b.nc")] - self.assertEqual(expected, filenames) + assert expected == filenames gcsfs_inst.glob.side_effect = _GlobHelper(10) filenames = get_bucket_files(["*.nc", "*.txt"], ".", pattern_slice=slice(2, 5)) - self.assertEqual(len(filenames), 3 * 2) + assert len(filenames) == 3 * 2 gcsfs_inst.glob.side_effect = None # reset mock side effect gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] @@ -163,14 +163,14 @@ def test_get_bucket_files(self, gcsfs_mod): gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ["a.nc"] filenames = get_bucket_files("*.nc", ".") - self.assertEqual([os.path.join(".", "a.nc")], filenames) + assert [os.path.join(".", "a.nc")] == filenames gcsfs_inst.get.assert_not_called() # force redownload gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ["a.nc"] filenames = get_bucket_files("*.nc", ".", force=True) - self.assertEqual([os.path.join(".", "a.nc")], filenames) + assert [os.path.join(".", "a.nc")] == filenames gcsfs_inst.get.assert_called_once() # if we don't get any results then we expect an exception @@ -284,20 +284,20 @@ class TestVIIRSSDRDemoDownload: "SVDNB") ALL_GEO_PREFIXES = ("GITCO", "GMTCO", "GDNBO") - def test_download(self, _requests, tmpdir): + def test_download(self, requests, tmpdir): """Test downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) assert len(files) == 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation) self._assert_bands_in_filenames_and_contents(self.ALL_BAND_PREFIXES + self.ALL_GEO_PREFIXES, files, 10) - def test_do_not_download_the_files_twice(self, _requests, tmpdir): + def test_do_not_download_the_files_twice(self, requests, tmpdir): """Test re-downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() - _requests.get.return_value.__enter__ = get_mock + requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) new_files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) @@ -307,21 +307,21 @@ def test_do_not_download_the_files_twice(self, _requests, tmpdir): assert get_mock.call_count == total_num_files assert new_files == files - def test_download_channels_num_granules_im(self, _requests, tmpdir): + def test_download_channels_num_granules_im(self, requests, tmpdir): """Test downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) assert len(files) == 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation) self._assert_bands_in_filenames_and_contents(("SVI01", "SVM01", "GITCO", "GMTCO"), files, 10) - def test_download_channels_num_granules_im_twice(self, _requests, tmpdir): + def test_download_channels_num_granules_im_twice(self, requests, tmpdir): """Test re-downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() - _requests.get.return_value.__enter__ = get_mock + requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) @@ -334,10 +334,10 @@ def test_download_channels_num_granules_im_twice(self, _requests, tmpdir): assert len(files) == 2 * (1 + 1 + 2) # 2 granules * (1 I band + 1 M band + 2 geolocation) assert get_mock.call_count == num_first_batch - def test_download_channels_num_granules_dnb(self, _requests, tmpdir): + def test_download_channels_num_granules_dnb(self, requests, tmpdir): """Test downloading and re-downloading VIIRS SDR DNB data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("DNB",), diff --git a/satpy/tests/test_dependency_tree.py b/satpy/tests/test_dependency_tree.py index 57b718963f..40433c0032 100644 --- a/satpy/tests/test_dependency_tree.py +++ b/satpy/tests/test_dependency_tree.py @@ -87,10 +87,8 @@ def test_copy_preserves_unique_empty_node(self): new_dependency_tree = self.dependency_tree.copy() assert self.dependency_tree.empty_node is new_dependency_tree.empty_node - self.assertIs(self.dependency_tree._root.children[0].children[0].children[1], - self.dependency_tree.empty_node) - self.assertIs(new_dependency_tree._root.children[0].children[0].children[1], - self.dependency_tree.empty_node) + assert self.dependency_tree._root.children[0].children[0].children[1] is self.dependency_tree.empty_node + assert new_dependency_tree._root.children[0].children[0].children[1] is self.dependency_tree.empty_node def test_new_dependency_tree_preserves_unique_empty_node(self): """Test that dependency tree instantiation preserves the uniqueness of the empty node.""" @@ -216,8 +214,8 @@ def test_compositor_loaded_sensor_order(self): """Test that a compositor is loaded from the first alphabetical sensor.""" self.dependency_tree.populate_with_keys({"comp1"}) comp_nodes = self.dependency_tree.trunk() - self.assertEqual(len(comp_nodes), 1) - self.assertEqual(comp_nodes[0].name["resolution"], 500) + assert len(comp_nodes) == 1 + assert comp_nodes[0].name["resolution"] == 500 def test_modifier_loaded_sensor_order(self): """Test that a modifier is loaded from the first alphabetical sensor.""" @@ -225,5 +223,5 @@ def test_modifier_loaded_sensor_order(self): dq = DataQuery(name="ds5", modifiers=("mod1",)) self.dependency_tree.populate_with_keys({dq}) comp_nodes = self.dependency_tree.trunk() - self.assertEqual(len(comp_nodes), 1) - self.assertEqual(comp_nodes[0].data[0].ret_val, 1) + assert len(comp_nodes) == 1 + assert comp_nodes[0].data[0].ret_val == 1 diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index 4282bc86b1..403e686204 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -56,19 +56,19 @@ def test_combine_times(self): info2 = {"start_time": 2} res = self.fh.combine_info([info1, info2]) exp = {"start_time": 1} - self.assertDictEqual(res, exp) + assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"start_time": 1} - self.assertDictEqual(res, exp) + assert res == exp info1 = {"end_time": 1} info2 = {"end_time": 2} res = self.fh.combine_info([info1, info2]) exp = {"end_time": 2} - self.assertDictEqual(res, exp) + assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"end_time": 2} - self.assertDictEqual(res, exp) + assert res == exp def test_combine_orbits(self): """Combine orbits.""" @@ -76,19 +76,19 @@ def test_combine_orbits(self): info2 = {"start_orbit": 2} res = self.fh.combine_info([info1, info2]) exp = {"start_orbit": 1} - self.assertDictEqual(res, exp) + assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"start_orbit": 1} - self.assertDictEqual(res, exp) + assert res == exp info1 = {"end_orbit": 1} info2 = {"end_orbit": 2} res = self.fh.combine_info([info1, info2]) exp = {"end_orbit": 2} - self.assertDictEqual(res, exp) + assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"end_orbit": 2} - self.assertDictEqual(res, exp) + assert res == exp @mock.patch("satpy.readers.file_handlers.SwathDefinition") def test_combine_area(self, sdef): @@ -107,9 +107,9 @@ def test_combine_area(self, sdef): info2 = {"area": area2} self.fh.combine_info([info1, info2]) - self.assertTupleEqual(sdef.call_args[1]["lons"].shape, (2, 5)) - self.assertTupleEqual(sdef.call_args[1]["lats"].shape, (2, 5)) - self.assertEqual(sdef.return_value.name, "area1_area2") + assert sdef.call_args[1]["lons"].shape == (2, 5) + assert sdef.call_args[1]["lats"].shape == (2, 5) + assert sdef.return_value.name == "area1_area2" def test_combine_orbital_parameters(self): """Combine orbital parameters.""" @@ -148,10 +148,10 @@ def test_combine_orbital_parameters(self): "only_in_1": False, "only_in_2": True}} res = self.fh.combine_info([info1, info2]) - self.assertDictEqual(res, exp) + assert res == exp # Identity - self.assertEqual(self.fh.combine_info([info1]), info1) + assert self.fh.combine_info([info1]) == info1 # Empty self.fh.combine_info([{}]) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 04d32b7ecc..4e41b78c75 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -275,7 +275,7 @@ def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertEqual(res.attrs["sun_zenith_threshold"], 84.0) + assert res.attrs["sun_zenith_threshold"] == 84.0 calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=84.0, masking_limit=NIRReflectance.MASKING_LIMIT) @@ -308,7 +308,7 @@ def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertIsNone(res.attrs["sun_zenith_masking_limit"]) + assert res.attrs["sun_zenith_masking_limit"] is None calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=NIRReflectance.TERMINATOR_LIMIT, masking_limit=None) @@ -383,11 +383,11 @@ def test_compositor(self, calculator, apply_modifier_info, sza): sza.return_value = sunz2 res = comp([nir, ir_], optional_datasets=[sunz], **info) - self.assertEqual(res.attrs["sun_zenith_threshold"], 86.0) - self.assertEqual(res.attrs["units"], "K") - self.assertEqual(res.attrs["platform_name"], platform) - self.assertEqual(res.attrs["sensor"], sensor) - self.assertEqual(res.attrs["name"], chan_name) + assert res.attrs["sun_zenith_threshold"] == 86.0 + assert res.attrs["units"] == "K" + assert res.attrs["platform_name"] == platform + assert res.attrs["sensor"] == sensor + assert res.attrs["name"] == chan_name calculator.assert_called_with("NOAA-20", "viirs", "M12", sunz_threshold=86.0, masking_limit=NIRReflectance.MASKING_LIMIT) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 3b2888565b..8250f691a0 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -67,7 +67,7 @@ real_import = builtins.__import__ -@pytest.fixture +@pytest.fixture() def viirs_file(tmp_path, monkeypatch): """Create a dummy viirs file.""" filename = "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" @@ -78,7 +78,7 @@ def viirs_file(tmp_path, monkeypatch): return filename -@pytest.fixture +@pytest.fixture() def atms_file(tmp_path, monkeypatch): """Create a dummy atms file.""" filename = "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5" @@ -132,44 +132,44 @@ def test_init_noargs(self): """Test DatasetDict init with no arguments.""" from satpy import DatasetDict d = DatasetDict() - self.assertIsInstance(d, dict) + assert isinstance(d, dict) def test_init_dict(self): """Test DatasetDict init with a regular dict argument.""" from satpy import DatasetDict regular_dict = {make_dataid(name="test", wavelength=(0, 0.5, 1)): "1", } d = DatasetDict(regular_dict) - self.assertEqual(d, regular_dict) + assert d == regular_dict def test_getitem(self): """Test DatasetDict getitem with different arguments.""" from satpy.tests.utils import make_dsq d = self.test_dict # access by name - self.assertEqual(d["test"], "1") + assert d["test"] == "1" # access by exact wavelength - self.assertEqual(d[1.5], "2") + assert d[1.5] == "2" # access by near wavelength - self.assertEqual(d[1.55], "2") + assert d[1.55] == "2" # access by near wavelength of another dataset - self.assertEqual(d[1.65], "3") + assert d[1.65] == "3" # access by name with multiple levels - self.assertEqual(d["test6"], "6_100") + assert d["test6"] == "6_100" - self.assertEqual(d[make_dsq(wavelength=1.5)], "2") - self.assertEqual(d[make_dsq(wavelength=0.5, resolution=1000)], "1") - self.assertEqual(d[make_dsq(wavelength=0.5, resolution=500)], "1h") - self.assertEqual(d[make_dsq(name="test6", level=100)], "6_100") - self.assertEqual(d[make_dsq(name="test6", level=200)], "6_200") + assert d[make_dsq(wavelength=1.5)] == "2" + assert d[make_dsq(wavelength=0.5, resolution=1000)] == "1" + assert d[make_dsq(wavelength=0.5, resolution=500)] == "1h" + assert d[make_dsq(name="test6", level=100)] == "6_100" + assert d[make_dsq(name="test6", level=200)] == "6_200" # higher resolution is returned - self.assertEqual(d[0.5], "1h") - self.assertEqual(d["test4"], "4refl") - self.assertEqual(d[make_dataid(name="test4", calibration="radiance")], "4rad") + assert d[0.5] == "1h" + assert d["test4"] == "4refl" + assert d[make_dataid(name="test4", calibration="radiance")] == "4rad" self.assertRaises(KeyError, d.getitem, "1h") # test with full tuple - self.assertEqual(d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)], "1") + assert d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)] == "1" def test_get_key(self): """Test 'get_key' special functions.""" @@ -180,32 +180,27 @@ def test_get_key(self): num_results=0) res3 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=3) - self.assertEqual(len(res2), 1) - self.assertEqual(len(res3), 1) + assert len(res2) == 1 + assert len(res3) == 1 res2 = res2[0] res3 = res3[0] - self.assertEqual(res1, res2) - self.assertEqual(res1, res3) + assert res1 == res2 + assert res1 == res3 res1 = get_key("test4", d, query=DataQuery(polarization="V")) - self.assertEqual(res1, make_dataid(name="test4", calibration="radiance", - polarization="V")) + assert res1 == make_dataid(name="test4", calibration="radiance", polarization="V") res1 = get_key(0.5, d, query=DataQuery(resolution=500)) - self.assertEqual(res1, make_dataid(name="testh", - wavelength=(0, 0.5, 1), - resolution=500)) + assert res1 == make_dataid(name="testh", wavelength=(0, 0.5, 1), resolution=500) res1 = get_key("test6", d, query=DataQuery(level=100)) - self.assertEqual(res1, make_dataid(name="test6", - level=100)) + assert res1 == make_dataid(name="test6", level=100) res1 = get_key("test5", d) res2 = get_key("test5", d, query=DataQuery(modifiers=("mod2",))) res3 = get_key("test5", d, query=DataQuery(modifiers=("mod1", "mod2",))) - self.assertEqual(res1, make_dataid(name="test5", - modifiers=("mod2",))) - self.assertEqual(res1, res2) - self.assertNotEqual(res1, res3) + assert res1 == make_dataid(name="test5", modifiers=("mod2",)) + assert res1 == res2 + assert res1 != res3 # more than 1 result when default is to ask for 1 result self.assertRaises(KeyError, get_key, "test4", d, best=False) @@ -213,40 +208,39 @@ def test_get_key(self): def test_contains(self): """Test DatasetDict contains method.""" d = self.test_dict - self.assertIn("test", d) - self.assertFalse(d.contains("test")) - self.assertNotIn("test_bad", d) - self.assertIn(0.5, d) - self.assertFalse(d.contains(0.5)) - self.assertIn(1.5, d) - self.assertIn(1.55, d) - self.assertIn(1.65, d) - self.assertIn(make_dataid(name="test4", calibration="radiance"), d) - self.assertIn("test4", d) + assert "test" in d + assert not d.contains("test") + assert "test_bad" not in d + assert 0.5 in d + assert not d.contains(0.5) + assert 1.5 in d + assert 1.55 in d + assert 1.65 in d + assert make_dataid(name="test4", calibration="radiance") in d + assert "test4" in d def test_keys(self): """Test keys method of DatasetDict.""" from satpy.tests.utils import DataID d = self.test_dict - self.assertEqual(len(d.keys()), len(self.regular_dict.keys())) - self.assertTrue(all(isinstance(x, DataID) for x in d.keys())) + assert len(d.keys()) == len(self.regular_dict.keys()) + assert all(isinstance(x, DataID) for x in d.keys()) name_keys = d.keys(names=True) - self.assertListEqual(sorted(set(name_keys))[:4], [ - "test", "test2", "test3", "test4"]) + assert sorted(set(name_keys))[:4] == ["test", "test2", "test3", "test4"] wl_keys = tuple(d.keys(wavelengths=True)) - self.assertIn((0, 0.5, 1), wl_keys) - self.assertIn((1, 1.5, 2, "µm"), wl_keys) - self.assertIn((1.2, 1.7, 2.2, "µm"), wl_keys) - self.assertIn(None, wl_keys) + assert (0, 0.5, 1) in wl_keys + assert (1, 1.5, 2, "µm") in wl_keys + assert (1.2, 1.7, 2.2, "µm") in wl_keys + assert None in wl_keys def test_setitem(self): """Test setitem method of DatasetDict.""" d = self.test_dict d["new_ds"] = {"metadata": "new_ds"} - self.assertEqual(d["new_ds"]["metadata"], "new_ds") + assert d["new_ds"]["metadata"] == "new_ds" d[0.5] = {"calibration": "radiance"} - self.assertEqual(d[0.5]["resolution"], 500) - self.assertEqual(d[0.5]["name"], "testh") + assert d[0.5]["resolution"] == 500 + assert d[0.5]["name"] == "testh" class TestReaderLoader(unittest.TestCase): @@ -276,20 +270,20 @@ def test_no_args(self): """ from satpy.readers import load_readers ri = load_readers() - self.assertDictEqual(ri, {}) + assert ri == {} def test_filenames_only(self): """Test with filenames specified.""" from satpy.readers import load_readers ri = load_readers(filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_and_reader(self): """Test with filenames and reader specified.""" from satpy.readers import load_readers ri = load_readers(reader="viirs_sdr", filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" @@ -306,7 +300,7 @@ def test_filenames_as_path(self): ri = load_readers(filenames=[ Path("SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"), ]) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_as_dict(self): """Test loading readers where filenames are organized by reader.""" @@ -315,7 +309,7 @@ def test_filenames_as_dict(self): "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(filenames=filenames) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_as_dict_bad_reader(self): """Test loading with filenames dict but one of the readers is bad.""" @@ -340,7 +334,7 @@ def test_filenames_as_dict_with_reader(self): "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(reader="viirs_sdr", filenames=filenames) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_empty_filenames_as_dict(self): """Test passing filenames as a dictionary with an empty list of filenames.""" @@ -357,7 +351,7 @@ def test_empty_filenames_as_dict(self): "viirs_l1b": [], } ri = load_readers(filenames) - self.assertListEqual(list(ri.keys()), ["viirs_sdr"]) + assert list(ri.keys()) == ["viirs_sdr"] @mock.patch("satpy.readers.hrit_base.HRITFileHandler._get_hd") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header") @@ -436,10 +430,10 @@ def test_almost_all_filtered(self): "end_time": datetime.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) - self.assertIn("viirs_sdr", readers) + assert "viirs_sdr" in readers # abi_l1b reader was created, but no datasets available - self.assertIn("abi_l1b", readers) - self.assertEqual(len(list(readers["abi_l1b"].available_dataset_ids)), 0) + assert "abi_l1b" in readers + assert len(list(readers["abi_l1b"].available_dataset_ids)) == 0 class TestFindFilesAndReaders: @@ -514,7 +508,7 @@ def test_reader_name_unmatched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" from datetime import datetime - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="No supported files found"): find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 26, 18, 0, 0), end_time=datetime(2012, 2, 26, 19, 0, 0)) @@ -540,7 +534,7 @@ def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): def test_bad_sensor(self): """Test bad sensor doesn't find any files.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Sensor.* not supported by any readers"): find_files_and_readers(sensor="i_dont_exist") def test_sensor(self, viirs_file): @@ -555,7 +549,7 @@ def test_sensor_no_files(self): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="No supported files found"): find_files_and_readers(sensor="viirs") assert find_files_and_readers(sensor="viirs", missing_ok=True) == {} @@ -589,7 +583,7 @@ def test_old_reader_name_mapping(self): return pytest.skip("Skipping deprecated reader tests because " "no deprecated readers.") test_reader = sorted(OLD_READER_NAMES.keys())[0] - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Reader name .* has been deprecated, use .* instead."): get_valid_reader_names([test_reader]) @@ -714,12 +708,12 @@ def test_no_reader(self): # without files it's going to be an empty result assert group_files([]) == [] groups = group_files(self.g16_files) - self.assertEqual(6, len(groups)) + assert 6 == len(groups) def test_unknown_files(self): """Test that error is raised on unknown files.""" from satpy.readers import group_files - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="No matching readers found for these files: .*"): group_files(self.unknown_files, "abi_l1b") def test_bad_reader(self): @@ -737,8 +731,8 @@ def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b") - self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]["abi_l1b"])) + assert 6 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_default_behavior_set(self): """Test the default behavior with the 'abi_l1b' reader.""" @@ -747,23 +741,23 @@ def test_default_behavior_set(self): num_files = len(files) groups = group_files(files, reader="abi_l1b") # we didn't modify it - self.assertEqual(len(files), num_files) - self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]["abi_l1b"])) + assert len(files) == num_files + assert 6 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_non_datetime_group_key(self): """Test what happens when the start_time isn't used for grouping.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b", group_keys=("platform_shortname",)) - self.assertEqual(1, len(groups)) - self.assertEqual(12, len(groups[0]["abi_l1b"])) + assert 1 == len(groups) + assert 12 == len(groups[0]["abi_l1b"]) def test_large_time_threshold(self): """Test what happens when the time threshold holds multiple files.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b", time_threshold=60*8) - self.assertEqual(3, len(groups)) - self.assertEqual(4, len(groups[0]["abi_l1b"])) + assert 3 == len(groups) + assert 4 == len(groups[0]["abi_l1b"]) def test_two_instruments_files(self): """Test the behavior when two instruments files are provided. @@ -777,8 +771,8 @@ def test_two_instruments_files(self): """ from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time",)) - self.assertEqual(6, len(groups)) - self.assertEqual(4, len(groups[0]["abi_l1b"])) + assert 6 == len(groups) + assert 4 == len(groups[0]["abi_l1b"]) def test_two_instruments_files_split(self): """Test the default behavior when two instruments files are provided and split. @@ -790,49 +784,49 @@ def test_two_instruments_files_split(self): from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time", "platform_shortname")) - self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]["abi_l1b"])) + assert 12 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) # default for abi_l1b should also behave like this groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b") - self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]["abi_l1b"])) + assert 12 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_viirs_orbits(self): """Test a reader that doesn't use 'start_time' for default grouping.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr") - self.assertEqual(2, len(groups)) + assert 2 == len(groups) # the noaa-20 files will be first because the orbit number is smaller # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[0]["viirs_sdr"])) + assert 5 * 3 == len(groups[0]["viirs_sdr"]) # 3 granules * 2 file types - self.assertEqual(6, len(groups[1]["viirs_sdr"])) + assert 6 == len(groups[1]["viirs_sdr"]) def test_viirs_override_keys(self): """Test overriding a group keys to add 'start_time'.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", group_keys=("start_time", "orbit", "platform_shortname")) - self.assertEqual(8, len(groups)) - self.assertEqual(2, len(groups[0]["viirs_sdr"])) # NPP - self.assertEqual(2, len(groups[1]["viirs_sdr"])) # NPP - self.assertEqual(2, len(groups[2]["viirs_sdr"])) # NPP - self.assertEqual(3, len(groups[3]["viirs_sdr"])) # N20 - self.assertEqual(3, len(groups[4]["viirs_sdr"])) # N20 - self.assertEqual(3, len(groups[5]["viirs_sdr"])) # N20 - self.assertEqual(3, len(groups[6]["viirs_sdr"])) # N20 - self.assertEqual(3, len(groups[7]["viirs_sdr"])) # N20 + assert 8 == len(groups) + assert 2 == len(groups[0]["viirs_sdr"]) # NPP + assert 2 == len(groups[1]["viirs_sdr"]) # NPP + assert 2 == len(groups[2]["viirs_sdr"]) # NPP + assert 3 == len(groups[3]["viirs_sdr"]) # N20 + assert 3 == len(groups[4]["viirs_sdr"]) # N20 + assert 3 == len(groups[5]["viirs_sdr"]) # N20 + assert 3 == len(groups[6]["viirs_sdr"]) # N20 + assert 3 == len(groups[7]["viirs_sdr"]) # N20 # Ask for a larger time span with our groups groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", time_threshold=60 * 60 * 2, group_keys=("start_time", "orbit", "platform_shortname")) - self.assertEqual(2, len(groups)) + assert 2 == len(groups) # NPP is first because it has an earlier time # 3 granules * 2 file types - self.assertEqual(6, len(groups[0]["viirs_sdr"])) + assert 6 == len(groups[0]["viirs_sdr"]) # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[1]["viirs_sdr"])) + assert 5 * 3 == len(groups[1]["viirs_sdr"]) def test_multi_readers(self): """Test passing multiple readers.""" @@ -930,7 +924,7 @@ def test_multi_readers_empty_groups_passed(self): def test_multi_readers_invalid_parameter(self): """Verify that invalid missing parameter raises ValueError.""" from satpy.readers import group_files - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid value for ``missing`` argument..*"): group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index a9a3b24a01..be55954851 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -125,13 +125,13 @@ def test_type_preserve(self): data.attrs["_FillValue"] = 255 data.attrs["area"] = source_area res = resample_dataset(data, dest_area) - self.assertEqual(res.dtype, data.dtype) - self.assertTrue(np.all(res.values == expected_gap)) + assert res.dtype == data.dtype + assert np.all(res.values == expected_gap) expected_filled = np.array([[1, 2], [3, 3]]) res = resample_dataset(data, dest_area, radius_of_influence=1000000) - self.assertEqual(res.dtype, data.dtype) - self.assertTrue(np.all(res.values == expected_filled)) + assert res.dtype == data.dtype + assert np.all(res.values == expected_filled) class TestKDTreeResampler(unittest.TestCase): @@ -155,7 +155,7 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_resampler.assert_called_once() resampler.resampler.get_neighbour_info.assert_called() # swath definitions should not be cached - self.assertFalse(len(mock_dset.to_zarr.mock_calls), 0) + assert len(mock_dset.to_zarr.mock_calls) == 0 resampler.resampler.reset_mock() cnc.assert_called_once() @@ -170,11 +170,11 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, zarr_open.side_effect = ValueError() resampler.precompute(cache_dir=the_dir) # assert data was saved to the on-disk cache - self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) + assert len(mock_dset.to_zarr.mock_calls) == 1 # assert that zarr_open was called to try to zarr_open something from disk - self.assertEqual(len(zarr_open.mock_calls), 1) + assert len(zarr_open.mock_calls) == 1 # we should have cached things in-memory - self.assertEqual(len(resampler._index_caches), 1) + assert len(resampler._index_caches) == 1 nbcalls = len(resampler.resampler.get_neighbour_info.mock_calls) # test reusing the resampler zarr_open.side_effect = None @@ -195,20 +195,20 @@ def astype(self, dtype): distance_array=4) resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again - self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) + assert len(mock_dset.to_zarr.mock_calls) == 1 # we already have things cached in-memory, don't need to load - self.assertEqual(len(zarr_open.mock_calls), 1) + assert len(zarr_open.mock_calls) == 1 # we should have cached things in-memory - self.assertEqual(len(resampler._index_caches), 1) - self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) + assert len(resampler._index_caches) == 1 + assert len(resampler.resampler.get_neighbour_info.mock_calls) == nbcalls # test loading saved resampler resampler = KDTreeResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) - self.assertEqual(len(zarr_open.mock_calls), 4) - self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) + assert len(zarr_open.mock_calls) == 4 + assert len(resampler.resampler.get_neighbour_info.mock_calls) == nbcalls # we should have cached things in-memory now - self.assertEqual(len(resampler._index_caches), 1) + assert len(resampler._index_caches) == 1 finally: shutil.rmtree(the_dir) @@ -279,10 +279,10 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav): num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) new_data = resample_dataset(swath_data, target_area, resampler="ewa") - self.assertTupleEqual(new_data.shape, (200, 100)) - self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs["test"], "test") - self.assertIs(new_data.attrs["area"], target_area) + assert new_data.shape == (200, 100) + assert new_data.dtype == np.float32 + assert new_data.attrs["test"] == "test" + assert new_data.attrs["area"] is target_area # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count @@ -296,17 +296,17 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav): new_data = resample_dataset(data, target_area, resampler="ewa") new_data.compute() # ll2cr will be called once more because of the computation - self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) + assert ll2cr.call_count == ll2cr_calls + num_chunks # but we should already have taken the lonlats from the SwathDefinition - self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn("y", new_data.coords) - self.assertIn("x", new_data.coords) - self.assertIn("crs", new_data.coords) - self.assertIsInstance(new_data.coords["crs"].item(), CRS) - self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords["y"].attrs["units"], "meter") - self.assertEqual(new_data.coords["x"].attrs["units"], "meter") - self.assertEqual(target_area.crs, new_data.coords["crs"].item()) + assert get_lonlats.call_count == lonlat_calls + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() @mock.patch("satpy.resample.fornav") @mock.patch("satpy.resample.ll2cr") @@ -329,10 +329,10 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): num_chunks = len(source_swath.lons.chunks[0]) * len(source_swath.lons.chunks[1]) new_data = resample_dataset(swath_data, target_area, resampler="ewa") - self.assertTupleEqual(new_data.shape, (3, 200, 100)) - self.assertEqual(new_data.dtype, np.float32) - self.assertEqual(new_data.attrs["test"], "test") - self.assertIs(new_data.attrs["area"], target_area) + assert new_data.shape == (3, 200, 100) + assert new_data.dtype == np.float32 + assert new_data.attrs["test"] == "test" + assert new_data.attrs["area"] is target_area # make sure we can actually compute everything new_data.compute() lonlat_calls = get_lonlats.call_count @@ -346,20 +346,20 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav): new_data = resample_dataset(swath_data, target_area, resampler="ewa") new_data.compute() # ll2cr will be called once more because of the computation - self.assertEqual(ll2cr.call_count, ll2cr_calls + num_chunks) + assert ll2cr.call_count == ll2cr_calls + num_chunks # but we should already have taken the lonlats from the SwathDefinition - self.assertEqual(get_lonlats.call_count, lonlat_calls) - self.assertIn("y", new_data.coords) - self.assertIn("x", new_data.coords) - self.assertIn("bands", new_data.coords) - self.assertIn("crs", new_data.coords) - self.assertIsInstance(new_data.coords["crs"].item(), CRS) - self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords["y"].attrs["units"], "meter") - self.assertEqual(new_data.coords["x"].attrs["units"], "meter") + assert get_lonlats.call_count == lonlat_calls + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "bands" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" np.testing.assert_equal(new_data.coords["bands"].values, ["R", "G", "B"]) - self.assertEqual(target_area.crs, new_data.coords["crs"].item()) + assert target_area.crs == new_data.coords["crs"].item() class TestNativeResampler: @@ -388,7 +388,7 @@ def test_expand_reduce_aggregate_identity(self): @pytest.mark.parametrize("dim0_factor", [1. / 4, 0.333323423, 1.333323423]) def test_expand_reduce_aggregate_invalid(self, dim0_factor): """Test classmethod 'expand_reduce' fails when factor does not divide evenly.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="[Aggregation, Expand] .*"): NativeResampler._expand_reduce(self.d_arr, {0: dim0_factor, 1: 1.}) def test_expand_reduce_agg_rechunk(self): @@ -469,7 +469,7 @@ def test_expand_without_dims_4D(self): input_shape=(2, 3, 100, 50), input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Can only handle 2D or 3D arrays without dimensions."): resampler.resample(ds1) @@ -500,14 +500,14 @@ def test_bil_resampling(self, xr_resampler, create_filename, new_data = resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_bil_info.assert_called_with( data, fill_value=fill_value, output_shape=target_area.shape) - self.assertIn("y", new_data.coords) - self.assertIn("x", new_data.coords) - self.assertIn("crs", new_data.coords) - self.assertIsInstance(new_data.coords["crs"].item(), CRS) - self.assertIn("lambert", new_data.coords["crs"].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords["y"].attrs["units"], "meter") - self.assertEqual(new_data.coords["x"].attrs["units"], "meter") - self.assertEqual(target_area.crs, new_data.coords["crs"].item()) + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() # Test that the resampling info is tried to read from the disk resampler = BilinearResampler(source_swath, target_area) @@ -533,13 +533,13 @@ def test_bil_resampling(self, xr_resampler, create_filename, # we already have things cached in-memory, no need to save again resampler.resampler.save_resampling_info.assert_called_once() # we already have things cached in-memory, don't need to load - self.assertEqual(resampler.resampler.get_bil_info.call_count, nbcalls) + assert resampler.resampler.get_bil_info.call_count == nbcalls # test loading saved resampler resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) - self.assertEqual(resampler.resampler.load_resampling_info.call_count, 3) - self.assertEqual(resampler.resampler.get_bil_info.call_count, nbcalls) + assert resampler.resampler.load_resampling_info.call_count == 3 + assert resampler.resampler.get_bil_info.call_count == nbcalls resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) @@ -564,10 +564,8 @@ def test_move_existing_caches(self): fid.write("42") from satpy.resample import _move_existing_caches _move_existing_caches(the_dir, zarr_file) - self.assertFalse(os.path.exists(zarr_file)) - self.assertTrue(os.path.exists( - os.path.join(the_dir, "moved_by_satpy", - "test.zarr"))) + assert not os.path.exists(zarr_file) + assert os.path.exists(os.path.join(the_dir, "moved_by_satpy", "test.zarr")) # Run again to see that the existing dir doesn't matter with open(zarr_file, "w") as fid: fid.write("42") @@ -594,18 +592,16 @@ def test_area_def_coordinates(self): dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn("y", new_data_arr.coords) - self.assertIn("x", new_data_arr.coords) - - self.assertIn("units", new_data_arr.coords["y"].attrs) - self.assertEqual( - new_data_arr.coords["y"].attrs["units"], "meter") - self.assertIn("units", new_data_arr.coords["x"].attrs) - self.assertEqual( - new_data_arr.coords["x"].attrs["units"], "meter") - self.assertIn("crs", new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) + assert "y" in new_data_arr.coords + assert "x" in new_data_arr.coords + + assert "units" in new_data_arr.coords["y"].attrs + assert new_data_arr.coords["y"].attrs["units"] == "meter" + assert "units" in new_data_arr.coords["x"].attrs + assert new_data_arr.coords["x"].attrs["units"] == "meter" + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() # already has coords data_arr = xr.DataArray( @@ -615,15 +611,15 @@ def test_area_def_coordinates(self): coords={"y": np.arange(2, 202), "x": np.arange(100)} ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn("y", new_data_arr.coords) - self.assertNotIn("units", new_data_arr.coords["y"].attrs) - self.assertIn("x", new_data_arr.coords) - self.assertNotIn("units", new_data_arr.coords["x"].attrs) + assert "y" in new_data_arr.coords + assert "units" not in new_data_arr.coords["y"].attrs + assert "x" in new_data_arr.coords + assert "units" not in new_data_arr.coords["x"].attrs np.testing.assert_equal(new_data_arr.coords["y"], np.arange(2, 202)) - self.assertIn("crs", new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() # lat/lon area area_def = AreaDefinition( @@ -636,18 +632,16 @@ def test_area_def_coordinates(self): dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn("y", new_data_arr.coords) - self.assertIn("x", new_data_arr.coords) - - self.assertIn("units", new_data_arr.coords["y"].attrs) - self.assertEqual( - new_data_arr.coords["y"].attrs["units"], "degrees_north") - self.assertIn("units", new_data_arr.coords["x"].attrs) - self.assertEqual( - new_data_arr.coords["x"].attrs["units"], "degrees_east") - self.assertIn("crs", new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords["crs"].item()) + assert "y" in new_data_arr.coords + assert "x" in new_data_arr.coords + + assert "units" in new_data_arr.coords["y"].attrs + assert new_data_arr.coords["y"].attrs["units"] == "degrees_north" + assert "units" in new_data_arr.coords["x"].attrs + assert new_data_arr.coords["x"].attrs["units"] == "degrees_east" + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() def test_swath_def_coordinates(self): """Test coordinates being added with an SwathDefinition.""" @@ -679,11 +673,11 @@ def test_swath_def_coordinates(self): # new_data_arr.coords['latitude'].attrs['units'], 'degrees_north') # self.assertIsInstance(new_data_arr.coords['latitude'].data, da.Array) - self.assertIn("crs", new_data_arr.coords) + assert "crs" in new_data_arr.coords crs = new_data_arr.coords["crs"].item() - self.assertIsInstance(crs, CRS) + assert isinstance(crs, CRS) assert crs.is_geographic - self.assertIsInstance(new_data_arr.coords["crs"].item(), CRS) + assert isinstance(new_data_arr.coords["crs"].item(), CRS) class TestBucketAvg(unittest.TestCase): @@ -702,16 +696,16 @@ def setUp(self): def test_init(self): """Test bucket resampler initialization.""" - self.assertIsNone(self.bucket.resampler) - self.assertTrue(self.bucket.source_geo_def == self.source_geo_def) - self.assertTrue(self.bucket.target_geo_def == self.target_geo_def) + assert self.bucket.resampler is None + assert self.bucket.source_geo_def == self.source_geo_def + assert self.bucket.target_geo_def == self.target_geo_def @mock.patch("pyresample.bucket.BucketResampler") def test_precompute(self, bucket): """Test bucket resampler precomputation.""" bucket.return_value = True self.bucket.precompute() - self.assertTrue(self.bucket.resampler) + assert self.bucket.resampler bucket.assert_called_once_with(self.target_geo_def, 1, 2) def _compute_mocked_bucket_avg(self, data, return_data=None, **kwargs): @@ -729,16 +723,16 @@ def test_compute(self): # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_avg(data, fill_value=2) - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_avg(data, fill_value=2) - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) self.bucket.resampler.get_average.return_value = data[0, :, :] res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): @@ -805,33 +799,33 @@ def test_resample(self, pyresample_bucket): res = self.bucket.resample(data) self.bucket.precompute.assert_called_once() self.bucket.compute.assert_called_once() - self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ("y", "x")) - self.assertTrue("bar" in res.attrs) - self.assertEqual(res.attrs["bar"], "baz") + assert res.shape == (5, 5) + assert res.dims == ("y", "x") + assert "bar" in res.attrs + assert res.attrs["bar"] == "baz" # 2D input data data = xr.DataArray(da.ones((5, 5)), dims=("foo", "bar")) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ("y", "x")) + assert res.shape == (5, 5) + assert res.dims == ("y", "x") # 3D input data with 'bands' dim data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "foo", "bar"), coords={"bands": ["L"]}) self.bucket.compute.return_value = da.ones((1, 5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (1, 5, 5)) - self.assertEqual(res.dims, ("bands", "y", "x")) - self.assertEqual(res.coords["bands"], ["L"]) + assert res.shape == (1, 5, 5) + assert res.dims == ("bands", "y", "x") + assert res.coords["bands"] == ["L"] # 3D input data with misc dim names data = xr.DataArray(da.ones((3, 5, 5)), dims=("foo", "bar", "baz")) self.bucket.compute.return_value = da.ones((3, 5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (3, 5, 5)) - self.assertEqual(res.dims, ("foo", "bar", "baz")) + assert res.shape == (3, 5, 5) + assert res.dims == ("foo", "bar", "baz") class TestBucketSum(unittest.TestCase): @@ -861,15 +855,15 @@ def test_compute(self): # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_sum(data) - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_sum(data) - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): @@ -946,16 +940,16 @@ def test_compute(self): data = da.ones((5,)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_count(data, return_data=data[0, :, :]) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) class TestBucketFraction(unittest.TestCase): @@ -1007,6 +1001,6 @@ def test_resample(self, pyresample_bucket): arr = da.ones((5, 5)) self.bucket.compute.return_value = {0: arr, 1: arr, 2: arr} res = self.bucket.resample(data) - self.assertTrue("categories" in res.coords) - self.assertTrue("categories" in res.dims) - self.assertTrue(np.all(res.coords["categories"] == np.array([0, 1, 2]))) + assert "categories" in res.coords + assert "categories" in res.dims + assert np.all(res.coords["categories"] == np.array([0, 1, 2])) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 29d940fbdc..2e38e00b3c 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -27,6 +27,7 @@ import numpy as np import pytest import xarray as xr +from pytest import approx # noqa: PT013 from satpy.utils import ( angle2xyz, @@ -50,176 +51,176 @@ class TestUtils(unittest.TestCase): def test_lonlat2xyz(self): """Test the lonlat2xyz function.""" x__, y__, z__ = lonlat2xyz(0, 0) - self.assertAlmostEqual(x__, 1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(1) + assert y__ == approx(0) + assert z__ == approx(0) x__, y__, z__ = lonlat2xyz(90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(1) + assert z__ == approx(0) x__, y__, z__ = lonlat2xyz(0, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = lonlat2xyz(180, 0) - self.assertAlmostEqual(x__, -1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(-1) + assert y__ == approx(0) + assert z__ == approx(0) x__, y__, z__ = lonlat2xyz(-90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(-1) + assert z__ == approx(0) x__, y__, z__ = lonlat2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, -1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(-1) x__, y__, z__ = lonlat2xyz(0, 45) - self.assertAlmostEqual(x__, np.sqrt(2) / 2) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, np.sqrt(2) / 2) + assert x__ == approx(np.sqrt(2) / 2) + assert y__ == approx(0) + assert z__ == approx(np.sqrt(2) / 2) x__, y__, z__ = lonlat2xyz(0, 60) - self.assertAlmostEqual(x__, np.sqrt(1) / 2) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, np.sqrt(3) / 2) + assert x__ == approx(np.sqrt(1) / 2) + assert y__ == approx(0) + assert z__ == approx(np.sqrt(3) / 2) def test_angle2xyz(self): """Test the lonlat2xyz function.""" x__, y__, z__ = angle2xyz(0, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = angle2xyz(90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = angle2xyz(0, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(1) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(180, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = angle2xyz(-90, 0) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 1) + assert x__ == approx(0) + assert y__ == approx(0) + assert z__ == approx(1) x__, y__, z__ = angle2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(-1) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(90, 90) - self.assertAlmostEqual(x__, 1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(1) + assert y__ == approx(0) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(-90, 90) - self.assertAlmostEqual(x__, -1) - self.assertAlmostEqual(y__, 0) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(-1) + assert y__ == approx(0) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(180, 90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(-1) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(0, -90) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, -1) - self.assertAlmostEqual(z__, 0) + assert x__ == approx(0) + assert y__ == approx(-1) + assert z__ == approx(0) x__, y__, z__ = angle2xyz(0, 45) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, np.sqrt(2) / 2) - self.assertAlmostEqual(z__, np.sqrt(2) / 2) + assert x__ == approx(0) + assert y__ == approx(np.sqrt(2) / 2) + assert z__ == approx(np.sqrt(2) / 2) x__, y__, z__ = angle2xyz(0, 60) - self.assertAlmostEqual(x__, 0) - self.assertAlmostEqual(y__, np.sqrt(3) / 2) - self.assertAlmostEqual(z__, np.sqrt(1) / 2) + assert x__ == approx(0) + assert y__ == approx(np.sqrt(3) / 2) + assert z__ == approx(np.sqrt(1) / 2) def test_xyz2lonlat(self): """Test xyz2lonlat.""" lon, lat = xyz2lonlat(1, 0, 0) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 0) + assert lon == approx(0) + assert lat == approx(0) lon, lat = xyz2lonlat(0, 1, 0) - self.assertAlmostEqual(lon, 90) - self.assertAlmostEqual(lat, 0) + assert lon == approx(90) + assert lat == approx(0) lon, lat = xyz2lonlat(0, 0, 1, asin=True) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 90) + assert lon == approx(0) + assert lat == approx(90) lon, lat = xyz2lonlat(0, 0, 1) - self.assertAlmostEqual(lon, 0) - self.assertAlmostEqual(lat, 90) + assert lon == approx(0) + assert lat == approx(90) lon, lat = xyz2lonlat(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) - self.assertAlmostEqual(lon, 45) - self.assertAlmostEqual(lat, 0) + assert lon == approx(45) + assert lat == approx(0) def test_xyz2angle(self): """Test xyz2angle.""" azi, zen = xyz2angle(1, 0, 0) - self.assertAlmostEqual(azi, 90) - self.assertAlmostEqual(zen, 90) + assert azi == approx(90) + assert zen == approx(90) azi, zen = xyz2angle(0, 1, 0) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 90) + assert azi == approx(0) + assert zen == approx(90) azi, zen = xyz2angle(0, 0, 1) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 0) + assert azi == approx(0) + assert zen == approx(0) azi, zen = xyz2angle(0, 0, 1, acos=True) - self.assertAlmostEqual(azi, 0) - self.assertAlmostEqual(zen, 0) + assert azi == approx(0) + assert zen == approx(0) azi, zen = xyz2angle(np.sqrt(2) / 2, np.sqrt(2) / 2, 0) - self.assertAlmostEqual(azi, 45) - self.assertAlmostEqual(zen, 90) + assert azi == approx(45) + assert zen == approx(90) azi, zen = xyz2angle(-1, 0, 0) - self.assertAlmostEqual(azi, -90) - self.assertAlmostEqual(zen, 90) + assert azi == approx(-90) + assert zen == approx(90) azi, zen = xyz2angle(0, -1, 0) - self.assertAlmostEqual(azi, 180) - self.assertAlmostEqual(zen, 90) + assert azi == approx(180) + assert zen == approx(90) def test_proj_units_to_meters(self): """Test proj units to meters conversion.""" prj = "+asd=123123123123" res = proj_units_to_meters(prj) - self.assertEqual(res, prj) + assert res == prj prj = "+a=6378.137" res = proj_units_to_meters(prj) - self.assertEqual(res, "+a=6378137.000") + assert res == "+a=6378137.000" prj = "+a=6378.137 +units=km" res = proj_units_to_meters(prj) - self.assertEqual(res, "+a=6378137.000") + assert res == "+a=6378137.000" prj = "+a=6378.137 +b=6378.137" res = proj_units_to_meters(prj) - self.assertEqual(res, "+a=6378137.000 +b=6378137.000") + assert res == "+a=6378137.000 +b=6378137.000" prj = "+a=6378.137 +b=6378.137 +h=35785.863" res = proj_units_to_meters(prj) - self.assertEqual(res, "+a=6378137.000 +b=6378137.000 +h=35785863.000") + assert res == "+a=6378137.000 +b=6378137.000 +h=35785863.000" class TestGetSatPos: @@ -271,11 +272,11 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): @pytest.mark.parametrize( "attrs", - ( + [ {}, {"orbital_parameters": {"projection_longitude": 1}}, {"satellite_altitude": 1} - ) + ] ) def test_get_satpos_fails_with_informative_error(self, attrs): """Test that get_satpos raises an informative error message.""" @@ -358,10 +359,9 @@ def test_specific_check_satpy(self): checked_fake = False for call in print_mock.mock_calls: if len(call[1]) > 0 and "__fake" in call[1][0]: - self.assertNotIn("ok", call[1][1]) + assert "ok" not in call[1][1] checked_fake = True - self.assertTrue(checked_fake, "Did not find __fake module " - "mentioned in checks") + assert checked_fake, "Did not find __fake module mentioned in checks" def test_debug_on(caplog): @@ -605,7 +605,7 @@ def test_convert_remote_files_to_fsspec_storage_options(open_files): def test_import_error_helper(): """Test the import error helper.""" module = "some_crazy_name_for_unknow_dependency_module" - with pytest.raises(ImportError) as err: + with pytest.raises(ImportError) as err: # noqa: PT012 with import_error_helper(module): import unknow_dependency_module # noqa assert module in str(err) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index 6e1ce7f2e2..c2d049dae1 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -79,7 +79,7 @@ def test_show(self, mock_get_image): data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, dims=["y", "x"]) show(p) - self.assertTrue(mock_get_image.return_value.show.called) + assert mock_get_image.return_value.show.called class TestEnhancer(unittest.TestCase): @@ -89,13 +89,13 @@ def test_basic_init_no_args(self): """Test Enhancer init with no arguments passed.""" from satpy.writers import Enhancer e = Enhancer() - self.assertIsNotNone(e.enhancement_tree) + assert e.enhancement_tree is not None def test_basic_init_no_enh(self): """Test Enhancer init requesting no enhancements.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=False) - self.assertIsNone(e.enhancement_tree) + assert e.enhancement_tree is None def test_basic_init_provided_enh(self): """Test Enhancer init with string enhancement configs.""" @@ -108,7 +108,7 @@ def test_basic_init_provided_enh(self): method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} """]) - self.assertIsNotNone(e.enhancement_tree) + assert e.enhancement_tree is not None def test_init_nonexistent_enh_file(self): """Test Enhancer init with a nonexistent enhancement configuration file.""" @@ -522,23 +522,21 @@ def _ignore_all_tags(self, tag_suffix, node): writer_fn_name = os.path.splitext(writer_fn)[0] writer_info = read_writer_config([writer_config], loader=IgnoreLoader) - self.assertEqual(writer_fn_name, writer_info["name"], - "Writer YAML filename doesn't match writer " - "name in the YAML file.") + assert writer_fn_name == writer_info["name"] def test_available_writers(self): """Test the 'available_writers' function.""" from satpy import available_writers writer_names = available_writers() - self.assertGreater(len(writer_names), 0) - self.assertIsInstance(writer_names[0], str) - self.assertIn("geotiff", writer_names) + assert len(writer_names) > 0 + assert isinstance(writer_names[0], str) + assert "geotiff" in writer_names writer_infos = available_writers(as_dict=True) - self.assertEqual(len(writer_names), len(writer_infos)) - self.assertIsInstance(writer_infos[0], dict) + assert len(writer_names) == len(writer_infos) + assert isinstance(writer_infos[0], dict) for writer_info in writer_infos: - self.assertIn("name", writer_info) + assert "name" in writer_info class TestComputeWriterResults(unittest.TestCase): @@ -584,7 +582,7 @@ def test_simple_image(self): writer="simple_image", compute=False) compute_writer_results([res]) - self.assertTrue(os.path.isfile(fname)) + assert os.path.isfile(fname) def test_geotiff(self): """Test writing to mitiff file.""" @@ -594,7 +592,7 @@ def test_geotiff(self): datasets=["test"], writer="geotiff", compute=False) compute_writer_results([res]) - self.assertTrue(os.path.isfile(fname)) + assert os.path.isfile(fname) # FIXME: This reader needs more information than exist at the moment # def test_mitiff(self): @@ -628,8 +626,8 @@ def test_multiple_geotiff(self): datasets=["test"], writer="geotiff", compute=False) compute_writer_results([res1, res2]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) def test_multiple_simple(self): """Test writing to geotiff files.""" @@ -643,8 +641,8 @@ def test_multiple_simple(self): datasets=["test"], writer="simple_image", compute=False) compute_writer_results([res1, res2]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) def test_mixed(self): """Test writing to multiple mixed-type files.""" @@ -659,8 +657,8 @@ def test_mixed(self): writer="geotiff", compute=False) res3 = [] compute_writer_results([res1, res2, res3]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) class TestBaseWriter: @@ -805,9 +803,9 @@ def test_add_overlay_basic_rgb(self): with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil: apply_pil.return_value = self.orig_rgb_img new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, fill_value=0) - self.assertEqual(self.orig_rgb_img.mode, new_img.mode) + assert self.orig_rgb_img.mode == new_img.mode new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir) - self.assertEqual(self.orig_rgb_img.mode + "A", new_img.mode) + assert self.orig_rgb_img.mode + "A" == new_img.mode with mock.patch.object(self.orig_rgb_img, "convert") as convert: convert.return_value = self.orig_rgb_img @@ -849,21 +847,21 @@ def test_add_overlay_basic_l(self): """Test basic add_overlay usage with L data.""" from satpy.writers import add_overlay new_img = add_overlay(self.orig_l_img, self.area_def, "", fill_value=0) - self.assertEqual("RGB", new_img.mode) + assert "RGB" == new_img.mode new_img = add_overlay(self.orig_l_img, self.area_def, "") - self.assertEqual("RGBA", new_img.mode) + assert "RGBA" == new_img.mode def test_add_decorate_basic_rgb(self): """Test basic add_decorate usage with RGB data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_rgb_img, **self.decorate) - self.assertEqual("RGBA", new_img.mode) + assert "RGBA" == new_img.mode def test_add_decorate_basic_l(self): """Test basic add_decorate usage with L data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_l_img, **self.decorate) - self.assertEqual("RGBA", new_img.mode) + assert "RGBA" == new_img.mode def test_group_results_by_output_file(tmp_path): diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index b829f46d23..35752cd237 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -124,7 +124,7 @@ def test_get_filebase(self): pattern = os.path.join(*pattern.split("/")) filename = os.path.join(base_dir, "Oa05_radiance.nc") expected = os.path.join(base_data, "Oa05_radiance.nc") - self.assertEqual(yr._get_filebase(filename, pattern), expected) + assert yr._get_filebase(filename, pattern) == expected def test_match_filenames(self): """Check that matching filenames works.""" @@ -143,7 +143,7 @@ def test_match_filenames(self): filenames = [os.path.join(base_dir, "Oa05_radiance.nc"), os.path.join(base_dir, "geo_coordinates.nc")] expected = os.path.join(base_dir, "geo_coordinates.nc") - self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) + assert yr._match_filenames(filenames, pattern) == {expected} def test_match_filenames_windows_forward_slash(self): """Check that matching filenames works on Windows with forward slashes. @@ -166,14 +166,13 @@ def test_match_filenames_windows_forward_slash(self): filenames = [os.path.join(base_dir, "Oa05_radiance.nc").replace(os.sep, "/"), os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/")] expected = os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/") - self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) + assert yr._match_filenames(filenames, pattern) == {expected} def test_listify_string(self): """Check listify_string.""" - self.assertEqual(yr.listify_string(None), []) - self.assertEqual(yr.listify_string("some string"), ["some string"]) - self.assertEqual(yr.listify_string(["some", "string"]), - ["some", "string"]) + assert yr.listify_string(None) == [] + assert yr.listify_string("some string") == ["some string"] + assert yr.listify_string(["some", "string"]) == ["some", "string"] class DummyReader(BaseFileHandler): @@ -237,8 +236,8 @@ def test_select_from_pathnames(self): res = self.reader.select_files_from_pathnames(filelist) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: - self.assertIn(expected, res) - self.assertEqual(len(res), 3) + assert expected in res + assert len(res) == 3 def test_fn_items_for_ft(self): """Check filename_items_for_filetype.""" @@ -247,7 +246,7 @@ def test_fn_items_for_ft(self): fiter = self.reader.filename_items_for_filetype(filelist, ft_info) filenames = dict(fname for fname in fiter) - self.assertEqual(len(filenames.keys()), 3) + assert len(filenames.keys()) == 3 def test_create_filehandlers(self): """Check create_filehandlers.""" @@ -255,7 +254,7 @@ def test_create_filehandlers(self): "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) - self.assertEqual(len(self.reader.file_handlers["ftype1"]), 3) + assert len(self.reader.file_handlers["ftype1"]) == 3 def test_serializable(self): """Check that a reader is serializable by dask. @@ -342,29 +341,22 @@ def test_all_data_ids(self): def test_all_dataset_names(self): """Get all dataset names.""" - self.assertSetEqual(self.reader.all_dataset_names, - set(["ch01", "ch02", "lons", "lats"])) + assert self.reader.all_dataset_names == set(["ch01", "ch02", "lons", "lats"]) def test_available_dataset_ids(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) - self.assertSetEqual(set(self.reader.available_dataset_ids), - {make_dataid(name="ch02", - wavelength=(0.7, 0.75, 0.8), - calibration="counts", - modifiers=()), - make_dataid(name="ch01", - wavelength=(0.5, 0.6, 0.7), - calibration="reflectance", - modifiers=())}) + assert set(self.reader.available_dataset_ids) == {make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), + calibration="counts", modifiers=()), + make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=())} def test_available_dataset_names(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) - self.assertSetEqual(set(self.reader.available_dataset_names), - set(["ch01", "ch02"])) + assert set(self.reader.available_dataset_names) == set(["ch01", "ch02"]) def test_filter_fh_by_time(self): """Check filtering filehandlers by time.""" @@ -383,11 +375,11 @@ def test_filter_fh_by_time(self): for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, fh.end_time) # only the first one should be false - self.assertEqual(res, idx not in [0, 4]) + assert res == (idx not in [0, 4]) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, None) - self.assertEqual(res, idx not in [0, 1, 4, 5]) + assert res == (idx not in [0, 1, 4, 5]) @patch("satpy.readers.yaml_reader.get_area_def") @patch("satpy.readers.yaml_reader.AreaDefBoundary") @@ -401,17 +393,17 @@ def test_file_covers_area(self, bnd, adb, gad): bnd.return_value.contour_poly.intersection.return_value = True adb.return_value.contour_poly.intersection.return_value = True res = self.reader.check_file_covers_area(file_handler, True) - self.assertTrue(res) + assert res bnd.return_value.contour_poly.intersection.return_value = False adb.return_value.contour_poly.intersection.return_value = False res = self.reader.check_file_covers_area(file_handler, True) - self.assertFalse(res) + assert not res file_handler.get_bounding_box.side_effect = NotImplementedError() self.reader.filter_parameters["area"] = True res = self.reader.check_file_covers_area(file_handler, True) - self.assertTrue(res) + assert res def test_start_end_time(self): """Check start and end time behaviours.""" @@ -446,8 +438,8 @@ def get_end_time(): "2": [fh2, fh3], } - self.assertEqual(self.reader.start_time, datetime(1999, 12, 30, 0, 0)) - self.assertEqual(self.reader.end_time, datetime(2000, 1, 3, 12, 30)) + assert self.reader.start_time == datetime(1999, 12, 30, 0, 0) + assert self.reader.end_time == datetime(2000, 1, 3, 12, 30) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" @@ -455,9 +447,9 @@ def test_select_from_pathnames(self): res = self.reader.select_files_from_pathnames(filelist) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: - self.assertIn(expected, res) + assert expected in res - self.assertEqual(0, len(self.reader.select_files_from_pathnames([]))) + assert 0 == len(self.reader.select_files_from_pathnames([])) def test_select_from_directory(self): """Check select_files_from_directory.""" @@ -469,12 +461,11 @@ def test_select_from_directory(self): res = self.reader.select_files_from_directory(dpath) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: - self.assertIn(os.path.join(dpath, expected), res) + assert os.path.join(dpath, expected) in res for fname in filelist: os.remove(os.path.join(dpath, fname)) - self.assertEqual(0, - len(self.reader.select_files_from_directory(dpath))) + assert 0 == len(self.reader.select_files_from_directory(dpath)) os.rmdir(dpath) from fsspec.implementations.local import LocalFileSystem @@ -484,14 +475,12 @@ def glob(self, pattern): return ["/grocery/apricot.nc", "/grocery/aubergine.nc"] res = self.reader.select_files_from_directory(dpath, fs=Silly()) - self.assertEqual( - res, - {"/grocery/apricot.nc", "/grocery/aubergine.nc"}) + assert res == {"/grocery/apricot.nc", "/grocery/aubergine.nc"} def test_supports_sensor(self): """Check supports_sensor.""" - self.assertTrue(self.reader.supports_sensor("canon")) - self.assertFalse(self.reader.supports_sensor("nikon")) + assert self.reader.supports_sensor("canon") + assert not self.reader.supports_sensor("nikon") @patch("satpy.readers.yaml_reader.StackedAreaDefinition") def test_load_area_def(self, sad): @@ -502,33 +491,31 @@ def test_load_area_def(self, sad): for _i in range(items): file_handlers.append(MagicMock()) final_area = self.reader._load_area_def(dataid, file_handlers) - self.assertEqual(final_area, sad.return_value.squeeze.return_value) + assert final_area == sad.return_value.squeeze.return_value args, kwargs = sad.call_args - self.assertEqual(len(args), items) + assert len(args) == items def test_preferred_filetype(self): """Test finding the preferred filetype.""" self.reader.file_handlers = {"a": "a", "b": "b", "c": "c"} - self.assertEqual(self.reader._preferred_filetype(["c", "a"]), "c") - self.assertEqual(self.reader._preferred_filetype(["a", "c"]), "a") - self.assertEqual(self.reader._preferred_filetype(["d", "e"]), None) + assert self.reader._preferred_filetype(["c", "a"]) == "c" + assert self.reader._preferred_filetype(["a", "c"]) == "a" + assert self.reader._preferred_filetype(["d", "e"]) is None def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" ds_q = DataQuery(name="ch01", wavelength=(0.5, 0.6, 0.7, "µm"), calibration="reflectance", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_q) - self.assertListEqual(res, - [make_dataid(name="lons"), - make_dataid(name="lats")]) + assert res == [make_dataid(name="lons"), make_dataid(name="lats")] def test_get_coordinates_for_dataset_key_without(self): """Test getting coordinates for a key without coordinates.""" ds_id = make_dataid(name="lons", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) - self.assertListEqual(res, []) + assert res == [] def test_get_coordinates_for_dataset_keys(self): """Test getting coordinates for keys.""" @@ -543,7 +530,7 @@ def test_get_coordinates_for_dataset_keys(self): lons]) expected = {ds_id1: [lons, lats], ds_id2: [lons, lats], lons: []} - self.assertDictEqual(res, expected) + assert res == expected def test_get_file_handlers(self): """Test getting filehandler to load a dataset.""" @@ -551,10 +538,10 @@ def test_get_file_handlers(self): calibration="reflectance", modifiers=()) self.reader.file_handlers = {"ftype1": "bla"} - self.assertEqual(self.reader._get_file_handlers(ds_id1), "bla") + assert self.reader._get_file_handlers(ds_id1) == "bla" lons = make_dataid(name="lons", modifiers=()) - self.assertEqual(self.reader._get_file_handlers(lons), None) + assert self.reader._get_file_handlers(lons) is None @patch("satpy.readers.yaml_reader.xr") def test_load_entire_dataset(self, xarray): @@ -564,7 +551,7 @@ def test_load_entire_dataset(self, xarray): proj = self.reader._load_dataset(None, {}, file_handlers) - self.assertIs(proj, xarray.concat.return_value) + assert proj is xarray.concat.return_value class TestFileYAMLReaderLoading(unittest.TestCase): @@ -711,7 +698,7 @@ def test_update_ds_ids_from_file_handlers(self): if not isinstance(file_types, list): file_types = [file_types] if ftype in file_types: - self.assertEqual(resol, ds_id["resolution"]) + assert resol == ds_id["resolution"] # Test methods @@ -1016,13 +1003,13 @@ def test_get_expected_segments(self, cfh): # default (1) created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] - self.assertEqual(es, 1) + assert es == 1 # YAML defined for each file type fake_fh.filetype_info["expected_segments"] = 2 created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] - self.assertEqual(es, 2) + assert es == 2 # defined both in the filename and the YAML metadata # YAML has priority @@ -1030,20 +1017,20 @@ def test_get_expected_segments(self, cfh): fake_fh.filetype_info = {"expected_segments": 2} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] - self.assertEqual(es, 2) + assert es == 2 # defined in the filename fake_fh.filename_info = {"total_segments": 3} fake_fh.filetype_info = {} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] - self.assertEqual(es, 3) + assert es == 3 # check correct FCI segment (aka chunk in the FCI world) number reading into segment fake_fh.filename_info = {"count_in_repeat_cycle": 5} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filename_info["segment"] - self.assertEqual(es, 5) + assert es == 5 @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") @@ -1082,8 +1069,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): # No missing segments res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(res.attrs is file_handlers[0].combine_info.return_value) - self.assertTrue(empty_segment not in slice_list) + assert res.attrs is file_handlers[0].combine_info.return_value + assert empty_segment not in slice_list # One missing segment in the middle slice_list[4] = None @@ -1091,7 +1078,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[4] is empty_segment) + assert slice_list[4] is empty_segment # The last segment is missing slice_list = expected_segments * [seg, ] @@ -1100,7 +1087,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[-1] is empty_segment) + assert slice_list[-1] is empty_segment # The last two segments are missing slice_list = expected_segments * [seg, ] @@ -1109,8 +1096,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[-1] is empty_segment) - self.assertTrue(slice_list[-2] is empty_segment) + assert slice_list[-1] is empty_segment + assert slice_list[-2] is empty_segment # The first segment is missing slice_list = expected_segments * [seg, ] @@ -1119,7 +1106,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[0] is empty_segment) + assert slice_list[0] is empty_segment # The first two segments are missing slice_list = expected_segments * [seg, ] @@ -1129,8 +1116,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[0] is empty_segment) - self.assertTrue(slice_list[1] is empty_segment) + assert slice_list[0] is empty_segment + assert slice_list[1] is empty_segment # Disable padding res = reader._load_dataset(dataid, ds_info, file_handlers, @@ -1174,7 +1161,7 @@ def test_pad_later_segments_area(self, AreaDefinition): file_handlers = [fh_1] dataid = "dataid" res = reader._pad_later_segments_area(file_handlers, dataid) - self.assertEqual(len(res), 2) + assert len(res) == 2 seg2_extent = (0, 1500, 200, 1000) expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg2_extent) @@ -1197,7 +1184,7 @@ def test_pad_earlier_segments_area(self, AreaDefinition): dataid = "dataid" area_defs = {2: seg2_area} res = reader._pad_earlier_segments_area(file_handlers, dataid, area_defs) - self.assertEqual(len(res), 2) + assert len(res) == 2 seg1_extent = (0, 500, 200, 0) expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg1_extent) @@ -1219,11 +1206,11 @@ def test_find_missing_segments(self): dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res - self.assertEqual(counter, 2) - self.assertEqual(expected_segments, 1) - self.assertTrue(projectable in slice_list) - self.assertFalse(failure) - self.assertTrue(proj is projectable) + assert counter == 2 + assert expected_segments == 1 + assert projectable in slice_list + assert failure is False + assert proj is projectable # Three expected segments, first and last missing filename_info = {"segment": 2} @@ -1240,14 +1227,14 @@ def test_find_missing_segments(self): dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res - self.assertEqual(counter, 3) - self.assertEqual(expected_segments, 3) - self.assertEqual(slice_list, [None, projectable, None]) - self.assertFalse(failure) - self.assertTrue(proj is projectable) + assert counter == 3 + assert expected_segments == 3 + assert slice_list == [None, projectable, None] + assert failure is False + assert proj is projectable -@pytest.fixture +@pytest.fixture() @patch.object(yr.GEOVariableSegmentYAMLReader, "__init__", lambda x: None) def GVSYReader(): """Get a fixture of the GEOVariableSegmentYAMLReader.""" @@ -1258,28 +1245,28 @@ def GVSYReader(): return reader -@pytest.fixture +@pytest.fixture() def fake_geswh(): """Get a fixture of the patched _get_empty_segment_with_height.""" with patch("satpy.readers.yaml_reader._get_empty_segment_with_height") as geswh: yield geswh -@pytest.fixture +@pytest.fixture() def fake_xr(): """Get a fixture of the patched xarray.""" with patch("satpy.readers.yaml_reader.xr") as xr: yield xr -@pytest.fixture +@pytest.fixture() def fake_mss(): """Get a fixture of the patched _find_missing_segments.""" with patch("satpy.readers.yaml_reader._find_missing_segments") as mss: yield mss -@pytest.fixture +@pytest.fixture() def fake_adef(): """Get a fixture of the patched AreaDefinition.""" with patch("satpy.readers.yaml_reader.AreaDefinition") as adef: diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index 70f1ec80e5..ca958fce37 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -156,8 +156,8 @@ def __call__(self, datasets, optional_datasets=None, **kwargs): opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get("name", "") if "NOPE" in opt_dep_name or "fail" in opt_dep_name: continue - assert (optional_datasets is not None and - len(optional_datasets)) + assert optional_datasets is not None + assert len(optional_datasets) resolution = datasets[0].attrs.get("resolution") mod_name = self.attrs["modifiers"][-1] data = datasets[0].data diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 0c9ca9f234..6c8977662a 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -354,7 +354,7 @@ def test_groups(self): # Different projection coordinates in one group are not supported with TempFile() as filename: - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Datasets .* must have identical projection coordinates..*"): scn.save_datasets(datasets=["VIS006", "HRV"], filename=filename, writer="cf") def test_single_time_value(self): @@ -731,7 +731,7 @@ def test_assert_xy_unique(self): assert_xy_unique(datas) datas["c"] = xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 3], "x": [3, 4]}) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Datasets .* must have identical projection coordinates..*"): assert_xy_unique(datas) def test_link_coords(self): @@ -1149,7 +1149,7 @@ def test_global_attr_history_and_Conventions(self): class TestCFWriterData: """Test case for CF writer where data arrays are needed.""" - @pytest.fixture + @pytest.fixture() def datasets(self): """Create test dataset.""" data = [[75, 2], [3, 4]] @@ -1226,7 +1226,7 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): class EncodingUpdateTest: """Test update of netCDF encoding.""" - @pytest.fixture + @pytest.fixture() def fake_ds(self): """Create fake data for testing.""" ds = xr.Dataset({"foo": (("y", "x"), [[1, 2], [3, 4]]), @@ -1236,7 +1236,7 @@ def fake_ds(self): "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds - @pytest.fixture + @pytest.fixture() def fake_ds_digit(self): """Create fake data for testing.""" ds_digit = xr.Dataset({"CHANNEL_1": (("y", "x"), [[1, 2], [3, 4]]), @@ -1327,7 +1327,7 @@ def test_with_time(self, fake_ds): class TestEncodingKwarg: """Test CF writer with 'encoding' keyword argument.""" - @pytest.fixture + @pytest.fixture() def scene(self): """Create a fake scene.""" scn = Scene() @@ -1343,7 +1343,7 @@ def compression_on(self, request): """Get compression options.""" return request.param - @pytest.fixture + @pytest.fixture() def encoding(self, compression_on): """Get encoding.""" enc = { @@ -1359,19 +1359,19 @@ def encoding(self, compression_on): enc["test-array"].update(comp_params) return enc - @pytest.fixture + @pytest.fixture() def filename(self, tmp_path): """Get output filename.""" return str(tmp_path / "test.nc") - @pytest.fixture + @pytest.fixture() def complevel_exp(self, compression_on): """Get expected compression level.""" if compression_on: return 7 return 0 - @pytest.fixture + @pytest.fixture() def expected(self, complevel_exp): """Get expectated file contents.""" return { @@ -1419,7 +1419,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): class TestEncodingAttribute(TestEncodingKwarg): """Test CF writer with 'encoding' dataset attribute.""" - @pytest.fixture + @pytest.fixture() def scene_with_encoding(self, scene, encoding): """Create scene with a dataset providing the 'encoding' attribute.""" scene["test-array"].encoding = encoding["test-array"] diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 4e5c8b7c9c..b4ff371dab 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -593,7 +593,7 @@ def test_save_one_dataset(self): imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: if "In this file" in key: - self.assertEqual(key, " Channels: 1 In this file: 1") + assert key == " Channels: 1 In this file: 1" def test_save_one_dataset_sensor_set(self): """Test basic writer operation with one dataset ie. no bands.""" @@ -604,7 +604,7 @@ def test_save_one_dataset_sensor_set(self): imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: if "In this file" in key: - self.assertEqual(key, " Channels: 1 In this file: 1") + assert key == " Channels: 1 In this file: 1" def test_save_dataset_with_calibration(self): """Test writer operation with calibration.""" @@ -751,29 +751,29 @@ def test_save_dataset_with_calibration(self): if "Table_calibration" in key: found_table_calibration = True if "1-VIS0.63" in key: - self.assertEqual(key, expected_key_channel[0]) + assert key == expected_key_channel[0] number_of_calibrations += 1 elif "2-VIS0.86" in key: - self.assertEqual(key, expected_key_channel[1]) + assert key == expected_key_channel[1] number_of_calibrations += 1 elif "3(3B)-IR3.7" in key: - self.assertEqual(key, expected_key_channel[2]) + assert key == expected_key_channel[2] number_of_calibrations += 1 elif "4-IR10.8" in key: - self.assertEqual(key, expected_key_channel[3]) + assert key == expected_key_channel[3] number_of_calibrations += 1 elif "5-IR11.5" in key: - self.assertEqual(key, expected_key_channel[4]) + assert key == expected_key_channel[4] number_of_calibrations += 1 elif "6(3A)-VIS1.6" in key: - self.assertEqual(key, expected_key_channel[5]) + assert key == expected_key_channel[5] number_of_calibrations += 1 else: self.fail("Not a valid channel description i the given key.") - self.assertTrue(found_table_calibration, "Table_calibration is not found in the imagedescription.") - self.assertEqual(number_of_calibrations, 6) + assert found_table_calibration, "Table_calibration is not found in the imagedescription." + assert number_of_calibrations == 6 pillow_tif = Image.open(os.path.join(self.base_dir, filename)) - self.assertEqual(pillow_tif.n_frames, 6) + assert pillow_tif.n_frames == 6 self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_dataset_with_calibration_one_dataset(self): @@ -817,10 +817,10 @@ def test_save_dataset_with_calibration_one_dataset(self): if "Table_calibration" in key: found_table_calibration = True if "BT" in key: - self.assertEqual(key, expected_key_channel[0]) + assert key == expected_key_channel[0] number_of_calibrations += 1 - self.assertTrue(found_table_calibration, "Expected table_calibration is not found in the imagedescription.") - self.assertEqual(number_of_calibrations, 1) + assert found_table_calibration, "Expected table_calibration is not found in the imagedescription." + assert number_of_calibrations == 1 self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_dataset_with_bad_value(self): @@ -883,7 +883,7 @@ def test_convert_proj4_string(self): w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) proj4_string = w._add_proj4_string(ds1, ds1) - self.assertEqual(proj4_string, check["proj4"]) + assert proj4_string == check["proj4"] def test_save_dataset_palette(self): """Test writer operation as palette.""" @@ -934,11 +934,11 @@ def test_save_dataset_palette(self): dataset.attrs["start_time"]) pillow_tif = Image.open(os.path.join(self.base_dir, filename)) # Need to check PHOTOMETRIC is 3, ie palette - self.assertEqual(pillow_tif.tag_v2.get(262), 3) + assert pillow_tif.tag_v2.get(262) == 3 # Check the colormap of the palette image palette = pillow_tif.palette colormap = list((palette.getdata())[1]) - self.assertEqual(colormap, exp_c) + assert colormap == exp_c imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_color_info = False unit_name_found = False @@ -961,11 +961,11 @@ def test_save_dataset_palette(self): elif "COLOR INFO:" in key: found_color_info = True # Check the name of the palette description - self.assertEqual(name_length, 2) + assert name_length == 2 # Check the name and unit name of the palette - self.assertEqual(unit_name, " Test") + assert unit_name == " Test" # Check the palette description of the palette - self.assertEqual(names, [" test", " test2"]) + assert names == [" test", " test2"] self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_simple_write_two_bands(self): @@ -987,7 +987,7 @@ def test_get_test_dataset_three_bands_prereq(self): imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) for element in imgdesc: if " Channels:" in element: - self.assertEqual(element, " Channels: 3 In this file: 1 2 3") + assert element == " Channels: 3 In this file: 1 2 3" def test_save_dataset_with_calibration_error_one_dataset(self): """Test saving if mitiff as dataset with only one channel with invalid calibration.""" @@ -1010,7 +1010,7 @@ def test_save_dataset_with_calibration_error_one_dataset(self): with self.assertLogs(logger) as lc: w._add_calibration_datasets(4, dataset, _reverse_offset, _reverse_scale, _decimals) for _op in lc.output: - self.assertIn("Unknown calib type. Must be Radiance, Reflectance or BT.", _op) + assert "Unknown calib type. Must be Radiance, Reflectance or BT." in _op finally: logger.removeHandler(stream_handler) @@ -1039,7 +1039,6 @@ def test_save_dataset_with_missing_palette(self): with self.assertLogs(logger, logging.ERROR) as lc: w._save_as_palette(dataset.compute(), os.path.join(self.base_dir, filename), tiffinfo, **palette) for _op in lc.output: - self.assertIn(("In a mitiff palette image a color map must be provided: " - "palette_color_map is missing."), _op) + assert "In a mitiff palette image a color map must be provided: palette_color_map is missing." in _op finally: logger.removeHandler(stream_handler) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index bba3e9b44e..a9c60bdf90 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -463,8 +463,8 @@ def ntg_latlon(test_image_latlon): SatelliteNameID=654321) -@pytest.fixture -def patch_datetime_now(monkeypatch): +@pytest.fixture() +def _patch_datetime_now(monkeypatch): """Get a fake datetime.datetime.now().""" # Source: https://stackoverflow.com/a/20503374/974555, CC-BY-SA 4.0 @@ -741,11 +741,13 @@ def test_calc_single_tag_by_name(ntg1, ntg2, ntg3): assert ntg2.get_tag("DataType") == "GORN" assert ntg3.get_tag("DataType") == "PPRN" assert ntg1.get_tag("DataSource") == "dowsing rod" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown tag: invalid"): ntg1.get_tag("invalid") - with pytest.raises(ValueError): + with pytest.raises(ValueError, + match="Optional tag OriginalHeader must be supplied by user if user wants to request the value," + " but wasn't."): ntg1.get_tag("OriginalHeader") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Tag Gradient is added later by the GeoTIFF writer."): ntg1.get_tag("Gradient") @@ -773,11 +775,12 @@ def test_get_color_depth(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk): assert ntg3.get_color_depth() == 8 # mode P assert ntg_weird.get_color_depth() == 16 # mode LA assert ntg_rgba.get_color_depth() == 32 # mode RGBA - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unsupported image mode: CMYK"): ntg_cmyk.get_color_depth() -def test_get_creation_date_id(ntg1, ntg2, ntg3, patch_datetime_now): +@pytest.mark.usefixtures("_patch_datetime_now") +def test_get_creation_date_id(ntg1, ntg2, ntg3): """Test getting the creation date ID. This is the time at which the file was created. @@ -887,7 +890,7 @@ def test_get_projection(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk, assert ntg_cmyk.get_projection() == "SPOL" assert ntg_rgba.get_projection() == "MERC" assert ntg_latlon.get_projection() == "PLAT" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown mapping from area .*"): ntg_weird.get_projection() @@ -898,7 +901,7 @@ def test_get_ref_lat_1(ntg1, ntg2, ntg3, ntg_weird, ntg_latlon): np.testing.assert_allclose(rl1, 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_1(), 2.5) np.testing.assert_allclose(ntg3.get_ref_lat_1(), 75) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Could not find reference latitude for area test-area-north-stereo"): ntg_weird.get_ref_lat_1() with pytest.raises(AttributeError): ntg_latlon.get_ref_lat_1() @@ -945,7 +948,7 @@ def test_get_ymax(ntg1, ntg2, ntg3): def test_create_unknown_tags(test_image_small_arctic_P): """Test that unknown tags raise ValueError.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="The following tags were not recognised: Locatie"): NinJoTagGenerator( test_image_small_arctic_P, 42, diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index b8c311f9ed..ea6cf07f95 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -53,7 +53,7 @@ def test_init(self): from satpy.writers.ninjotiff import NinjoTIFFWriter ninjo_tags = {40000: "NINJO"} ntw = NinjoTIFFWriter(tags=ninjo_tags) - self.assertDictEqual(ntw.tags, ninjo_tags) + assert ntw.tags == ninjo_tags @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) @@ -65,7 +65,7 @@ def test_dataset(self, iwsd): with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: ntw.save_dataset(dataset, physic_unit="CELSIUS") uconv.assert_called_once_with(dataset, "K", "CELSIUS") - self.assertEqual(iwsd.call_count, 1) + assert iwsd.call_count == 1 @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) @@ -78,7 +78,7 @@ def test_dataset_skip_unit_conversion(self, iwsd): ntw.save_dataset(dataset, physic_unit="CELSIUS", convert_temperature_units=False) uconv.assert_not_called() - self.assertEqual(iwsd.call_count, 1) + assert iwsd.call_count == 1 @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py index b3e92c9510..01d89a22ad 100644 --- a/satpy/tests/writer_tests/test_simple_image.py +++ b/satpy/tests/writer_tests/test_simple_image.py @@ -72,6 +72,6 @@ def test_simple_delayed_write(self): w = PillowWriter(base_dir=self.base_dir) res = w.save_datasets(datasets, compute=False) for r__ in res: - self.assertIsInstance(r__, Delayed) + assert isinstance(r__, Delayed) r__.compute() compute_writer_results(res) diff --git a/satpy/tests/writer_tests/test_utils.py b/satpy/tests/writer_tests/test_utils.py index a0cf88e54f..e71e3a7e1e 100644 --- a/satpy/tests/writer_tests/test_utils.py +++ b/satpy/tests/writer_tests/test_utils.py @@ -32,4 +32,4 @@ def test_flatten_dict(self): "b_c": 1, "b_d_e": 1, "b_d_f_g": [1, 2]} - self.assertDictEqual(wutils.flatten_dict(d), expected) + assert wutils.flatten_dict(d) == expected From 2875ca1be33ae9e0396b95878e1b7faefe938307 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:21:50 +0200 Subject: [PATCH 0603/1416] Add other ruff rules --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 64c036b07f..f1dabc9473 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,8 +15,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -# select = ["E", "W", "F", "I", "D", "S", "B", "A", "PT", "Q", "TID", "C90", "T10", "N"] -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q"] +select = ["E", "W", "F", "I", "A", "B", "S", "N", "D","PT", "TID", "C90", "Q", "T10", "T20"] ignore = ["B905"] # only available from python 3.10 line-length = 120 From c67e92ff5fab9d0e3c784e8f1e651b6d91abd787 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:46:08 +0200 Subject: [PATCH 0604/1416] Fix after merge --- doc/source/reader_table.py | 2 + pyproject.toml | 3 +- satpy/_scene_converters.py | 2 +- satpy/composites/spectral.py | 2 +- satpy/modifiers/angles.py | 3 +- satpy/readers/_geos_area.py | 2 + satpy/readers/abi_l2_nc.py | 4 +- satpy/readers/ahi_l2_nc.py | 50 +++---- satpy/readers/gerb_l2_hr_h5.py | 16 +- satpy/readers/gms/gms5_vissr_l1b.py | 2 +- satpy/readers/gms/gms5_vissr_navigation.py | 3 + satpy/readers/goes_imager_nc.py | 4 +- satpy/readers/hrit_jma.py | 2 +- satpy/readers/iasi_l2_so2_bufr.py | 2 +- satpy/readers/modis_l2.py | 2 +- satpy/readers/mviri_l1b_fiduceo_nc.py | 4 +- satpy/readers/satpy_cf_nc.py | 6 +- satpy/readers/seviri_base.py | 31 ++-- satpy/readers/seviri_l1b_hrit.py | 2 +- satpy/readers/seviri_l1b_icare.py | 2 +- satpy/readers/seviri_l1b_native.py | 4 +- satpy/readers/seviri_l2_grib.py | 3 + satpy/scene.py | 4 +- satpy/tests/compositor_tests/test_spectral.py | 10 +- satpy/tests/multiscene_tests/test_blend.py | 4 +- .../modis_tests/test_modis_l1b.py | 14 +- satpy/tests/reader_tests/test_abi_l2_nc.py | 34 ++--- satpy/tests/reader_tests/test_ahi_hsd.py | 18 +-- .../reader_tests/test_ahi_l1b_gridded_bin.py | 1 - satpy/tests/reader_tests/test_ahi_l2_nc.py | 40 ++--- .../tests/reader_tests/test_gerb_l2_hr_h5.py | 138 +++++++++--------- satpy/tests/reader_tests/test_seviri_base.py | 28 ++-- satpy/tests/reader_tests/test_utils.py | 8 +- satpy/tests/test_modifiers.py | 28 ++-- satpy/tests/test_resample.py | 8 +- satpy/tests/test_utils.py | 5 +- satpy/tests/writer_tests/test_cf.py | 50 +++---- satpy/utils.py | 25 +--- satpy/writers/cf_writer.py | 30 ++-- setup.py | 8 +- utils/coord2area_def.py | 15 -- utils/fetch_avhrr_calcoeffs.py | 2 - 42 files changed, 303 insertions(+), 318 deletions(-) diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 1c6760a390..3ddec3444b 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -28,6 +28,7 @@ def rst_table_row(columns=None): Args: columns (list[str]): Content of each column. + Returns: str """ @@ -48,6 +49,7 @@ def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): width (optional[list[int]]): Width of each column as a list. If not specified defaults to auto and will therefore determined by the backend (see ) + Returns: str """ diff --git a/pyproject.toml b/pyproject.toml index f1dabc9473..8d81b23818 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,8 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "A", "B", "S", "N", "D","PT", "TID", "C90", "Q", "T10", "T20"] +# "A", "B", "S", "N", "D", +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] ignore = ["B905"] # only available from python 3.10 line-length = 120 diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 0aa903d2f8..4eb9826850 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -84,7 +84,7 @@ def to_xarray(scn, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds, xr.Dataset A CF-compliant xr.Dataset diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 7d05a000d6..59e8518a7e 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -149,7 +149,7 @@ class NDVIHybridGreen(SpectralBlender): def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), strength=1.0, **kwargs): """Initialize class and set the NDVI limits, blending fraction limits and strength.""" if strength <= 0.0: - raise ValueError(f"Expected stength greater than 0.0, got {strength}.") + raise ValueError(f"Expected strength greater than 0.0, got {strength}.") self.ndvi_min = ndvi_min self.ndvi_max = ndvi_max diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index a41e000254..1d059e1f5a 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -331,6 +331,7 @@ def compute_relative_azimuth(sat_azi: xr.DataArray, sun_azi: xr.DataArray) -> xr Args: sat_azi: DataArray for the satellite azimuth angles, typically in 0-360 degree range. sun_azi: DataArray for the solar azimuth angles, should be in same range as sat_azi. + Returns: A DataArray containing the relative azimuth angle in the 0-180 degree range. @@ -571,7 +572,7 @@ def _sunzen_reduction_ndarray(data: np.ndarray, reduction_factor = reduction_factor.clip(0., 1.) # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza - with np.errstate(invalid='ignore'): # we expect space pixels to be invalid + with np.errstate(invalid="ignore"): # we expect space pixels to be invalid reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2) # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a diff --git a/satpy/readers/_geos_area.py b/satpy/readers/_geos_area.py index e777d78ca7..03dabfa9a0 100644 --- a/satpy/readers/_geos_area.py +++ b/satpy/readers/_geos_area.py @@ -72,6 +72,7 @@ def get_area_extent(pdict): coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) + Returns: aex: An area extent for the scene @@ -178,6 +179,7 @@ def get_geos_area_naming(input_dict): input_dict: dict Dictionary with keys `platform_name`, `instrument_name`, `service_name`, `service_desc`, `resolution` . The resolution is expected in meters. + Returns: area_naming_dict with `area_id`, `description` keys, values are strings. diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index ef30629c5c..2324d3e1fd 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -43,9 +43,9 @@ def get_dataset(self, key, info): self._remove_problem_attrs(variable) # convert to satpy standard units - if variable.attrs['units'] == '1' and key['calibration'] == 'reflectance': + if variable.attrs["units"] == "1" and key["calibration"] == "reflectance": variable *= 100.0 - variable.attrs['units'] = '%' + variable.attrs["units"] = "%" return variable diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 5159931819..17823fed1e 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -54,7 +54,7 @@ logger = logging.getLogger(__name__) -EXPECTED_DATA_AREA = 'Full Disk' +EXPECTED_DATA_AREA = "Full Disk" class HIML2NCFileHandler(BaseFileHandler): @@ -69,39 +69,39 @@ def __init__(self, filename, filename_info, filetype_info): chunks={"xc": "auto", "yc": "auto"}) # Check that file is a full disk scene, we don't know the area for anything else - if self.nc.attrs['cdm_data_type'] != EXPECTED_DATA_AREA: - raise ValueError('File is not a full disk scene') - - self.sensor = self.nc.attrs['instrument_name'].lower() - self.nlines = self.nc.dims['Columns'] - self.ncols = self.nc.dims['Rows'] - self.platform_name = self.nc.attrs['satellite_name'] - self.platform_shortname = filename_info['platform'] + if self.nc.attrs["cdm_data_type"] != EXPECTED_DATA_AREA: + raise ValueError("File is not a full disk scene") + + self.sensor = self.nc.attrs["instrument_name"].lower() + self.nlines = self.nc.dims["Columns"] + self.ncols = self.nc.dims["Rows"] + self.platform_name = self.nc.attrs["satellite_name"] + self.platform_shortname = filename_info["platform"] self._meta = None @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc.attrs['time_coverage_start'] - return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + dt = self.nc.attrs["time_coverage_start"] + return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End timestamp of the dataset.""" - dt = self.nc.attrs['time_coverage_end'] - return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + dt = self.nc.attrs["time_coverage_end"] + return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") def get_dataset(self, key, info): """Load a dataset.""" - var = info['file_key'] - logger.debug('Reading in get_dataset %s.', var) + var = info["file_key"] + logger.debug("Reading in get_dataset %s.", var) variable = self.nc[var] # Data has 'Latitude' and 'Longitude' coords, these must be replaced. - variable = variable.rename({'Rows': 'y', 'Columns': 'x'}) + variable = variable.rename({"Rows": "y", "Columns": "x"}) - variable = variable.drop('Latitude') - variable = variable.drop('Longitude') + variable = variable.drop("Latitude") + variable = variable.drop("Longitude") variable.attrs.update(key.to_dict()) return variable @@ -117,20 +117,20 @@ def get_area_def(self, dsid): return self.area def _get_area_def(self): - logger.info('The AHI L2 cloud products do not have the metadata required to produce an area definition.' - ' Assuming standard Himawari-8/9 full disk projection.') + logger.info("The AHI L2 cloud products do not have the metadata required to produce an area definition." + " Assuming standard Himawari-8/9 full disk projection.") # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: raise ValueError("Input L2 file is not a full disk Himawari scene. Only full disk data is supported.") - pdict = {'cfac': 20466275, 'lfac': 20466275, 'coff': 2750.5, 'loff': 2750.5, 'a': 6378137.0, 'h': 35785863.0, - 'b': 6356752.3, 'ssp_lon': 140.7, 'nlines': self.nlines, 'ncols': self.ncols, 'scandir': 'N2S'} + pdict = {"cfac": 20466275, "lfac": 20466275, "coff": 2750.5, "loff": 2750.5, "a": 6378137.0, "h": 35785863.0, + "b": 6356752.3, "ssp_lon": 140.7, "nlines": self.nlines, "ncols": self.ncols, "scandir": "N2S"} aex = get_area_extent(pdict) - pdict['a_name'] = 'Himawari_Area' - pdict['a_desc'] = "AHI Full Disk area" - pdict['p_id'] = f'geos{self.platform_shortname}' + pdict["a_name"] = "Himawari_Area" + pdict["a_desc"] = "AHI Full Disk area" + pdict["p_id"] = f"geos{self.platform_shortname}" return get_area_definition(pdict, aex) diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index f663b3040f..0bf918d68f 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -40,10 +40,10 @@ def gerb_get_dataset(ds, ds_info): The routine takes into account the quantisation factor and fill values. """ ds_attrs = ds.attrs - ds_fill = ds_info['fill_value'] + ds_fill = ds_info["fill_value"] fill_mask = ds != ds_fill - if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: - ds = ds*ds_attrs['Quantisation Factor'] + if "Quantisation Factor" in ds_attrs and "Unit" in ds_attrs: + ds = ds*ds_attrs["Quantisation Factor"] else: ds = ds*1. ds = ds.where(fill_mask) @@ -61,17 +61,17 @@ def end_time(self): @property def start_time(self): """Get start time.""" - return self.filename_info['sensing_time'] + return self.filename_info["sensing_time"] def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds_name = ds_id['name'] - if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: + ds_name = ds_id["name"] + if ds_name not in ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]: raise KeyError(f"{ds_name} is an unknown dataset for this reader.") - ds = gerb_get_dataset(self[f'Radiometry/{ds_name}'], ds_info) + ds = gerb_get_dataset(self[f"Radiometry/{ds_name}"], ds_info) - ds.attrs.update({'start_time': self.start_time, 'data_time': self.start_time, 'end_time': self.end_time}) + ds.attrs.update({"start_time": self.start_time, "data_time": self.start_time, "end_time": self.end_time}) return ds diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py index f3c6898f65..0e1a5df483 100644 --- a/satpy/readers/gms/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -29,7 +29,7 @@ scene.load(["VIS", "IR1"]) -References +References: ~~~~~~~~~~ Details about platform, instrument and data format can be found in the diff --git a/satpy/readers/gms/gms5_vissr_navigation.py b/satpy/readers/gms/gms5_vissr_navigation.py index 8a811b2210..6335ce13b9 100644 --- a/satpy/readers/gms/gms5_vissr_navigation.py +++ b/satpy/readers/gms/gms5_vissr_navigation.py @@ -445,6 +445,7 @@ def get_lon_lat(pixel, nav_params): pixel (Pixel): Point in image coordinates. nav_params (PixelNavigationParameters): Navigation parameters for a single pixel. + Returns: Longitude and latitude in degrees. """ @@ -481,6 +482,7 @@ def transform_image_coords_to_scanning_angles(point, image_offset, scanning_angl point (Pixel): Point in image coordinates. image_offset (ImageOffset): Image offset. scanning_angles (ScanningAngles): Scanning angles. + Returns: Scanning angles (x, y) at the pixel center (rad). """ @@ -677,6 +679,7 @@ def intersect_with_earth(view_vector, sat_pos, ellipsoid): coordinates. sat_pos (Vector3D): Satellite position in earth-fixed coordinates. ellipsoid (EarthEllipsoid): Earth ellipsoid. + Returns: Intersection (Vector3D) with the earth's surface. """ diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index c343b7f7c5..4cb7cf8610 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -200,7 +200,7 @@ 6. Data is received via EumetCast -References +References: ========== - `[GVAR]`_ GVAR transmission format @@ -887,6 +887,7 @@ def _viscounts2radiance(counts, slope, offset): counts: Raw detector counts slope: Slope [W m-2 um-1 sr-1] offset: Offset [W m-2 um-1 sr-1] + Returns: Radiance [W m-2 um-1 sr-1] """ @@ -913,6 +914,7 @@ def _calibrate_vis(radiance, k): k: pi / H, where H is the solar spectral irradiance at annual-average sun-earth distance, averaged over the spectral response function of the detector). Units of k: [m2 um sr W-1] + Returns: Reflectance [%] """ diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 2a85a95cd4..865cbc5dd7 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -34,7 +34,7 @@ - `AHI sample data`_ -Example +Example: ------- Here is an example how to read Himwari-8 HRIT data with Satpy: diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index 500c2b29df..a63d434a86 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -31,7 +31,7 @@ Scene(reader="iasi_l2_so2_bufr", filenames=fnames) -Example +Example: ------- Here is an example how to read the data in satpy: diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 0ad1b95ba9..8fdf1c69bb 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -245,7 +245,7 @@ def _bits_strip(bit_start, bit_count, value): value : int Number from which to extract the bits - Returns + Returns: ------- int Value of the extracted bits diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 979483513a..043c45d4cc 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -43,7 +43,7 @@ name in the reader as in the netCDF file. -Example +Example: ------- This is how to read FIDUCEO MVIRI FCDR data in satpy: @@ -143,7 +143,7 @@ sza_vis = scn[query_vis] -References +References: ---------- - `[Handbook]`_ MFG User Handbook - `[PUG]`_ FIDUCEO MVIRI FCDR Product User Guide diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index cf99b57e7d..120a14be36 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -36,9 +36,8 @@ '{platform_name}-{sensor}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' -Example +Example: ------- - Here is an example how to read the data in satpy: .. code-block:: python @@ -92,9 +91,8 @@ ''AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc' -Example +Example: ------- - Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 1d7b7576e7..25e6ed1a8b 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -390,34 +390,34 @@ MEIRINK_EPOCH = datetime(2000, 1, 1) MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {} -MEIRINK_COEFS['2023'] = {} +MEIRINK_COEFS["2023"] = {} # Meteosat-8 -MEIRINK_COEFS['2023'][321] = {'VIS006': (24.346, 0.3739), - 'VIS008': (30.989, 0.3111), - 'IR_016': (22.869, 0.0065) +MEIRINK_COEFS["2023"][321] = {"VIS006": (24.346, 0.3739), + "VIS008": (30.989, 0.3111), + "IR_016": (22.869, 0.0065) } # Meteosat-9 -MEIRINK_COEFS['2023'][322] = {'VIS006': (21.026, 0.2556), - 'VIS008': (26.875, 0.1835), - 'IR_016': (21.394, 0.0498) +MEIRINK_COEFS["2023"][322] = {"VIS006": (21.026, 0.2556), + "VIS008": (26.875, 0.1835), + "IR_016": (21.394, 0.0498) } # Meteosat-10 -MEIRINK_COEFS['2023'][323] = {'VIS006': (19.829, 0.5856), - 'VIS008': (25.284, 0.6787), - 'IR_016': (23.066, -0.0286) +MEIRINK_COEFS["2023"][323] = {"VIS006": (19.829, 0.5856), + "VIS008": (25.284, 0.6787), + "IR_016": (23.066, -0.0286) } # Meteosat-11 -MEIRINK_COEFS['2023'][324] = {'VIS006': (20.515, 0.3600), - 'VIS008': (25.803, 0.4844), - 'IR_016': (22.354, -0.0187) +MEIRINK_COEFS["2023"][324] = {"VIS006": (20.515, 0.3600), + "VIS008": (25.803, 0.4844), + "IR_016": (22.354, -0.0187) } @@ -440,7 +440,7 @@ def get_meirink_slope(meirink_coefs, acquisition_time): def should_apply_meirink(calib_mode, channel_name): """Decide whether to use the Meirink calibration coefficients.""" - return "MEIRINK" in calib_mode and channel_name in ['VIS006', 'VIS008', 'IR_016'] + return "MEIRINK" in calib_mode and channel_name in ["VIS006", "VIS008", "IR_016"] class MeirinkCalibrationHandler: @@ -448,7 +448,7 @@ class MeirinkCalibrationHandler: def __init__(self, calib_mode): """Initialize the calibration handler.""" - self.coefs = MEIRINK_COEFS[calib_mode.split('-')[1]] + self.coefs = MEIRINK_COEFS[calib_mode.split("-")[1]] def get_slope(self, platform, channel, time): """Return the slope using the provided calibration coefficients.""" @@ -963,6 +963,7 @@ def calculate_area_extent(area_dict): line_step: Pixel resolution in meters in south-north direction [column_offset: Column offset, defaults to 0 if not given] [line_offset: Line offset, defaults to 0 if not given] + Returns: tuple: An area extent for the scene defined by the lower left and upper right corners diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 83fc82f687..8e3fb148bc 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -84,7 +84,7 @@ } -Example +Example: ------- Here is an example how to read the data in satpy: diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index b2ceb30313..5c151d64a2 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -32,7 +32,7 @@ CHANN is the channel (i.e: HRV, IR016, WV073, etc) VX-XX is the processing version number -Example +Example: ------- Here is an example how to read the data in satpy: diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 81f2d01300..4593d3fe3d 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -37,7 +37,7 @@ To see the full list of arguments that can be provided, look into the documentation of :class:`NativeMSGFileHandler`. -Example +Example: ------- Here is an example how to read the data in satpy. @@ -749,7 +749,7 @@ def __init__(self, header, trailer, mda): def get_img_bounds(self, dataset_id, is_roi): """Get image line and column boundaries. - returns: + Returns: Dictionary with the four keys 'south_bound', 'north_bound', 'east_bound' and 'west_bound', each containing a list of the respective line/column numbers of the image boundaries. diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py index ca021a7bc0..b69c60e7ac 100644 --- a/satpy/readers/seviri_l2_grib.py +++ b/satpy/readers/seviri_l2_grib.py @@ -151,6 +151,7 @@ def _get_proj_area(self, gid): Args: gid: The ID of the GRIB message. + Returns: tuple: A tuple of two dictionaries for the projection and the area definition. pdict: @@ -232,6 +233,7 @@ def _get_xarray_from_msg(self, gid): Args: gid: The ID of the GRIB message. + Returns: DataArray: The array containing the retrieved values. """ @@ -268,6 +270,7 @@ def _get_from_msg(gid, key): Args: gid: The ID of the GRIB message. key: The key of the required attribute. + Returns: The retrieved attribute or None if the key is missing. """ diff --git a/satpy/scene.py b/satpy/scene.py index f0a6e2468a..d96c81a0e4 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -785,7 +785,7 @@ def aggregate(self, dataset_ids=None, boundary="trim", side="left", func="mean", Returns: A new aggregated scene - See also: + See Also: xarray.DataArray.coarsen Example: @@ -1146,7 +1146,7 @@ def to_xarray(self, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds, xr.Dataset A CF-compliant xr.Dataset diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 7386bb066a..36a3dd9355 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -67,13 +67,13 @@ def test_hybrid_green(self): def test_green_corrector(self): """Test the deprecated class for green corrections.""" - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = GreenCorrector("blended_channel", fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "blended_channel" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) @@ -114,6 +114,6 @@ def test_nonliniear_scaling(self): def test_invalid_strength(self): """Test using invalid `strength` term for non-linear scaling.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected strength greater than 0.0, got 0.0."): _ = NDVIHybridGreen("ndvi_hybrid_green", strength=0.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 120af51930..f9d7e35462 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -394,7 +394,7 @@ def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: class TestTemporalRGB: """Test the temporal RGB blending method.""" - @pytest.fixture + @pytest.fixture() def nominal_data(self): """Return the input arrays for the nominal use case.""" da1 = xr.DataArray([1, 0, 0], attrs={"start_time": datetime(2023, 5, 22, 9, 0, 0)}) @@ -403,7 +403,7 @@ def nominal_data(self): return [da1, da2, da3] - @pytest.fixture + @pytest.fixture() def expected_result(self): """Return the expected result arrays.""" return [[1, 0, 0], [0, 1, 0], [0, 0, 1]] diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 8976d03fb8..11068b6577 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -50,7 +50,7 @@ def _check_shared_metadata(data_arr): assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == "modis_l1b" + assert data_arr.attrs["reader"] == "modis_l1b" assert "resolution" in data_arr.attrs res = data_arr.attrs["resolution"] if res == 5000: @@ -160,7 +160,7 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) scheduler = CustomScheduler(max_computes=1 + has_5km + has_500 + has_250) - with dask.config.set({'scheduler': scheduler, 'array.chunk-size': '1 MiB'}): + with dask.config.set({"scheduler": scheduler, "array.chunk-size": "1 MiB"}): _load_and_check_geolocation(scene, "*", default_res, default_shape, True) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500) @@ -179,9 +179,9 @@ def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) - dataset_name = '1' - with dask.config.set({'array.chunk-size': '1 MiB'}): + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) + dataset_name = "1" + with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset[0, 0] == 300.0 @@ -194,8 +194,8 @@ def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file) """Test loading visible band.""" scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file, reader_kwargs={"mask_saturated": mask_saturated}) - dataset_name = '2' - with dask.config.set({'array.chunk-size': '1 MiB'}): + dataset_name = "2" + with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 68d40017a7..4b8d3a9578 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -169,23 +169,23 @@ def test_mcmip_get_dataset(self, xr_, product, exp_metadata): exp_data *= 100 exp_attrs = { - 'instrument_ID': None, - 'modifiers': (), - 'name': product, - 'observation_type': 'MCMIP', - 'orbital_slot': None, - 'reader': 'abi_l2_nc', - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M6', - 'scene_abbr': 'F', - 'scene_id': None, - 'sensor': 'abi', - 'timeline_ID': None, - 'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000), - 'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000), - 'ancillary_variables': [], + "instrument_ID": None, + "modifiers": (), + "name": product, + "observation_type": "MCMIP", + "orbital_slot": None, + "reader": "abi_l2_nc", + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M6", + "scene_abbr": "F", + "scene_id": None, + "sensor": "abi", + "timeline_ID": None, + "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000), + "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), + "ancillary_variables": [], } exp_attrs.update(exp_metadata) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 1ceb14e733..9338440246 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -569,10 +569,10 @@ def test_updated_calibrate(self): def test_user_calibration(self): """Test user-defined calibration modes.""" # This is for radiance correction - self.fh.user_calibration = {'B13': {'slope': 0.95, - 'offset': -0.1}} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + self.fh.user_calibration = {"B13": {"slope": 0.95, + "offset": -0.1}} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_np = rad.compute() assert rad.dtype == rad_np.dtype assert rad.dtype == np.float32 @@ -581,11 +581,11 @@ def test_user_calibration(self): assert np.allclose(rad, rad_exp) # This is for DN calibration - self.fh.user_calibration = {'B13': {'slope': -0.0032, - 'offset': 15.20}, - 'type': 'DN'} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + self.fh.user_calibration = {"B13": {"slope": -0.0032, + "offset": 15.20}, + "type": "DN"} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_np = rad.compute() assert rad.dtype == rad_np.dtype assert rad.dtype == np.float32 diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index 73206e7ffd..05abef600b 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -267,7 +267,6 @@ def tearDown(self): def test_get_luts(self): """Check that the function to download LUTs operates successfully.""" tempdir = tempfile.gettempdir() - print(self.fh.lut_dir) self.fh._get_luts() assert not os.path.exists(os.path.join(tempdir, "count2tbb_v102/")) for lut_name in AHI_LUT_NAMES: diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 39de4e1053..7d4050ecf0 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -18,7 +18,7 @@ start_time = datetime(2023, 8, 24, 5, 40, 21) end_time = datetime(2023, 8, 24, 5, 49, 40) -dimensions = {'Columns': 5500, 'Rows': 5500} +dimensions = {"Columns": 5500, "Rows": 5500} exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) @@ -30,12 +30,12 @@ } badarea_attrs = global_attrs.copy() -badarea_attrs['cdm_data_type'] = 'bad_area' +badarea_attrs["cdm_data_type"] = "bad_area" -def ahil2_filehandler(fname, platform='h09'): +def ahil2_filehandler(fname, platform="h09"): """Instantiate a Filehandler.""" - fileinfo = {'platform': platform} + fileinfo = {"platform": platform} filetype = None fh = HIML2NCFileHandler(fname, fileinfo, filetype) return fh @@ -45,9 +45,9 @@ def ahil2_filehandler(fname, platform='h09'): def himl2_filename(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, - coords={'Latitude': (['Rows', 'Columns'], lat_data), - 'Longitude': (['Rows', 'Columns'], lon_data)}, + ds = xr.Dataset({"CloudMask": (["Rows", "Columns"], clmk_data)}, + coords={"Latitude": (["Rows", "Columns"], lat_data), + "Longitude": (["Rows", "Columns"], lon_data)}, attrs=global_attrs) ds.to_netcdf(fname) return fname @@ -57,9 +57,9 @@ def himl2_filename(tmp_path_factory): def himl2_filename_bad(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, - coords={'Latitude': (['Rows', 'Columns'], lat_data), - 'Longitude': (['Rows', 'Columns'], lon_data)}, + ds = xr.Dataset({"CloudMask": (["Rows", "Columns"], clmk_data)}, + coords={"Latitude": (["Rows", "Columns"], lat_data), + "Longitude": (["Rows", "Columns"], lon_data)}, attrs=badarea_attrs) ds.to_netcdf(fname) @@ -75,35 +75,35 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' + ps = "+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs" # Check case where input data is correct size. fh = ahil2_filehandler(himl2_filename) clmk_id = make_dataid(name="cloudmask") area_def = fh.get_area_def(clmk_id) - assert area_def.width == dimensions['Columns'] - assert area_def.height == dimensions['Rows'] + assert area_def.width == dimensions["Columns"] + assert area_def.height == dimensions["Rows"] assert np.allclose(area_def.area_extent, exp_ext) assert area_def.proj4_string == ps # Check case where input data is incorrect size. - with pytest.raises(ValueError): - fh = ahil2_filehandler(himl2_filename) - fh.nlines = 3000 + fh = ahil2_filehandler(himl2_filename) + fh.nlines = 3000 + with pytest.raises(ValueError, match="Input L2 file is not a full disk Himawari scene..*"): fh.get_area_def(clmk_id) def test_bad_area_name(himl2_filename_bad): """Check case where area name is not correct.""" - global_attrs['cdm_data_type'] = 'bad_area' - with pytest.raises(ValueError): + global_attrs["cdm_data_type"] = "bad_area" + with pytest.raises(ValueError, match="File is not a full disk scene"): ahil2_filehandler(himl2_filename_bad) - global_attrs['cdm_data_type'] = 'Full Disk' + global_attrs["cdm_data_type"] = "Full Disk" def test_load_data(himl2_filename): """Test that data is loaded successfully.""" fh = ahil2_filehandler(himl2_filename) clmk_id = make_dataid(name="cloudmask") - clmk = fh.get_dataset(clmk_id, {'file_key': 'CloudMask'}) + clmk = fh.get_dataset(clmk_id, {"file_key": "CloudMask"}) assert np.allclose(clmk.data, clmk_data) diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index eb06362831..0333f3df2b 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -37,10 +37,10 @@ def make_h5_null_string(length): def write_h5_null_string_att(loc_id, name, s): """Write a NULL terminated string attribute at loc_id.""" dt = make_h5_null_string(length=7) - name = bytes(name.encode('ascii')) - s = bytes(s.encode('ascii')) + name = bytes(name.encode("ascii")) + s = bytes(s.encode("ascii")) at = h5py.h5a.create(loc_id, name, dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(s, dtype=f'|S{len(s)+1}')) + at.write(np.array(s, dtype=f"|S{len(s)+1}")) @pytest.fixture(scope="session") @@ -48,74 +48,74 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): """Create a dummy HDF5 file for the GERB L2 HR product.""" filename = tmp_path_factory.mktemp("data") / FNAME - with h5py.File(filename, 'w') as fid: - fid.create_group('/Angles') - fid['/Angles/Relative Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Relative Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - fid['/Angles/Solar Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Solar Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Relative Azimuth'].id, 'Unit', 'Degree') - fid['/Angles/Viewing Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Viewing Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Viewing Azimuth'].id, 'Unit', 'Degree') - fid['/Angles/Viewing Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Viewing Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Viewing Zenith'].id, 'Unit', 'Degree') - fid.create_group('/GERB') + with h5py.File(filename, "w") as fid: + fid.create_group("/Angles") + fid["/Angles/Relative Azimuth"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Relative Azimuth"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + fid["/Angles/Solar Zenith"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Solar Zenith"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Relative Azimuth"].id, "Unit", "Degree") + fid["/Angles/Viewing Azimuth"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Viewing Azimuth"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Viewing Azimuth"].id, "Unit", "Degree") + fid["/Angles/Viewing Zenith"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Viewing Zenith"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Viewing Zenith"].id, "Unit", "Degree") + fid.create_group("/GERB") dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(3) dt.set_strpad(h5py.h5t.STR_NULLTERM) - write_h5_null_string_att(fid['/GERB'].id, 'Instrument Identifier', 'G4') - fid.create_group('/GGSPS') - fid['/GGSPS'].attrs['L1.5 NANRG Product Version'] = np.array(-1, dtype='int32') - fid.create_group('/Geolocation') - write_h5_null_string_att(fid['/Geolocation'].id, 'Geolocation File Name', - 'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf') - fid['/Geolocation'].attrs['Nominal Satellite Longitude (degrees)'] = np.array(0.0, dtype='float64') - fid.create_group('/Imager') - fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') - write_h5_null_string_att(fid['/Imager'].id, 'Type', 'SEVIRI') - fid.create_group('/RMIB') - fid.create_group('/Radiometry') - fid['/Radiometry'].attrs['SEVIRI Radiance Definition Flag'] = np.array(2, dtype='int32') - fid['/Radiometry/A Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) - fid['/Radiometry/C Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) - fid['/Radiometry/Longwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Longwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') - fid['/Radiometry/Longwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') - fid['/Radiometry/Shortwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Shortwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') - fid['/Radiometry/Shortwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') - fid['/Radiometry/Solar Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Solar Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Solar Flux'].id, 'Unit', 'Watt per square meter') - fid['/Radiometry/Solar Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Solar Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Solar Radiance'].id, 'Unit', 'Watt per square meter per steradian') - fid['/Radiometry/Thermal Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Thermal Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Thermal Flux'].id, 'Unit', 'Watt per square meter') - fid['/Radiometry/Thermal Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Thermal Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Thermal Radiance'].id, 'Unit', 'Watt per square meter per steradian') - fid.create_group('/Scene Identification') - write_h5_null_string_att(fid['/Scene Identification'].id, - 'Solar Angular Dependency Models Set Version', 'CERES_TRMM.1') - write_h5_null_string_att(fid['/Scene Identification'].id, - 'Thermal Angular Dependency Models Set Version', 'RMIB.3') - fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - write_h5_null_string_att(fid['/Scene Identification/Cloud Cover'].id, 'Unit', 'Percent') - fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = \ - np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = \ - np.array(0.00025, dtype='float64') - fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - write_h5_null_string_att(fid['/Scene Identification/Cloud Phase'].id, 'Unit', - 'Percent (Water=0%,Mixed,Ice=100%)') - fid.create_group('/Times') - fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) + write_h5_null_string_att(fid["/GERB"].id, "Instrument Identifier", "G4") + fid.create_group("/GGSPS") + fid["/GGSPS"].attrs["L1.5 NANRG Product Version"] = np.array(-1, dtype="int32") + fid.create_group("/Geolocation") + write_h5_null_string_att(fid["/Geolocation"].id, "Geolocation File Name", + "G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf") + fid["/Geolocation"].attrs["Nominal Satellite Longitude (degrees)"] = np.array(0.0, dtype="float64") + fid.create_group("/Imager") + fid["/Imager"].attrs["Instrument Identifier"] = np.array(4, dtype="int32") + write_h5_null_string_att(fid["/Imager"].id, "Type", "SEVIRI") + fid.create_group("/RMIB") + fid.create_group("/Radiometry") + fid["/Radiometry"].attrs["SEVIRI Radiance Definition Flag"] = np.array(2, dtype="int32") + fid["/Radiometry/A Values (per GERB detector cell)"] = np.ones(shape=(256,), dtype=np.dtype(">f8")) + fid["/Radiometry/C Values (per GERB detector cell)"] = np.ones(shape=(256,), dtype=np.dtype(">f8")) + fid["/Radiometry/Longwave Correction"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Longwave Correction"].attrs["Offset"] = np.array(1.0, dtype="float64") + fid["/Radiometry/Longwave Correction"].attrs["Quantisation Factor"] = np.array(0.005, dtype="float64") + fid["/Radiometry/Shortwave Correction"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Shortwave Correction"].attrs["Offset"] = np.array(1.0, dtype="float64") + fid["/Radiometry/Shortwave Correction"].attrs["Quantisation Factor"] = np.array(0.005, dtype="float64") + fid["/Radiometry/Solar Flux"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Solar Flux"].attrs["Quantisation Factor"] = np.array(0.25, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Solar Flux"].id, "Unit", "Watt per square meter") + fid["/Radiometry/Solar Radiance"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Solar Radiance"].attrs["Quantisation Factor"] = np.array(0.05, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Solar Radiance"].id, "Unit", "Watt per square meter per steradian") + fid["/Radiometry/Thermal Flux"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Thermal Flux"].attrs["Quantisation Factor"] = np.array(0.25, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Thermal Flux"].id, "Unit", "Watt per square meter") + fid["/Radiometry/Thermal Radiance"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Thermal Radiance"].attrs["Quantisation Factor"] = np.array(0.05, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Thermal Radiance"].id, "Unit", "Watt per square meter per steradian") + fid.create_group("/Scene Identification") + write_h5_null_string_att(fid["/Scene Identification"].id, + "Solar Angular Dependency Models Set Version", "CERES_TRMM.1") + write_h5_null_string_att(fid["/Scene Identification"].id, + "Thermal Angular Dependency Models Set Version", "RMIB.3") + fid["/Scene Identification/Cloud Cover"] = np.ones(shape=(1237, 1237), dtype=np.dtype("uint8")) + fid["/Scene Identification/Cloud Cover"].attrs["Quantisation Factor"] = np.array(0.01, dtype="float64") + write_h5_null_string_att(fid["/Scene Identification/Cloud Cover"].id, "Unit", "Percent") + fid["/Scene Identification/Cloud Optical Depth (logarithm)"] = \ + np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Scene Identification/Cloud Optical Depth (logarithm)"].attrs["Quantisation Factor"] = \ + np.array(0.00025, dtype="float64") + fid["/Scene Identification/Cloud Phase"] = np.ones(shape=(1237, 1237), dtype=np.dtype("uint8")) + fid["/Scene Identification/Cloud Phase"].attrs["Quantisation Factor"] = np.array(0.01, dtype="float64") + write_h5_null_string_att(fid["/Scene Identification/Cloud Phase"].id, "Unit", + "Percent (Water=0%,Mixed,Ice=100%)") + fid.create_group("/Times") + fid["/Times/Time (per row)"] = np.ones(shape=(1237,), dtype=np.dtype("|S22")) return filename @@ -123,7 +123,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): @pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" - scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) + scene = Scene(reader="gerb_l2_hr_h5", filenames=[gerb_l2_hr_h5_dummy_file]) scene.load([name]) assert scene[name].shape == (1237, 1237) assert np.nanmax((scene[name].to_numpy().flatten() - 0.25)) < 1e-6 diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 6c7fcfb671..ced24a77ea 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -357,17 +357,17 @@ def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): class TestMeirinkSlope: """Unit tests for the slope of Meirink calibration.""" - @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) - @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) + @pytest.mark.parametrize("platform_id", [321, 322, 323, 324]) + @pytest.mark.parametrize("channel_name", ["VIS006", "VIS008", "IR_016"]) def test_get_meirink_slope_epoch(self, platform_id, channel_name): """Test the value of the slope of the Meirink calibration on 2000-01-01.""" - coefs = {'coefs': {}} - coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} - coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', MEIRINK_EPOCH) - assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2023'][platform_id][channel_name][0]/1000. + coefs = {"coefs": {}} + coefs["coefs"]["NOMINAL"] = {"gain": -1, "offset": -1} + coefs["coefs"]["EXTERNAL"] = {} + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, "MEIRINK-2023", MEIRINK_EPOCH) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS["2023"][platform_id][channel_name][0]/1000. - @pytest.mark.parametrize('platform_id,time,expected', ( + @pytest.mark.parametrize(("platform_id", "time", "expected"), [ (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), @@ -376,12 +376,12 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), - )) + ]) def test_get_meirink_slope_2020(self, platform_id, time, expected): """Test the value of the slope of the Meirink calibration.""" - coefs = {'coefs': {}} - coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} - coefs['coefs']['EXTERNAL'] = {} - for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', time) + coefs = {"coefs": {}} + coefs["coefs"]["NOMINAL"] = {"gain": -1, "offset": -1} + coefs["coefs"]["EXTERNAL"] = {} + for i, channel_name in enumerate(["VIS006", "VIS008", "IR_016"]): + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, "MEIRINK-2023", time) assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 89a79ded0d..6471159449 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -202,16 +202,16 @@ def test_sub_area(self, adef): def test_np2str(self): """Test the np2str function.""" # byte object - npbytes = np.bytes_('hej') - self.assertEqual(hf.np2str(npbytes), 'hej') + npbytes = np.bytes_("hej") + assert hf.np2str(npbytes) == "hej" # single element numpy array np_arr = np.array([npbytes]) - self.assertEqual(hf.np2str(np_arr), 'hej') + assert hf.np2str(np_arr) == "hej" # scalar numpy array np_arr = np.array(npbytes) - self.assertEqual(hf.np2str(np_arr), 'hej') + assert hf.np2str(np_arr) == "hej" # multi-element array npbytes = np.array([npbytes, npbytes]) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 61b94dc3d9..4aece73487 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -117,18 +117,18 @@ def test_basic_default_not_provided(self, sunz_ds1, as_32bit): if as_32bit: sunz_ds1 = sunz_ds1.astype(np.float32) - comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((sunz_ds1,), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) + res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - assert 'y' in res.coords - assert 'x' in res.coords - ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) - res = comp((ds1,), test_attr='test') + assert "y" in res.coords + assert "x" in res.coords + ds1 = sunz_ds1.copy().drop_vars(("y", "x")) + res = comp((ds1,), test_attr="test") res_np = res.compute() np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) assert res.dtype == res_np.dtype - assert 'y' not in res.coords - assert 'x' not in res.coords + assert "y" not in res.coords + assert "x" not in res.coords def test_basic_lims_not_provided(self, sunz_ds1): """Test custom limits when SZA isn't provided.""" @@ -169,20 +169,20 @@ class TestSunZenithReducer: def setup_class(cls): """Initialze SunZenithReducer classes that shall be tested.""" from satpy.modifiers.geometry import SunZenithReducer - cls.default = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) - cls.custom = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), + cls.default = SunZenithReducer(name="sza_reduction_test_default", modifiers=tuple()) + cls.custom = SunZenithReducer(name="sza_reduction_test_custom", modifiers=tuple(), correction_limit=70, max_sza=95, strength=3.0) def test_default_settings(self, sunz_ds1, sunz_sza): """Test default settings with sza data available.""" - res = self.default((sunz_ds1, sunz_sza), test_attr='test') + res = self.default((sunz_ds1, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), rtol=1e-5) def test_custom_settings(self, sunz_ds1, sunz_sza): """Test custom settings with sza data available.""" - res = self.custom((sunz_ds1, sunz_sza), test_attr='test') + res = self.custom((sunz_ds1, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), rtol=1e-5) @@ -190,8 +190,8 @@ def test_custom_settings(self, sunz_ds1, sunz_sza): def test_invalid_max_sza(self, sunz_ds1, sunz_sza): """Test invalid max_sza with sza data available.""" from satpy.modifiers.geometry import SunZenithReducer - with pytest.raises(ValueError): - SunZenithReducer(name='sza_reduction_test_invalid', modifiers=tuple(), max_sza=None) + with pytest.raises(ValueError, match="`max_sza` must be defined when using the SunZenithReducer."): + SunZenithReducer(name="sza_reduction_test_invalid", modifiers=tuple(), max_sza=None) class TestNIRReflectance(unittest.TestCase): diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 49036d871f..66e93009d2 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -132,10 +132,10 @@ def test_type_preserve(self): class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" - @mock.patch('satpy.resample.xr.Dataset') - @mock.patch('satpy.resample.zarr.open') - @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') - @mock.patch('pyresample.kd_tree.XArrayResamplerNN') + @mock.patch("satpy.resample.xr.Dataset") + @mock.patch("satpy.resample.zarr.open") + @mock.patch("satpy.resample.KDTreeResampler._create_cache_filename") + @mock.patch("pyresample.kd_tree.XArrayResamplerNN") def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset): """Test the kd resampler.""" diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index f03ca60fae..c9717921df 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -28,7 +28,6 @@ import numpy as np import pytest import xarray as xr -from pytest import approx # noqa: PT013 from satpy.utils import ( angle2xyz, @@ -193,8 +192,8 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): "attrs", [ {}, - {'orbital_parameters': {'projection_longitude': 1}}, - {'satellite_altitude': 1} + {"orbital_parameters": {"projection_longitude": 1}}, + {"satellite_altitude": 1} ] ) def test_get_satpos_fails_with_informative_error(self, attrs): diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 3a26e6c981..18b5947eb6 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -526,31 +526,31 @@ def get_test_attrs(self): Attributes, encoded attributes, encoded and flattened attributes """ - attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.str_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + attrs = {"name": "IR_108", + "start_time": datetime(2018, 1, 1, 0), + "end_time": datetime(2018, 1, 1, 0, 15), + "int": 1, + "float": 1.0, + "none": None, # should be dropped + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": True, + "numpy_void": np.void(0), + "numpy_bytes": np.bytes_("test"), + "numpy_string": np.str_("test"), + "list": [1, 2, np.float64(3)], + "nested_list": ["1", ["2", [3]]], + "bool": True, + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": np.array([True, False, True]), + "array_2d": np.array([[1, 2], [3, 4]]), + "array_3d": np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + "dict": {"a": 1, "b": 2}, + "nested_dict": {"l1": {"l2": {"l3": np.array([1, 2, 3], dtype="uint8")}}}, + "raw_metadata": OrderedDict([ + ("recarray", np.zeros(3, dtype=[("x", "i4"), ("y", "u1")])), + ("flag", np.bool_(True)), + ("dict", OrderedDict([("a", 1), ("b", np.array([1, 2, 3], dtype="uint8"))])) ])} encoded = {"name": "IR_108", "start_time": "2018-01-01 00:00:00", diff --git a/satpy/utils.py b/satpy/utils.py index 7ee3243d93..3996968043 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -505,24 +505,15 @@ def check_satpy(readers=None, writers=None, extras=None): from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer - print("Readers") - print("=======") - for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): - print(reader + ": ", res) - print() - - print("Writers") - print("=======") - for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): - print(writer + ": ", res) - print() - - print("Extras") - print("======") + for _reader, _res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): + pass + + for _writer, _res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): + pass + module_names = extras if extras is not None else ("cartopy", "geoviews") - for module_name, res in sorted(_check_import(module_names).items()): - print(module_name + ": ", res) - print() + for _module_name, _res in sorted(_check_import(module_names).items()): + pass def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 2d56d9f292..506a8bf561 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -195,20 +195,20 @@ # Numpy datatypes compatible with all netCDF4 backends. ``np.str_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" -NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), - np.dtype('int16'), np.dtype('uint16'), - np.dtype('int32'), np.dtype('uint32'), - np.dtype('int64'), np.dtype('uint64'), - np.dtype('float32'), np.dtype('float64'), +NC4_DTYPES = [np.dtype("int8"), np.dtype("uint8"), + np.dtype("int16"), np.dtype("uint16"), + np.dtype("int32"), np.dtype("uint32"), + np.dtype("int64"), np.dtype("uint64"), + np.dtype("float32"), np.dtype("float64"), np.bytes_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible -CF_DTYPES = [np.dtype('int8'), - np.dtype('int16'), - np.dtype('int32'), - np.dtype('float32'), - np.dtype('float64'), +CF_DTYPES = [np.dtype("int8"), + np.dtype("int16"), + np.dtype("int32"), + np.dtype("float32"), + np.dtype("float64"), np.bytes_] CF_VERSION = "CF-1.7" @@ -581,8 +581,8 @@ def _remove_satpy_attrs(new_data): def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" - if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + if "prerequisites" in dataarray.attrs: + dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] return dataarray @@ -809,7 +809,7 @@ def make_cf_dataarray(dataarray, Prepend dataset name with this if starting with a digit. The default is ``"CHANNEL_"``. - Returns + Returns: ------- new_data : xr.DataArray CF-compliant xr.DataArray. @@ -859,7 +859,7 @@ def _collect_cf_dataset(list_dataarrays, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds : xr.Dataset A partially CF-compliant xr.Dataset @@ -977,7 +977,7 @@ def collect_cf_datasets(list_dataarrays, It is used to create grouped netCDFs using the CF_Writer. If None (the default), no groups will be created. - Returns + Returns: ------- grouped_datasets : dict A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} diff --git a/setup.py b/setup.py index 6bea7bf639..cd1c43422e 100644 --- a/setup.py +++ b/setup.py @@ -22,10 +22,10 @@ from setuptools import find_packages, setup -requires = ['numpy >=1.21', 'pillow', 'pyresample >=1.24.0', 'trollsift', - 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', - 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', - 'packaging', 'pooch', 'pyorbital'] +requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", + "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.10.1, !=0.13.0", + "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", + "packaging", "pooch", "pyorbital"] test_requires = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", diff --git a/utils/coord2area_def.py b/utils/coord2area_def.py index 8b6aa0478b..81fb93678b 100644 --- a/utils/coord2area_def.py +++ b/utils/coord2area_def.py @@ -126,21 +126,6 @@ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) - print("### " + proj4_string) - print() - print(name + ":") - print(" description: " + name) - print(" projection:") - print(" proj: " + proj) - print(" ellps: WGS84") - print(" lat_0: " + str(lat_0)) - print(" lon_0: " + str(lon_0)) - print(" shape:") - print(" height: " + str(ysize)) - print(" width: " + str(xsize)) - print(" area_extent:") - print(" lower_left_xy: [%f, %f]" % (area_extent[0], area_extent[1])) - print(" upper_right_xy: [%f, %f]" % (area_extent[2], area_extent[3])) if args.shapes is None: sys.exit(0) diff --git a/utils/fetch_avhrr_calcoeffs.py b/utils/fetch_avhrr_calcoeffs.py index f73975df95..8212c5531e 100644 --- a/utils/fetch_avhrr_calcoeffs.py +++ b/utils/fetch_avhrr_calcoeffs.py @@ -112,7 +112,6 @@ def get_all_coeffs(): coeffs[platform] = {} for chan in URLS[platform].keys(): url = URLS[platform][chan] - print(url) page = get_page(url) coeffs[platform][chan] = get_coeffs(page) @@ -134,7 +133,6 @@ def save_coeffs(coeffs, out_dir=""): fid[chan]["intercept2"] = coeffs[platform][chan]["intercept2"] fid.close() - print("Calibration coefficients saved for %s" % platform) def main(): From b75008beba074be25b279dc423f94d49eb11ab2b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:46:35 +0200 Subject: [PATCH 0605/1416] Add ruff rules --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8d81b23818..9677cbf09d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,8 +15,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -# "A", "B", "S", "N", "D", -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "A", "B", "S", "N", "D"] ignore = ["B905"] # only available from python 3.10 line-length = 120 From fd9be77662d4cdcaf74723ceb1df1df69607b7fb Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:49:17 +0200 Subject: [PATCH 0606/1416] Fix ruff rules --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9677cbf09d..a550f275e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,8 +15,8 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "A", "B", "S", "N", "D"] -ignore = ["B905"] # only available from python 3.10 +# In the future, add "A", "B", "S", "N", "D" +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 [tool.ruff.per-file-ignores] From f2f91938d4281efd2a4057f4f5f0d8f009139ca0 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 13:52:00 +0200 Subject: [PATCH 0607/1416] Remove lint ci job --- .github/workflows/ci.yaml | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ee31213bf2..6e1fdfc781 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,28 +11,9 @@ env: CACHE_NUMBER: 0 jobs: - lint: - name: lint and style checks - runs-on: ubuntu-latest - steps: - - name: Checkout source - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install ruff pytest - - name: Install Satpy - run: | - pip install -e . - test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} - needs: [lint] strategy: fail-fast: true matrix: From 8b7938f6c0d96a5995031652d7997e31afd82427 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 09:16:38 -0500 Subject: [PATCH 0608/1416] Fix failing demo tests --- satpy/tests/test_demo.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index d1dddd5e8d..32e8016f58 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -89,7 +89,7 @@ def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] # expected 16 files, got 2 - self.assertRaises(AssertionError, get_us_midlatitude_cyclone_abi) + self.assertRaises(RuntimeError, get_us_midlatitude_cyclone_abi) # unknown access method self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method="unknown") @@ -109,7 +109,7 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): # only return 5 results total gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 - self.assertRaises(AssertionError, get_hurricane_florence_abi) + self.assertRaises(RuntimeError, get_hurricane_florence_abi) self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method="unknown") gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) @@ -244,11 +244,12 @@ class _FakeRequest: requests_log: list[str] = [] - def __init__(self, url, stream=None): + def __init__(self, url, stream=None, timeout=None): self._filename = os.path.basename(url) self.headers = {} self.requests_log.append(url) del stream # just mimicking requests 'get' + del timeout # just mimicking requests 'get' def __enter__(self): return self From 6bf2ce9efcd9486251d95a2520e895423d2baac3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 09:26:35 -0500 Subject: [PATCH 0609/1416] Revert changes to check_satpy logic --- satpy/utils.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/satpy/utils.py b/satpy/utils.py index 3996968043..f9ea05ca79 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -505,15 +505,24 @@ def check_satpy(readers=None, writers=None, extras=None): from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer - for _reader, _res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): - pass - - for _writer, _res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): - pass - + print("Readers") # noqa: T201 + print("=======") # noqa: T201 + for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): + print(reader + ": ", res) # noqa: T201 + print() # noqa: T201 + + print("Writers") # noqa: T201 + print("=======") # noqa: T201 + for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): + print(writer + ": ", res) # noqa: T201 + print() # noqa: T201 + + print("Extras") # noqa: T201 + print("======") # noqa: T201 module_names = extras if extras is not None else ("cartopy", "geoviews") - for _module_name, _res in sorted(_check_import(module_names).items()): - pass + for module_name, res in sorted(_check_import(module_names).items()): + print(module_name + ": ", res) # noqa: T201 + print() # noqa: T201 def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: From 529055e8e35ce1480ff43fa7c14b3d0513aa6cc2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 09:30:52 -0500 Subject: [PATCH 0610/1416] Fix various numpy related test failures --- satpy/tests/reader_tests/test_generic_image.py | 2 +- satpy/tests/reader_tests/test_seviri_l2_grib.py | 2 +- satpy/tests/reader_tests/test_tropomi_l2.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 393bbfa98d..0ea143269f 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -267,7 +267,7 @@ def test_GenericImageFileHandler_nodata(self): info = {"nodata_handling": "nan_mask"} dataset = reader.get_dataset(foo, info) assert isinstance(dataset, xr.DataArray) - assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) is True + assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) assert np.isnan(dataset.attrs["_FillValue"]) info = {"nodata_handling": "fill_value"} diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py index d57fda4e79..d3b40d6caa 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py @@ -133,7 +133,7 @@ def test_data_reading(self, da_, xr_): # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((1200, 1000))) is True + assert np.all(args[0] == np.ones((1200, 1000))) assert args[1] == CHUNK_SIZE # Checks that xarray.DataArray has been called with the correct arguments diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index 05d0717538..7305bf365c 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -196,6 +196,6 @@ def test_load_bounds(self): assert "x" in dest.dims assert DEFAULT_FILE_SHAPE[0] + 1 == dest.shape[0] assert DEFAULT_FILE_SHAPE[1] + 1 == dest.shape[1] - assert np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0]) - assert np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3]) - assert np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) + np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0]) + np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3]) + np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) From 9c9cc875286f37fe08b0248c877f927b4fe62ede Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 09:42:06 -0500 Subject: [PATCH 0611/1416] Fix extra whitespace after commas --- satpy/readers/eum_base.py | 10 +++++----- satpy/readers/goes_imager_nc.py | 2 +- satpy/readers/seviri_l2_bufr.py | 2 +- satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py | 6 +++--- satpy/tests/reader_tests/test_seviri_l2_bufr.py | 2 +- .../tests/reader_tests/test_viirs_edr_active_fires.py | 4 ++-- satpy/tests/test_utils.py | 2 +- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index cc82ee008d..916ba9d444 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -88,14 +88,14 @@ def recarray2dict(arr): def get_service_mode(instrument_name, ssp_lon): """Get information about service mode for a given instrument and subsatellite longitude.""" - service_modes = {"seviri": {"0.0": {"service_name": "fes", "service_desc": "Full Earth Scanning service"}, - "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, + service_modes = {"seviri": {"0.0": {"service_name": "fes", "service_desc": "Full Earth Scanning service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, "41.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"}, "45.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"} }, - "fci": {"0.0": {"service_name": "fdss", "service_desc": "Full Disk Scanning Service"}, - "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, - }, + "fci": {"0.0": {"service_name": "fdss", "service_desc": "Full Disk Scanning Service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, + }, } unknown_modes = {"service_name": "unknown", "service_desc": "unknown"} diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 4cb7cf8610..7bb8fac84c 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -317,7 +317,7 @@ "GOES-14": {"00_7": {"slope": [5.874693E-1, 5.865367E-1, 5.862807E-1, 5.864086E-1, 5.857146E-1, 5.852004E-1, 5.860814E-1, 5.841697E-1], - "offset": [-17.037, -17.010, -17.002, -17.006, + "offset": [-17.037, -17.010, -17.002, -17.006, -16.986, -16.971, -16.996, -16.941], "x0": 29, "k": 1.88772E-3}, diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index 3b7188945c..02aa0c2767 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -46,7 +46,7 @@ CHUNK_SIZE = get_legacy_chunk_size() logger = logging.getLogger("SeviriL2Bufr") -data_center_dict = {55: {"ssp": "E0415", "name": "08"}, 56: {"ssp": "E0455", "name": "09"}, +data_center_dict = {55: {"ssp": "E0415", "name": "08"}, 56: {"ssp": "E0455", "name": "09"}, 57: {"ssp": "E0095", "name": "10"}, 70: {"ssp": "E0000", "name": "11"}} seg_size_dict = {"seviri_l2_bufr_asr": 16, "seviri_l2_bufr_cla": 16, diff --git a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py index 20a537a4a9..7055a4df6d 100644 --- a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py +++ b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py @@ -310,9 +310,9 @@ def setUp(self): self._header = np.zeros(1, dtype=_HEADERTYPE) self._header["satid"][0] = 3 self._header["instrument"][0] = 12 - self._header["tempradcnv"][0] = [[2968720, 0, 1000000, 5236956, 0], - [1000000, 6114597, 0, 1000000, 6114597], - [-3100, 1000270, 6348092, 0, 1000000]] + self._header["tempradcnv"][0] = [[2968720, 0, 1000000, 5236956, 0], + [1000000, 6114597, 0, 1000000, 6114597], + [-3100, 1000270, 6348092, 0, 1000000]] self._data = np.zeros(3, dtype=_SCANTYPE) self._data["scnlinyr"][:] = 2020 self._data["scnlindy"][:] = 261 diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index 2979084974..ec3fdf7b56 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -29,7 +29,7 @@ from satpy.tests.utils import make_dataid -FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} +FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} FILENAME_INFO = {"start_time": "20191112000000", "spacecraft": "MSG2"} diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py index 7063814c34..7bede07292 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py +++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py @@ -73,7 +73,7 @@ def get_test_content(self, filename, filename_info, filename_type): file_content["Fire Pixels/attr/units"] = "none" file_content["Fire Pixels/shape"] = DEFAULT_FILE_SHAPE - attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, dims=("z", "fakeDim0", "fakeDim1")) @@ -97,7 +97,7 @@ def get_test_content(self, filename, filename_info, filename_type): file_content["Fire Pixels/FP_T4/attr/units"] = "kelvins" file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA - attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, dims=("z", "fakeDim0", "fakeDim1")) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index c9717921df..6f5db02087 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -283,7 +283,7 @@ def test_specific_check_satpy(self): if len(call[1]) > 0 and "__fake" in call[1][0]: assert "ok" not in call[1][1] checked_fake = True - assert checked_fake, "Did not find __fake module mentioned in checks" + assert checked_fake, "Did not find __fake module mentioned in checks" def test_debug_on(caplog): From d3743fe2d6e223a3071895d054280b1cc4ead20d Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 17:00:28 +0200 Subject: [PATCH 0612/1416] Restore removed prints --- pyproject.toml | 2 ++ utils/coord2area_def.py | 15 +++++++++++++++ utils/fetch_avhrr_calcoeffs.py | 2 ++ 3 files changed, 19 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index a550f275e8..1282120a59 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,8 @@ line-length = 120 [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests +"utils/coord2area_def.py" = ["T201"] # allow print +"fetch_avhrr_calcoeffs.py" = ["T201"] # allow print [tool.ruff.pydocstyle] convention = "google" diff --git a/utils/coord2area_def.py b/utils/coord2area_def.py index 81fb93678b..8b6aa0478b 100644 --- a/utils/coord2area_def.py +++ b/utils/coord2area_def.py @@ -126,6 +126,21 @@ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) + print("### " + proj4_string) + print() + print(name + ":") + print(" description: " + name) + print(" projection:") + print(" proj: " + proj) + print(" ellps: WGS84") + print(" lat_0: " + str(lat_0)) + print(" lon_0: " + str(lon_0)) + print(" shape:") + print(" height: " + str(ysize)) + print(" width: " + str(xsize)) + print(" area_extent:") + print(" lower_left_xy: [%f, %f]" % (area_extent[0], area_extent[1])) + print(" upper_right_xy: [%f, %f]" % (area_extent[2], area_extent[3])) if args.shapes is None: sys.exit(0) diff --git a/utils/fetch_avhrr_calcoeffs.py b/utils/fetch_avhrr_calcoeffs.py index 8212c5531e..f73975df95 100644 --- a/utils/fetch_avhrr_calcoeffs.py +++ b/utils/fetch_avhrr_calcoeffs.py @@ -112,6 +112,7 @@ def get_all_coeffs(): coeffs[platform] = {} for chan in URLS[platform].keys(): url = URLS[platform][chan] + print(url) page = get_page(url) coeffs[platform][chan] = get_coeffs(page) @@ -133,6 +134,7 @@ def save_coeffs(coeffs, out_dir=""): fid[chan]["intercept2"] = coeffs[platform][chan]["intercept2"] fid.close() + print("Calibration coefficients saved for %s" % platform) def main(): From 3f2bbc48d23263e7bc605d222174f989657c4930 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 17:23:37 +0200 Subject: [PATCH 0613/1416] Fix docstrings --- satpy/readers/goes_imager_nc.py | 2 +- satpy/readers/hrit_jma.py | 2 +- satpy/readers/iasi_l2_so2_bufr.py | 2 +- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 +- satpy/readers/satpy_cf_nc.py | 4 ++-- satpy/readers/seviri_l1b_hrit.py | 2 +- satpy/readers/seviri_l1b_icare.py | 2 +- satpy/readers/seviri_l1b_native.py | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 7bb8fac84c..214852fffd 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -201,7 +201,7 @@ References: -========== +=========== - `[GVAR]`_ GVAR transmission format - `[BOOK-N]`_ GOES-N databook diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 865cbc5dd7..c273b9b578 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -35,7 +35,7 @@ Example: -------- +-------- Here is an example how to read Himwari-8 HRIT data with Satpy: .. code-block:: python diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index a63d434a86..b5088aa041 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -32,7 +32,7 @@ Scene(reader="iasi_l2_so2_bufr", filenames=fnames) Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 043c45d4cc..d50ecab97f 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -44,7 +44,7 @@ Example: -------- +-------- This is how to read FIDUCEO MVIRI FCDR data in satpy: .. code-block:: python diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 120a14be36..7a26ead72b 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -37,7 +37,7 @@ Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python @@ -92,7 +92,7 @@ Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 8e3fb148bc..3b3aa82277 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -85,7 +85,7 @@ Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index 5c151d64a2..2024c46532 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -33,7 +33,7 @@ VX-XX is the processing version number Example: -------- +-------- Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 4593d3fe3d..361dd1bb50 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -38,7 +38,7 @@ of :class:`NativeMSGFileHandler`. Example: -------- +-------- Here is an example how to read the data in satpy. NOTE: When loading the data, the orientation From 887137e9ba4dd5945b132e07b99c10de50d782ed Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 17:34:57 +0200 Subject: [PATCH 0614/1416] Fix underline in doc --- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index d50ecab97f..9a309a0bb8 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -144,7 +144,7 @@ References: ----------- +----------- - `[Handbook]`_ MFG User Handbook - `[PUG]`_ FIDUCEO MVIRI FCDR Product User Guide From 41759a8a925dc223f68fb02cd359f6873426be24 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 26 Oct 2023 17:42:49 +0200 Subject: [PATCH 0615/1416] Fix underline --- satpy/readers/gms/gms5_vissr_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py index 0e1a5df483..c8a88dfe25 100644 --- a/satpy/readers/gms/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -30,7 +30,7 @@ References: -~~~~~~~~~~ +~~~~~~~~~~~ Details about platform, instrument and data format can be found in the following references: From 5d6a1062ea4f4a448f8a79361a88282bdba218cc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 11:35:03 -0500 Subject: [PATCH 0616/1416] Remove coordinates during DayNightCompositor masking And add a lot more type annotations --- satpy/composites/__init__.py | 91 +++++++++++++++++++++++++----------- 1 file changed, 65 insertions(+), 26 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index c6883f9ab9..100dc3e293 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -19,6 +19,7 @@ import logging import os import warnings +from typing import Optional, Sequence import dask.array as da import numpy as np @@ -119,7 +120,12 @@ def id(self): id_keys = self.attrs.get('_satpy_id_keys', minimal_default_keys_config) return DataID(id_keys, **self.attrs) - def __call__(self, datasets, optional_datasets=None, **info): + def __call__( + self, + datasets: Sequence[xr.DataArray], + optional_datasets: Optional[Sequence[xr.DataArray]] = None, + **info + ) -> xr.DataArray: """Generate a composite.""" raise NotImplementedError() @@ -422,7 +428,12 @@ def _get_sensors(self, projectables): sensor = list(sensor)[0] return sensor - def __call__(self, projectables, nonprojectables=None, **attrs): + def __call__( + self, + datasets: Sequence[xr.DataArray], + optional_datasets: Optional[Sequence[xr.DataArray]] = None, + **attrs + ) -> xr.DataArray: """Build the composite.""" if 'deprecation_warning' in self.attrs: warnings.warn( @@ -431,29 +442,29 @@ def __call__(self, projectables, nonprojectables=None, **attrs): stacklevel=2 ) self.attrs.pop('deprecation_warning', None) - num = len(projectables) + num = len(datasets) mode = attrs.get('mode') if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] - if len(projectables) > 1: - projectables = self.match_data_arrays(projectables) - data = self._concat_datasets(projectables, mode) + if len(datasets) > 1: + datasets = self.match_data_arrays(datasets) + data = self._concat_datasets(datasets, mode) # Skip masking if user wants it or a specific alpha channel is given. if self.common_channel_mask and mode[-1] != 'A': data = data.where(data.notnull().all(dim='bands')) else: - data = projectables[0] + data = datasets[0] # if inputs have a time coordinate that may differ slightly between # themselves then find the mid time and use that as the single # time coordinate value - if len(projectables) > 1: - time = check_times(projectables) + if len(datasets) > 1: + time = check_times(datasets) if time is not None and 'time' in data.dims: data['time'] = [time] - new_attrs = combine_metadata(*projectables) + new_attrs = combine_metadata(*datasets) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) @@ -467,7 +478,7 @@ def __call__(self, projectables, nonprojectables=None, **attrs): new_attrs.update(self.attrs) if resolution is not None: new_attrs['resolution'] = resolution - new_attrs["sensor"] = self._get_sensors(projectables) + new_attrs["sensor"] = self._get_sensors(datasets) new_attrs["mode"] = mode return xr.DataArray(data=data.data, attrs=new_attrs, @@ -692,22 +703,27 @@ def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", inclu self._has_sza = False super(DayNightCompositor, self).__init__(name, **kwargs) - def __call__(self, projectables, **kwargs): + def __call__( + self, + datasets: Sequence[xr.DataArray], + optional_datasets: Optional[Sequence[xr.DataArray]] = None, + **attrs + ) -> xr.DataArray: """Generate the composite.""" - projectables = self.match_data_arrays(projectables) + datasets = self.match_data_arrays(datasets) # At least one composite is requested. - foreground_data = projectables[0] + foreground_data = datasets[0] - weights = self._get_coszen_blending_weights(projectables) + weights = self._get_coszen_blending_weights(datasets) # Apply enhancements to the foreground data foreground_data = enhance2dataset(foreground_data) if "only" in self.day_night: - attrs = foreground_data.attrs.copy() + fg_attrs = foreground_data.attrs.copy() day_data, night_data, weights = self._get_data_for_single_side_product(foreground_data, weights) else: - day_data, night_data, attrs = self._get_data_for_combined_product(foreground_data, projectables[1]) + day_data, night_data, fg_attrs = self._get_data_for_combined_product(foreground_data, datasets[1]) # The computed coszen is for the full area, so it needs to be masked for missing and off-swath data if self.include_alpha and not self._has_sza: @@ -718,11 +734,18 @@ def __call__(self, projectables, **kwargs): day_data = zero_missing_data(day_data, night_data) night_data = zero_missing_data(night_data, day_data) - data = self._weight_data(day_data, night_data, weights, attrs) + data = self._weight_data(day_data, night_data, weights, fg_attrs) - return super(DayNightCompositor, self).__call__(data, **kwargs) + return super(DayNightCompositor, self).__call__( + data, + optional_datasets=optional_datasets, + **attrs + ) - def _get_coszen_blending_weights(self, projectables): + def _get_coszen_blending_weights( + self, + projectables: Sequence[xr.DataArray], + ) -> xr.DataArray: lim_low = np.cos(np.deg2rad(self.lim_low)) lim_high = np.cos(np.deg2rad(self.lim_high)) try: @@ -739,7 +762,11 @@ def _get_coszen_blending_weights(self, projectables): return coszen.clip(0, 1) - def _get_data_for_single_side_product(self, foreground_data, weights): + def _get_data_for_single_side_product( + self, + foreground_data: xr.DataArray, + weights: xr.DataArray, + ) -> tuple[xr.DataArray, xr.DataArray, xr.DataArray]: # Only one portion (day or night) is selected. One composite is requested. # Add alpha band to single L/RGB composite to make the masked-out portion transparent when needed # L -> LA @@ -778,7 +805,12 @@ def _get_data_for_combined_product(self, day_data, night_data): return day_data, night_data, attrs - def _mask_weights_with_data(self, weights, day_data, night_data): + def _mask_weights_with_data( + self, + weights: xr.DataArray, + day_data: xr.DataArray, + night_data: xr.DataArray, + ) -> xr.DataArray: data_a = _get_single_channel(day_data) data_b = _get_single_channel(night_data) if "only" in self.day_night: @@ -788,12 +820,16 @@ def _mask_weights_with_data(self, weights, day_data, night_data): return weights.where(mask, np.nan) - def _weight_data(self, day_data, night_data, weights, attrs): + def _weight_data( + self, + day_data: xr.DataArray, + night_data: xr.DataArray, + weights: xr.DataArray, + attrs: dict, + ) -> list[xr.DataArray]: if not self.include_alpha: fill = 1 if self.day_night == "night_only" else 0 weights = weights.where(~np.isnan(weights), fill) - if isinstance(weights, xr.DataArray): - weights = weights.data data = [] for b in _get_band_names(day_data, night_data): day_band = _get_single_band_data(day_data, b) @@ -823,9 +859,12 @@ def _get_single_band_data(data, band): return data.sel(bands=band) -def _get_single_channel(data): +def _get_single_channel(data: xr.DataArray) -> xr.DataArray: try: data = data[0, :, :] + # remove coordinates that may be band-specific (ex. "bands") + # and we don't care about anymore + data = data.reset_coords(drop=True) except (IndexError, TypeError): pass return data From be120d9696c5e2dcc617796ed2cbb478586ae4eb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 26 Oct 2023 13:46:39 -0500 Subject: [PATCH 0617/1416] Fix type annotation in crefl function --- satpy/modifiers/_crefl_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py index c8d6920056..5d1b06977b 100644 --- a/satpy/modifiers/_crefl_utils.py +++ b/satpy/modifiers/_crefl_utils.py @@ -318,7 +318,7 @@ def __call__(self, sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, a def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): raise NotImplementedError() - def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da.Array: + def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da.Array | float: """Get digital elevation map data for our granule with ocean fill value set to 0.""" if avg_elevation is None: LOG.debug("No average elevation information provided in CREFL") From f182d7424933d1e437a067a2b4419700a8ec17c6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 22 Oct 2023 19:29:19 -0500 Subject: [PATCH 0618/1416] Add initial hacky chunking and float32 handling to ABI L1b reader --- satpy/readers/abi_base.py | 18 +++++++++++++----- satpy/readers/abi_l1b.py | 4 ++-- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 0b80045767..69c059e569 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -62,17 +62,25 @@ def __init__(self, filename, filename_info, filetype_info): @cached_property def nc(self): """Get the xarray dataset for this file.""" + import math + + from satpy.utils import get_dask_chunk_size_in_bytes + chunk_size_for_high_res = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats + chunk_size_for_high_res = np.round(chunk_size_for_high_res / 226) * 226 + ft = self.filetype_info["file_type"] + low_res_factor = 1 if ft == "c02" else (2 if ft in ("c01", "c03", "c05") else 4) + chunk_size = int(chunk_size_for_high_res / low_res_factor) f_obj = open_file_or_filename(self.filename) try: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={"x": CHUNK_SIZE, "y": CHUNK_SIZE}, ) + chunks={'x': chunk_size, 'y': chunk_size}, ) except ValueError: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={"lon": CHUNK_SIZE, "lat": CHUNK_SIZE}, ) + chunks={'lon': chunk_size, 'lat': chunk_size}, ) nc = self._rename_dims(nc) return nc @@ -137,7 +145,7 @@ def is_int(val): new_fill = fill else: new_fill = np.nan - data = data.where(data != fill, new_fill) + data = data.where(data != fill, np.float32(new_fill)) if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information @@ -147,8 +155,8 @@ def is_int(val): # can't do this in place since data is most likely uint16 # and we are making it a 64-bit float if not is_int(factor): - factor = float(factor) - data = data * factor + offset + factor = np.float32(factor) + data = data * np.float32(factor) + np.float32(offset) return data def _adjust_coords(self, data, item): diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 3a22397cde..4d0276bf79 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -136,11 +136,11 @@ def _raw_calibrate(self, data): def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" solar_irradiance = self["esun"] - esd = self["earth_sun_distance_anomaly_in_AU"].astype(float) + esd = self["earth_sun_distance_anomaly_in_AU"].astype(np.float32) factor = np.pi * esd * esd / solar_irradiance - res = data * factor + res = data * np.float32(factor) res.attrs = data.attrs res.attrs["units"] = "1" res.attrs["long_name"] = "Bidirectional Reflectance" From 878e5c6c4dbc4d6200df208660102d060786907d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 27 Oct 2023 12:34:39 -0500 Subject: [PATCH 0619/1416] Use filetype info for ABI resolution-based chunking --- satpy/etc/readers/abi_l1b.yaml | 4 ++++ satpy/readers/abi_base.py | 19 +++++++------------ satpy/tests/reader_tests/test_abi_l1b.py | 2 +- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/abi_l1b.yaml b/satpy/etc/readers/abi_l1b.yaml index d9de341ff1..f4986ba106 100644 --- a/satpy/etc/readers/abi_l1b.yaml +++ b/satpy/etc/readers/abi_l1b.yaml @@ -25,16 +25,19 @@ file_types: # "suffix" is an arbitrary suffix that may be added during third-party testing (see PR #1380) c01: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B + resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c02: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B + resolution: 500 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c03: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B + resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] @@ -44,6 +47,7 @@ file_types: '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c05: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B + resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 69c059e569..956bec278e 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -66,21 +66,16 @@ def nc(self): from satpy.utils import get_dask_chunk_size_in_bytes chunk_size_for_high_res = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats - chunk_size_for_high_res = np.round(chunk_size_for_high_res / 226) * 226 - ft = self.filetype_info["file_type"] - low_res_factor = 1 if ft == "c02" else (2 if ft in ("c01", "c03", "c05") else 4) - chunk_size = int(chunk_size_for_high_res / low_res_factor) - f_obj = open_file_or_filename(self.filename) - try: + chunk_size_for_high_res = np.round(chunk_size_for_high_res / (4 * 226)) * (4 * 226) + low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) + res_chunk_bytes = int(chunk_size_for_high_res / low_res_factor) * 4 + import dask + with dask.config.set({"array.chunk-size": res_chunk_bytes}): + f_obj = open_file_or_filename(self.filename) nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'x': chunk_size, 'y': chunk_size}, ) - except ValueError: - nc = xr.open_dataset(f_obj, - decode_cf=True, - mask_and_scale=False, - chunks={'lon': chunk_size, 'lat': chunk_size}, ) + chunks="auto") nc = self._rename_dims(nc) return nc diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index ab2b1eec54..7563f6e13d 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -404,7 +404,7 @@ def test_open_dataset(self, _): # noqa: PT019 openable_thing = mock.MagicMock() - NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, None) + NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) openable_thing.open.assert_called() From 85360970007207e6e0f5b611801491d56dfd70ba Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 27 Oct 2023 16:29:13 -0500 Subject: [PATCH 0620/1416] Start refactoring ABI L1b tests --- satpy/tests/reader_tests/test_abi_l1b.py | 77 +++++++++++++++--------- 1 file changed, 50 insertions(+), 27 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 7563f6e13d..f8bd7e4e9f 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -16,8 +16,10 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The abi_l1b reader tests package.""" +from __future__ import annotations import unittest +from typing import Any from unittest import mock import numpy as np @@ -26,13 +28,23 @@ from satpy.tests.utils import make_dataid +RAD_SHAPE = { + 500: (3000, 5000), # conus - 500m + 1000: (1500, 2500), # conus - 1km + 2000: (750, 1250), # conus - 2km +} -def _create_fake_rad_dataarray(rad=None): + +def _create_fake_rad_dataarray( + rad: xr.DataArray | None = None, + # resolution: int = 2000, +): x_image = xr.DataArray(0.) y_image = xr.DataArray(0.) time = xr.DataArray(0.) + shape = (2, 5) # RAD_SHAPE[resolution] if rad is None: - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. + rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( @@ -115,17 +127,28 @@ class Test_NC_ABI_L1B_Base(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" @mock.patch("satpy.readers.abi_base.xr") - def setUp(self, xr_, rad=None, clip_negative_radiances=False): + def setUp( + self, + xr_, + rad: xr.DataArray | None = None, + clip_negative_radiances: bool = False, + filetype_resolution: int = 0 + ) -> None: """Create a fake dataset using the given radiance data.""" from satpy.readers.abi_l1b import NC_ABI_L1B xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad) - self.reader = NC_ABI_L1B("filename", - {"platform_shortname": "G16", "observation_type": "Rad", - "suffix": "custom", - "scene_abbr": "C", "scan_mode": "M3"}, - {"filetype": "info"}, - clip_negative_radiances=clip_negative_radiances) + ft_info: dict[str, Any] = {"filetype": "info"} + if filetype_resolution: + ft_info["resolution"] = filetype_resolution + self.file_handler = NC_ABI_L1B( + "filename", + {"platform_shortname": "G16", "observation_type": "Rad", + "suffix": "custom", + "scene_abbr": "C", "scan_mode": "M3"}, + ft_info, + clip_negative_radiances=clip_negative_radiances + ) class TestABIYAML: @@ -157,13 +180,13 @@ class Test_NC_ABI_L1B(Test_NC_ABI_L1B_Base): def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - assert self.reader.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) - assert self.reader.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) + assert self.file_handler.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) + assert self.file_handler.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) def test_get_dataset(self): """Test the get_dataset method.""" key = make_dataid(name="Rad", calibration="radiance") - res = self.reader.get_dataset(key, {"info": "info"}) + res = self.file_handler.get_dataset(key, {"info": "info"}) exp = {"calibration": "radiance", "instrument_ID": None, "modifiers": (), @@ -198,14 +221,14 @@ def test_get_dataset(self): @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" - self.reader.get_area_def(None) + self.file_handler.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m"} - assert call_args[4] == self.reader.ncols - assert call_args[5] == self.reader.nlines + assert call_args[4] == self.file_handler.ncols + assert call_args[5] == self.file_handler.nlines np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) @@ -226,11 +249,11 @@ def setUp(self): "_FillValue": 1002, # last rad_data value } ) - super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad) + super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad, filetype_resolution=2000) def test_ir_calibration_attrs(self): """Test IR calibrated DataArray attributes.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C05", calibration="brightness_temperature"), {}) # make sure the attributes from the file are in the data array @@ -241,11 +264,11 @@ def test_ir_calibration_attrs(self): def test_clip_negative_radiances_attribute(self): """Assert that clip_negative_radiances is set to False.""" - assert not self.reader.clip_negative_radiances + assert not self.file_handler.clip_negative_radiances def test_ir_calibrate(self): """Test IR calibration.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C05", calibration="brightness_temperature"), {}) expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], @@ -273,15 +296,15 @@ def setUp(self): } ) - super().setUp(rad=rad, clip_negative_radiances=True) + super().setUp(rad=rad, clip_negative_radiances=True, filetype_resolution=2000) def test_clip_negative_radiances_attribute(self): """Assert that clip_negative_radiances has been set to True.""" - assert self.reader.clip_negative_radiances + assert self.file_handler.clip_negative_radiances def test_ir_calibrate(self): """Test IR calibration.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C07", calibration="brightness_temperature"), {}) clipped_ir = 267.07775531 @@ -319,11 +342,11 @@ def setUp(self): "_FillValue": 20, } ) - super(Test_NC_ABI_L1B_vis_cal, self).setUp(rad=rad) + super(Test_NC_ABI_L1B_vis_cal, self).setUp(rad=rad, filetype_resolution=1000) def test_vis_calibrate(self): """Test VIS calibration.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C05", calibration="reflectance"), {}) expected = np.array([[0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], @@ -352,11 +375,11 @@ def setUp(self): "_FillValue": 20, } ) - super(Test_NC_ABI_L1B_raw_cal, self).setUp(rad=rad) + super(Test_NC_ABI_L1B_raw_cal, self).setUp(rad=rad, filetype_resolution=1000) def test_raw_calibrate(self): """Test RAW calibration.""" - res = self.reader.get_dataset( + res = self.file_handler.get_dataset( make_dataid(name="C05", calibration="counts"), {}) # We expect the raw data to be unchanged @@ -391,7 +414,7 @@ def to_dict(self): with self.assertRaises(ValueError, msg="Did not detect invalid cal"): did = FakeDataID(name="C05", calibration="invalid", modifiers=()) - self.reader.get_dataset(did, {}) + self.file_handler.get_dataset(did, {}) class Test_NC_ABI_File(unittest.TestCase): From 55e52484c05b254737863abb0648e1483d63e1b5 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 28 Oct 2023 19:30:06 -0500 Subject: [PATCH 0621/1416] Remove unnecessary duplication in ABI L1b tests --- satpy/readers/abi_l1b.py | 8 +- satpy/tests/reader_tests/test_abi_l1b.py | 451 ++++++++++++----------- 2 files changed, 231 insertions(+), 228 deletions(-) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 4d0276bf79..c3da53c9c7 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -59,12 +59,8 @@ def get_dataset(self, key, info): "radiance": self._rad_calibrate, "counts": self._raw_calibrate, } - - try: - func = cal_dictionary[key["calibration"]] - res = func(radiances) - except KeyError: - raise ValueError("Unknown calibration '{}'".format(key["calibration"])) + func = cal_dictionary[key["calibration"]] + res = func(radiances) # convert to satpy standard units if res.attrs["units"] == "1" and key["calibration"] != "counts": diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index f8bd7e4e9f..bdaa03f9e5 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -18,14 +18,17 @@ """The abi_l1b reader tests package.""" from __future__ import annotations -import unittest -from typing import Any +import contextlib +from pathlib import Path +from typing import Any, Iterator from unittest import mock +import dask.array as da import numpy as np import pytest import xarray as xr +from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.tests.utils import make_dataid RAD_SHAPE = { @@ -36,27 +39,27 @@ def _create_fake_rad_dataarray( - rad: xr.DataArray | None = None, - # resolution: int = 2000, -): - x_image = xr.DataArray(0.) - y_image = xr.DataArray(0.) - time = xr.DataArray(0.) + rad: xr.DataArray | None = None, + # resolution: int = 2000, +) -> xr.DataArray: + x_image = xr.DataArray(0.0) + y_image = xr.DataArray(0.0) + time = xr.DataArray(0.0) shape = (2, 5) # RAD_SHAPE[resolution] if rad is None: - rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.) * 50. - rad_data = (rad_data + 1.) / 0.5 + rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 + rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=("y", "x"), attrs={ "scale_factor": 0.5, - "add_offset": -1., + "add_offset": -1.0, "_FillValue": 1002, "units": "W m-2 um-1 sr-1", "valid_range": (0, 4095), - } + }, ) rad.coords["t"] = time rad.coords["x_image"] = x_image @@ -68,25 +71,21 @@ def _create_fake_rad_dataset(rad=None): rad = _create_fake_rad_dataarray(rad=rad) x__ = xr.DataArray( - range(5), - attrs={"scale_factor": 2., "add_offset": -1.}, - dims=("x",) + range(5), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",) ) y__ = xr.DataArray( - range(2), - attrs={"scale_factor": -2., "add_offset": 1.}, - dims=("y",) + range(2), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) ) proj = xr.DataArray( [], attrs={ - "semi_major_axis": 1., - "semi_minor_axis": 1., - "perspective_point_height": 1., - "longitude_of_projection_origin": -90., - "latitude_of_projection_origin": 0., - "sweep_angle_axis": u"x" - } + "semi_major_axis": 1.0, + "semi_minor_axis": 1.0, + "perspective_point_height": 1.0, + "longitude_of_projection_origin": -90.0, + "latitude_of_projection_origin": 0.0, + "sweep_angle_axis": "x", + }, ) fake_dataset = xr.Dataset( @@ -95,8 +94,8 @@ def _create_fake_rad_dataset(rad=None): "band_id": np.array(8), # 'x': x__, # 'y': y__, - "x_image": xr.DataArray(0.), - "y_image": xr.DataArray(0.), + "x_image": xr.DataArray(0.0), + "y_image": xr.DataArray(0.0), "goes_imager_projection": proj, "yaw_flip_flag": np.array([1]), "planck_fk1": np.array(13432.1), @@ -107,13 +106,12 @@ def _create_fake_rad_dataset(rad=None): "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), - "earth_sun_distance_anomaly_in_AU": np.array(0.99) + "earth_sun_distance_anomaly_in_AU": np.array(0.99), }, coords={ "t": rad.coords["t"], "x": x__, "y": y__, - }, attrs={ "time_coverage_start": "2017-09-20T17:30:40.8Z", @@ -123,93 +121,139 @@ def _create_fake_rad_dataset(rad=None): return fake_dataset -class Test_NC_ABI_L1B_Base(unittest.TestCase): - """Common setup for NC_ABI_L1B tests.""" +def generate_l1b_filename(chan_name: str) -> str: + return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230.nc" + + +@pytest.fixture(scope="module") +def l1b_c01_file(tmp_path_factory) -> list[Path]: + filename = generate_l1b_filename("C01") + data_path = tmp_path_factory.mktemp("abi_l1b").join(filename) + dataset = _create_fake_rad_dataset() + dataset.to_netcdf(data_path) + return [data_path] + + +@pytest.fixture(scope="module") +def l1b_all_files( + l1b_c01_file, +) -> list[Path]: + return l1b_c01_file - @mock.patch("satpy.readers.abi_base.xr") - def setUp( - self, - xr_, - rad: xr.DataArray | None = None, - clip_negative_radiances: bool = False, - filetype_resolution: int = 0 - ) -> None: - """Create a fake dataset using the given radiance data.""" - from satpy.readers.abi_l1b import NC_ABI_L1B +@contextlib.contextmanager +def create_file_handler( + rad: xr.DataArray | None = None, + clip_negative_radiances: bool = False, + filetype_resolution: int = 0, +) -> Iterator[NC_ABI_L1B]: + """Create a fake dataset using the given radiance data.""" + + ft_info: dict[str, Any] = {"filetype": "info"} + if filetype_resolution: + ft_info["resolution"] = filetype_resolution + + with mock.patch("satpy.readers.abi_base.xr") as xr_: xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad) - ft_info: dict[str, Any] = {"filetype": "info"} - if filetype_resolution: - ft_info["resolution"] = filetype_resolution - self.file_handler = NC_ABI_L1B( + file_handler = NC_ABI_L1B( "filename", - {"platform_shortname": "G16", "observation_type": "Rad", - "suffix": "custom", - "scene_abbr": "C", "scan_mode": "M3"}, + { + "platform_shortname": "G16", + "observation_type": "Rad", + "suffix": "custom", + "scene_abbr": "C", + "scan_mode": "M3", + }, ft_info, - clip_negative_radiances=clip_negative_radiances + clip_negative_radiances=clip_negative_radiances, ) + yield file_handler class TestABIYAML: """Tests for the ABI L1b reader's YAML configuration.""" - @pytest.mark.parametrize(("channel", "suffix"), - [("C{:02d}".format(num), suffix) - for num in range(1, 17) - for suffix in ("", "_test_suffix")]) + @pytest.mark.parametrize( + ("channel", "suffix"), + [ + ("C{:02d}".format(num), suffix) + for num in range(1, 17) + for suffix in ("", "_test_suffix") + ], + ) def test_file_patterns_match(self, channel, suffix): """Test that the configured file patterns work.""" from satpy.readers import configs_for_reader, load_reader + reader_configs = list(configs_for_reader("abi_l1b"))[0] reader = load_reader(reader_configs) - fn1 = ("OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" - "_c20182541300308{}.nc").format(channel, suffix) + fn1 = ( + "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" + "_c20182541300308{}.nc" + ).format(channel, suffix) loadables = reader.select_files_from_pathnames([fn1]) assert len(loadables) == 1 if not suffix and channel in ["C01", "C02", "C03", "C05"]: - fn2 = ("OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" - "_c20182541300308-000000_0.nc").format(channel) + fn2 = ( + "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" + "_c20182541300308-000000_0.nc" + ).format(channel) loadables = reader.select_files_from_pathnames([fn2]) assert len(loadables) == 1 -class Test_NC_ABI_L1B(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B: """Test the NC_ABI_L1B reader.""" + @property + def fake_rad(self): + """Create fake data for these tests. + + Needs to be an instance method so the subclass can override it. + + """ + return None + def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - assert self.file_handler.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) - assert self.file_handler.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) + + with create_file_handler(rad=self.fake_rad) as file_handler: + assert file_handler.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) + assert file_handler.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) def test_get_dataset(self): """Test the get_dataset method.""" key = make_dataid(name="Rad", calibration="radiance") - res = self.file_handler.get_dataset(key, {"info": "info"}) - exp = {"calibration": "radiance", - "instrument_ID": None, - "modifiers": (), - "name": "Rad", - "observation_type": "Rad", - "orbital_parameters": {"projection_altitude": 1.0, - "projection_latitude": 0.0, - "projection_longitude": -90.0, - "satellite_nominal_altitude": 35786020., - "satellite_nominal_latitude": 0.0, - "satellite_nominal_longitude": -89.5, - "yaw_flip": True}, - "orbital_slot": None, - "platform_name": "GOES-16", - "platform_shortname": "G16", - "production_site": None, - "scan_mode": "M3", - "scene_abbr": "C", - "scene_id": None, - "sensor": "abi", - "timeline_ID": None, - "suffix": "custom", - "units": "W m-2 um-1 sr-1"} + with create_file_handler(rad=self.fake_rad) as file_handler: + res = file_handler.get_dataset(key, {"info": "info"}) + exp = { + "calibration": "radiance", + "instrument_ID": None, + "modifiers": (), + "name": "Rad", + "observation_type": "Rad", + "orbital_parameters": { + "projection_altitude": 1.0, + "projection_latitude": 0.0, + "projection_longitude": -90.0, + "satellite_nominal_altitude": 35786020.0, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_longitude": -89.5, + "yaw_flip": True, + }, + "orbital_slot": None, + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M3", + "scene_abbr": "C", + "scene_id": None, + "sensor": "abi", + "timeline_ID": None, + "suffix": "custom", + "units": "W m-2 um-1 sr-1", + } assert res.attrs == exp # we remove any time dimension information @@ -221,40 +265,47 @@ def test_get_dataset(self): @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" - self.file_handler.get_area_def(None) - - assert adef.call_count == 1 - call_args = tuple(adef.call_args)[0] - assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, - "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m"} - assert call_args[4] == self.file_handler.ncols - assert call_args[5] == self.file_handler.nlines - np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) + with create_file_handler(rad=self.fake_rad) as file_handler: + file_handler.get_area_def(None) + + assert adef.call_count == 1 + call_args = tuple(adef.call_args)[0] + assert call_args[3] == { + "a": 1.0, + "b": 1.0, + "h": 1.0, + "lon_0": -90.0, + "proj": "geos", + "sweep": "x", + "units": "m", + } + assert call_args[4] == file_handler.ncols + assert call_args[5] == file_handler.nlines + np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) -class Test_NC_ABI_L1B_ir_cal(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B_ir_cal: """Test the NC_ABI_L1B reader's default IR calibration.""" - def setUp(self): - """Create fake data for the tests.""" - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. - rad_data = (rad_data + 1.) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - rad_data, - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1., - "_FillValue": 1002, # last rad_data value - } + @pytest.mark.parametrize("clip_negative_radiances", [False, True]) + def test_ir_calibrate(self, clip_negative_radiances): + """Test IR calibration.""" + with _ir_file_handler( + clip_negative_radiances=clip_negative_radiances + ) as file_handler: + res = file_handler.get_dataset( + make_dataid(name="C07", calibration="brightness_temperature"), {} + ) + assert file_handler.clip_negative_radiances == clip_negative_radiances + + clipped_ir = 134.68753 if clip_negative_radiances else np.nan + expected = np.array( + [ + [clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688], + [391.58655, 407.64786, 422.60635, 436.68802, np.nan], + ] ) - super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad, filetype_resolution=2000) - - def test_ir_calibration_attrs(self): - """Test IR calibrated DataArray attributes.""" - res = self.file_handler.get_dataset( - make_dataid(name="C05", calibration="brightness_temperature"), {}) + np.testing.assert_allclose(res.data, expected, equal_nan=True, atol=1e-04) # make sure the attributes from the file are in the data array assert "scale_factor" not in res.attrs @@ -262,95 +313,69 @@ def test_ir_calibration_attrs(self): assert res.attrs["standard_name"] == "toa_brightness_temperature" assert res.attrs["long_name"] == "Brightness Temperature" - def test_clip_negative_radiances_attribute(self): - """Assert that clip_negative_radiances is set to False.""" - assert not self.file_handler.clip_negative_radiances - - def test_ir_calibrate(self): - """Test IR calibration.""" - res = self.file_handler.get_dataset( - make_dataid(name="C05", calibration="brightness_temperature"), {}) - - expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], - [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) - assert np.allclose(res.data, expected, equal_nan=True) - - -class Test_NC_ABI_L1B_clipped_ir_cal(Test_NC_ABI_L1B_Base): - """Test the NC_ABI_L1B reader's IR calibration (clipping negative radiance).""" - def setUp(self): - """Create fake data for the tests.""" - values = np.arange(10.) - values[0] = -0.0001 # introduce below minimum expected radiance - rad_data = (values.reshape((2, 5)) + 1.) * 50. - rad_data = (rad_data + 1.) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - rad_data, - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1., - "_FillValue": 1002, - } - ) - - super().setUp(rad=rad, clip_negative_radiances=True, filetype_resolution=2000) - - def test_clip_negative_radiances_attribute(self): - """Assert that clip_negative_radiances has been set to True.""" - assert self.file_handler.clip_negative_radiances - - def test_ir_calibrate(self): - """Test IR calibration.""" - res = self.file_handler.get_dataset( - make_dataid(name="C07", calibration="brightness_temperature"), {}) +@contextlib.contextmanager +def _ir_file_handler( + data: da.Array | None = None, clip_negative_radiances: bool = False +): + """Create fake data for the tests.""" + if data is None: + data = _fake_ir_data() + rad = xr.DataArray( + data, + dims=("y", "x"), + attrs={ + "scale_factor": 0.5, + "add_offset": -1.3, + "_FillValue": np.int16( + np.floor(((9 + 1) * 50.0 + 1.3) / 0.5) + ), # last rad_data value + }, + ) + with create_file_handler( + rad=rad, + clip_negative_radiances=clip_negative_radiances, + filetype_resolution=2000, + ) as file_handler: + yield file_handler - clipped_ir = 267.07775531 - expected = np.array([[clipped_ir, 305.15576503, 332.37383249, 354.73895301, 374.19710115], - [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) - assert np.allclose(res.data, expected, equal_nan=True) - def test_get_minimum_radiance(self): - """Test get_minimum_radiance from Rad DataArray.""" - from satpy.readers.abi_l1b import NC_ABI_L1B - data = xr.DataArray( - attrs={ - "scale_factor": 0.5, - "add_offset": -1., - "_FillValue": 1002, - } - ) - np.testing.assert_allclose(NC_ABI_L1B._get_minimum_radiance(NC_ABI_L1B, data), 0.0) +def _fake_ir_data(): + values = np.arange(10.0) + rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 + rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance + rad_data = (rad_data + 1.3) / 0.5 + return rad_data.astype(np.int16) -class Test_NC_ABI_L1B_vis_cal(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B_vis_cal: """Test the NC_ABI_L1B reader.""" - def setUp(self): - """Create fake data for the tests.""" - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) - rad_data = (rad_data + 1.) / 0.5 + def test_vis_calibrate(self): + """Test VIS calibration.""" + rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 + rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=("y", "x"), attrs={ "scale_factor": 0.5, - "add_offset": -1., + "add_offset": -1.0, "_FillValue": 20, - } + }, + ) + with create_file_handler(rad=rad, filetype_resolution=1000) as file_handler: + res = file_handler.get_dataset( + make_dataid(name="C05", calibration="reflectance"), {} + ) + + expected = np.array( + [ + [0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], + [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171], + ] ) - super(Test_NC_ABI_L1B_vis_cal, self).setUp(rad=rad, filetype_resolution=1000) - - def test_vis_calibrate(self): - """Test VIS calibration.""" - res = self.file_handler.get_dataset( - make_dataid(name="C05", calibration="reflectance"), {}) - - expected = np.array([[0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], - [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171]]) assert np.allclose(res.data, expected, equal_nan=True) assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs @@ -358,29 +383,27 @@ def test_vis_calibrate(self): assert res.attrs["long_name"] == "Bidirectional Reflectance" -class Test_NC_ABI_L1B_raw_cal(Test_NC_ABI_L1B_Base): +class Test_NC_ABI_L1B_raw_cal: """Test the NC_ABI_L1B reader raw calibration.""" - def setUp(self): - """Create fake data for the tests.""" - rad_data = (np.arange(10.).reshape((2, 5)) + 1.) - rad_data = (rad_data + 1.) / 0.5 + def test_raw_calibrate(self): + """Test RAW calibration.""" + rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 + rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( rad_data, dims=("y", "x"), attrs={ "scale_factor": 0.5, - "add_offset": -1., + "add_offset": -1.0, "_FillValue": 20, - } + }, ) - super(Test_NC_ABI_L1B_raw_cal, self).setUp(rad=rad, filetype_resolution=1000) - - def test_raw_calibrate(self): - """Test RAW calibration.""" - res = self.file_handler.get_dataset( - make_dataid(name="C05", calibration="counts"), {}) + with create_file_handler(rad=rad) as file_handler: + res = file_handler.get_dataset( + make_dataid(name="C05", calibration="counts"), {} + ) # We expect the raw data to be unchanged expected = res.data @@ -400,24 +423,7 @@ def test_raw_calibrate(self): assert res.attrs["long_name"] == "Raw Counts" -class Test_NC_ABI_L1B_invalid_cal(Test_NC_ABI_L1B_Base): - """Test the NC_ABI_L1B reader with invalid calibration.""" - - def test_invalid_calibration(self): - """Test detection of invalid calibration values.""" - # Need to use a custom DataID class because the real DataID class is - # smart enough to detect the invalid calibration before the ABI L1B - # get_dataset method gets a chance to run. - class FakeDataID(dict): - def to_dict(self): - return self - - with self.assertRaises(ValueError, msg="Did not detect invalid cal"): - did = FakeDataID(name="C05", calibration="invalid", modifiers=()) - self.file_handler.get_dataset(did, {}) - - -class Test_NC_ABI_File(unittest.TestCase): +class Test_NC_ABI_File: """Test file opening.""" @mock.patch("satpy.readers.abi_base.xr") @@ -434,17 +440,18 @@ def test_open_dataset(self, _): # noqa: PT019 class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): """Allow h5netcdf peculiarities.""" - def setUp(self): + @property + def fake_rad(self): """Create fake data for the tests.""" rad_data = np.int16(50) rad = xr.DataArray( rad_data, attrs={ "scale_factor": 0.5, - "add_offset": -1., + "add_offset": -1.0, "_FillValue": np.array([1002]), "units": "W m-2 um-1 sr-1", "valid_range": (0, 4095), - } + }, ) - super(Test_NC_ABI_L1B_H5netcdf, self).setUp(rad=rad) + return rad From deac453b30cf4936d06fb8eabc0a27941645f2bf Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 28 Oct 2023 20:48:12 -0500 Subject: [PATCH 0622/1416] Use dask arrays in abi l1b tests --- satpy/tests/reader_tests/test_abi_l1b.py | 32 ++++++++++-------------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index bdaa03f9e5..bc7c5351e8 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -51,7 +51,7 @@ def _create_fake_rad_dataarray( rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - rad_data, + da.from_array(rad_data), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -212,7 +212,7 @@ def fake_rad(self): Needs to be an instance method so the subclass can override it. """ - return None + return None # use default from file handler creator def test_basic_attributes(self): """Test getting basic file attributes.""" @@ -315,14 +315,16 @@ def test_ir_calibrate(self, clip_negative_radiances): @contextlib.contextmanager -def _ir_file_handler( - data: da.Array | None = None, clip_negative_radiances: bool = False -): +def _ir_file_handler(clip_negative_radiances: bool = False): """Create fake data for the tests.""" - if data is None: - data = _fake_ir_data() + values = np.arange(10.0) + rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 + rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance + rad_data = (rad_data + 1.3) / 0.5 + data = rad_data.astype(np.int16) + rad = xr.DataArray( - data, + da.from_array(data), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -340,14 +342,6 @@ def _ir_file_handler( yield file_handler -def _fake_ir_data(): - values = np.arange(10.0) - rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 - rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance - rad_data = (rad_data + 1.3) / 0.5 - return rad_data.astype(np.int16) - - class Test_NC_ABI_L1B_vis_cal: """Test the NC_ABI_L1B reader.""" @@ -357,7 +351,7 @@ def test_vis_calibrate(self): rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - rad_data, + da.from_array(rad_data), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -392,7 +386,7 @@ def test_raw_calibrate(self): rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - rad_data, + da.from_array(rad_data), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -445,7 +439,7 @@ def fake_rad(self): """Create fake data for the tests.""" rad_data = np.int16(50) rad = xr.DataArray( - rad_data, + da.from_array(rad_data), attrs={ "scale_factor": 0.5, "add_offset": -1.0, From e24832416dca7aa2d285a3a51a00521f6acf2843 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 10:03:02 -0500 Subject: [PATCH 0623/1416] Switch some tests to on-disk files --- satpy/readers/abi_base.py | 2 +- satpy/tests/reader_tests/test_abi_l1b.py | 64 +++++++++++++++--------- 2 files changed, 40 insertions(+), 26 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 956bec278e..28ff91ce38 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -66,7 +66,7 @@ def nc(self): from satpy.utils import get_dask_chunk_size_in_bytes chunk_size_for_high_res = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats - chunk_size_for_high_res = np.round(chunk_size_for_high_res / (4 * 226)) * (4 * 226) + chunk_size_for_high_res = np.round(max(chunk_size_for_high_res / (4 * 226), 1)) * (4 * 226) low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) res_chunk_bytes = int(chunk_size_for_high_res / low_res_factor) * 4 import dask diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index bc7c5351e8..6d8a001918 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -19,8 +19,9 @@ from __future__ import annotations import contextlib +from datetime import datetime from pathlib import Path -from typing import Any, Iterator +from typing import Any, Callable, Iterator from unittest import mock import dask.array as da @@ -28,6 +29,7 @@ import pytest import xarray as xr +from satpy import Scene from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.tests.utils import make_dataid @@ -77,7 +79,7 @@ def _create_fake_rad_dataset(rad=None): range(2), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) ) proj = xr.DataArray( - [], + np.int64(0), attrs={ "semi_major_axis": 1.0, "semi_minor_axis": 1.0, @@ -122,16 +124,27 @@ def _create_fake_rad_dataset(rad=None): def generate_l1b_filename(chan_name: str) -> str: - return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230.nc" + return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" @pytest.fixture(scope="module") -def l1b_c01_file(tmp_path_factory) -> list[Path]: - filename = generate_l1b_filename("C01") - data_path = tmp_path_factory.mktemp("abi_l1b").join(filename) - dataset = _create_fake_rad_dataset() - dataset.to_netcdf(data_path) - return [data_path] +def l1b_c01_file(tmp_path_factory) -> Callable: + def _create_file_handler( + rad: xr.DataArray | None = None, + clip_negative_radiances: bool = False, + ): + filename = generate_l1b_filename("C01") + data_path = tmp_path_factory.mktemp("abi_l1b") / filename + dataset = _create_fake_rad_dataset(rad=rad) + dataset.to_netcdf(data_path) + scn = Scene( + reader="abi_l1b", + filenames=[str(data_path)], + reader_kwargs={"clip_negative_radiances": clip_negative_radiances} + ) + return scn + + return _create_file_handler @pytest.fixture(scope="module") @@ -214,24 +227,17 @@ def fake_rad(self): """ return None # use default from file handler creator - def test_basic_attributes(self): - """Test getting basic file attributes.""" - from datetime import datetime - - with create_file_handler(rad=self.fake_rad) as file_handler: - assert file_handler.start_time == datetime(2017, 9, 20, 17, 30, 40, 800000) - assert file_handler.end_time == datetime(2017, 9, 20, 17, 41, 17, 500000) - - def test_get_dataset(self): + def test_get_dataset(self, l1b_c01_file): """Test the get_dataset method.""" - key = make_dataid(name="Rad", calibration="radiance") - with create_file_handler(rad=self.fake_rad) as file_handler: - res = file_handler.get_dataset(key, {"info": "info"}) + scn = l1b_c01_file(rad=self.fake_rad) + key = make_dataid(name="C01", calibration="radiance") + scn.load([key]) + exp = { "calibration": "radiance", "instrument_ID": None, "modifiers": (), - "name": "Rad", + "name": "C01", "observation_type": "Rad", "orbital_parameters": { "projection_altitude": 1.0, @@ -246,16 +252,24 @@ def test_get_dataset(self): "platform_name": "GOES-16", "platform_shortname": "G16", "production_site": None, - "scan_mode": "M3", + "reader": "abi_l1b", + "resolution": 1000, + "scan_mode": "M4", "scene_abbr": "C", "scene_id": None, "sensor": "abi", "timeline_ID": None, - "suffix": "custom", + "suffix": "suffix", "units": "W m-2 um-1 sr-1", + "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000), + "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), } - assert res.attrs == exp + res = scn["C01"] + assert "area" in res.attrs + for exp_key, exp_val in exp.items(): + assert res.attrs[exp_key] == exp_val + # we remove any time dimension information assert "t" not in res.coords assert "t" not in res.dims From 07e841c3540982f2d1638d34ba4795bdcf1f4559 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 14:57:55 -0500 Subject: [PATCH 0624/1416] Move more ABI L1b tests to on-disk files --- satpy/readers/abi_base.py | 6 ++-- satpy/tests/reader_tests/test_abi_l1b.py | 42 +++++++++++++++--------- satpy/utils.py | 19 +++++++++++ 3 files changed, 48 insertions(+), 19 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 28ff91ce38..3574349c71 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -23,7 +23,7 @@ import numpy as np import xarray as xr -from pyresample import geometry +from pyresample.geometry import AreaDefinition from satpy._compat import cached_property from satpy.readers import open_file_or_filename @@ -212,7 +212,7 @@ def _get_areadef_latlon(self, key): "fi": float(fi), "pm": float(pm)} - ll_area_def = geometry.AreaDefinition( + ll_area_def = AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_latlon", @@ -262,7 +262,7 @@ def _get_areadef_fixedgrid(self, key): "units": "m", "sweep": sweep_axis} - fg_area_def = geometry.AreaDefinition( + fg_area_def = AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_fixed_grid", diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 6d8a001918..7ee9d36acd 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -32,6 +32,7 @@ from satpy import Scene from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.tests.utils import make_dataid +from satpy.utils import ignore_pyproj_proj_warnings RAD_SHAPE = { 500: (3000, 5000), # conus - 500m @@ -276,26 +277,34 @@ def test_get_dataset(self, l1b_c01_file): assert "time" not in res.coords assert "time" not in res.dims - @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") - def test_get_area_def(self, adef): + def test_get_area_def(self, l1b_c01_file): """Test the area generation.""" - with create_file_handler(rad=self.fake_rad) as file_handler: - file_handler.get_area_def(None) - - assert adef.call_count == 1 - call_args = tuple(adef.call_args)[0] - assert call_args[3] == { - "a": 1.0, - "b": 1.0, + from pyresample.geometry import AreaDefinition + + scn = l1b_c01_file(rad=self.fake_rad) + scn.load(["C01"]) + area_def = scn["C01"].attrs["area"] + assert isinstance(area_def, AreaDefinition) + + with ignore_pyproj_proj_warnings(): + proj_dict = area_def.crs.to_dict() + exp_dict = { "h": 1.0, "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m", } - assert call_args[4] == file_handler.ncols - assert call_args[5] == file_handler.nlines - np.testing.assert_allclose(call_args[6], (-2, -2, 8, 2)) + if "R" in proj_dict: + assert proj_dict["R"] == 1 + else: + assert proj_dict["a"] == 1 + assert proj_dict["b"] == 1 + for proj_key, proj_val in exp_dict.items(): + assert proj_dict[proj_key] == proj_val + + assert area_def.shape == scn["C01"].shape + assert area_def.area_extent == (-2, -2, 8, 2) class Test_NC_ABI_L1B_ir_cal: @@ -437,8 +446,6 @@ class Test_NC_ABI_File: @mock.patch("satpy.readers.abi_base.xr") def test_open_dataset(self, _): # noqa: PT019 """Test openning a dataset.""" - from satpy.readers.abi_l1b import NC_ABI_L1B - openable_thing = mock.MagicMock() NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) @@ -451,7 +458,10 @@ class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): @property def fake_rad(self): """Create fake data for the tests.""" - rad_data = np.int16(50) + shape = (2, 5) + rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 + rad_data = (rad_data + 1.0) / 0.5 + rad_data = rad_data.astype(np.int16) rad = xr.DataArray( da.from_array(rad_data), attrs={ diff --git a/satpy/utils.py b/satpy/utils.py index f9ea05ca79..dfedc30803 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -576,6 +576,25 @@ def ignore_invalid_float_warnings(): yield +@contextlib.contextmanager +def ignore_pyproj_proj_warnings(): + """Wrap operations that we know will produce a PROJ.4 precision warning. + + Only to be used internally to Pyresample when we have no other choice but + to use PROJ.4 strings/dicts. For example, serialization to YAML or other + human-readable formats or testing the methods that produce the PROJ.4 + versions of the CRS. + + """ + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + "You will likely lose important projection information", + UserWarning, + ) + yield + + def get_chunk_size_limit(dtype=float): """Compute the chunk size limit in bytes given *dtype* (float by default). From b6411c7dc34afbbde38efca5dd13d5a5fe18dd69 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 20:47:53 -0500 Subject: [PATCH 0625/1416] Switch all ABI L1b tests to on-disk files --- satpy/tests/reader_tests/test_abi_l1b.py | 101 ++++++++--------------- 1 file changed, 33 insertions(+), 68 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 7ee9d36acd..e60453f9a3 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -18,10 +18,8 @@ """The abi_l1b reader tests package.""" from __future__ import annotations -import contextlib from datetime import datetime -from pathlib import Path -from typing import Any, Callable, Iterator +from typing import Callable from unittest import mock import dask.array as da @@ -29,7 +27,7 @@ import pytest import xarray as xr -from satpy import Scene +from satpy import DataQuery, Scene from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.tests.utils import make_dataid from satpy.utils import ignore_pyproj_proj_warnings @@ -130,10 +128,7 @@ def generate_l1b_filename(chan_name: str) -> str: @pytest.fixture(scope="module") def l1b_c01_file(tmp_path_factory) -> Callable: - def _create_file_handler( - rad: xr.DataArray | None = None, - clip_negative_radiances: bool = False, - ): + def _create_file_handler(rad: xr.DataArray | None = None): filename = generate_l1b_filename("C01") data_path = tmp_path_factory.mktemp("abi_l1b") / filename dataset = _create_fake_rad_dataset(rad=rad) @@ -141,7 +136,6 @@ def _create_file_handler( scn = Scene( reader="abi_l1b", filenames=[str(data_path)], - reader_kwargs={"clip_negative_radiances": clip_negative_radiances} ) return scn @@ -149,39 +143,23 @@ def _create_file_handler( @pytest.fixture(scope="module") -def l1b_all_files( - l1b_c01_file, -) -> list[Path]: - return l1b_c01_file - - -@contextlib.contextmanager -def create_file_handler( - rad: xr.DataArray | None = None, - clip_negative_radiances: bool = False, - filetype_resolution: int = 0, -) -> Iterator[NC_ABI_L1B]: - """Create a fake dataset using the given radiance data.""" - - ft_info: dict[str, Any] = {"filetype": "info"} - if filetype_resolution: - ft_info["resolution"] = filetype_resolution - - with mock.patch("satpy.readers.abi_base.xr") as xr_: - xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad) - file_handler = NC_ABI_L1B( - "filename", - { - "platform_shortname": "G16", - "observation_type": "Rad", - "suffix": "custom", - "scene_abbr": "C", - "scan_mode": "M3", - }, - ft_info, - clip_negative_radiances=clip_negative_radiances, +def l1b_c07_file(tmp_path_factory) -> Callable: + def _create_file_handler( + rad: xr.DataArray | None = None, + clip_negative_radiances: bool = False, + ): + filename = generate_l1b_filename("C07") + data_path = tmp_path_factory.mktemp("abi_l1b") / filename + dataset = _create_fake_rad_dataset(rad=rad) + dataset.to_netcdf(data_path) + scn = Scene( + reader="abi_l1b", + filenames=[str(data_path)], + reader_kwargs={"clip_negative_radiances": clip_negative_radiances} ) - yield file_handler + return scn + + return _create_file_handler class TestABIYAML: @@ -311,15 +289,11 @@ class Test_NC_ABI_L1B_ir_cal: """Test the NC_ABI_L1B reader's default IR calibration.""" @pytest.mark.parametrize("clip_negative_radiances", [False, True]) - def test_ir_calibrate(self, clip_negative_radiances): + def test_ir_calibrate(self, l1b_c07_file, clip_negative_radiances): """Test IR calibration.""" - with _ir_file_handler( - clip_negative_radiances=clip_negative_radiances - ) as file_handler: - res = file_handler.get_dataset( - make_dataid(name="C07", calibration="brightness_temperature"), {} - ) - assert file_handler.clip_negative_radiances == clip_negative_radiances + scn = l1b_c07_file(rad=_fake_ir_data(), clip_negative_radiances=clip_negative_radiances) + scn.load([DataQuery(name="C07", calibration="brightness_temperature")]) + res = scn["C07"] clipped_ir = 134.68753 if clip_negative_radiances else np.nan expected = np.array( @@ -337,9 +311,7 @@ def test_ir_calibrate(self, clip_negative_radiances): assert res.attrs["long_name"] == "Brightness Temperature" -@contextlib.contextmanager -def _ir_file_handler(clip_negative_radiances: bool = False): - """Create fake data for the tests.""" +def _fake_ir_data(): values = np.arange(10.0) rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance @@ -357,18 +329,13 @@ def _ir_file_handler(clip_negative_radiances: bool = False): ), # last rad_data value }, ) - with create_file_handler( - rad=rad, - clip_negative_radiances=clip_negative_radiances, - filetype_resolution=2000, - ) as file_handler: - yield file_handler + return rad class Test_NC_ABI_L1B_vis_cal: """Test the NC_ABI_L1B reader.""" - def test_vis_calibrate(self): + def test_vis_calibrate(self, l1b_c01_file): """Test VIS calibration.""" rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 rad_data = (rad_data + 1.0) / 0.5 @@ -382,10 +349,9 @@ def test_vis_calibrate(self): "_FillValue": 20, }, ) - with create_file_handler(rad=rad, filetype_resolution=1000) as file_handler: - res = file_handler.get_dataset( - make_dataid(name="C05", calibration="reflectance"), {} - ) + scn = l1b_c01_file(rad=rad) + scn.load(["C01"]) + res = scn["C01"] expected = np.array( [ @@ -403,7 +369,7 @@ def test_vis_calibrate(self): class Test_NC_ABI_L1B_raw_cal: """Test the NC_ABI_L1B reader raw calibration.""" - def test_raw_calibrate(self): + def test_raw_calibrate(self, l1b_c01_file): """Test RAW calibration.""" rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 rad_data = (rad_data + 1.0) / 0.5 @@ -417,10 +383,9 @@ def test_raw_calibrate(self): "_FillValue": 20, }, ) - with create_file_handler(rad=rad) as file_handler: - res = file_handler.get_dataset( - make_dataid(name="C05", calibration="counts"), {} - ) + scn = l1b_c01_file(rad=rad) + scn.load([DataQuery(name="C01", calibration="counts")]) + res = scn["C01"] # We expect the raw data to be unchanged expected = res.data From f9efd963ba6a50cbead6979095db4786c456c31d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 21:16:10 -0500 Subject: [PATCH 0626/1416] Use more realistic sizes in ABI tests --- satpy/tests/reader_tests/test_abi_l1b.py | 49 +++++++++++++----------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index e60453f9a3..fb08ef9361 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -41,12 +41,12 @@ def _create_fake_rad_dataarray( rad: xr.DataArray | None = None, - # resolution: int = 2000, + resolution: int = 2000, ) -> xr.DataArray: x_image = xr.DataArray(0.0) y_image = xr.DataArray(0.0) time = xr.DataArray(0.0) - shape = (2, 5) # RAD_SHAPE[resolution] + shape = RAD_SHAPE[resolution] if rad is None: rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 @@ -68,14 +68,14 @@ def _create_fake_rad_dataarray( return rad -def _create_fake_rad_dataset(rad=None): - rad = _create_fake_rad_dataarray(rad=rad) +def _create_fake_rad_dataset(rad: xr.DataArray, resolution: int) -> xr.Dataset: + rad = _create_fake_rad_dataarray(rad=rad, resolution=resolution) x__ = xr.DataArray( - range(5), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",) + range(rad.shape[1]), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",) ) y__ = xr.DataArray( - range(2), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) + range(rad.shape[0]), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) ) proj = xr.DataArray( np.int64(0), @@ -131,7 +131,7 @@ def l1b_c01_file(tmp_path_factory) -> Callable: def _create_file_handler(rad: xr.DataArray | None = None): filename = generate_l1b_filename("C01") data_path = tmp_path_factory.mktemp("abi_l1b") / filename - dataset = _create_fake_rad_dataset(rad=rad) + dataset = _create_fake_rad_dataset(rad=rad, resolution=1000) dataset.to_netcdf(data_path) scn = Scene( reader="abi_l1b", @@ -145,12 +145,12 @@ def _create_file_handler(rad: xr.DataArray | None = None): @pytest.fixture(scope="module") def l1b_c07_file(tmp_path_factory) -> Callable: def _create_file_handler( - rad: xr.DataArray | None = None, + rad: xr.DataArray, clip_negative_radiances: bool = False, ): filename = generate_l1b_filename("C07") data_path = tmp_path_factory.mktemp("abi_l1b") / filename - dataset = _create_fake_rad_dataset(rad=rad) + dataset = _create_fake_rad_dataset(rad=rad, resolution=2000) dataset.to_netcdf(data_path) scn = Scene( reader="abi_l1b", @@ -204,7 +204,7 @@ def fake_rad(self): Needs to be an instance method so the subclass can override it. """ - return None # use default from file handler creator + return None def test_get_dataset(self, l1b_c01_file): """Test the get_dataset method.""" @@ -282,7 +282,7 @@ def test_get_area_def(self, l1b_c01_file): assert proj_dict[proj_key] == proj_val assert area_def.shape == scn["C01"].shape - assert area_def.area_extent == (-2, -2, 8, 2) + assert area_def.area_extent == (-2.0, -2998.0, 4998.0, 2.0) class Test_NC_ABI_L1B_ir_cal: @@ -298,11 +298,12 @@ def test_ir_calibrate(self, l1b_c07_file, clip_negative_radiances): clipped_ir = 134.68753 if clip_negative_radiances else np.nan expected = np.array( [ - [clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688], - [391.58655, 407.64786, 422.60635, 436.68802, np.nan], + clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688, + 391.58655, 407.64786, 422.60635, 436.68802, np.nan, ] ) - np.testing.assert_allclose(res.data, expected, equal_nan=True, atol=1e-04) + data_np = res.data.compute() + np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True, atol=1e-04) # make sure the attributes from the file are in the data array assert "scale_factor" not in res.attrs @@ -312,8 +313,9 @@ def test_ir_calibrate(self, l1b_c07_file, clip_negative_radiances): def _fake_ir_data(): - values = np.arange(10.0) - rad_data = (values.reshape((2, 5)) + 1.0) * 50.0 + shape = RAD_SHAPE[2000] + values = np.arange(shape[0] * shape[1]) + rad_data = (values.reshape(shape) + 1.0) * 50.0 rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance rad_data = (rad_data + 1.3) / 0.5 data = rad_data.astype(np.int16) @@ -337,7 +339,8 @@ class Test_NC_ABI_L1B_vis_cal: def test_vis_calibrate(self, l1b_c01_file): """Test VIS calibration.""" - rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 + shape = RAD_SHAPE[1000] + rad_data = np.arange(shape[0] * shape[1]).reshape(shape) + 1.0 rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( @@ -355,11 +358,12 @@ def test_vis_calibrate(self, l1b_c01_file): expected = np.array( [ - [0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085], - [0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171], + 0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085, + 0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171, ] ) - assert np.allclose(res.data, expected, equal_nan=True) + data_np = res.data.compute() + assert np.allclose(data_np[0, :10], expected, equal_nan=True) assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" @@ -371,7 +375,8 @@ class Test_NC_ABI_L1B_raw_cal: def test_raw_calibrate(self, l1b_c01_file): """Test RAW calibration.""" - rad_data = np.arange(10.0).reshape((2, 5)) + 1.0 + shape = RAD_SHAPE[1000] + rad_data = np.arange(shape[0] * shape[1]).reshape(shape) + 1.0 rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( @@ -423,7 +428,7 @@ class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): @property def fake_rad(self): """Create fake data for the tests.""" - shape = (2, 5) + shape = RAD_SHAPE[1000] rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) From 1e0e21f083cbf2cde44dbabbe5c9cc4287df091e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 29 Oct 2023 21:29:33 -0500 Subject: [PATCH 0627/1416] Revert AreaDefinition import for easier test mocking --- satpy/readers/abi_base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 3574349c71..28ff91ce38 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -23,7 +23,7 @@ import numpy as np import xarray as xr -from pyresample.geometry import AreaDefinition +from pyresample import geometry from satpy._compat import cached_property from satpy.readers import open_file_or_filename @@ -212,7 +212,7 @@ def _get_areadef_latlon(self, key): "fi": float(fi), "pm": float(pm)} - ll_area_def = AreaDefinition( + ll_area_def = geometry.AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_latlon", @@ -262,7 +262,7 @@ def _get_areadef_fixedgrid(self, key): "units": "m", "sweep": sweep_axis} - fg_area_def = AreaDefinition( + fg_area_def = geometry.AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_fixed_grid", From 514f5e1bef90d57282993ee04501c70cf7b4ea28 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 30 Oct 2023 11:52:33 -0500 Subject: [PATCH 0628/1416] More abi l1b test refactoring --- satpy/tests/reader_tests/test_abi_l1b.py | 446 +++++++++++------------ 1 file changed, 214 insertions(+), 232 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index fb08ef9361..54c78e1089 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -19,17 +19,18 @@ from __future__ import annotations from datetime import datetime -from typing import Callable +from pathlib import Path +from typing import Any, Callable from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr +from pytest_lazyfixture import lazy_fixture from satpy import DataQuery, Scene from satpy.readers.abi_l1b import NC_ABI_L1B -from satpy.tests.utils import make_dataid from satpy.utils import ignore_pyproj_proj_warnings RAD_SHAPE = { @@ -72,10 +73,14 @@ def _create_fake_rad_dataset(rad: xr.DataArray, resolution: int) -> xr.Dataset: rad = _create_fake_rad_dataarray(rad=rad, resolution=resolution) x__ = xr.DataArray( - range(rad.shape[1]), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",) + range(rad.shape[1]), + attrs={"scale_factor": 2.0, "add_offset": -1.0}, + dims=("x",), ) y__ = xr.DataArray( - range(rad.shape[0]), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",) + range(rad.shape[0]), + attrs={"scale_factor": -2.0, "add_offset": 1.0}, + dims=("y",), ) proj = xr.DataArray( np.int64(0), @@ -126,92 +131,144 @@ def generate_l1b_filename(chan_name: str) -> str: return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" -@pytest.fixture(scope="module") -def l1b_c01_file(tmp_path_factory) -> Callable: - def _create_file_handler(rad: xr.DataArray | None = None): - filename = generate_l1b_filename("C01") - data_path = tmp_path_factory.mktemp("abi_l1b") / filename - dataset = _create_fake_rad_dataset(rad=rad, resolution=1000) - dataset.to_netcdf(data_path) - scn = Scene( - reader="abi_l1b", - filenames=[str(data_path)], - ) - return scn +@pytest.fixture() +def c01_refl(tmp_path) -> xr.DataArray: + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load(["C01"]) + return scn["C01"] + + +@pytest.fixture() +def c01_rad(tmp_path) -> xr.DataArray: + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load([DataQuery(name="C01", calibration="radiance")]) + return scn["C01"] + - return _create_file_handler +@pytest.fixture() +def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: + shape = RAD_SHAPE[1000] + rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 + rad_data = (rad_data + 1.0) / 0.5 + rad_data = rad_data.astype(np.int16) + rad = xr.DataArray( + da.from_array(rad_data), + attrs={ + "scale_factor": 0.5, + "add_offset": -1.0, + "_FillValue": np.array([1002]), + "units": "W m-2 um-1 sr-1", + "valid_range": (0, 4095), + }, + ) + scn = _create_scene_for_data(tmp_path, "C01", rad, 1000) + scn.load([DataQuery(name="C01", calibration="radiance")]) + return scn["C01"] + + +@pytest.fixture() +def c01_counts(tmp_path) -> xr.DataArray: + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load([DataQuery(name="C01", calibration="counts")]) + return scn["C01"] + + +def _create_scene_for_data( + tmp_path: Path, + channel_name: str, + rad: xr.DataArray | None, + resolution: int, + reader_kwargs: dict[str, Any] | None = None, +) -> Scene: + filename = generate_l1b_filename(channel_name) + data_path = tmp_path / filename + dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) + dataset.to_netcdf(data_path) + scn = Scene( + reader="abi_l1b", + filenames=[str(data_path)], + reader_kwargs=reader_kwargs, + ) + return scn -@pytest.fixture(scope="module") -def l1b_c07_file(tmp_path_factory) -> Callable: - def _create_file_handler( - rad: xr.DataArray, - clip_negative_radiances: bool = False, +@pytest.fixture() +def c07_bt_creator(tmp_path) -> Callable: + def _load_data_array( + clip_negative_radiances: bool = False, ): - filename = generate_l1b_filename("C07") - data_path = tmp_path_factory.mktemp("abi_l1b") / filename - dataset = _create_fake_rad_dataset(rad=rad, resolution=2000) - dataset.to_netcdf(data_path) - scn = Scene( - reader="abi_l1b", - filenames=[str(data_path)], - reader_kwargs={"clip_negative_radiances": clip_negative_radiances} + rad = _fake_c07_data() + scn = _create_scene_for_data( + tmp_path, + "C07", + rad, + 2000, + {"clip_negative_radiances": clip_negative_radiances}, ) - return scn + scn.load(["C07"]) + return scn["C07"] - return _create_file_handler + return _load_data_array -class TestABIYAML: - """Tests for the ABI L1b reader's YAML configuration.""" - - @pytest.mark.parametrize( - ("channel", "suffix"), - [ - ("C{:02d}".format(num), suffix) - for num in range(1, 17) - for suffix in ("", "_test_suffix") - ], +def _fake_c07_data() -> xr.DataArray: + shape = RAD_SHAPE[2000] + values = np.arange(shape[0] * shape[1]) + rad_data = (values.reshape(shape) + 1.0) * 50.0 + rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance + rad_data = (rad_data + 1.3) / 0.5 + data = rad_data.astype(np.int16) + rad = xr.DataArray( + da.from_array(data), + dims=("y", "x"), + attrs={ + "scale_factor": 0.5, + "add_offset": -1.3, + "_FillValue": np.int16( + np.floor(((9 + 1) * 50.0 + 1.3) / 0.5) + ), # last rad_data value + }, ) - def test_file_patterns_match(self, channel, suffix): - """Test that the configured file patterns work.""" - from satpy.readers import configs_for_reader, load_reader + return rad - reader_configs = list(configs_for_reader("abi_l1b"))[0] - reader = load_reader(reader_configs) - fn1 = ( + +@pytest.mark.parametrize( + ("channel", "suffix"), + [ + ("C{:02d}".format(num), suffix) + for num in range(1, 17) + for suffix in ("", "_test_suffix") + ], +) +def test_file_patterns_match(channel, suffix): + """Test that the configured file patterns work.""" + from satpy.readers import configs_for_reader, load_reader + + reader_configs = list(configs_for_reader("abi_l1b"))[0] + reader = load_reader(reader_configs) + fn1 = ( + "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" + "_c20182541300308{}.nc" + ).format(channel, suffix) + loadables = reader.select_files_from_pathnames([fn1]) + assert len(loadables) == 1 + if not suffix and channel in ["C01", "C02", "C03", "C05"]: + fn2 = ( "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" - "_c20182541300308{}.nc" - ).format(channel, suffix) - loadables = reader.select_files_from_pathnames([fn1]) + "_c20182541300308-000000_0.nc" + ).format(channel) + loadables = reader.select_files_from_pathnames([fn2]) assert len(loadables) == 1 - if not suffix and channel in ["C01", "C02", "C03", "C05"]: - fn2 = ( - "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" - "_c20182541300308-000000_0.nc" - ).format(channel) - loadables = reader.select_files_from_pathnames([fn2]) - assert len(loadables) == 1 +@pytest.mark.parametrize( + "c01_data_arr", [lazy_fixture("c01_rad"), lazy_fixture("c01_rad_h5netcdf")] +) class Test_NC_ABI_L1B: """Test the NC_ABI_L1B reader.""" - @property - def fake_rad(self): - """Create fake data for these tests. - - Needs to be an instance method so the subclass can override it. - - """ - return None - - def test_get_dataset(self, l1b_c01_file): + def test_get_dataset(self, c01_data_arr): """Test the get_dataset method.""" - scn = l1b_c01_file(rad=self.fake_rad) - key = make_dataid(name="C01", calibration="radiance") - scn.load([key]) - exp = { "calibration": "radiance", "instrument_ID": None, @@ -244,7 +301,7 @@ def test_get_dataset(self, l1b_c01_file): "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), } - res = scn["C01"] + res = c01_data_arr assert "area" in res.attrs for exp_key, exp_val in exp.items(): assert res.attrs[exp_key] == exp_val @@ -255,13 +312,11 @@ def test_get_dataset(self, l1b_c01_file): assert "time" not in res.coords assert "time" not in res.dims - def test_get_area_def(self, l1b_c01_file): + def test_get_area_def(self, c01_data_arr): """Test the area generation.""" from pyresample.geometry import AreaDefinition - scn = l1b_c01_file(rad=self.fake_rad) - scn.load(["C01"]) - area_def = scn["C01"].attrs["area"] + area_def = c01_data_arr.attrs["area"] assert isinstance(area_def, AreaDefinition) with ignore_pyproj_proj_warnings(): @@ -281,165 +336,92 @@ def test_get_area_def(self, l1b_c01_file): for proj_key, proj_val in exp_dict.items(): assert proj_dict[proj_key] == proj_val - assert area_def.shape == scn["C01"].shape + assert area_def.shape == c01_data_arr.shape assert area_def.area_extent == (-2.0, -2998.0, 4998.0, 2.0) -class Test_NC_ABI_L1B_ir_cal: - """Test the NC_ABI_L1B reader's default IR calibration.""" - - @pytest.mark.parametrize("clip_negative_radiances", [False, True]) - def test_ir_calibrate(self, l1b_c07_file, clip_negative_radiances): - """Test IR calibration.""" - scn = l1b_c07_file(rad=_fake_ir_data(), clip_negative_radiances=clip_negative_radiances) - scn.load([DataQuery(name="C07", calibration="brightness_temperature")]) - res = scn["C07"] - - clipped_ir = 134.68753 if clip_negative_radiances else np.nan - expected = np.array( - [ - clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688, - 391.58655, 407.64786, 422.60635, 436.68802, np.nan, - ] - ) - data_np = res.data.compute() - np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True, atol=1e-04) - - # make sure the attributes from the file are in the data array - assert "scale_factor" not in res.attrs - assert "_FillValue" not in res.attrs - assert res.attrs["standard_name"] == "toa_brightness_temperature" - assert res.attrs["long_name"] == "Brightness Temperature" - - -def _fake_ir_data(): - shape = RAD_SHAPE[2000] - values = np.arange(shape[0] * shape[1]) - rad_data = (values.reshape(shape) + 1.0) * 50.0 - rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance - rad_data = (rad_data + 1.3) / 0.5 - data = rad_data.astype(np.int16) - - rad = xr.DataArray( - da.from_array(data), - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1.3, - "_FillValue": np.int16( - np.floor(((9 + 1) * 50.0 + 1.3) / 0.5) - ), # last rad_data value - }, +@pytest.mark.parametrize("clip_negative_radiances", [False, True]) +def test_ir_calibrate(self, c07_bt_creator, clip_negative_radiances): + """Test IR calibration.""" + res = c07_bt_creator(clip_negative_radiances=clip_negative_radiances) + clipped_ir = 134.68753 if clip_negative_radiances else np.nan + expected = np.array( + [ + clipped_ir, + 304.97037, + 332.22778, + 354.6147, + 374.08688, + 391.58655, + 407.64786, + 422.60635, + 436.68802, + np.nan, + ] + ) + data_np = res.data.compute() + np.testing.assert_allclose( + data_np[0, :10], expected, equal_nan=True, atol=1e-04 ) - return rad - - -class Test_NC_ABI_L1B_vis_cal: - """Test the NC_ABI_L1B reader.""" - - def test_vis_calibrate(self, l1b_c01_file): - """Test VIS calibration.""" - shape = RAD_SHAPE[1000] - rad_data = np.arange(shape[0] * shape[1]).reshape(shape) + 1.0 - rad_data = (rad_data + 1.0) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - da.from_array(rad_data), - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1.0, - "_FillValue": 20, - }, - ) - scn = l1b_c01_file(rad=rad) - scn.load(["C01"]) - res = scn["C01"] - - expected = np.array( - [ - 0.15265617, 0.30531234, 0.45796851, 0.61062468, 0.76328085, - 0.91593702, 1.06859319, 1.22124936, np.nan, 1.52656171, - ] - ) - data_np = res.data.compute() - assert np.allclose(data_np[0, :10], expected, equal_nan=True) - assert "scale_factor" not in res.attrs - assert "_FillValue" not in res.attrs - assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" - assert res.attrs["long_name"] == "Bidirectional Reflectance" - - -class Test_NC_ABI_L1B_raw_cal: - """Test the NC_ABI_L1B reader raw calibration.""" - - def test_raw_calibrate(self, l1b_c01_file): - """Test RAW calibration.""" - shape = RAD_SHAPE[1000] - rad_data = np.arange(shape[0] * shape[1]).reshape(shape) + 1.0 - rad_data = (rad_data + 1.0) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - da.from_array(rad_data), - dims=("y", "x"), - attrs={ - "scale_factor": 0.5, - "add_offset": -1.0, - "_FillValue": 20, - }, - ) - scn = l1b_c01_file(rad=rad) - scn.load([DataQuery(name="C01", calibration="counts")]) - res = scn["C01"] - - # We expect the raw data to be unchanged - expected = res.data - assert np.allclose(res.data, expected, equal_nan=True) - - # check for the presence of typical attributes - assert "scale_factor" in res.attrs - assert "add_offset" in res.attrs - assert "_FillValue" in res.attrs - assert "orbital_parameters" in res.attrs - assert "platform_shortname" in res.attrs - assert "scene_id" in res.attrs - - # determine if things match their expected values/types. - assert res.data.dtype == np.int16 - assert res.attrs["standard_name"] == "counts" - assert res.attrs["long_name"] == "Raw Counts" - - -class Test_NC_ABI_File: - """Test file opening.""" - - @mock.patch("satpy.readers.abi_base.xr") - def test_open_dataset(self, _): # noqa: PT019 - """Test openning a dataset.""" - openable_thing = mock.MagicMock() - - NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) - openable_thing.open.assert_called() + # make sure the attributes from the file are in the data array + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_brightness_temperature" + assert res.attrs["long_name"] == "Brightness Temperature" -class Test_NC_ABI_L1B_H5netcdf(Test_NC_ABI_L1B): - """Allow h5netcdf peculiarities.""" - @property - def fake_rad(self): - """Create fake data for the tests.""" - shape = RAD_SHAPE[1000] - rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 - rad_data = (rad_data + 1.0) / 0.5 - rad_data = rad_data.astype(np.int16) - rad = xr.DataArray( - da.from_array(rad_data), - attrs={ - "scale_factor": 0.5, - "add_offset": -1.0, - "_FillValue": np.array([1002]), - "units": "W m-2 um-1 sr-1", - "valid_range": (0, 4095), - }, - ) - return rad +def test_vis_calibrate(c01_refl): + """Test VIS calibration.""" + res = c01_refl + expected = np.array( + [ + 7.632808, + 15.265616, + 22.898426, + 30.531233, + 38.164043, + 45.796852, + 53.429657, + 61.062466, + 68.695274, + np.nan, + ] + ) + data_np = res.data.compute() + np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True) + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert res.attrs["long_name"] == "Bidirectional Reflectance" + + +def test_raw_calibrate(c01_counts): + """Test RAW calibration.""" + res = c01_counts + + # We expect the raw data to be unchanged + expected = res.data + assert np.allclose(res.data, expected, equal_nan=True) + + # check for the presence of typical attributes + assert "scale_factor" in res.attrs + assert "add_offset" in res.attrs + assert "_FillValue" in res.attrs + assert "orbital_parameters" in res.attrs + assert "platform_shortname" in res.attrs + assert "scene_id" in res.attrs + + # determine if things match their expected values/types. + assert res.data.dtype == np.int16 + assert res.attrs["standard_name"] == "counts" + assert res.attrs["long_name"] == "Raw Counts" + + +@mock.patch("satpy.readers.abi_base.xr") +def test_open_dataset(_): # noqa: PT019 + """Test opening a dataset.""" + openable_thing = mock.MagicMock() + + NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) + openable_thing.open.assert_called() From 83609cca303a5ae3abe3e4b1959f16d77f26b6bb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 30 Oct 2023 13:11:47 -0500 Subject: [PATCH 0629/1416] Undo forcing GRB fill to floating point Caused failure in GLM L2 DQF processing --- satpy/readers/abi_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 28ff91ce38..3fdd724e12 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -140,7 +140,7 @@ def is_int(val): new_fill = fill else: new_fill = np.nan - data = data.where(data != fill, np.float32(new_fill)) + data = data.where(data != fill, new_fill) if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information From 8b5c450509105cff28881a621d7044cda309f665 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 30 Oct 2023 14:38:07 -0500 Subject: [PATCH 0630/1416] Fix various inconsistencies in ABI L1b DataArrays --- satpy/readers/abi_base.py | 7 +- satpy/readers/abi_l1b.py | 1 + satpy/tests/reader_tests/test_abi_l1b.py | 141 +++++++++++++---------- 3 files changed, 84 insertions(+), 65 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 3fdd724e12..1c0824ab27 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -139,18 +139,13 @@ def is_int(val): if is_int(data) and is_int(factor) and is_int(offset): new_fill = fill else: - new_fill = np.nan + new_fill = np.float32(np.nan) data = data.where(data != fill, new_fill) if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information data = data * np.round(float(factor), 6) + np.round(float(offset), 6) elif factor != 1: - # make sure the factor is a 64-bit float - # can't do this in place since data is most likely uint16 - # and we are making it a 64-bit float - if not is_int(factor): - factor = np.float32(factor) data = data * np.float32(factor) + np.float32(offset) return data diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index c3da53c9c7..4933b0982a 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -49,6 +49,7 @@ def get_dataset(self, key, info): # For raw cal, don't apply scale and offset, return raw file counts if key["calibration"] == "counts": radiances = self.nc["Rad"].copy() + radiances = self._adjust_coords(radiances, "Rad") else: radiances = self["Rad"] diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 54c78e1089..61f1746bf9 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -25,6 +25,7 @@ import dask.array as da import numpy as np +import numpy.typing as npt import pytest import xarray as xr from pytest_lazyfixture import lazy_fixture @@ -153,6 +154,7 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: rad_data = rad_data.astype(np.int16) rad = xr.DataArray( da.from_array(rad_data), + dims=("y", "x"), attrs={ "scale_factor": 0.5, "add_offset": -1.0, @@ -173,25 +175,6 @@ def c01_counts(tmp_path) -> xr.DataArray: return scn["C01"] -def _create_scene_for_data( - tmp_path: Path, - channel_name: str, - rad: xr.DataArray | None, - resolution: int, - reader_kwargs: dict[str, Any] | None = None, -) -> Scene: - filename = generate_l1b_filename(channel_name) - data_path = tmp_path / filename - dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) - dataset.to_netcdf(data_path) - scn = Scene( - reader="abi_l1b", - filenames=[str(data_path)], - reader_kwargs=reader_kwargs, - ) - return scn - - @pytest.fixture() def c07_bt_creator(tmp_path) -> Callable: def _load_data_array( @@ -232,6 +215,73 @@ def _fake_c07_data() -> xr.DataArray: return rad +def _create_scene_for_data( + tmp_path: Path, + channel_name: str, + rad: xr.DataArray | None, + resolution: int, + reader_kwargs: dict[str, Any] | None = None, +) -> Scene: + filename = generate_l1b_filename(channel_name) + data_path = tmp_path / filename + dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) + dataset.to_netcdf(data_path) + scn = Scene( + reader="abi_l1b", + filenames=[str(data_path)], + reader_kwargs=reader_kwargs, + ) + return scn + + +def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: + data_np = data_arr.data.compute() + assert data_np.dtype == data_arr.dtype + assert data_np.dtype == exp_dtype + return data_np + +def _check_area(data_arr: xr.DataArray) -> None: + from pyresample.geometry import AreaDefinition + + area_def = data_arr.attrs["area"] + assert isinstance(area_def, AreaDefinition) + + with ignore_pyproj_proj_warnings(): + proj_dict = area_def.crs.to_dict() + exp_dict = { + "h": 1.0, + "lon_0": -90.0, + "proj": "geos", + "sweep": "x", + "units": "m", + } + if "R" in proj_dict: + assert proj_dict["R"] == 1 + else: + assert proj_dict["a"] == 1 + assert proj_dict["b"] == 1 + for proj_key, proj_val in exp_dict.items(): + assert proj_dict[proj_key] == proj_val + + assert area_def.shape == data_arr.shape + if area_def.shape[0] == RAD_SHAPE[1000][0]: + exp_extent = (-2.0, -2998.0, 4998.0, 2.0) + else: + exp_extent = (-2.0, -1498.0, 2498.0, 2.0) + assert area_def.area_extent == exp_extent + + +def _check_dims_and_coords(data_arr: xr.DataArray) -> None: + assert "y" in data_arr.dims + assert "x" in data_arr.dims + + # we remove any time dimension information + assert "t" not in data_arr.coords + assert "t" not in data_arr.dims + assert "time" not in data_arr.coords + assert "time" not in data_arr.dims + + @pytest.mark.parametrize( ("channel", "suffix"), [ @@ -302,46 +352,15 @@ def test_get_dataset(self, c01_data_arr): } res = c01_data_arr - assert "area" in res.attrs + _get_and_check_array(res, np.float32) + _check_area(res) + _check_dims_and_coords(res) for exp_key, exp_val in exp.items(): assert res.attrs[exp_key] == exp_val - # we remove any time dimension information - assert "t" not in res.coords - assert "t" not in res.dims - assert "time" not in res.coords - assert "time" not in res.dims - - def test_get_area_def(self, c01_data_arr): - """Test the area generation.""" - from pyresample.geometry import AreaDefinition - - area_def = c01_data_arr.attrs["area"] - assert isinstance(area_def, AreaDefinition) - - with ignore_pyproj_proj_warnings(): - proj_dict = area_def.crs.to_dict() - exp_dict = { - "h": 1.0, - "lon_0": -90.0, - "proj": "geos", - "sweep": "x", - "units": "m", - } - if "R" in proj_dict: - assert proj_dict["R"] == 1 - else: - assert proj_dict["a"] == 1 - assert proj_dict["b"] == 1 - for proj_key, proj_val in exp_dict.items(): - assert proj_dict[proj_key] == proj_val - - assert area_def.shape == c01_data_arr.shape - assert area_def.area_extent == (-2.0, -2998.0, 4998.0, 2.0) - @pytest.mark.parametrize("clip_negative_radiances", [False, True]) -def test_ir_calibrate(self, c07_bt_creator, clip_negative_radiances): +def test_ir_calibrate(c07_bt_creator, clip_negative_radiances): """Test IR calibration.""" res = c07_bt_creator(clip_negative_radiances=clip_negative_radiances) clipped_ir = 134.68753 if clip_negative_radiances else np.nan @@ -359,7 +378,9 @@ def test_ir_calibrate(self, c07_bt_creator, clip_negative_radiances): np.nan, ] ) - data_np = res.data.compute() + data_np = _get_and_check_array(res, np.float32) + _check_area(res) + _check_dims_and_coords(res) np.testing.assert_allclose( data_np[0, :10], expected, equal_nan=True, atol=1e-04 ) @@ -388,7 +409,9 @@ def test_vis_calibrate(c01_refl): np.nan, ] ) - data_np = res.data.compute() + data_np = _get_and_check_array(res, np.float32) + _check_area(res) + _check_dims_and_coords(res) np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True) assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs @@ -401,8 +424,9 @@ def test_raw_calibrate(c01_counts): res = c01_counts # We expect the raw data to be unchanged - expected = res.data - assert np.allclose(res.data, expected, equal_nan=True) + _get_and_check_array(res, np.int16) + _check_area(res) + _check_dims_and_coords(res) # check for the presence of typical attributes assert "scale_factor" in res.attrs @@ -413,7 +437,6 @@ def test_raw_calibrate(c01_counts): assert "scene_id" in res.attrs # determine if things match their expected values/types. - assert res.data.dtype == np.int16 assert res.attrs["standard_name"] == "counts" assert res.attrs["long_name"] == "Raw Counts" From 14f59c49e4b327c349f510dd97b73e046eb14f72 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 31 Oct 2023 15:19:58 -0500 Subject: [PATCH 0631/1416] Add dask chunk size checks to ABI l1b tests --- satpy/readers/abi_base.py | 42 ++++++++++---- satpy/tests/reader_tests/test_abi_l1b.py | 70 ++++++++++++++++-------- 2 files changed, 79 insertions(+), 33 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 1c0824ab27..07a29e3043 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -18,9 +18,11 @@ """Advance Baseline Imager reader base class for the Level 1b and l2+ reader.""" import logging +import math from contextlib import suppress from datetime import datetime +import dask import numpy as np import xarray as xr from pyresample import geometry @@ -28,11 +30,10 @@ from satpy._compat import cached_property from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_dask_chunk_size_in_bytes logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { "g16": "GOES-16", "g17": "GOES-17", @@ -62,15 +63,8 @@ def __init__(self, filename, filename_info, filetype_info): @cached_property def nc(self): """Get the xarray dataset for this file.""" - import math - - from satpy.utils import get_dask_chunk_size_in_bytes - chunk_size_for_high_res = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats - chunk_size_for_high_res = np.round(max(chunk_size_for_high_res / (4 * 226), 1)) * (4 * 226) - low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) - res_chunk_bytes = int(chunk_size_for_high_res / low_res_factor) * 4 - import dask - with dask.config.set({"array.chunk-size": res_chunk_bytes}): + chunk_bytes = self._chunk_bytes_for_resolution() + with dask.config.set({"array.chunk-size": chunk_bytes}): f_obj = open_file_or_filename(self.filename) nc = xr.open_dataset(f_obj, decode_cf=True, @@ -79,6 +73,32 @@ def nc(self): nc = self._rename_dims(nc) return nc + def _chunk_bytes_for_resolution(self) -> int: + """Get a best-guess optimal chunk size for resolution-based chunking. + + First a chunk size is chosen for the provided Dask setting `array.chunk-size` + and then aligned with a hardcoded on-disk chunk size of 226. This is then + adjusted to match the current resolution. + + This should result in 500 meter data having 4 times as many pixels per + dask array chunk (2 in each dimension) as 1km data and 8 times as many + as 2km data. As data is combined or upsampled geographically the arrays + should not need to be rechunked. Care is taken to make sure that array + chunks are aligned with on-disk file chunks at all resolutions, but at + the cost of flexibility due to a hardcoded on-disk chunk size of 226 + elements per dimension. + + """ + num_high_res_elems_per_dim = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats + # assume on-disk chunk size of 226 + # this is true for all CSPP Geo GRB output (226 for all sectors) and full disk from other sources + # 250 has been seen for AWS/CLASS CONUS, Mesoscale 1, and Mesoscale 2 files + # we align this with 4 on-disk chunks at 500m, so it will be 2 on-disk chunks for 1km, and 1 for 2km + high_res_elems_disk_aligned = np.round(max(num_high_res_elems_per_dim / (4 * 226), 1)) * (4 * 226) + low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) + res_elems_per_dim = int(high_res_elems_disk_aligned / low_res_factor) + return (res_elems_per_dim ** 2) * 4 + @staticmethod def _rename_dims(nc): if "t" in nc.dims or "t" in nc.coords: diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 61f1746bf9..ec3a0334cc 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -23,6 +23,7 @@ from typing import Any, Callable from unittest import mock +import dask import dask.array as da import numpy as np import numpy.typing as npt @@ -36,9 +37,12 @@ RAD_SHAPE = { 500: (3000, 5000), # conus - 500m - 1000: (1500, 2500), # conus - 1km - 2000: (750, 1250), # conus - 2km } +# RAD_SHAPE = { +# 500: (21696, 21696), # fldk - 500m +# } +RAD_SHAPE[1000] = (RAD_SHAPE[500][0] // 2, RAD_SHAPE[500][1] // 2) +RAD_SHAPE[2000] = (RAD_SHAPE[500][0] // 4, RAD_SHAPE[500][1] // 4) def _create_fake_rad_dataarray( @@ -54,7 +58,7 @@ def _create_fake_rad_dataarray( rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - da.from_array(rad_data), + da.from_array(rad_data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -134,15 +138,21 @@ def generate_l1b_filename(chan_name: str) -> str: @pytest.fixture() def c01_refl(tmp_path) -> xr.DataArray: - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load(["C01"]) + # 4 bytes for 32-bit floats + # 4 on-disk chunks for 500 meter data + # 226 on-disk chunk size + # Square (**2) for 2D size + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load(["C01"]) return scn["C01"] @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load([DataQuery(name="C01", calibration="radiance")]) + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load([DataQuery(name="C01", calibration="radiance")]) return scn["C01"] @@ -153,7 +163,7 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( - da.from_array(rad_data), + da.from_array(rad_data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -163,15 +173,17 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: "valid_range": (0, 4095), }, ) - scn = _create_scene_for_data(tmp_path, "C01", rad, 1000) - scn.load([DataQuery(name="C01", calibration="radiance")]) + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data(tmp_path, "C01", rad, 1000) + scn.load([DataQuery(name="C01", calibration="radiance")]) return scn["C01"] @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load([DataQuery(name="C01", calibration="counts")]) + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data(tmp_path, "C01", None, 1000) + scn.load([DataQuery(name="C01", calibration="counts")]) return scn["C01"] @@ -181,14 +193,15 @@ def _load_data_array( clip_negative_radiances: bool = False, ): rad = _fake_c07_data() - scn = _create_scene_for_data( - tmp_path, - "C07", - rad, - 2000, - {"clip_negative_radiances": clip_negative_radiances}, - ) - scn.load(["C07"]) + with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + scn = _create_scene_for_data( + tmp_path, + "C07", + rad, + 2000, + {"clip_negative_radiances": clip_negative_radiances}, + ) + scn.load(["C07"]) return scn["C07"] return _load_data_array @@ -202,7 +215,7 @@ def _fake_c07_data() -> xr.DataArray: rad_data = (rad_data + 1.3) / 0.5 data = rad_data.astype(np.int16) rad = xr.DataArray( - da.from_array(data), + da.from_array(data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, @@ -225,7 +238,12 @@ def _create_scene_for_data( filename = generate_l1b_filename(channel_name) data_path = tmp_path / filename dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) - dataset.to_netcdf(data_path) + dataset.to_netcdf( + data_path, + encoding={ + "Rad": {"chunksizes": [226, 226]}, + }, + ) scn = Scene( reader="abi_l1b", filenames=[str(data_path)], @@ -236,10 +254,18 @@ def _create_scene_for_data( def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: data_np = data_arr.data.compute() + assert isinstance(data_arr, xr.DataArray) + assert isinstance(data_arr.data, da.Array) + assert isinstance(data_np, np.ndarray) + res = 1000 if RAD_SHAPE[1000][0] == data_np.shape[0] else 2000 + assert data_arr.chunks[0][0] == 226 * (4 / (res / 500)) + assert data_arr.chunks[1][0] == 226 * (4 / (res / 500)) + assert data_np.dtype == data_arr.dtype assert data_np.dtype == exp_dtype return data_np + def _check_area(data_arr: xr.DataArray) -> None: from pyresample.geometry import AreaDefinition From 4d9fcbf84433f4adb05d0e1ba18480568de91b77 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 1 Nov 2023 09:41:31 +0200 Subject: [PATCH 0632/1416] Use a custom scheduler to check number of computes in NDVIHybridGreen compositor --- satpy/tests/compositor_tests/test_spectral.py | 33 +++++++++++-------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 36a3dd9355..55b24b1982 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -15,12 +15,14 @@ # satpy. If not, see . """Tests for spectral correction compositors.""" +import dask import dask.array as da import numpy as np import pytest import xarray as xr from satpy.composites.spectral import GreenCorrector, HybridGreen, NDVIHybridGreen, SpectralBlender +from satpy.tests.utils import CustomScheduler class TestSpectralComposites: @@ -92,25 +94,28 @@ def setup_method(self): def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" - comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), - standard_name="toa_bidirectional_reflectance") - - # Test General functionality with linear strength (=1.0) - res = comp((self.c01, self.c02, self.c03)) - assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) - assert res.attrs["name"] == "ndvi_hybrid_green" - assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" - data = res.values + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") + + # Test General functionality with linear strength (=1.0) + res = comp((self.c01, self.c02, self.c03)) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "ndvi_hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) def test_nonliniear_scaling(self): """Test non-linear scaling using `strength` term.""" - comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), - standard_name="toa_bidirectional_reflectance") + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, + prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c02, self.c03)) - np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) + res = comp((self.c01, self.c02, self.c03)).compute() + np.testing.assert_array_almost_equal(res.data, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) def test_invalid_strength(self): """Test using invalid `strength` term for non-linear scaling.""" From 51f90e6c6a716fec0e64f55660289c49c07b395b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 1 Nov 2023 09:42:51 +0200 Subject: [PATCH 0633/1416] Clip values instead of using da.where() twice --- satpy/composites/spectral.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 59e8518a7e..448d7cb26a 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -18,8 +18,6 @@ import logging import warnings -import dask.array as da - from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata @@ -166,8 +164,7 @@ def __call__(self, projectables, optional_datasets=None, **attrs): ndvi = (ndvi_input[1] - ndvi_input[0]) / (ndvi_input[1] + ndvi_input[0]) - ndvi.data = da.where(ndvi > self.ndvi_min, ndvi, self.ndvi_min) - ndvi.data = da.where(ndvi < self.ndvi_max, ndvi, self.ndvi_max) + ndvi = ndvi.clip(self.ndvi_min, self.ndvi_max) # Introduce non-linearity to ndvi for non-linear scaling to NIR blend fraction if self.strength != 1.0: # self._apply_strength() has no effect if strength = 1.0 -> no non-linear behaviour From 5db79f55232dcada3b984c5d886f48420a515524 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 1 Nov 2023 14:09:13 +0100 Subject: [PATCH 0634/1416] Update sgli reader, add tests --- satpy/etc/readers/sgli_l1b.yaml | 40 +++-- satpy/readers/sgli_l1b.py | 189 +++++++++++++++------- satpy/tests/reader_tests/test_sgli_l1b.py | 104 ++++++++++++ 3 files changed, 264 insertions(+), 69 deletions(-) diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index 41cb3bed9e..cbf5c4989d 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -6,6 +6,36 @@ reader: default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + data_identification_keys: + name: + required: true + wavelength: + type: !!python/name:satpy.dataset.dataid.WavelengthRange + polarization: + transitive: true + resolution: + transitive: false + calibration: + enum: + - reflectance + - brightness_temperature + - radiance + - counts + transitive: true + modifiers: + default: [] + type: !!python/name:satpy.dataset.dataid.ModifierTuple + + coord_identification_keys: + name: + required: true + polarization: + transitive: true + resolution: + transitive: false + + + file_types: gcom-c_l1b_v: file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI @@ -31,7 +61,6 @@ datasets: longitude_v: name: longitude_v resolution: [250, 1000] - file_type: gcom-c_l1b standard_name: longitude units: degree file_key: Geometry_data/Longitude @@ -40,7 +69,6 @@ datasets: latitude_v: name: latitude_v resolution: [250, 1000] - file_type: gcom-c_l1b standard_name: latitude units: degree file_key: Geometry_data/Latitude @@ -50,7 +78,6 @@ datasets: name: longitude_p resolution: 1000 polarization: [0, -60, 60] - file_type: gcom-c_l1b standard_name: longitude units: degree file_key: Geometry_data/Longitude @@ -60,7 +87,6 @@ datasets: name: latitude_p resolution: 1000 polarization: [0, -60, 60] - file_type: gcom-c_l1b standard_name: latitude units: degree file_key: Geometry_data/Latitude @@ -69,7 +95,6 @@ datasets: longitude_ir: name: longitude_ir resolution: [250, 500, 1000] - file_type: gcom-c_l1b standard_name: longitude units: degree file_key: Geometry_data/Longitude @@ -78,7 +103,6 @@ datasets: latitude_ir: name: latitude_ir resolution: [250, 500, 1000] - file_type: gcom-c_l1b standard_name: latitude units: degree file_key: Geometry_data/Latitude @@ -87,7 +111,6 @@ datasets: solar_zenith_angle: name: solar_zenith_angle sensor: sgli - wavelength: [0.3925,0.4,0.4075] resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -96,7 +119,6 @@ datasets: solar_azimuth_angle: name: solar_azimuth_angle sensor: sgli - wavelength: [0.3925,0.4,0.4075] resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -105,7 +127,6 @@ datasets: satellite_zenith_angle: name: satellite_zenith_angle sensor: sgli - wavelength: [0.3925,0.4,0.4075] resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -114,7 +135,6 @@ datasets: satellite_azimuth_angle: name: satellite_azimuth_angle sensor: sgli - wavelength: [0.3925,0.4,0.4075] resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 97db07a756..25289b5de3 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -34,18 +34,19 @@ import h5py import numpy as np import xarray as xr +from dask.array.core import normalize_chunks from xarray import Dataset, Variable from xarray.backends import BackendArray, BackendEntrypoint from xarray.core import indexing -from satpy import CHUNK_SIZE +# from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) -resolutions = {'Q': 250, - 'K': 1000, - 'L': 1000} +resolutions = {"Q": 250, + "K": 1000, + "L": 1000} def interpolate(arr, sampling, full_shape): @@ -58,13 +59,14 @@ def interpolate(arr, sampling, full_shape): full_x = np.arange(0, full_shape[0]) full_y = np.arange(0, full_shape[1]) + from scipy.interpolate import RectBivariateSpline spl = RectBivariateSpline( tie_x, tie_y, arr) values = spl(full_x, full_y) - return da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)) + return da.from_array(values, chunks=(1000, 1000)) class HDF5SGLI(BaseFileHandler): @@ -73,78 +75,147 @@ class HDF5SGLI(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): """Initialize the filehandler.""" super().__init__(filename, filename_info, filetype_info) - self.resolution = resolutions[self.filename_info['resolution']] - self.fh = h5py.File(self.filename, 'r') + self.resolution = resolutions[self.filename_info["resolution"]] + self.h5file = h5py.File(self.filename, "r") @property def start_time(self): """Get the start time.""" - the_time = self.fh['Global_attributes'].attrs['Scene_start_time'].item() - return datetime.strptime(the_time.decode('ascii'), '%Y%m%d %H:%M:%S.%f') + the_time = self.h5file["Global_attributes"].attrs["Scene_start_time"].item() + return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") @property def end_time(self): """Get the end time.""" - the_time = self.fh['Global_attributes'].attrs['Scene_end_time'].item() - return datetime.strptime(the_time.decode('ascii'), '%Y%m%d %H:%M:%S.%f') + the_time = self.h5file["Global_attributes"].attrs["Scene_end_time"].item() + return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") def get_dataset(self, key, info): """Get the dataset.""" if key["resolution"] != self.resolution: return - if key["polarization"] is not None: - pols = {0: '0', -60: 'm60', 60: 'p60'} - file_key = info['file_key'].format(pol=pols[key["polarization"]]) - else: - file_key = info['file_key'] - - h5dataset = self.fh[file_key] - - resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) - if resampling_interval != 1: - logger.debug('Interpolating %s.', key["name"]) - full_shape = (self.fh['Image_data'].attrs['Number_of_lines'], - self.fh['Image_data'].attrs['Number_of_pixels']) - dataset = interpolate(h5dataset, resampling_interval, full_shape) - else: - dataset = da.from_array(h5dataset[:].astype(' 116 + +def test_loading_lon_lat(sgli_file): + handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) + did = dict(name="longitude_v", resolution=1000, polarization=None) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude"}) + assert res.shape == (1955, 1250) + assert res.chunks is not None + assert res.dtype == np.float32 From bfb96c73ba042a1976b8a6e4b79d4f4618fdf511 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 1 Nov 2023 14:52:34 +0100 Subject: [PATCH 0635/1416] Start replacing assertRaises --- satpy/tests/test_readers.py | 34 ++++++++++++++++++--------------- satpy/tests/test_resample.py | 2 +- satpy/tests/test_utils.py | 1 - satpy/tests/test_writers.py | 7 ++++--- satpy/tests/test_yaml_reader.py | 22 ++++++++++----------- 5 files changed, 34 insertions(+), 32 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 8250f691a0..378f3fdb5a 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -166,7 +166,8 @@ def test_getitem(self): assert d[0.5] == "1h" assert d["test4"] == "4refl" assert d[make_dataid(name="test4", calibration="radiance")] == "4rad" - self.assertRaises(KeyError, d.getitem, "1h") + with pytest.raises(KeyError): + d.getitem("1h") # test with full tuple assert d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)] == "1" @@ -203,7 +204,8 @@ def test_get_key(self): assert res1 != res3 # more than 1 result when default is to ask for 1 result - self.assertRaises(KeyError, get_key, "test4", d, best=False) + with pytest.raises(KeyError): + get_key("test4", d, best=False) def test_contains(self): """Test DatasetDict contains method.""" @@ -288,9 +290,9 @@ def test_filenames_and_reader(self): def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" from satpy.readers import load_readers - self.assertRaises(ValueError, load_readers, reader="i_dont_exist", filenames=[ - "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", - ]) + with pytest.raises(ValueError, match="No reader named: i_dont_exist"): + load_readers(reader="i_dont_exist", + filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) def test_filenames_as_path(self): """Test with filenames specified as pathlib.Path.""" @@ -318,9 +320,8 @@ def test_filenames_as_dict_bad_reader(self): "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "__fake__": ["fake.txt"], } - self.assertRaisesRegex(ValueError, - r"(?=.*__fake__)(?!.*viirs)(^No reader.+)", - load_readers, filenames=filenames) + with pytest.raises(ValueError, match=r"(?=.*__fake__)(?!.*viirs)(^No reader.+)"): + load_readers(filenames=filenames) def test_filenames_as_dict_with_reader(self): """Test loading from a filenames dict with a single reader specified. @@ -343,7 +344,8 @@ def test_empty_filenames_as_dict(self): filenames = { "viirs_sdr": [], } - self.assertRaises(ValueError, load_readers, filenames=filenames) + with pytest.raises(ValueError, match="No supported files found"): + load_readers(filenames=filenames) # two readers, one is empty filenames = { @@ -370,7 +372,8 @@ def test_missing_requirements(self, *mocks): with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No handler for reading requirement.*", category=UserWarning) for filenames in [epi_miss, pro_miss, epi_pro_miss]: - self.assertRaises(ValueError, load_readers, reader="seviri_l1b_hrit", filenames=filenames) + with pytest.raises(ValueError, match="No dataset could be loaded.*"): + load_readers(reader="seviri_l1b_hrit", filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ @@ -400,8 +403,8 @@ def test_all_filtered(self): filter_params = {"start_time": datetime.datetime(1970, 1, 1), "end_time": datetime.datetime(1970, 1, 2), "area": None} - self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) + with pytest.raises(ValueError, match="No dataset could be loaded.*"): + load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" @@ -414,8 +417,8 @@ def test_all_filtered_multiple(self): } filter_params = {"start_time": datetime.datetime(1970, 1, 1), "end_time": datetime.datetime(1970, 1, 2)} - self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) + with pytest.raises(ValueError, match="No dataset could be loaded."): + load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" @@ -725,7 +728,8 @@ def test_bad_reader(self): # touch the file so it exists on disk with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") - self.assertRaises(yaml.YAMLError, group_files, [], reader="abi_l1b") + with pytest.raises(yaml.YAMLError): + group_files([], reader="abi_l1b") def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 66e93009d2..7135661578 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -834,7 +834,7 @@ def test_compute(self): # Too many dimensions data = da.ones((3, 5, 5)) - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="BucketFraction not implemented for 3D datasets"): _ = self.bucket.compute(data) @mock.patch("pyresample.bucket.BucketResampler") diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 6f5db02087..18ff839599 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -80,7 +80,6 @@ def test_lonlat2xyz(self, lonlat, xyz): ((90, 90), (1, 0, 0)), ((-90, 90), (-1, 0, 0)), ((180, 90), (0, -1, 0)), - ((0, -90), (0, -1, 0)), ((0, 45), (0, sqrt(2) / 2, sqrt(2) / 2)), ((0, 60), (0, sqrt(3) / 2, sqrt(1) / 2)), ], diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index c2d049dae1..dd26b06c82 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -40,7 +40,8 @@ def test_to_image_1d(self): # 1D from satpy.writers import to_image p = xr.DataArray(np.arange(25), dims=["y"]) - self.assertRaises(ValueError, to_image, p) + with pytest.raises(ValueError, match="Need at least a 2D array to make an image."): + to_image(p) @mock.patch("satpy.writers.XRImage") def test_to_image_2d(self, mock_geoimage): @@ -113,8 +114,8 @@ def test_basic_init_provided_enh(self): def test_init_nonexistent_enh_file(self): """Test Enhancer init with a nonexistent enhancement configuration file.""" from satpy.writers import Enhancer - self.assertRaises( - ValueError, Enhancer, enhancement_config_file="is_not_a_valid_filename_?.yaml") + with pytest.raises(ValueError, match="YAML file doesn't exist or string is not YAML dict:.*"): + Enhancer(enhancement_config_file="is_not_a_valid_filename_?.yaml") class _BaseCustomEnhancementConfigTests: diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 35752cd237..41439a1ac6 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -327,7 +327,9 @@ def setUp(self): def test_deprecated_passing_config_files(self): """Test that we get an exception when config files are passed to inti.""" - self.assertRaises(ValueError, yr.FileYAMLReader, "/path/to/some/file.yaml") + with pytest.raises(ValueError, + match="Passing config files to create a Reader is deprecated.*"): + yr.FileYAMLReader("/path/to/some/file.yaml") def test_all_data_ids(self): """Check that all datasets ids are returned.""" @@ -409,15 +411,11 @@ def test_start_end_time(self): """Check start and end time behaviours.""" self.reader.file_handlers = {} - def get_start_time(): - return self.reader.start_time + with pytest.raises(RuntimeError): + self.reader.start_time - self.assertRaises(RuntimeError, get_start_time) - - def get_end_time(): - return self.reader.end_time - - self.assertRaises(RuntimeError, get_end_time) + with pytest.raises(RuntimeError): + self.reader.end_time fh0 = FakeFH(datetime(1999, 12, 30, 0, 0), datetime(1999, 12, 31, 0, 0)) @@ -780,7 +778,7 @@ def test_load_dataset_with_area_for_single_areas(self, ldwa): np.testing.assert_equal(res.coords["time"], np.arange(2)) # check wrong input - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="Target orientation for Dataset unknown_name not recognized.*"): _ = reader._load_dataset_with_area(dsid, coords, "wronginput") # check native orientation, nothing should change @@ -1043,11 +1041,11 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): # Projectable is None mss.return_value = [0, 0, 0, False, None] - with self.assertRaises(KeyError): + with pytest.raises(KeyError): res = reader._load_dataset(None, None, None) # Failure is True mss.return_value = [0, 0, 0, True, 0] - with self.assertRaises(KeyError): + with pytest.raises(KeyError): res = reader._load_dataset(None, None, None) # Setup input, and output of mocked functions From 7db371af672a9ea15db69866631ae4a0597334e3 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 1 Nov 2023 17:11:49 +0200 Subject: [PATCH 0636/1416] Test that NDVIHybridGreenCompositor doesn't up-cast the data --- satpy/tests/compositor_tests/test_spectral.py | 25 ++++++++++++++----- 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 55b24b1982..3f9d65a78d 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -85,12 +85,15 @@ class TestNdviHybridGreenCompositor: def setup_method(self): """Initialize channels.""" - self.c01 = xr.DataArray(da.from_array([[0.25, 0.30], [0.20, 0.30]], chunks=25), - dims=("y", "x"), attrs={"name": "C02"}) - self.c02 = xr.DataArray(da.from_array([[0.25, 0.30], [0.25, 0.35]], chunks=25), - dims=("y", "x"), attrs={"name": "C03"}) - self.c03 = xr.DataArray(da.from_array([[0.35, 0.35], [0.28, 0.65]], chunks=25), - dims=("y", "x"), attrs={"name": "C04"}) + self.c01 = xr.DataArray( + da.from_array(np.array([[0.25, 0.30], [0.20, 0.30]], dtype=np.float32), chunks=25), + dims=("y", "x"), attrs={"name": "C02"}) + self.c02 = xr.DataArray( + da.from_array(np.array([[0.25, 0.30], [0.25, 0.35]], dtype=np.float32), chunks=25), + dims=("y", "x"), attrs={"name": "C03"}) + self.c03 = xr.DataArray( + da.from_array(np.array([[0.35, 0.35], [0.28, 0.65]], dtype=np.float32), chunks=25), + dims=("y", "x"), attrs={"name": "C04"}) def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" @@ -107,6 +110,16 @@ def test_ndvi_hybrid_green(self): data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) + def test_ndvi_hybrid_green_dtype(self): + """Test that the datatype is not altered by the compositor.""" + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") + + # Test General functionality with linear strength (=1.0) + res = comp((self.c01, self.c02, self.c03)).compute() + assert res.data.dtype == np.float32 + def test_nonliniear_scaling(self): """Test non-linear scaling using `strength` term.""" with dask.config.set(scheduler=CustomScheduler(max_computes=1)): From edd0632df09e0456b3738b8da3728723de2a33fe Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 15:23:40 -0500 Subject: [PATCH 0637/1416] Remove unnecessary float cast in satpy/readers/abi_l1b.py --- satpy/readers/abi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 4933b0982a..29ed6f668c 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -133,7 +133,7 @@ def _raw_calibrate(self, data): def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" solar_irradiance = self["esun"] - esd = self["earth_sun_distance_anomaly_in_AU"].astype(np.float32) + esd = self["earth_sun_distance_anomaly_in_AU"] factor = np.pi * esd * esd / solar_irradiance From 144778b3e53a8fa3256f6500b112daaaa8b9ed3e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 2 Nov 2023 08:52:24 +0200 Subject: [PATCH 0638/1416] Update satpy/tests/compositor_tests/test_spectral.py Co-authored-by: David Hoese --- satpy/tests/compositor_tests/test_spectral.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 3f9d65a78d..2773b5d4a5 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -127,7 +127,10 @@ def test_nonliniear_scaling(self): prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c02, self.c03)).compute() + res = comp((self.c01, self.c02, self.c03)) + res_np = res.data.compute() + assert res.dtype == res_np.dtype + assert res.dtype == np.float32 np.testing.assert_array_almost_equal(res.data, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) def test_invalid_strength(self): From 5b2cbc3225463892386b634b0ce1586272a95cfc Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 2 Nov 2023 08:54:43 +0200 Subject: [PATCH 0639/1416] Fix typo in test method name --- satpy/tests/compositor_tests/test_spectral.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 2773b5d4a5..2a3c92eca8 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -120,7 +120,7 @@ def test_ndvi_hybrid_green_dtype(self): res = comp((self.c01, self.c02, self.c03)).compute() assert res.data.dtype == np.float32 - def test_nonliniear_scaling(self): + def test_nonlinear_scaling(self): """Test non-linear scaling using `strength` term.""" with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, From dd2c879bc2ea0c6af825b93bf1139062c1df63b2 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 2 Nov 2023 08:57:03 +0200 Subject: [PATCH 0640/1416] Remove copy-paste comment --- satpy/tests/compositor_tests/test_spectral.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 2a3c92eca8..e46cff4d0c 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -115,8 +115,6 @@ def test_ndvi_hybrid_green_dtype(self): with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") - - # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)).compute() assert res.data.dtype == np.float32 From 006136ef5a8590e6c7d63b1c2db97f5a70693726 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 14:00:42 -0500 Subject: [PATCH 0641/1416] Switch abi l1b reader tests to use reader-level interfaces --- satpy/tests/reader_tests/test_abi_l1b.py | 43 +++++++++--------------- 1 file changed, 16 insertions(+), 27 deletions(-) diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index ec3a0334cc..a6acd7f027 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -31,16 +31,14 @@ import xarray as xr from pytest_lazyfixture import lazy_fixture -from satpy import DataQuery, Scene +from satpy import DataQuery from satpy.readers.abi_l1b import NC_ABI_L1B +from satpy.readers.yaml_reader import FileYAMLReader from satpy.utils import ignore_pyproj_proj_warnings RAD_SHAPE = { 500: (3000, 5000), # conus - 500m } -# RAD_SHAPE = { -# 500: (21696, 21696), # fldk - 500m -# } RAD_SHAPE[1000] = (RAD_SHAPE[500][0] // 2, RAD_SHAPE[500][1] // 2) RAD_SHAPE[2000] = (RAD_SHAPE[500][0] // 4, RAD_SHAPE[500][1] // 4) @@ -143,17 +141,15 @@ def c01_refl(tmp_path) -> xr.DataArray: # 226 on-disk chunk size # Square (**2) for 2D size with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load(["C01"]) - return scn["C01"] + reader = _create_reader_for_data(tmp_path, "C01", None, 1000) + return reader.load(["C01"])["C01"] @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load([DataQuery(name="C01", calibration="radiance")]) - return scn["C01"] + reader = _create_reader_for_data(tmp_path, "C01", None, 1000) + return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @pytest.fixture() @@ -174,17 +170,15 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: }, ) with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data(tmp_path, "C01", rad, 1000) - scn.load([DataQuery(name="C01", calibration="radiance")]) - return scn["C01"] + reader = _create_reader_for_data(tmp_path, "C01", rad, 1000) + return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data(tmp_path, "C01", None, 1000) - scn.load([DataQuery(name="C01", calibration="counts")]) - return scn["C01"] + reader = _create_reader_for_data(tmp_path, "C01", None, 1000) + return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] @pytest.fixture() @@ -194,15 +188,14 @@ def _load_data_array( ): rad = _fake_c07_data() with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): - scn = _create_scene_for_data( + reader = _create_reader_for_data( tmp_path, "C07", rad, 2000, {"clip_negative_radiances": clip_negative_radiances}, ) - scn.load(["C07"]) - return scn["C07"] + return reader.load(["C07"])["C07"] return _load_data_array @@ -228,13 +221,13 @@ def _fake_c07_data() -> xr.DataArray: return rad -def _create_scene_for_data( +def _create_reader_for_data( tmp_path: Path, channel_name: str, rad: xr.DataArray | None, resolution: int, reader_kwargs: dict[str, Any] | None = None, -) -> Scene: +) -> FileYAMLReader: filename = generate_l1b_filename(channel_name) data_path = tmp_path / filename dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) @@ -244,12 +237,8 @@ def _create_scene_for_data( "Rad": {"chunksizes": [226, 226]}, }, ) - scn = Scene( - reader="abi_l1b", - filenames=[str(data_path)], - reader_kwargs=reader_kwargs, - ) - return scn + from satpy.readers import load_readers + return load_readers([str(data_path)], "abi_l1b", reader_kwargs=reader_kwargs)["abi_l1b"] def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: From c35381897d5e3bd19597d96d0a6b1774edb8dc88 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 10:04:23 -0500 Subject: [PATCH 0642/1416] Add dask chunk size checks to ABI l1b tests --- satpy/tests/writer_tests/test_awips_tiled.py | 7 ++++--- satpy/writers/awips_tiled.py | 5 ++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index eab72e8f5b..63113a9f94 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -495,9 +495,10 @@ def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): "_FillValue": 1, }) - w.save_datasets([ds1, ds2, ds3, dqf], sector_id="TEST", source_name="TESTS", - tile_count=(3, 3), template="glm_l2_rad{}".format(sector.lower()), - **extra_kwargs) + with pytest.warns(UserWarning, match="Production location attribute "): + w.save_datasets([ds1, ds2, ds3, dqf], sector_id="TEST", source_name="TESTS", + tile_count=(3, 3), template="glm_l2_rad{}".format(sector.lower()), + **extra_kwargs) fn_glob = self._get_glm_glob_filename(extra_kwargs) all_files = glob(os.path.join(str(tmp_path), fn_glob)) assert len(all_files) == 9 diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 5f10418e8a..9bab65fe35 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -620,9 +620,12 @@ def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): # file data type to allow for extra fill values num_fills = 0 - if is_unsigned or unsigned_in_signed: + if is_unsigned: # max value fills = [2 ** file_bit_depth - 1] + elif unsigned_in_signed: + # max unsigned value is -1 as a signed int + fills = [-1] else: # max value fills = [2 ** (file_bit_depth - 1) - 1] From 2e53366d3666672ceeffb6f8e2b7d5218e7d393d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 10:04:59 -0500 Subject: [PATCH 0643/1416] Remove use of pkg_resources (deprecated) in sphinx conf.py --- doc/source/conf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 3bef218f89..f6606dc6c9 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -18,8 +18,6 @@ import sys from datetime import datetime -from pkg_resources import get_distribution - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -28,12 +26,14 @@ from reader_table import generate_reader_table # noqa: E402 +import satpy # noqa: E402 + # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # get version using setuptools-scm -release = get_distribution("satpy").version +release = satpy.__version__ # The full version, including alpha/beta/rc tags. # for example take major/minor version = ".".join(release.split(".")[:2]) From 57369902601c3834a24ea14fc82d03d9f843093b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 10:40:35 -0500 Subject: [PATCH 0644/1416] Fix time string parsing with newer versions of xarray and numpy Something in a recent change makes xarray or numpy allow for `str + DataArray` returning a DataArray and this makes `datetime.strptime` mad. --- satpy/readers/viirs_atms_sdr_base.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/readers/viirs_atms_sdr_base.py b/satpy/readers/viirs_atms_sdr_base.py index be0a7a0d65..159a84a070 100644 --- a/satpy/readers/viirs_atms_sdr_base.py +++ b/satpy/readers/viirs_atms_sdr_base.py @@ -100,11 +100,11 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): super().__init__(filename, filename_info, filetype_info, **kwargs) def _parse_datetime(self, datestr, timestr): - try: - datetime_str = datestr + timestr - except TypeError: - datetime_str = (str(datestr.data.compute().astype(str)) + - str(timestr.data.compute().astype(str))) + if not isinstance(datestr, str): + datestr = str(datestr.data.compute().astype(str)) + if not isinstance(timestr, str): + timestr = str(timestr.data.compute().astype(str)) + datetime_str = datestr + timestr time_val = datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") if abs(time_val - NO_DATE) < EPSILON_TIME: From c25c0bf34cc3cd8a62d254d5016e95374125de51 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 13:37:14 -0500 Subject: [PATCH 0645/1416] Fix incorrect data type in AMI tests Tests used a 16-bit signed integer, but real world data is unsigned. Using signed results in not being able to mask with expected quality flags --- satpy/tests/reader_tests/test_ami_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index cdbc4468c9..a9909fd48b 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -65,7 +65,7 @@ def setUp(self, xr_, counts=None): if counts is None: rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. rad_data = (rad_data + 1.) / 0.5 - rad_data = rad_data.astype(np.int16) + rad_data = rad_data.astype(np.uint16) counts = xr.DataArray( da.from_array(rad_data, chunks="auto"), dims=("y", "x"), From ea791e92f28219572e7bbefc9432d44760af994a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 1 Nov 2023 14:13:10 -0500 Subject: [PATCH 0646/1416] Fix LI L2 tests using signed values with an unsigned type --- satpy/tests/reader_tests/_li_test_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index d6a32253f5..32107006fc 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -370,13 +370,13 @@ def l2_lfl_schema(settings=None): "default_data": lambda: np.random.uniform(stime, etime, nobs) }, "l1b_geolocation_warning": { - "format": "u8", + "format": "i1", "shape": ("flashes",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { - "format": "u8", + "format": "i1", "shape": ("flashes",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 From e82379eff586ff1d76c623bbe2b08940b2ffed35 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 09:50:07 -0500 Subject: [PATCH 0647/1416] Add workaround for dtype not being preserved in xr.where call --- satpy/readers/mersi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 905db0654f..7070131f51 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -141,7 +141,7 @@ def _mask_data(self, data, dataset_id, attrs): if dataset_id.get("calibration") == "counts": # preserve integer type of counts if possible attrs["_FillValue"] = fill_value - new_fill = fill_value + new_fill = data.dtype.type(fill_value) else: new_fill = np.nan if valid_range is not None: From bec297a05d32e8da0ad70d53011fe231204859b1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 10:03:46 -0500 Subject: [PATCH 0648/1416] Add workaround for xarray where with integer types --- satpy/readers/fci_l1c_nc.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index e42975b3a4..2d3e047c5e 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -334,11 +334,10 @@ def _get_dataset_measurand(self, key, info=None): vr = attrs.get("valid_range", [-np.inf, np.inf]) if key["calibration"] == "counts": attrs["_FillValue"] = fv - nfv = fv + nfv = data.dtype.type(fv) else: nfv = np.nan - data = data.where(data >= vr[0], nfv) - data = data.where(data <= vr[1], nfv) + data = data.where((data >= vr[0]) & (data <= vr[1]), nfv) res = self.calibrate(data, key) From 9f69ff5a439c0f8a8f45a1a123fbee0b58e041ea Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 10:36:09 -0500 Subject: [PATCH 0649/1416] Add workaround for integer overflow in goes imager reader Numpy 2.0 complains about 1000 not fitting in a uint8, but I also cast other large numbers to be a little safe. --- satpy/readers/goes_imager_hrit.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 457d5d809c..9ac6fe4484 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -117,11 +117,11 @@ class CalibrationError(Exception): def make_sgs_time(sgs_time_array): """Make sgs time.""" - year = ((sgs_time_array["century"] >> 4) * 1000 + - (sgs_time_array["century"] & 15) * 100 + + year = ((sgs_time_array["century"] >> 4) * np.int64(1000) + + (sgs_time_array["century"] & 15) * np.int64(100) + (sgs_time_array["year"] >> 4) * 10 + (sgs_time_array["year"] & 15)) - doy = ((sgs_time_array["doy1"] >> 4) * 100 + + doy = ((sgs_time_array["doy1"] >> 4) * np.int64(100) + (sgs_time_array["doy1"] & 15) * 10 + (sgs_time_array["doy_hours"] >> 4)) hours = ((sgs_time_array["doy_hours"] & 15) * 10 + @@ -130,7 +130,7 @@ def make_sgs_time(sgs_time_array): (sgs_time_array["mins_secs"] >> 4)) secs = ((sgs_time_array["mins_secs"] & 15) * 10 + (sgs_time_array["secs_msecs"] >> 4)) - msecs = ((sgs_time_array["secs_msecs"] & 15) * 100 + + msecs = ((sgs_time_array["secs_msecs"] & 15) * np.int64(100) + (sgs_time_array["msecs"] >> 4) * 10 + (sgs_time_array["msecs"] & 15)) return (datetime(int(year), 1, 1) + From b55da03f125398a99f34cffcbec2e695b92977ad Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 11:44:10 -0500 Subject: [PATCH 0650/1416] Fix dtype handling in modis readers --- satpy/readers/hdfeos_base.py | 3 ++- satpy/tests/reader_tests/modis_tests/test_modis_l1b.py | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index f60040a46f..c4bd5ebd7b 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -285,7 +285,8 @@ def _get_good_data_mask(self, data_arr, is_category=False): if is_category and np.issubdtype(data_arr.dtype, np.integer): # no need to mask, the fill value is already what it needs to be return None, None - new_fill = np.nan + fill_type = data_arr.dtype.type if np.issubdtype(data_arr.dtype, np.floating) else np.float32 + new_fill = fill_type(np.nan) data_arr.attrs.pop("_FillValue", None) good_mask = data_arr != fill_value return good_mask, new_fill diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 11068b6577..d4998a67f9 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -77,6 +77,10 @@ def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res, assert lat_arr.shape == exp_shape # compute lon/lat at the same time to avoid wasted computation lon_vals, lat_vals = dask.compute(lon_arr, lat_arr) + assert lon_arr.dtype == lat_arr.dtype + assert lon_arr.dtype == np.float32 + assert lon_vals.dtype == lon_arr.dtype + assert lat_vals.dtype == lat_arr.dtype np.testing.assert_array_less(lon_vals, 0) np.testing.assert_array_less(0, lat_vals) check_callback(lon_arr) From 6c20a67aa56d79090f054588ed836394fb4b3c5b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 12:35:56 -0500 Subject: [PATCH 0651/1416] Be more intentional with dtype casting in goes imager hrit reader --- satpy/readers/goes_imager_hrit.py | 41 +++++++++++++++++++------------ 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 9ac6fe4484..1724ba214d 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -117,22 +117,31 @@ class CalibrationError(Exception): def make_sgs_time(sgs_time_array): """Make sgs time.""" - year = ((sgs_time_array["century"] >> 4) * np.int64(1000) + - (sgs_time_array["century"] & 15) * np.int64(100) + - (sgs_time_array["year"] >> 4) * 10 + - (sgs_time_array["year"] & 15)) - doy = ((sgs_time_array["doy1"] >> 4) * np.int64(100) + - (sgs_time_array["doy1"] & 15) * 10 + - (sgs_time_array["doy_hours"] >> 4)) - hours = ((sgs_time_array["doy_hours"] & 15) * 10 + - (sgs_time_array["hours_mins"] >> 4)) - mins = ((sgs_time_array["hours_mins"] & 15) * 10 + - (sgs_time_array["mins_secs"] >> 4)) - secs = ((sgs_time_array["mins_secs"] & 15) * 10 + - (sgs_time_array["secs_msecs"] >> 4)) - msecs = ((sgs_time_array["secs_msecs"] & 15) * np.int64(100) + - (sgs_time_array["msecs"] >> 4) * 10 + - (sgs_time_array["msecs"] & 15)) + century = sgs_time_array["century"].astype(np.int64) + year = sgs_time_array["year"].astype(np.int64) + doy1 = sgs_time_array["doy1"].astype(np.int64) + doy_hours = sgs_time_array["doy_hours"].astype(np.int64) + hours_mins = sgs_time_array["hours_mins"].astype(np.int64) + mins_secs = sgs_time_array["mins_secs"].astype(np.int64) + secs_msecs = sgs_time_array["secs_msecs"].astype(np.int64) + msecs = sgs_time_array["msecs"].astype(np.int64) + + year = ((century >> 4) * 1000 + + (century & 15) * 100 + + (year >> 4) * 10 + + (year & 15)) + doy = ((doy1 >> 4) * 100 + + (doy1 & 15) * 10 + + (doy_hours >> 4)) + hours = ((doy_hours & 15) * 10 + + (hours_mins >> 4)) + mins = ((hours_mins & 15) * 10 + + (mins_secs >> 4)) + secs = ((mins_secs & 15) * 10 + + (secs_msecs >> 4)) + msecs = ((secs_msecs & 15) * 100 + + (msecs >> 4) * 10 + + (msecs & 15)) return (datetime(int(year), 1, 1) + timedelta(days=int(doy - 1), hours=int(hours), From 07bb8204d19bed79370cec6ad913c3892173cc11 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 2 Nov 2023 12:41:17 -0500 Subject: [PATCH 0652/1416] Fix accidental dtype upcasting in seviri l1b calibration --- satpy/readers/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index 18c7193a43..c1bf7c7497 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -459,7 +459,7 @@ def apply_earthsun_distance_correction(reflectance, utc_date=None): reflectance.attrs["sun_earth_distance_correction_applied"] = True reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): - reflectance = reflectance * sun_earth_dist * sun_earth_dist + reflectance = reflectance * reflectance.dtype.type(sun_earth_dist * sun_earth_dist) return reflectance @@ -472,5 +472,5 @@ def remove_earthsun_distance_correction(reflectance, utc_date=None): reflectance.attrs["sun_earth_distance_correction_applied"] = False reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): - reflectance = reflectance / (sun_earth_dist * sun_earth_dist) + reflectance = reflectance / reflectance.dtype.type(sun_earth_dist * sun_earth_dist) return reflectance From 4424c5d150b6e113bd4cecf753b867d796a945ca Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 15:55:03 -0500 Subject: [PATCH 0653/1416] Add xfail for unstable skyfield and h5py-based tests --- satpy/tests/modifier_tests/test_parallax.py | 4 +++ .../tests/reader_tests/test_gerb_l2_hr_h5.py | 2 ++ satpy/tests/test_utils.py | 2 ++ satpy/tests/utils.py | 28 +++++++++++++++++++ 4 files changed, 36 insertions(+) diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index f1385e9b18..e1b426dce2 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -30,6 +30,7 @@ from pyresample import create_area_def import satpy.resample +from satpy.tests.utils import xfail_skyfield_unstable_numpy2 from satpy.writers import get_enhanced_image # NOTE: @@ -438,6 +439,7 @@ def test_correct_area_cloudy_same_area(self, ): corrector = ParallaxCorrection(area) corrector(sc["CTH_constant"]) + @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_correct_area_no_orbital_parameters(self, caplog, fake_tle): """Test ParallaxCorrection when CTH has no orbital parameters. @@ -761,6 +763,7 @@ def fake_scene(self, yaml_code): "area": area}) return sc + @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_double_load(self, fake_scene, conf_file, fake_tle): """Test that loading corrected and uncorrected works correctly. @@ -790,6 +793,7 @@ def test_no_compute(self, fake_scene, conf_file): sccc.return_value = [os.fspath(conf_file)] fake_scene.load(["parallax_corrected_VIS006"]) + @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_enhanced_image(self, fake_scene, conf_file, fake_tle): """Test that image enhancement is the same.""" with unittest.mock.patch( diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index 0333f3df2b..d504b75d13 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -22,6 +22,7 @@ import pytest from satpy import Scene +from satpy.tests.utils import xfail_h5py_unstable_numpy2 FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" @@ -120,6 +121,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): return filename +@pytest.mark.xfail(xfail_h5py_unstable_numpy2(), reason="h5py doesn't include numpy 2 fix") @pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 6f5db02087..f6633e159d 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -29,6 +29,7 @@ import pytest import xarray as xr +from satpy.tests.utils import xfail_skyfield_unstable_numpy2 from satpy.utils import ( angle2xyz, get_legacy_chunk_size, @@ -202,6 +203,7 @@ def test_get_satpos_fails_with_informative_error(self, attrs): with pytest.raises(KeyError, match="Unable to determine satellite position.*"): get_satpos(data_arr) + @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield does not support numpy 2 yet") def test_get_satpos_from_satname(self, caplog): """Test getting satellite position from satellite name only.""" import pyorbital.tlefile diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index ca958fce37..e2b70fe86c 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -407,3 +407,31 @@ def assert_attrs_equal(attrs, attrs_exp, tolerance=0): ) except TypeError: assert attrs[key] == attrs_exp[key], err_msg + + +def xfail_skyfield_unstable_numpy2(): + """Determine if skyfield-based tests should be xfail in the unstable numpy 2.x environment.""" + try: + import skyfield + except ImportError: + skyfield = None + + import os + is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") + is_np2 = np.__version__.startswith("2") + return skyfield is None and is_np2 and is_unstable_ci + + +def xfail_h5py_unstable_numpy2(): + """Determine if h5py-based tests should be xfail in the unstable numpy 2.x environment.""" + from packaging import version + try: + import h5py + is_broken_h5py = version.parse(h5py.__version__) < version.parse("3.10.0") + except ImportError: + is_broken_h5py = True + + import os + is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") + is_np2 = np.__version__.startswith("2") + return is_broken_h5py and is_np2 and is_unstable_ci From 65ec94fb88b09c167a0e886941c35778c09e9567 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 19:55:01 -0500 Subject: [PATCH 0654/1416] Fix skyfield numpy compatibility check in xfail func --- satpy/tests/utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index e2b70fe86c..b543afc7b4 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -413,12 +413,15 @@ def xfail_skyfield_unstable_numpy2(): """Determine if skyfield-based tests should be xfail in the unstable numpy 2.x environment.""" try: import skyfield + + # known numpy incompatibility: + from skyfield import timelib # noqa except ImportError: skyfield = None import os is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") - is_np2 = np.__version__.startswith("2") + is_np2 = np.__version__.startswith("2.") return skyfield is None and is_np2 and is_unstable_ci From 432615b583f7e6ab9b8e87702a060d1be20bb5fd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 20:25:15 -0500 Subject: [PATCH 0655/1416] Fix h5py version check in xfail test func --- satpy/tests/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index b543afc7b4..7471bfc31c 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -430,11 +430,11 @@ def xfail_h5py_unstable_numpy2(): from packaging import version try: import h5py - is_broken_h5py = version.parse(h5py.__version__) < version.parse("3.10.0") + is_broken_h5py = version.parse(h5py.__version__) <= version.parse("3.10.0") except ImportError: is_broken_h5py = True import os is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") - is_np2 = np.__version__.startswith("2") + is_np2 = np.__version__.startswith("2.") return is_broken_h5py and is_np2 and is_unstable_ci From c8e7464607de02e917f82dcf1999de6237772498 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 20:53:21 -0500 Subject: [PATCH 0656/1416] Fix dtype preservation in abi_l1b with numpy 2 --- satpy/readers/abi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index 29ed6f668c..07626f330d 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -163,7 +163,7 @@ def _ir_calibrate(self, data): if self.clip_negative_radiances: min_rad = self._get_minimum_radiance(data) - data = data.clip(min=min_rad) + data = data.clip(min=data.dtype.type(min_rad)) res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs From f08e0fd1e2b5c8339a15a39be55272d306889bfa Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 3 Nov 2023 21:26:42 -0500 Subject: [PATCH 0657/1416] Fix h5py mock in docs build to handle xfail version checks --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index f6606dc6c9..df006727c0 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -44,7 +44,7 @@ class Mock(object): # noqa def __init__(self, *args, **kwargs): """Mask any arguments to mock object.""" - pass + self.__version__ = "0.0.0" def __call__(self, *args, **kwargs): """Mock a function and class object when accessed from mocked module.""" From 0096ad1b89174f9059becf523befc46086a7b042 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 6 Nov 2023 14:08:22 +0100 Subject: [PATCH 0658/1416] Replace usage of assertraises in test_demo.py --- satpy/tests/test_demo.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index 32e8016f58..8f5c59bf3c 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -27,6 +27,8 @@ from collections import defaultdict from unittest import mock +import pytest + # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path @@ -89,9 +91,11 @@ def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] # expected 16 files, got 2 - self.assertRaises(RuntimeError, get_us_midlatitude_cyclone_abi) + with pytest.raises(RuntimeError): + get_us_midlatitude_cyclone_abi() # unknown access method - self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method="unknown") + with pytest.raises(NotImplementedError): + get_us_midlatitude_cyclone_abi(method="unknown") gcsfs_inst.glob.return_value = ["a.nc"] * 16 filenames = get_us_midlatitude_cyclone_abi() @@ -109,8 +113,10 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): # only return 5 results total gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 - self.assertRaises(RuntimeError, get_hurricane_florence_abi) - self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method="unknown") + with pytest.raises(RuntimeError): + get_hurricane_florence_abi() + with pytest.raises(NotImplementedError): + get_hurricane_florence_abi(method="unknown") gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() @@ -157,7 +163,8 @@ def test_get_bucket_files(self, gcsfs_mod): gcsfs_inst.glob.side_effect = None # reset mock side effect gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] - self.assertRaises(OSError, get_bucket_files, "*.nc", "does_not_exist") + with pytest.raises(OSError, match="Directory does not exist: does_not_exist"): + get_bucket_files("*.nc", "does_not_exist") open("a.nc", "w").close() # touch the file gcsfs_inst.get.reset_mock() @@ -176,13 +183,15 @@ def test_get_bucket_files(self, gcsfs_mod): # if we don't get any results then we expect an exception gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = [] - self.assertRaises(OSError, get_bucket_files, "*.nc", ".") + with pytest.raises(OSError, match="No files could be found or downloaded."): + get_bucket_files("*.nc", ".") @mock.patch("satpy.demo._google_cloud_platform.gcsfs", None) def test_no_gcsfs(self): """Test that 'gcsfs' is required.""" from satpy.demo._google_cloud_platform import get_bucket_files - self.assertRaises(RuntimeError, get_bucket_files, "*.nc", ".") + with pytest.raises(RuntimeError): + get_bucket_files("*.nc", ".") class TestAHIDemoDownload: From 4e485e93158efae51ef0a1e5178e0227cf838c3a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 6 Nov 2023 12:22:52 -0600 Subject: [PATCH 0659/1416] Fix ABI readers using wrong dtype for resolution-based chunks In-file data is 16-bit so our size has to be based on that --- satpy/readers/abi_base.py | 4 ++-- satpy/tests/reader_tests/test_abi_l1b.py | 26 ++++++++++++++---------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 07a29e3043..107382d7ba 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -94,10 +94,10 @@ def _chunk_bytes_for_resolution(self) -> int: # this is true for all CSPP Geo GRB output (226 for all sectors) and full disk from other sources # 250 has been seen for AWS/CLASS CONUS, Mesoscale 1, and Mesoscale 2 files # we align this with 4 on-disk chunks at 500m, so it will be 2 on-disk chunks for 1km, and 1 for 2km - high_res_elems_disk_aligned = np.round(max(num_high_res_elems_per_dim / (4 * 226), 1)) * (4 * 226) + high_res_elems_disk_aligned = round(max(num_high_res_elems_per_dim / (4 * 226), 1)) * (4 * 226) low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) res_elems_per_dim = int(high_res_elems_disk_aligned / low_res_factor) - return (res_elems_per_dim ** 2) * 4 + return (res_elems_per_dim ** 2) * 2 # 16-bit integers on disk @staticmethod def _rename_dims(nc): diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index a6acd7f027..1c7d2c78ef 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -136,18 +136,14 @@ def generate_l1b_filename(chan_name: str) -> str: @pytest.fixture() def c01_refl(tmp_path) -> xr.DataArray: - # 4 bytes for 32-bit floats - # 4 on-disk chunks for 500 meter data - # 226 on-disk chunk size - # Square (**2) for 2D size - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load(["C01"])["C01"] @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @@ -169,14 +165,14 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: "valid_range": (0, 4095), }, ) - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", rad, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] @@ -187,7 +183,7 @@ def _load_data_array( clip_negative_radiances: bool = False, ): rad = _fake_c07_data() - with dask.config.set({"array.chunk-size": ((226 * 4) ** 2) * 4}): + with _apply_dask_chunk_size(): reader = _create_reader_for_data( tmp_path, "C07", @@ -241,14 +237,22 @@ def _create_reader_for_data( return load_readers([str(data_path)], "abi_l1b", reader_kwargs=reader_kwargs)["abi_l1b"] +def _apply_dask_chunk_size(): + # 226 on-disk chunk size + # 8 on-disk chunks for 500 meter data + # Square (**2) for 2D size + # 4 bytes for 32-bit floats + return dask.config.set({"array.chunk-size": ((226 * 8) ** 2) * 4}) + + def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: data_np = data_arr.data.compute() assert isinstance(data_arr, xr.DataArray) assert isinstance(data_arr.data, da.Array) assert isinstance(data_np, np.ndarray) res = 1000 if RAD_SHAPE[1000][0] == data_np.shape[0] else 2000 - assert data_arr.chunks[0][0] == 226 * (4 / (res / 500)) - assert data_arr.chunks[1][0] == 226 * (4 / (res / 500)) + assert data_arr.chunks[0][0] == 226 * (8 / (res / 500)) + assert data_arr.chunks[1][0] == 226 * (8 / (res / 500)) assert data_np.dtype == data_arr.dtype assert data_np.dtype == exp_dtype From 78c11911769be180470eb8da8f131d8fea4ad333 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Tue, 7 Nov 2023 08:10:07 +0100 Subject: [PATCH 0660/1416] Update thresholds for high-level and low-level cloud layers abased on feedback from CIRA, i.e. the developers of the GeoColor composite blend (personal communication, 27.09.2023). --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 5924d7794c..0deecec642 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1077,7 +1077,7 @@ class HighCloudCompositor(CloudCompositor): of where abs(latitude). """ - def __init__(self, name, transition_min=(200., 220.), transition_max=280, latitude_min=(30., 60.), + def __init__(self, name, transition_min=(210., 230.), transition_max=300, latitude_min=(30., 60.), transition_gamma=1.0, **kwargs): """Collect custom configuration values. @@ -1149,7 +1149,7 @@ class LowCloudCompositor(CloudCompositor): """ def __init__(self, name, values_land=(1,), values_sea=(0,), - range_land=(1.0, 4.5), + range_land=(0.0, 4.0), range_sea=(0.0, 4.0), transition_gamma=1.0, **kwargs): """Init info. From 68f93047257267520f73d273b7eb057c63e00ab1 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Tue, 7 Nov 2023 08:11:47 +0100 Subject: [PATCH 0661/1416] Add TODOs for code consolidation and optimization. --- satpy/composites/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0deecec642..dea3830b22 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1109,6 +1109,8 @@ def __call__(self, projectables, **kwargs): `projectables` is expected to be a list or tuple with a single element: - index 0: Brightness temperature of a thermal infrared window channel (e.g. 10.5 microns). """ + # TODO Optimize and make sure that there are no early unnecessary dask computations. Is there a way to avoid + # computation of the latitude array? if len(projectables) != 1: raise ValueError(f"Expected 1 dataset, got {len(projectables)}") @@ -1191,6 +1193,7 @@ def __call__(self, projectables, **kwargs): - index 1. Brightness temperature of the window channel (used to filter out noise-induced false alarms). - index 2: Land-Sea-Mask. """ + # TODO Optimize and make sure that there are no early unnecessary dask computations if len(projectables) != 3: raise ValueError(f"Expected 3 datasets, got {len(projectables)}") @@ -1200,6 +1203,8 @@ def __call__(self, projectables, **kwargs): lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops + # TODO Consolidate this. Should it really be set to zero and thus within the threshold range? What if the + # lower threshold would be changed to -1 btd = btd.where(bt_win >= 230, 0.0) # Call CloudCompositor for land surface pixels From 07d10c9915928cf6d43794db4a89bfbf43fc2d7b Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Tue, 7 Nov 2023 09:04:49 +0100 Subject: [PATCH 0662/1416] Add url to land water mask to use for GeoColor low-level cloud detection. --- satpy/etc/composites/abi.yaml | 4 ++-- satpy/etc/composites/ahi.yaml | 4 ++-- satpy/etc/composites/fci.yaml | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index a686b829ae..e950ba027f 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -783,8 +783,8 @@ composites: - name: C13 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask - # TODO Change filename - filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" + known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 4f6ae6932f..e088bcf1a6 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -533,8 +533,8 @@ composites: - name: B13 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask - # TODO Change filename - filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" + known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 25ba032cac..c8a32910ca 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -130,8 +130,8 @@ composites: - name: ir_105 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_sea_mask - # TODO Change filename - filename: "/tcenas/proj/optcalimg/strandgren/GeoColor/static_data/gshhs_land_sea_mask_3km_i.tif" + url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" + known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor From 39bf45e9dd701339fec9531b4b9fc50b7be5ade4 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Tue, 7 Nov 2023 09:07:46 +0100 Subject: [PATCH 0663/1416] Update doc strings. --- satpy/composites/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index dea3830b22..bc5a199aa0 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1063,7 +1063,8 @@ class HighCloudCompositor(CloudCompositor): temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at the lower end, used to identify high opaque clouds, is made a function of the latitude in order to have tropopause level clouds appear opaque at both high and low latitudes. This follows the Geocolor - implementation of high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + implementation of high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`), but + with some adjustments to the thresholds based on recent developments and feedback from CIRA. The two brightness temperature thresholds in `transition_min` are used together with the corresponding latitude limits in `latitude_min` to compute a modified version of `transition_min` that is later used @@ -1144,7 +1145,8 @@ class LowCloudCompositor(CloudCompositor): function of the `BTD` value itself. Two sets of thresholds are used, one set for land surface types (`range_land`) and another one for sea/water surface types (`range_sea`), respectively. Hence, this compositor requires a land-sea-mask as a prerequisite input. This follows the GeoColor - implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`). + implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`), but + with some adjustments to the thresholds based on recent developments and feedback from CIRA. Please note that the spectral test and thus the output of the compositor (using the expected input data) is only applicable during night-time. @@ -1165,8 +1167,6 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), difference over land surface types. range_sea (tuple): Threshold values used for masking low-level clouds from the brightness temperature difference over sea/water. - latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent - transition_min values. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature difference range. """ From 9f9e34ddad87fb69ebfdf9b4b89cd1f5872938fb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 10:20:18 +0000 Subject: [PATCH 0664/1416] Add a reader for MODIS Level 3 files in CMG format. --- satpy/readers/hdfeos_base.py | 5 +- satpy/readers/modis_l3.py | 111 +++++++++++++++++++++++++++++++++++ 2 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 satpy/readers/modis_l3.py diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index f60040a46f..9964eeb2e1 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -148,7 +148,10 @@ def _read_mda(cls, lines, element=None): @classmethod def _split_line(cls, line, lines): - key, val = line.split("=") + try: + key, val = line.split("=") + except ValueError: + key, val = line.split("=", maxsplit=1) key = key.strip() val = val.strip() try: diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py new file mode 100644 index 0000000000..3c60efba87 --- /dev/null +++ b/satpy/readers/modis_l3.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Modis level 3 hdf-eos format reader. + +Introduction +------------ + +The ``modis_l3`` reader reads Modis L3 products in hdf-eos format. +Since there are a multitude of different level 3 datasets not all of theses are implemented (yet). + + +Currently the reader supports: + - mcd43c1: BRDF/Albedo Model Parameters dataset + - mcd43c3: BRDF/Albedo Albedo dataset + +To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. + +""" +import logging + +from pyresample import geometry + +from satpy.readers.hdfeos_base import HDFEOSGeoReader +from satpy.utils import get_legacy_chunk_size + +logger = logging.getLogger(__name__) +CHUNK_SIZE = get_legacy_chunk_size() + + +class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): + """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" + + def __init__(self, filename, filename_info, filetype_info, **kwargs): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, **kwargs) + + # Initialise number of rows and columns + self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Get the grid name and other projection info + gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] + if "CMG" not in gridname: + raise ValueError("Only CMG grids are supported") + + # Get the grid resolution + pos = gridname.rfind("_") + 1 + pos2 = gridname.rfind("Deg") + self.resolution = float(gridname[pos:pos2]) + + upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] + lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] + + self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + + + def available_datasets(self, configured_datasets=None): + """Automatically determine datasets provided by this file.""" + logger.debug("Available_datasets begin...") + + ds_dict = self.sd.datasets() + + yield from super().available_datasets(configured_datasets) + common = {"file_type": "mcd43_cmg_hdf", "resolution": self.resolution} + for key in ds_dict.keys(): + if "/" in key: # not a dataset + continue + yield True, {"name": key} | common + + def get_dataset(self, dataset_id, dataset_info): + """Get DataArray for specified dataset.""" + dataset_name = dataset_id["name"] + dataset = self.load_dataset(dataset_name, dataset_info.pop("category", False)) + self._add_satpy_metadata(dataset_id, dataset) + + return dataset + + + def get_area_def(self, dsid): + """Get the area definition. + + This is fixed, but not defined in the file. So we must + generate it ourselves with some assumptions. + """ + proj_param = "EPSG:4326" + + area = geometry.AreaDefinition("gridded_modis", + "A gridded L3 MODIS area", + "longlat", + proj_param, + self.ncols, + self.nrows, + self.area_extent) + self.area = area + + return self.area From 7c7d5fc5670e76574f815396c239d552688b9dfd Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 10:40:04 +0000 Subject: [PATCH 0665/1416] Update reader for MODIS Level 3 files in CMG format. --- satpy/etc/readers/modis_l3.yaml | 16 ++++++++++++++++ satpy/readers/modis_l3.py | 13 ++++++++++++- 2 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 satpy/etc/readers/modis_l3.yaml diff --git a/satpy/etc/readers/modis_l3.yaml b/satpy/etc/readers/modis_l3.yaml new file mode 100644 index 0000000000..5ad2f32e04 --- /dev/null +++ b/satpy/etc/readers/modis_l3.yaml @@ -0,0 +1,16 @@ +reader: + name: modis_l3 + short_name: MODIS l3 + long_name: MODIS Level 3 (mcd43) data in HDF-EOS format + description: MODIS HDF-EOS L3 Reader + status: Beta + supports_fsspec: false + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [modis] + +file_types: + mcd43_cmg_hdf: + file_patterns: + - 'MCD43C{prod_type}.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + - 'M{platform_indicator:1s}D09CMG.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + file_reader: !!python/name:satpy.readers.modis_l3.ModisL3GriddedHDFFileHandler diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 3c60efba87..2b9387ed58 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -61,11 +61,22 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): # Get the grid resolution pos = gridname.rfind("_") + 1 pos2 = gridname.rfind("Deg") - self.resolution = float(gridname[pos:pos2]) + + # Some products don't have resolution listed. + if pos < 0 or pos2 < 0: + self.resolution = 360. / self.ncols + else: + self.resolution = float(gridname[pos:pos2]) upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] + # For some reason, a few of the CMG products multiply their + # decimal degree extents by one million. This fixes it. + if lowerright[0] > 1e6: + upperleft = tuple(val / 1e6 for val in upperleft) + lowerright = tuple(val / 1e6 for val in lowerright) + self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) From 0de217a4c7f7db8e343d6e1173ca0103cc37c57a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 10:44:15 +0000 Subject: [PATCH 0666/1416] Update MODIS L3 docstring. --- satpy/readers/modis_l3.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 2b9387ed58..86560624a8 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -21,12 +21,13 @@ ------------ The ``modis_l3`` reader reads Modis L3 products in hdf-eos format. -Since there are a multitude of different level 3 datasets not all of theses are implemented (yet). +There are multiple level 3 products, including some on sinusoidal grids and some on the climate modeling grid (CMG). +This reader supports the CMG products at present, and the sinusoidal products will be added if there is demand. -Currently the reader supports: - - mcd43c1: BRDF/Albedo Model Parameters dataset - - mcd43c3: BRDF/Albedo Albedo dataset +The reader has been tested with: + - MCD43c*: BRDF/Albedo data, such as parameters, albedo and nbar + - MOD09CMG: Surface Reflectance on climate monitoring grid. To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. From b4cf2c43447a6c87ceb9b1a767b0ffc5b036e3d0 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 10:47:12 +0000 Subject: [PATCH 0667/1416] Restructure the L3 MODIS reader. --- satpy/readers/modis_l3.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 86560624a8..bff33b190c 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -46,15 +46,8 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" - def __init__(self, filename, filename_info, filetype_info, **kwargs): - """Init the file handler.""" - super().__init__(filename, filename_info, filetype_info, **kwargs) - - # Initialise number of rows and columns - self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] - self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] - - # Get the grid name and other projection info + def _sort_grid(self): + """Get the grid properties.""" gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] if "CMG" not in gridname: raise ValueError("Only CMG grids are supported") @@ -65,9 +58,9 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): # Some products don't have resolution listed. if pos < 0 or pos2 < 0: - self.resolution = 360. / self.ncols + resolution = 360. / self.ncols else: - self.resolution = float(gridname[pos:pos2]) + resolution = float(gridname[pos:pos2]) upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] @@ -78,7 +71,21 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) - self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + + return resolution, area_extent + + + def __init__(self, filename, filename_info, filetype_info, **kwargs): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, **kwargs) + + # Initialise number of rows and columns + self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Get the grid name and other projection info + self.resolution, self.area_extent = self._sort_grid() def available_datasets(self, configured_datasets=None): From 1457ac67e7457ab1c05d2472a73c0b632864278a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 11:44:39 +0000 Subject: [PATCH 0668/1416] Add tests for MODIS L3 reader. --- .../modis_tests/_modis_fixtures.py | 80 ++++++++++++++++++- .../reader_tests/modis_tests/conftest.py | 2 + .../reader_tests/modis_tests/test_modis_l3.py | 78 ++++++++++++++++++ 3 files changed, 159 insertions(+), 1 deletion(-) create mode 100644 satpy/tests/reader_tests/modis_tests/test_modis_l3.py diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 66221c613e..aff84de7be 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -51,6 +51,9 @@ def _shape_for_resolution(resolution: int) -> tuple[int, int]: + # Case of a CMG 0.05 degree file, for L3 tests + if resolution == -999: + return 3600, 7200 assert resolution in RES_TO_REPEAT_FACTOR factor = RES_TO_REPEAT_FACTOR[resolution] if factor == 1: @@ -252,7 +255,10 @@ def create_hdfeos_test_file(filename: str, if geo_resolution is None or file_shortname is None: raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa - setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa + if geo_resolution == -999 or geo_resolution == -9999: + setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_resolution)) # noqa + else: + setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa for var_name, var_info in variable_infos.items(): @@ -326,6 +332,31 @@ def _create_struct_metadata(geo_resolution: int) -> str: return struct_metadata_header +def _create_struct_metadata_cmg(res) -> str: + # Case of a MOD09 file + gridline = 'GridName="MOD09CMG"\n' + upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" + upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" + if res == -9999: + # Case of a MCD43 file + gridline = 'GridName="MCD_CMG_BRDF_0.05Deg"\n' + upleft = "UpperLeftPointMtrs=(-180.000000,90.000000)\n" + upright = "LowerRightMtrs=(180.000000,-90.000000)\n" + + struct_metadata_header = ("GROUP=SwathStructure\n" + "END_GROUP=SwathStructure\n" + "GROUP=GridStructure\n" + "GROUP=GRID_1\n" + f"{gridline}\n" + "XDim=7200\n" + "YDim=3600\n" + f"{upleft}\n" + f"{upright}\n" + "END_GROUP=GRID_1\n" + "END_GROUP=GridStructure\nEND") + return struct_metadata_header + + def _create_header_metadata() -> str: archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND" return archive_metadata_header @@ -471,6 +502,28 @@ def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: } +def _get_l3_refl_variable_info(var_name: str) -> dict: + shape = (3600, 7200) + data = np.zeros((shape[0], shape[1]), dtype=np.int16) + row_dim_name = "XDim" + col_dim_name = "YDim" + return { + var_name: { + "data": data, + "type": SDC.INT16, + "fill_value": -28672, + "attrs": { + # dim_labels are just unique dimension names, may not match exactly with real world files + "dim_labels": [row_dim_name, + col_dim_name], + "valid_range": (-100, 16000), + "scale_factor": 1e-4, + "add_offset": 0., + }, + }, + } + + def _get_mask_byte1_variable_info() -> dict: shape = _shape_for_resolution(1000) data = np.zeros((shape[0], shape[1]), dtype=np.uint16) @@ -537,6 +590,31 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: return [full_path] +def generate_nasa_l3_filename(prefix: str) -> str: + """Generate a file name that follows MODIS 09 L3 convention in a temporary directory.""" + now = datetime.now() + return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" + + +@pytest.fixture(scope="session") +def modis_l3_nasa_mod09_file(tmpdir_factory) -> list[str]: + """Create a single MOD09 L3 HDF4 file with headers.""" + filename = generate_nasa_l3_filename("MOD09CMG") + full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) + variable_infos = _get_l3_refl_variable_info("Coarse_Resolution_Surface_Reflectance_Band_2") + create_hdfeos_test_file(full_path, variable_infos, geo_resolution=-999, file_shortname="MOD09") + return [full_path] + +@pytest.fixture(scope="session") +def modis_l3_nasa_mod43_file(tmpdir_factory) -> list[str]: + """Create a single MOD09 L3 HDF4 file with headers.""" + filename = generate_nasa_l3_filename("MCD43C1") + full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) + variable_infos = _get_l3_refl_variable_info("BRDF_Albedo_Parameter1_Band2") + create_hdfeos_test_file(full_path, variable_infos, geo_resolution=-9999, file_shortname="MCD43C1") + return [full_path] + + @pytest.fixture(scope="session") def modis_l2_nasa_mod35_mod03_files(modis_l2_nasa_mod35_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create a MOD35 L2 HDF4 file and MOD03 L1b geolocation file.""" diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index e6a8432653..309b16321f 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -32,4 +32,6 @@ modis_l2_nasa_mod06_file, modis_l2_nasa_mod35_file, modis_l2_nasa_mod35_mod03_files, + modis_l3_nasa_mod09_file, + modis_l3_nasa_mod43_file, ) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py new file mode 100644 index 0000000000..1203ecf205 --- /dev/null +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unit tests for MODIS L3 HDF reader.""" + +from __future__ import annotations + +import dask.array as da +import pytest +from pyresample import geometry +from pytest_lazyfixture import lazy_fixture + +from satpy import Scene, available_readers + +from ._modis_fixtures import _shape_for_resolution + + +def _expected_area(): + proj_param = "EPSG:4326" + + return geometry.AreaDefinition("gridded_modis", + "A gridded L3 MODIS area", + "longlat", + proj_param, + 7200, + 3600, + (-180, -90, 180, 90)) + + +class TestModisL3: + """Test MODIS L3 reader.""" + + def test_available_reader(self): + """Test that MODIS L3 reader is available.""" + assert "modis_l3" in available_readers() + + @pytest.mark.parametrize( + ("loadable", "filename"), + [ + ("Coarse_Resolution_Surface_Reflectance_Band_2", lazy_fixture("modis_l3_nasa_mod09_file")), + ("BRDF_Albedo_Parameter1_Band2",lazy_fixture("modis_l3_nasa_mod43_file")), + ] + ) + def test_scene_available_datasets(self, loadable, filename): + """Test that datasets are available.""" + scene = Scene(reader="modis_l3", filenames=filename) + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert loadable in available_datasets + + def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): + """Load and check an L2 variable.""" + scene = Scene(reader="modis_l3", filenames=modis_l3_nasa_mod09_file) + + ds_name = "Coarse_Resolution_Surface_Reflectance_Band_2" + scene.load([ds_name]) + + data_arr = scene[ds_name] + assert isinstance(data_arr.data, da.Array) + data_arr = data_arr.compute() + + assert data_arr.shape == _shape_for_resolution(-999) + assert data_arr.attrs.get("resolution") == 0.05 + assert data_arr.attrs.get("area") == _expected_area() From 1f501168138ab74c143975fc160ab5fc1129119d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 11:51:39 +0000 Subject: [PATCH 0669/1416] Simplify the MODIS L3 code + tests somewhat. --- satpy/readers/modis_l3.py | 25 ++++++++++++------- .../modis_tests/_modis_fixtures.py | 4 +-- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index bff33b190c..dc4600790d 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -46,22 +46,31 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" - def _sort_grid(self): - """Get the grid properties.""" + def _get_res(self): + """Compute the resolution from the file metadata.""" gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] if "CMG" not in gridname: raise ValueError("Only CMG grids are supported") - # Get the grid resolution + # Get the grid resolution from the grid name pos = gridname.rfind("_") + 1 pos2 = gridname.rfind("Deg") # Some products don't have resolution listed. if pos < 0 or pos2 < 0: - resolution = 360. / self.ncols + self.resolution = 360. / self.ncols else: - resolution = float(gridname[pos:pos2]) + self.resolution = float(gridname[pos:pos2]) + + + def _sort_grid(self): + """Get the grid properties.""" + + # First, get the grid resolution + self._get_res() + + # Now compute the data extent upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] @@ -71,9 +80,7 @@ def _sort_grid(self): upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) - area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) - - return resolution, area_extent + self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) def __init__(self, filename, filename_info, filetype_info, **kwargs): @@ -85,7 +92,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] # Get the grid name and other projection info - self.resolution, self.area_extent = self._sort_grid() + self._sort_grid() def available_datasets(self, configured_datasets=None): diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index aff84de7be..f076e100ba 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -254,11 +254,11 @@ def create_hdfeos_test_file(filename: str, if include_metadata: if geo_resolution is None or file_shortname is None: raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") - setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa - if geo_resolution == -999 or geo_resolution == -9999: + elif geo_resolution == -999 or geo_resolution == -9999: setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_resolution)) # noqa else: setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa + setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa for var_name, var_info in variable_infos.items(): From 7afa5773f4e03f478e4725a8bb091e5602fbdd8c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 11:57:40 +0000 Subject: [PATCH 0670/1416] Further simplify the MODIS L3 tests. --- .../modis_tests/_modis_fixtures.py | 32 ++++++++++++------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index f076e100ba..40e448e067 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -596,23 +596,33 @@ def generate_nasa_l3_filename(prefix: str) -> str: return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" +def modis_l3_file(tmpdir_factory, f_prefix, var_name, geo_res, f_short): + """Create a MODIS L3 file of the desired type.""" + filename = generate_nasa_l3_filename(f_prefix) + full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) + variable_infos = _get_l3_refl_variable_info(var_name) + create_hdfeos_test_file(full_path, variable_infos, geo_resolution=geo_res, file_shortname=f_short) + return [full_path] + + @pytest.fixture(scope="session") def modis_l3_nasa_mod09_file(tmpdir_factory) -> list[str]: """Create a single MOD09 L3 HDF4 file with headers.""" - filename = generate_nasa_l3_filename("MOD09CMG") - full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) - variable_infos = _get_l3_refl_variable_info("Coarse_Resolution_Surface_Reflectance_Band_2") - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=-999, file_shortname="MOD09") - return [full_path] + return modis_l3_file(tmpdir_factory, + "MOD09CMG", + "Coarse_Resolution_Surface_Reflectance_Band_2", + -999, + "MOD09") + @pytest.fixture(scope="session") def modis_l3_nasa_mod43_file(tmpdir_factory) -> list[str]: - """Create a single MOD09 L3 HDF4 file with headers.""" - filename = generate_nasa_l3_filename("MCD43C1") - full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) - variable_infos = _get_l3_refl_variable_info("BRDF_Albedo_Parameter1_Band2") - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=-9999, file_shortname="MCD43C1") - return [full_path] + """Create a single MVCD43 L3 HDF4 file with headers.""" + return modis_l3_file(tmpdir_factory, + "MCD43C1", + "BRDF_Albedo_Parameter1_Band2", + -9999, + "MCD43C1") @pytest.fixture(scope="session") From 3cbc7024ffd2f4d1734fd411c421e66360af1af1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 14:03:49 +0000 Subject: [PATCH 0671/1416] Add reader for OSI SAF L3 products on EASE and polar stereographic projections. --- satpy/etc/readers/osisaf_nc.yaml | 107 +++++++++++++++++++++ satpy/readers/osisaf_l3_nc.py | 154 +++++++++++++++++++++++++++++++ 2 files changed, 261 insertions(+) create mode 100644 satpy/etc/readers/osisaf_nc.yaml create mode 100644 satpy/readers/osisaf_l3_nc.py diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml new file mode 100644 index 0000000000..214345da3a --- /dev/null +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -0,0 +1,107 @@ +reader: + name: osisaf_nc + short_name: OSI-SAF netCDF + long_name: OSI-SAF data in netCDF4 format + description: > + A reader for OSI-SAF data in netCDF4 format. + References: + + - Dataset descriptions: https://osi-saf.eumetsat.int/documentation/products-documentation + + status: Beta + supports_fsspec: true + sensors: [osisaf] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + osi_sea_ice_conc: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_edge: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_emis: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sea_ice_type: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + +datasets: + # Shared between various file types + status_flag: + name: status_flag + file_type: [osi_sea_ice_conc, osi_sea_ice_edge, osi_sea_ice_type] + + orbit_num_amsr: + name: orbit_num_amsr + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + orbit_num_ascat: + name: orbit_num_ascat + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + orbit_num_ssmis: + name: orbit_num_ssmis + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + param_used: + name: param_used + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + uncertainty: + name: uncertainty + file_type: [osi_sea_ice_edge, osi_sea_ice_type] + + # Sea ice concentration datasets + algorithm_uncertainty: + name: algorithm_uncertainty + file_type: osi_sea_ice_conc + confidence_level: + name: confidence_level + file_type: osi_sea_ice_conc + ice_conc: + name: ice_conc + file_type: osi_sea_ice_conc + ice_conc_unfiltered: + name: ice_conc_unfiltered + file_type: osi_sea_ice_conc + masks: + name: masks + file_type: osi_sea_ice_conc + smearing_uncertainty: + name: smearing_uncertainty + file_type: osi_sea_ice_conc + total_uncertainty: + name: total_uncertainty + file_type: osi_sea_ice_conc + + # Ice edge product + ice_edge: + name: ice_edge + file_type: osi_sea_ice_edge + + # Ice type product + ice_type: + name: ice_type + file_type: osi_sea_ice_type + + # Ice emis product + e: + name: e + file_type: osi_sea_ice_emis + ev: + name: ev + file_type: osi_sea_ice_emis + flag: + name: flag + file_type: osi_sea_ice_emis + R: + name: R + file_type: osi_sea_ice_emis + S: + name: S + file_type: osi_sea_ice_emis + teff: + name: teff + file_type: osi_sea_ice_emis + u: + name: u + file_type: osi_sea_ice_emis diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py new file mode 100644 index 0000000000..794119a300 --- /dev/null +++ b/satpy/readers/osisaf_l3_nc.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +# type: ignore +"""A reader for OSI-SAF level 3 products in netCDF format.""" + +import logging +from datetime import datetime + +import numpy as np + +from satpy.readers.netcdf_utils import NetCDF4FileHandler + +logger = logging.getLogger(__name__) + + +class OSISAFL3NCFileHandler(NetCDF4FileHandler): + """Reader for the OSISAF l3 netCDF format.""" + + + @staticmethod + def _parse_datetime(datestr): + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + + def _get_ease_grid(self): + """Set up the EASE grid.""" + from pyresample import create_area_def + + proj4str = self["Lambert_Azimuthal_Grid/attr/proj4_string"] + x_size = self["/dimension/xc"] + y_size = self["/dimension/yc"] + p_lowerleft_lat = self["lat"].values[y_size - 1, 0] + p_lowerleft_lon = self["lon"].values[y_size - 1, 0] + p_upperright_lat = self["lat"].values[0, x_size - 1] + p_upperright_lon = self["lon"].values[0, x_size - 1] + area_extent = [p_lowerleft_lon, p_lowerleft_lat, p_upperright_lon, p_upperright_lat] + area_def = create_area_def(area_id="osisaf_lambert_azimuthal_equal_area", + description="osisaf_lambert_azimuthal_equal_area", + proj_id="osisaf_lambert_azimuthal_equal_area", + projection=proj4str, width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + + def _get_polar_stereographic_grid(self): + """Set up the polar stereographic grid.""" + from pyresample import create_area_def + + proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + x_size = self["/dimension/xc"] + y_size = self["/dimension/yc"] + p_lowerleft_lat = self["lat"].values[y_size - 1, 0] + p_lowerleft_lon = self["lon"].values[y_size - 1, 0] + p_upperright_lat = self["lat"].values[0, x_size - 1] + p_upperright_lon = self["lon"].values[0, x_size - 1] + area_extent = [p_lowerleft_lon, p_lowerleft_lat, p_upperright_lon, p_upperright_lat] + area_def = create_area_def(area_id="osisaf_polar_stereographic", + description="osisaf_polar_stereographic", + proj_id="osisaf_polar_stereographic", + projection=proj4str, width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + + + def get_area_def(self, area_id): + """Override abstract baseclass method""" + + if self.filename_info["grid"] == "ease": + return self._get_ease_grid() + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + return self._get_polar_stereographic_grid() + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + + def _get_ds_attr(self, a_name): + """Get a dataset attribute and check it's valid.""" + try: + return self[a_name] + except KeyError: + return None + + def get_dataset(self, dataset_id, ds_info): + """Load a dataset.""" + logger.debug(f"Reading {dataset_id['name']} from {self.filename}") + var_path = ds_info.get("file_key", f"{dataset_id['name']}") + + shape = self[var_path + "/shape"] + if shape[0] == 1: + # Remove the time dimension from dataset + data = self[var_path][0] + else: + data = self[var_path] + + file_units = ds_info.get("file_units") + if file_units is None: + file_units = self._get_ds_attr(var_path + "/attr/units") + if file_units is None: + file_units = 1 + + # Try to get the valid limits for the data. + # Not all datasets have these, so fall back on assuming no limits. + valid_min = self._get_ds_attr(var_path + "/attr/valid_min") + valid_max = self._get_ds_attr(var_path + "/attr/valid_max") + if valid_min is not None and valid_max is not None: + data = data.where(data >= valid_min, np.nan) + data = data.where(data <= valid_max, np.nan) + + + # Try to get the scale and offset for the data. + # As above, not all datasets have these, so fall back on assuming no limits. + scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") + scale_offset = self._get_ds_attr(var_path + "/attr/add_offset") + if scale_offset is not None and scale_factor is not None: + data = (data * scale_factor + scale_offset) + + # Try to get the fill value for the data. + # If there isn"t one, assume all remaining pixels are valid. + fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + if fill_value is not None: + data = data.where(data != fill_value, np.nan) + + # Set proper dimension names + data = data.rename({"xc": "x", "yc": "y"}) + + ds_info.update({ + "units": ds_info.get("units", file_units), + "platform_name": self["/attr/platform_name"], + "sensor": self["/attr/instrument_type"] + }) + ds_info.update(dataset_id.to_dict()) + data.attrs.update(ds_info) + return data + + @property + def start_time(self): + return self._parse_datetime(self["/attr/start_date"]) + # return self._parse_datetime(self["/attr/start_date"]) + + @property + def end_time(self): + return self._parse_datetime(self["/attr/stop_date"]) From 300a9a1cf09d62c6e2b64d2131486f089e95761e Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 14:06:50 +0000 Subject: [PATCH 0672/1416] Fix typos and tidy osi saf reader. --- satpy/readers/osisaf_l3_nc.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 794119a300..e61e752299 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -118,7 +118,6 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data >= valid_min, np.nan) data = data.where(data <= valid_max, np.nan) - # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") @@ -127,7 +126,7 @@ def get_dataset(self, dataset_id, ds_info): data = (data * scale_factor + scale_offset) # Try to get the fill value for the data. - # If there isn"t one, assume all remaining pixels are valid. + # If there isn't one, assume all remaining pixels are valid. fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") if fill_value is not None: data = data.where(data != fill_value, np.nan) From 13fffa673909606376014941f644785b09cfd1f4 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:26:50 +0000 Subject: [PATCH 0673/1416] Add tests for the OSI SAF L3 reader. --- satpy/readers/osisaf_l3_nc.py | 8 +- satpy/tests/reader_tests/test_osisaf_l3.py | 233 +++++++++++++++++++++ 2 files changed, 237 insertions(+), 4 deletions(-) create mode 100644 satpy/tests/reader_tests/test_osisaf_l3.py diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index e61e752299..00f1176b6f 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -31,7 +31,6 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): """Reader for the OSISAF l3 netCDF format.""" - @staticmethod def _parse_datetime(datestr): return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") @@ -77,11 +76,12 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filename_info["grid"] == "ease": - return self._get_ease_grid() + self.area_def = self._get_ease_grid() + return self.area_def elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - return self._get_polar_stereographic_grid() + self.area_def = self._get_polar_stereographic_grid() + return self.area_def else: raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py new file mode 100644 index 0000000000..40cf4539e1 --- /dev/null +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.osisaf_l3 module.""" + +import os +from datetime import datetime + +import numpy as np +import pytest +import xarray as xr + +from satpy import DataQuery +from satpy.readers.osisaf_l3_nc import OSISAFL3NCFileHandler + +stere_ds = xr.DataArray( + -999, + attrs={"grid_mapping_name": "polar_stereographic", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", +}) + +ease_ds = xr.DataArray( + -999, + attrs={"grid_mapping_name": "lambert_azimuthal_equal_area", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6371228.0, + "longitude_of_projection_origin": 0.0, + "latitude_of_projection_origin": -90.0, + "proj4_string": "+proj=laea +a=6371228.0 +lat_0=-90 +lon_0=0", + }) + + +class TestOSISAFL3Reader: + """Test OSI-SAF level 3 netCDF reader.""" + + def setup_method(self, proj_type): + """Create a fake dataset.""" + self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + self.base_data = np.expand_dims(self.base_data, axis=0) + self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) + self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) + self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) + self.time_data = np.array([1.]) + + self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) + self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) + self.xc = xr.DataArray( + self.xc_data, + dims=("yc", "xc"), + attrs={"standard_name": "projection_x_coordinate", "units": "km"} + ) + self.yc = xr.DataArray( + self.yc_data, + dims=("yc", "xc"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"} + ) + self.time = xr.DataArray( + self.time_data, + dims=("time"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"} + ) + self.lat = xr.DataArray( + self.lat_data, + dims=("yc", "xc"), + attrs={"standard_name": "latitude", "units": "degrees_north"} + ) + self.lon = xr.DataArray( + self.lon_data, + dims=("yc", "xc"), + attrs={"standard_name": "longitude", "units": "degrees_east"} + ) + self.conc = xr.DataArray( + self.base_data, + dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", + "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"} + ) + self.uncert = xr.DataArray( + self.unc_data, + dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, + "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + ) + + data_vars = { + "ice_conc": self.conc, + "total_uncertainty": self.uncert, + "xc": self.xc, + "yc": self.yc, + "time": self.time, + "lat": self.lat, + "lon": self.lon, + "Lambert_Azimuthal_Grid": ease_ds, + "Polar_Stereographic_Grid": stere_ds} + self.fake_dataset = xr.Dataset( + data_vars=data_vars, + attrs={ + "start_date": "2022-12-15 00:00:00", + "stop_date": "2022-12-16 00:00:00", + "platform_name": "Multi-sensor analysis", + "instrument_type": "Multi-sensor analysis"}, + ) + + def test_instantiate_single_netcdf_file(self, tmp_path): + """Test initialization of file handlers - given a single netCDF file.""" + filename_info = {} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + + def test_get_dataset(self, tmp_path): + """Test retrieval of datasets.""" + filename_info = {} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + res = test.get_dataset(DataQuery(name="ice_conc"), {"standard_name": "sea_ice_area_fraction"}) + # Check we remove singleton dimension + assert res.shape[0] == 2 + assert res.shape[1] == 5 + + # Test values are correct + test_ds = self.fake_dataset["ice_conc"][0].values + test_ds = np.where(test_ds == -999, np.nan, test_ds) + test_ds = np.where(test_ds > 10000, np.nan, test_ds) + np.testing.assert_allclose(res.values, test_ds / 100) + + res = test.get_dataset(DataQuery(name="total_uncertainty"), {"standard_name": "sea_ice_area_fraction"}) + assert res.shape[0] == 2 + assert res.shape[1] == 5 + + with pytest.raises(KeyError): + test.get_dataset(DataQuery(name="erroneous dataset"), {"standard_name": "erroneous dataset"}) + + def test_get_start_and_end_times(self, tmp_path): + """Test retrieval of the sensor name from the netCDF file.""" + good_start_time = datetime(2022, 12, 15, 0, 0, 0) + good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + + filename_info = {} + filetype_info = {} + + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + assert test.start_time == good_start_time + assert test.end_time == good_stop_time + + + def test_get_area_def_ease(self, tmp_path): + """Test getting the area definition for the EASE grid.""" + filename_info = {"grid": "ease"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_lambert_azimuthal_equal_area" + assert area_def.proj_dict["R"] == 6371228 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "laea" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) + + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "stere"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) + + def test_get_area_def_bad(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "turnips"} + filetype_info = {} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + with pytest.raises(ValueError, match="Unknown grid type: turnips"): + test.get_area_def(None) From 35434d3ae172d795c0716e1125077d285fcbdf3a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:40:01 +0000 Subject: [PATCH 0674/1416] Update satpy/readers/modis_l3.py Co-authored-by: David Hoese --- satpy/readers/modis_l3.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index dc4600790d..b5313d89cc 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -61,9 +61,6 @@ def _get_res(self): self.resolution = 360. / self.ncols else: self.resolution = float(gridname[pos:pos2]) - - - def _sort_grid(self): """Get the grid properties.""" From fb59a77e279f2cf8d9447ab8429ca1075ebc6bcf Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:41:39 +0000 Subject: [PATCH 0675/1416] Update MODIS L3 reader for review comments. --- satpy/readers/modis_l3.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index b5313d89cc..f8f94372cc 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -37,10 +37,8 @@ from pyresample import geometry from satpy.readers.hdfeos_base import HDFEOSGeoReader -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): @@ -61,6 +59,7 @@ def _get_res(self): self.resolution = 360. / self.ncols else: self.resolution = float(gridname[pos:pos2]) + def _sort_grid(self): """Get the grid properties.""" @@ -77,7 +76,7 @@ def _sort_grid(self): upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) - self.area_extent = (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + return (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) def __init__(self, filename, filename_info, filetype_info, **kwargs): @@ -89,7 +88,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] # Get the grid name and other projection info - self._sort_grid() + self.area_extent = self._sort_grid() def available_datasets(self, configured_datasets=None): From 261fca6e38cba8a3a0fe145f8a5078e90bb25723 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:46:13 +0000 Subject: [PATCH 0676/1416] Update MODIS test fixtures to simplify. --- .../modis_tests/_modis_fixtures.py | 35 +++++++++++-------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 40e448e067..e4272373b3 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -229,6 +229,16 @@ def generate_imapp_filename(suffix): return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" +def _add_geo_metadata(h, geo_res): + """Add the geoinfo metadata to the fake file.""" + if geo_res == -999 or geo_res == -9999: + setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_res)) # noqa + else: + setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_res)) # noqa + + return h + + def create_hdfeos_test_file(filename: str, variable_infos: dict, geo_resolution: Optional[int] = None, @@ -254,10 +264,7 @@ def create_hdfeos_test_file(filename: str, if include_metadata: if geo_resolution is None or file_shortname is None: raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") - elif geo_resolution == -999 or geo_resolution == -9999: - setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_resolution)) # noqa - else: - setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa + h = _add_geo_metadata(h, geo_resolution) setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa @@ -344,16 +351,16 @@ def _create_struct_metadata_cmg(res) -> str: upright = "LowerRightMtrs=(180.000000,-90.000000)\n" struct_metadata_header = ("GROUP=SwathStructure\n" - "END_GROUP=SwathStructure\n" - "GROUP=GridStructure\n" - "GROUP=GRID_1\n" - f"{gridline}\n" - "XDim=7200\n" - "YDim=3600\n" - f"{upleft}\n" - f"{upright}\n" - "END_GROUP=GRID_1\n" - "END_GROUP=GridStructure\nEND") + "END_GROUP=SwathStructure\n" + "GROUP=GridStructure\n" + "GROUP=GRID_1\n" + f"{gridline}\n" + "XDim=7200\n" + "YDim=3600\n" + f"{upleft}\n" + f"{upright}\n" + "END_GROUP=GRID_1\n" + "END_GROUP=GridStructure\nEND") return struct_metadata_header From bdcddab6ca033d26d2363513ddb24fc07ddb2679 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 15:46:40 +0000 Subject: [PATCH 0677/1416] Remove rogue blank line. --- satpy/readers/modis_l3.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index f8f94372cc..dfddc0732b 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -62,7 +62,6 @@ def _get_res(self): def _sort_grid(self): """Get the grid properties.""" - # First, get the grid resolution self._get_res() From ac08013b725f2d096e91250a71c2e1ae34eeb219 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 8 Nov 2023 10:35:20 -0600 Subject: [PATCH 0678/1416] Remove typing ignore from satpy/readers/osisaf_l3_nc.py --- satpy/readers/osisaf_l3_nc.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 00f1176b6f..293584ffa8 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -15,7 +15,6 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -# type: ignore """A reader for OSI-SAF level 3 products in netCDF format.""" import logging From 092b452782a61b206af8be110cc84900c0c89f2a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 8 Nov 2023 10:41:32 -0600 Subject: [PATCH 0679/1416] Bump ruff hook version in .pre-commit-config.yaml --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eb21aa6601..1094cf0355 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,9 +1,9 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.247' + rev: 'v0.1.4' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks From 9fd639475113feaf7fdb884bcd8087f409b7c0e1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 16:47:23 +0000 Subject: [PATCH 0680/1416] Update MODIS L3 tests to check data types. --- .../tests/reader_tests/modis_tests/test_modis_l3.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index 1203ecf205..23c1af6fc1 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -20,6 +20,7 @@ from __future__ import annotations import dask.array as da +import numpy as np import pytest from pyresample import geometry from pytest_lazyfixture import lazy_fixture @@ -71,8 +72,12 @@ def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): data_arr = scene[ds_name] assert isinstance(data_arr.data, da.Array) - data_arr = data_arr.compute() + data_arr_comp = data_arr.compute() - assert data_arr.shape == _shape_for_resolution(-999) - assert data_arr.attrs.get("resolution") == 0.05 - assert data_arr.attrs.get("area") == _expected_area() + # Check types + assert data_arr_comp.dtype == data_arr.dtype + assert data_arr_comp.dtype == np.float32 + + assert data_arr_comp.shape == _shape_for_resolution(-999) + assert data_arr_comp.attrs.get("resolution") == 0.05 + assert data_arr_comp.attrs.get("area") == _expected_area() From 23c136a51df49e9433d6b65f8eb5006060f4e65f Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 17:49:05 +0000 Subject: [PATCH 0681/1416] Update INSAT-3D reader to get satellite location from file rather than hardcoded value. --- satpy/readers/insat3d_img_l1b_h5.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index a7dcf371cc..7e444e8d34 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -182,6 +182,8 @@ def get_area_def(self, ds_id): a = 6378137.0 b = 6356752.314245 + nom_cen_pos = self.datatree.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"][1] + pdict = { "cfac": cfac, "lfac": lfac, @@ -193,7 +195,7 @@ def get_area_def(self, ds_id): "a": a, "b": b, "h": h, - "ssp_lon": 82.0, + "ssp_lon": nom_cen_pos, "a_name": "insat3d82", "a_desc": "insat3d82", "p_id": "geosmsg" From 23db54b18abf2f33450eaf5823231a8c35152cca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 21:05:26 +0000 Subject: [PATCH 0682/1416] Add support for flux and sst products. --- satpy/etc/readers/osisaf_nc.yaml | 89 +++++++++++++++++++++++--- satpy/readers/osisaf_l3_nc.py | 105 ++++++++++++++++++++++++++----- 2 files changed, 170 insertions(+), 24 deletions(-) diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml index 214345da3a..479b5a38db 100644 --- a/satpy/etc/readers/osisaf_nc.yaml +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -16,17 +16,26 @@ reader: file_types: osi_sea_ice_conc: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_edge: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_emis: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_type: - file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_sst: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['{start_time:%Y%m%d%H%M%S}-{processing_center}-L3C_GHRSST-SSTskin-{sensor}_{platform_name}-v{version}.nc'] + osi_radflux_stere: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['osisaf_radiative_flux_24h_hl_{grid}-050_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + osi_radflux_grid: + file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler + file_patterns: ['{start_time:%Y%m%d%H%M%S}-OSISAF-RADFLX-{time_period}-{platform_name}.nc'] datasets: # Shared between various file types @@ -105,3 +114,67 @@ datasets: u: name: u file_type: osi_sea_ice_emis + + # SST product + ist_dtime: + name: ist_dtime + file_type: osi_sst + ist_quality_level: + name: ist_quality_level + file_type: osi_sst + l2p_flags: + name: l2p_flags + file_type: osi_sst + landmask: + name: landmask + file_type: osi_sst + or_number_of_pixels: + name: or_number_of_pixels + file_type: osi_sst + or_number_of_pixels_ist: + name: or_number_of_pixels_ist + file_type: osi_sst + probability_of_ice: + name: probability_of_ice + file_type: osi_sst + probability_of_water: + name: probability_of_water + file_type: osi_sst + quality_level: + name: quality_level + file_type: osi_sst + sea_ice_fraction: + name: sea_ice_fraction + file_type: osi_sst + sea_surface_temperature: + name: sea_surface_temperature + file_type: osi_sst + sses_bias: + name: sses_bias + file_type: osi_sst + sses_standard_deviation: + name: sses_standard_deviation + file_type: osi_sst + sst_dtime: + name: sst_dtime + file_type: osi_sst + surface_temperature: + name: surface_temperature + file_type: osi_sst + tempflag: + name: tempflag + file_type: osi_sst + + # Radiative flux product + dli: + name: dli + file_type: [osi_radflux_stere, osi_radflux_grid] + dli_confidence_level: + name: dli_confidence_level + file_type: [osi_radflux_stere, osi_radflux_grid] + ssi: + name: ssi + file_type: [osi_radflux_stere, osi_radflux_grid] + ssi_confidence_level: + name: ssi_confidence_level + file_type: [osi_radflux_stere, osi_radflux_grid] diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 293584ffa8..b2e6ec6812 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -32,7 +32,14 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): @staticmethod def _parse_datetime(datestr): - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + try: + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + except ValueError: + try: + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + except ValueError: + return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") + def _get_ease_grid(self): """Set up the EASE grid.""" @@ -53,11 +60,37 @@ def _get_ease_grid(self): units="deg") return area_def + def _get_geographic_grid(self): + """Set up the EASE grid.""" + from pyresample import create_area_def + + x_size = self["/dimension/lon"] + y_size = self["/dimension/lat"] + lat_0 = self["lat"].min() + lon_0 = self["lon"].min() + lat_1 = self["lat"].max() + lon_1 = self["lon"].max() + area_extent = [lon_0, lat_1, lon_1, lat_0] + area_def = create_area_def(area_id="osisaf_geographic_area", + description="osisaf_geographic_area", + proj_id="osisaf_geographic_area", + projection="+proj=lonlat", width=x_size, height=y_size, area_extent=area_extent, + units="deg") + return area_def + def _get_polar_stereographic_grid(self): """Set up the polar stereographic grid.""" from pyresample import create_area_def - - proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + try: + proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] + except KeyError: + # Some products don't have the proj str, so we construct it ourselves + sma = self["Polar_Stereographic_Grid/attr/semi_major_axis"] + smb = self["Polar_Stereographic_Grid/attr/semi_minor_axis"] + lon_0 = self["Polar_Stereographic_Grid/attr/straight_vertical_longitude_from_pole"] + lat_0 = self["Polar_Stereographic_Grid/attr/latitude_of_projection_origin"] + lat_ts = self["Polar_Stereographic_Grid/attr/standard_parallel"] + proj4str = f"+a={sma} +b={smb} +lat_ts={lat_ts} +lon_0={lon_0} +proj=stere +lat_0={lat_0}" x_size = self["/dimension/xc"] y_size = self["/dimension/yc"] p_lowerleft_lat = self["lat"].values[y_size - 1, 0] @@ -75,7 +108,13 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filename_info["grid"] == "ease": + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + elif self.filename_info["grid"] == "ease": self.area_def = self._get_ease_grid() return self.area_def elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": @@ -117,6 +156,12 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data >= valid_min, np.nan) data = data.where(data <= valid_max, np.nan) + # Try to get the fill value for the data. + # If there isn't one, assume all remaining pixels are valid. + fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + if fill_value is not None: + data = data.where(data != fill_value, np.nan) + # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") @@ -124,29 +169,57 @@ def get_dataset(self, dataset_id, ds_info): if scale_offset is not None and scale_factor is not None: data = (data * scale_factor + scale_offset) - # Try to get the fill value for the data. - # If there isn't one, assume all remaining pixels are valid. - fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") - if fill_value is not None: - data = data.where(data != fill_value, np.nan) - # Set proper dimension names - data = data.rename({"xc": "x", "yc": "y"}) + if self.filetype_info["file_type"] == "osi_radflux_grid": + data = data.rename({"lon": "x", "lat": "y"}) + else: + data = data.rename({"xc": "x", "yc": "y"}) ds_info.update({ "units": ds_info.get("units", file_units), - "platform_name": self["/attr/platform_name"], - "sensor": self["/attr/instrument_type"] + "platform_name": self._get_platname(), + "sensor": self._get_instname() }) ds_info.update(dataset_id.to_dict()) data.attrs.update(ds_info) return data + def _get_instname(self): + """Get instrument name.""" + try: + return self["/attr/instrument_name"] + except KeyError: + try: + return self["/attr/sensor"] + except KeyError: + return "unknown_sensor" + + def _get_platname(self): + """Get platform name.""" + try: + return self["/attr/platform_name"] + except KeyError: + return self["/attr/platform"] + + @property def start_time(self): - return self._parse_datetime(self["/attr/start_date"]) - # return self._parse_datetime(self["/attr/start_date"]) + start_t = self._get_ds_attr("/attr/start_date") + if start_t is None: + start_t = self._get_ds_attr("/attr/start_time") + if start_t is None: + start_t = self._get_ds_attr("/attr/time_coverage_start") + if start_t is None: + raise ValueError("Unknown start time attribute.") + return self._parse_datetime(start_t) @property def end_time(self): - return self._parse_datetime(self["/attr/stop_date"]) + end_t = self._get_ds_attr("/attr/stop_date") + if end_t is None: + end_t = self._get_ds_attr("/attr/stop_time") + if end_t is None: + end_t = self._get_ds_attr("/attr/time_coverage_end") + if end_t is None: + raise ValueError("Unknown stop time attribute.") + return self._parse_datetime(end_t) From aef8d90e610f1c8f62bd6c321f666b9fec038968 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 22:11:00 +0000 Subject: [PATCH 0683/1416] (wip) Update OSI SAF tests. --- satpy/tests/reader_tests/test_osisaf_l3.py | 193 ++++++++++++++------- 1 file changed, 131 insertions(+), 62 deletions(-) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 40cf4539e1..65a9efc9a1 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -30,15 +30,27 @@ stere_ds = xr.DataArray( -999, attrs={"grid_mapping_name": "polar_stereographic", - "false_easting": 0.0, - "false_northing": 0.0, - "semi_major_axis": 6378273.0, - "semi_minor_axis": 6356889.44891, - "straight_vertical_longitude_from_pole": 0.0, - "latitude_of_projection_origin": -90.0, - "standard_parallel": -70.0, - "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", -}) + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", + }) + +stere_ds_noproj = xr.DataArray( + -999, + attrs={"grid_mapping_name": "polar_stereographic", + "false_easting": 0.0, + "false_northing": 0.0, + "semi_major_axis": 6378273.0, + "semi_minor_axis": 6356889.44891, + "straight_vertical_longitude_from_pole": 0.0, + "latitude_of_projection_origin": -90.0, + "standard_parallel": -70.0, + }) ease_ds = xr.DataArray( -999, @@ -51,14 +63,34 @@ "proj4_string": "+proj=laea +a=6371228.0 +lat_0=-90 +lon_0=0", }) +attrs_ice = { + "start_date": "2022-12-15 00:00:00", + "stop_date": "2022-12-16 00:00:00", + "platform_name": "Multi-sensor analysis", + "instrument_type": "Multi-sensor analysis"} + +attrs_flux = { + "time_coverage_start": "2023-10-10T00:00:00Z", + "time_coverage_end": "2023-10-10T23:59:59Z", + "platform": "NOAA-19, NOAA-20, Metop-B, Metop-C, SNPP", + "sensor": "AVHRR, VIIRS, AVHRR, AVHRR, VIIRS"} + +attrs_geo = { + "start_time": "20221228T183000Z", + "stop_time": "20221228T193000Z", + "platform": "MSG4"} -class TestOSISAFL3Reader: - """Test OSI-SAF level 3 netCDF reader.""" - def setup_method(self, proj_type): +class OSISAFL3ReaderTests: + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self, tester="ice"): """Create a fake dataset.""" self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) self.base_data = np.expand_dims(self.base_data, axis=0) + self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) @@ -78,7 +110,7 @@ def setup_method(self, proj_type): ) self.time = xr.DataArray( self.time_data, - dims=("time"), + dims="time", attrs={"standard_name": "projection_y_coordinate", "units": "km"} ) self.lat = xr.DataArray( @@ -103,84 +135,93 @@ def setup_method(self, proj_type): attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} ) + self.ssi_geo = xr.DataArray( + self.base_data_ssi_geo, + dims=("lat", "lon"), + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "units": "W m-2", + "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} + ) + self.ssi = xr.DataArray( + self.base_data_ssi, + dims=("time", "yc", "xc"), + attrs={"_FillValue": -999.99, "units": "W m-2", + "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} + ) + self.uncert = xr.DataArray( + self.unc_data, + dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, + "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + ) data_vars = { - "ice_conc": self.conc, - "total_uncertainty": self.uncert, - "xc": self.xc, - "yc": self.yc, - "time": self.time, - "lat": self.lat, - "lon": self.lon, - "Lambert_Azimuthal_Grid": ease_ds, - "Polar_Stereographic_Grid": stere_ds} - self.fake_dataset = xr.Dataset( - data_vars=data_vars, - attrs={ - "start_date": "2022-12-15 00:00:00", - "stop_date": "2022-12-16 00:00:00", - "platform_name": "Multi-sensor analysis", - "instrument_type": "Multi-sensor analysis"}, - ) + "xc": self.xc, + "yc": self.yc, + "time": self.time, + "lat": self.lat, + "lon": self.lon, } + if tester == "ice": + data_vars["Lambert_Azimuthal_Grid"] = ease_ds + data_vars["Polar_Stereographic_Grid"] = stere_ds + data_vars["ice_conc"] = self.conc + data_vars["total_uncertainty"] = self.uncert + elif tester == "flux_stere": + data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj + data_vars["ssi"] = self.ssi + elif tester == "flux_geo": + data_vars["ssi"] = self.ssi_geo + + if tester == "ice": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) + elif tester == "flux_stere": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) + elif tester == "flux_geo": + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): """Test initialization of file handlers - given a single netCDF file.""" - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) - + OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) def test_get_dataset(self, tmp_path): """Test retrieval of datasets.""" - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) - res = test.get_dataset(DataQuery(name="ice_conc"), {"standard_name": "sea_ice_area_fraction"}) + res = test.get_dataset(DataQuery(name=self.varname), {"standard_name": self.stdname}) # Check we remove singleton dimension assert res.shape[0] == 2 assert res.shape[1] == 5 # Test values are correct - test_ds = self.fake_dataset["ice_conc"][0].values - test_ds = np.where(test_ds == -999, np.nan, test_ds) - test_ds = np.where(test_ds > 10000, np.nan, test_ds) - np.testing.assert_allclose(res.values, test_ds / 100) - - res = test.get_dataset(DataQuery(name="total_uncertainty"), {"standard_name": "sea_ice_area_fraction"}) - assert res.shape[0] == 2 - assert res.shape[1] == 5 + test_ds = self.fake_dataset[self.varname][0].values + test_ds = np.where(test_ds == self.fillv, np.nan, test_ds) + test_ds = np.where(test_ds > self.maxv, np.nan, test_ds) + test_ds = test_ds / self.scl + np.testing.assert_allclose(res.values, test_ds) with pytest.raises(KeyError): test.get_dataset(DataQuery(name="erroneous dataset"), {"standard_name": "erroneous dataset"}) def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - good_start_time = datetime(2022, 12, 15, 0, 0, 0) - good_stop_time = datetime(2022, 12, 16, 0, 0, 0) - - filename_info = {} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) - - assert test.start_time == good_start_time - assert test.end_time == good_stop_time + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + assert test.start_time == self.good_start_time + assert test.end_time == self.good_stop_time def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" filename_info = {"grid": "ease"} - filetype_info = {} + filetype_info = {"file_type": "osi_sea_ice_conc"} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) @@ -198,15 +239,12 @@ def test_get_area_def_ease(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) - def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" - filename_info = {"grid": "stere"} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" @@ -224,10 +262,41 @@ def test_get_area_def_stere(self, tmp_path): def test_get_area_def_bad(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" filename_info = {"grid": "turnips"} - filetype_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) with pytest.raises(ValueError, match="Unknown grid type: turnips"): test.get_area_def(None) + + +class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self): + super().setup_method(tester="ice") + self.filename_info = {"grid": "ease"} + self.filetype_info = {"file_type": "osi_sea_ice_conc"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ice_conc" + self.stdname = "sea_ice_area_fraction" + self.fillv = -999 + self.maxv = 10000 + self.scl = 100 + + +class TestOSISAFL3ReaderFlux(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files.""" + + def setup_method(self): + super().setup_method(tester="flux_stere") + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_stere"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -999.99 + self.maxv = 1000 + self.scl = 1 From 3bbf8923e868acc9357df4c1cea0133ab59ed3ec Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 8 Nov 2023 22:13:05 +0000 Subject: [PATCH 0684/1416] Update Insat-3D area def to use hardcoded field of view. --- satpy/readers/insat3d_img_l1b_h5.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 7e444e8d34..9f2224ef82 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -173,7 +173,12 @@ def get_area_def(self, ds_id): lines = shape[-2] cols = shape[-1] - fov = self.datatree.attrs["Field_of_View(degrees)"] + # From empirical analysis, hardcoding the view of view to 18 degrees + # produces better geolocation results. + # Uncommenting the line below will use the fov from the file instead, + # this line is kept for reference. + #fov = self.datatree.attrs["Field_of_View(degrees)"] + fov = 18 cfac = 2 ** 16 / (fov / cols) lfac = 2 ** 16 / (fov / lines) From 89d6b64e2f0bd2050957b6c32440767004345ed5 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 09:56:05 +0000 Subject: [PATCH 0685/1416] Update OSI-SAF reader and finish the tests. --- satpy/readers/osisaf_l3_nc.py | 28 ++-- satpy/tests/reader_tests/test_osisaf_l3.py | 186 ++++++++++++++++----- 2 files changed, 157 insertions(+), 57 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index b2e6ec6812..8cdd35020c 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -108,20 +108,22 @@ def _get_polar_stereographic_grid(self): def get_area_def(self, area_id): """Override abstract baseclass method""" - if self.filetype_info["file_type"] == "osi_radflux_grid": - self.area_def = self._get_geographic_grid() - return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def - elif self.filename_info["grid"] == "ease": - self.area_def = self._get_ease_grid() - return self.area_def - elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def + if "grid" in self.filename_info: + if self.filename_info["grid"] == "ease": + self.area_def = self._get_ease_grid() + return self.area_def + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") else: - raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def def _get_ds_attr(self, a_name): """Get a dataset attribute and check it's valid.""" diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 65a9efc9a1..fd035ccbac 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -88,13 +88,17 @@ def setup_method(self, tester="ice"): """Create a fake dataset.""" self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + self.base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) self.base_data = np.expand_dims(self.base_data, axis=0) self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) + self.base_data_sst = np.expand_dims(self.base_data_sst, axis=0) self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) self.time_data = np.array([1.]) + self.scl = 1. + self.add = 0. self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) @@ -138,7 +142,7 @@ def setup_method(self, tester="ice"): self.ssi_geo = xr.DataArray( self.base_data_ssi_geo, dims=("lat", "lon"), - attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "units": "W m-2", + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} ) self.ssi = xr.DataArray( @@ -147,13 +151,12 @@ def setup_method(self, tester="ice"): attrs={"_FillValue": -999.99, "units": "W m-2", "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} ) - self.uncert = xr.DataArray( - self.unc_data, - dims=("yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, - "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} + self.sst = xr.DataArray( + self.base_data_sst, + dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", + "valid_min": -8000., "valid_max": 5000., "standard_name": "sea_ice_surface_temperature"} ) - data_vars = { "xc": self.xc, "yc": self.yc, @@ -165,13 +168,15 @@ def setup_method(self, tester="ice"): data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["ice_conc"] = self.conc data_vars["total_uncertainty"] = self.uncert + elif tester == "sst": + data_vars["Polar_Stereographic_Grid"] = stere_ds + data_vars["surface_temperature"] = self.sst elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj data_vars["ssi"] = self.ssi elif tester == "flux_geo": data_vars["ssi"] = self.ssi_geo - - if tester == "ice": + if tester == "ice" or tester == "sst": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) @@ -198,10 +203,10 @@ def test_get_dataset(self, tmp_path): assert res.shape[1] == 5 # Test values are correct - test_ds = self.fake_dataset[self.varname][0].values + test_ds = self.fake_dataset[self.varname].values.squeeze() test_ds = np.where(test_ds == self.fillv, np.nan, test_ds) test_ds = np.where(test_ds > self.maxv, np.nan, test_ds) - test_ds = test_ds / self.scl + test_ds = test_ds / self.scl + self.add np.testing.assert_allclose(res.values, test_ds) with pytest.raises(KeyError): @@ -218,14 +223,60 @@ def test_get_start_and_end_times(self, tmp_path): assert test.start_time == self.good_start_time assert test.end_time == self.good_stop_time + def test_get_area_def_bad(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + filename_info = {"grid": "turnips"} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) + with pytest.raises(ValueError, match="Unknown grid type: turnips"): + test.get_area_def(None) + + +class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader ice files.""" + + def setup_method(self): + super().setup_method(tester="ice") + self.filename_info = {"grid": "ease"} + self.filetype_info = {"file_type": "osi_sea_ice_conc"} + self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.varname = "ice_conc" + self.stdname = "sea_ice_area_fraction" + self.fillv = -999 + self.maxv = 10000 + self.scl = 100 + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + self.filename_info = {"grid": "stere"} + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) + + def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" - filename_info = {"grid": "ease"} - filetype_info = {"file_type": "osi_sea_ice_conc"} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, filetype_info) + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), {"grid": "ease"}, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_lambert_azimuthal_equal_area" @@ -239,6 +290,22 @@ def test_get_area_def_ease(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) + +class TestOSISAFL3ReaderFluxStere(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files on stereographic grid.""" + + def setup_method(self): + super().setup_method(tester="flux_stere") + self.filename_info = {"grid": "polstere"} + self.filetype_info = {"file_type": "osi_radflux_stere"} + self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -999.99 + self.maxv = 1000 + self.scl = 1 + def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" @@ -259,44 +326,75 @@ def test_get_area_def_stere(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) - def test_get_area_def_bad(self, tmp_path): - """Test getting the area definition for the polar stereographic grid.""" - filename_info = {"grid": "turnips"} + +class TestOSISAFL3ReaderFluxGeo(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader flux files on lat/lon grid (GEO sensors).""" + + def setup_method(self): + super().setup_method(tester="flux_geo") + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_grid"} + self.good_start_time = datetime(2022, 12, 28, 18, 30, 0) + self.good_stop_time = datetime(2022, 12, 28, 19, 30, 0) + self.varname = "ssi" + self.stdname = "surface_downwelling_shortwave_flux_in_air" + self.fillv = -32768 + self.maxv = 1000 + self.scl = 10 + + + def test_get_area_def_grid(self, tmp_path): + """Test getting the area definition for the lat/lon grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" + self.filename_info = {} + self.filetype_info = {"file_type": "osi_radflux_grid"} self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) - test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) - with pytest.raises(ValueError, match="Unknown grid type: turnips"): - test.get_area_def(None) - + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) -class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): - """Test OSI-SAF level 3 netCDF reader ice files.""" + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_geographic_area" + assert area_def.proj_dict["datum"] == "WGS84" + assert area_def.proj_dict["proj"] == "longlat" - def setup_method(self): - super().setup_method(tester="ice") - self.filename_info = {"grid": "ease"} - self.filetype_info = {"file_type": "osi_sea_ice_conc"} - self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) - self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) - self.varname = "ice_conc" - self.stdname = "sea_ice_area_fraction" - self.fillv = -999 - self.maxv = 10000 - self.scl = 100 + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-65, -68, -60, -72)) -class TestOSISAFL3ReaderFlux(OSISAFL3ReaderTests): - """Test OSI-SAF level 3 netCDF reader flux files.""" +class TestOSISAFL3ReaderSST(OSISAFL3ReaderTests): + """Test OSI-SAF level 3 netCDF reader surface temperature files.""" def setup_method(self): - super().setup_method(tester="flux_stere") + super().setup_method(tester="sst") self.filename_info = {} - self.filetype_info = {"file_type": "osi_radflux_stere"} - self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) - self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) - self.varname = "ssi" - self.stdname = "surface_downwelling_shortwave_flux_in_air" - self.fillv = -999.99 + self.filetype_info = {"file_type": "osi_sst"} + self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.varname = "surface_temperature" + self.stdname = "sea_ice_surface_temperature" + self.fillv = -32768 self.maxv = 1000 - self.scl = 1 + self.scl = 100 + self.add = 273.15 + + def test_get_area_def_stere(self, tmp_path): + """Test getting the area definition for the polar stereographic grid.""" + tmp_filepath = tmp_path / "fake_dataset.nc" + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) + + area_def = test.get_area_def(None) + assert area_def.description == "osisaf_polar_stereographic" + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" + + assert area_def.width == 5 + assert area_def.height == 2 + np.testing.assert_allclose(area_def.area_extent, + (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) From 35196b684f60b6f950b91113472cd0019db3f228 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 10:47:14 +0000 Subject: [PATCH 0686/1416] Simplify OSI SAF code. --- satpy/readers/osisaf_l3_nc.py | 58 ++++++++++++++-------- satpy/tests/reader_tests/test_osisaf_l3.py | 8 ++- 2 files changed, 39 insertions(+), 27 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 8cdd35020c..1574b6ba74 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -106,24 +106,33 @@ def _get_polar_stereographic_grid(self): return area_def + def _get_finfo_grid(self): + """Get grid in case of filename info being used.""" + if self.filename_info["grid"] == "ease": + self.area_def = self._get_ease_grid() + return self.area_def + elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + else: + raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + + def _get_ftype_grid(self): + """Get grid in case of filetype info being used.""" + if self.filetype_info["file_type"] == "osi_radflux_grid": + self.area_def = self._get_geographic_grid() + return self.area_def + elif self.filetype_info["file_type"] == "osi_sst": + self.area_def = self._get_polar_stereographic_grid() + return self.area_def + def get_area_def(self, area_id): """Override abstract baseclass method""" if "grid" in self.filename_info: - if self.filename_info["grid"] == "ease": - self.area_def = self._get_ease_grid() - return self.area_def - elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def - else: - raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") + return self._get_finfo_grid() else: - if self.filetype_info["file_type"] == "osi_radflux_grid": - self.area_def = self._get_geographic_grid() - return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": - self.area_def = self._get_polar_stereographic_grid() - return self.area_def + return self._get_ftype_grid() + def _get_ds_attr(self, a_name): """Get a dataset attribute and check it's valid.""" @@ -132,23 +141,28 @@ def _get_ds_attr(self, a_name): except KeyError: return None + def _get_ds_units(self, ds_info, var_path): + """Find the units of the datasets.""" + + file_units = ds_info.get("file_units") + if file_units is None: + file_units = self._get_ds_attr(var_path + "/attr/units") + if file_units is None: + file_units = 1 + return file_units + def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" logger.debug(f"Reading {dataset_id['name']} from {self.filename}") var_path = ds_info.get("file_key", f"{dataset_id['name']}") shape = self[var_path + "/shape"] + data = self[var_path] if shape[0] == 1: # Remove the time dimension from dataset - data = self[var_path][0] - else: - data = self[var_path] + data = data[0] - file_units = ds_info.get("file_units") - if file_units is None: - file_units = self._get_ds_attr(var_path + "/attr/units") - if file_units is None: - file_units = 1 + file_units = self._get_ds_units(ds_info, var_path) # Try to get the valid limits for the data. # Not all datasets have these, so fall back on assuming no limits. diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index fd035ccbac..59550225b0 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -168,19 +168,17 @@ def setup_method(self, tester="ice"): data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["ice_conc"] = self.conc data_vars["total_uncertainty"] = self.uncert + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "sst": data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["surface_temperature"] = self.sst + self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj data_vars["ssi"] = self.ssi - elif tester == "flux_geo": - data_vars["ssi"] = self.ssi_geo - if tester == "ice" or tester == "sst": - self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) - elif tester == "flux_stere": self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) elif tester == "flux_geo": + data_vars["ssi"] = self.ssi_geo self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): From fd0ce951ada0a33d0d8313161faa3501d58bb1e9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 9 Nov 2023 11:01:01 +0000 Subject: [PATCH 0687/1416] Tidy up OSI SAF tests. --- satpy/tests/reader_tests/test_osisaf_l3.py | 128 ++++++++------------- 1 file changed, 47 insertions(+), 81 deletions(-) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 59550225b0..e037884c04 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -86,99 +86,67 @@ class OSISAFL3ReaderTests: def setup_method(self, tester="ice"): """Create a fake dataset.""" - self.base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) - self.base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) - self.base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) - self.base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) - self.base_data = np.expand_dims(self.base_data, axis=0) - self.base_data_ssi = np.expand_dims(self.base_data_ssi, axis=0) - self.base_data_sst = np.expand_dims(self.base_data_sst, axis=0) - self.unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) - self.yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) - self.xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) - self.time_data = np.array([1.]) + base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) + base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) + base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) + base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) + base_data = np.expand_dims(base_data, axis=0) + base_data_ssi = np.expand_dims(base_data_ssi, axis=0) + base_data_sst = np.expand_dims(base_data_sst, axis=0) + unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) + yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) + xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) + time_data = np.array([1.]) self.scl = 1. self.add = 0. - self.lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) - self.lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) - self.xc = xr.DataArray( - self.xc_data, - dims=("yc", "xc"), - attrs={"standard_name": "projection_x_coordinate", "units": "km"} - ) - self.yc = xr.DataArray( - self.yc_data, - dims=("yc", "xc"), - attrs={"standard_name": "projection_y_coordinate", "units": "km"} - ) - self.time = xr.DataArray( - self.time_data, - dims="time", - attrs={"standard_name": "projection_y_coordinate", "units": "km"} - ) - self.lat = xr.DataArray( - self.lat_data, - dims=("yc", "xc"), - attrs={"standard_name": "latitude", "units": "degrees_north"} - ) - self.lon = xr.DataArray( - self.lon_data, - dims=("yc", "xc"), - attrs={"standard_name": "longitude", "units": "degrees_east"} - ) - self.conc = xr.DataArray( - self.base_data, - dims=("time", "yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", - "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"} - ) - self.uncert = xr.DataArray( - self.unc_data, - dims=("yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, - "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"} - ) - self.ssi_geo = xr.DataArray( - self.base_data_ssi_geo, - dims=("lat", "lon"), - attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, - "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} - ) - self.ssi = xr.DataArray( - self.base_data_ssi, - dims=("time", "yc", "xc"), - attrs={"_FillValue": -999.99, "units": "W m-2", - "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"} - ) - self.sst = xr.DataArray( - self.base_data_sst, - dims=("time", "yc", "xc"), - attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", - "valid_min": -8000., "valid_max": 5000., "standard_name": "sea_ice_surface_temperature"} - ) - data_vars = { - "xc": self.xc, - "yc": self.yc, - "time": self.time, - "lat": self.lat, - "lon": self.lon, } + lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) + lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) + + xc = xr.DataArray(xc_data, dims=("yc", "xc"), + attrs={"standard_name": "projection_x_coordinate", "units": "km"}) + yc = xr.DataArray(yc_data, dims=("yc", "xc"), + attrs={"standard_name": "projection_y_coordinate", "units": "km"}) + time = xr.DataArray(time_data, dims="time", + attrs={"standard_name": "projection_y_coordinate", "units": "km"}) + lat = xr.DataArray(lat_data, dims=("yc", "xc"), + attrs={"standard_name": "latitude", "units": "degrees_north"}) + lon = xr.DataArray(lon_data, dims=("yc", "xc"), + attrs={"standard_name": "longitude", "units": "degrees_east"}) + conc = xr.DataArray(base_data, dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", + "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"}) + uncert = xr.DataArray(unc_data, dims=("yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "valid_min": 0, + "valid_max": 10000, "standard_name": "total_uncertainty"}) + ssi_geo = xr.DataArray(base_data_ssi_geo, dims=("lat", "lon"), + attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "valid_min": 0., + "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"}) + ssi = xr.DataArray(base_data_ssi, dims=("time", "yc", "xc"), + attrs={"_FillValue": -999.99, "units": "W m-2", "valid_min": 0., "valid_max": 1000., + "standard_name": "surface_downwelling_shortwave_flux_in_air"}) + sst = xr.DataArray(base_data_sst, dims=("time", "yc", "xc"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", + "valid_min": -8000., "valid_max": 5000., + "standard_name": "sea_ice_surface_temperature"}) + data_vars = {"xc": xc, "yc": yc, "time": time, "lat": lat, "lon": lon} + if tester == "ice": data_vars["Lambert_Azimuthal_Grid"] = ease_ds data_vars["Polar_Stereographic_Grid"] = stere_ds - data_vars["ice_conc"] = self.conc - data_vars["total_uncertainty"] = self.uncert + data_vars["ice_conc"] = conc + data_vars["total_uncertainty"] = uncert self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "sst": data_vars["Polar_Stereographic_Grid"] = stere_ds - data_vars["surface_temperature"] = self.sst + data_vars["surface_temperature"] = sst self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj - data_vars["ssi"] = self.ssi + data_vars["ssi"] = ssi self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) elif tester == "flux_geo": - data_vars["ssi"] = self.ssi_geo + data_vars["ssi"] = ssi_geo self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): @@ -268,7 +236,6 @@ def test_get_area_def_stere(self, tmp_path): np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) - def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" @@ -340,7 +307,6 @@ def setup_method(self): self.maxv = 1000 self.scl = 10 - def test_get_area_def_grid(self, tmp_path): """Test getting the area definition for the lat/lon grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" From 0ef3e6b1ac15702db2d022af3b9678a0386ccceb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 9 Nov 2023 13:01:18 -0600 Subject: [PATCH 0688/1416] Cleanup sgs time handling in geos_imager_hrit --- satpy/readers/goes_imager_hrit.py | 48 ++++++++++++++++--------------- 1 file changed, 25 insertions(+), 23 deletions(-) diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 1724ba214d..d90ebb4a72 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -31,6 +31,7 @@ import numpy as np import xarray as xr +from satpy._compat import ArrayLike from satpy.readers._geos_area import get_area_definition, get_area_extent, get_geos_area_naming from satpy.readers.eum_base import recarray2dict, time_cds_short from satpy.readers.hrit_base import ( @@ -115,10 +116,21 @@ class CalibrationError(Exception): ("msecs", "u1")]) -def make_sgs_time(sgs_time_array): +def make_sgs_time(sgs_time_array: ArrayLike) -> datetime: """Make sgs time.""" + epoch_year = _epoch_year_from_sgs_time(sgs_time_array) + doy_offset = _epoch_doy_offset_from_sgs_time(sgs_time_array) + return epoch_year + doy_offset + + +def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> datetime: century = sgs_time_array["century"].astype(np.int64) year = sgs_time_array["year"].astype(np.int64) + year = ((century >> 4) * 1000 + (century & 15) * 100 + (year >> 4) * 10 + (year & 15)) + return datetime(int(year), 1, 1) + + +def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> timedelta: doy1 = sgs_time_array["doy1"].astype(np.int64) doy_hours = sgs_time_array["doy_hours"].astype(np.int64) hours_mins = sgs_time_array["hours_mins"].astype(np.int64) @@ -126,28 +138,18 @@ def make_sgs_time(sgs_time_array): secs_msecs = sgs_time_array["secs_msecs"].astype(np.int64) msecs = sgs_time_array["msecs"].astype(np.int64) - year = ((century >> 4) * 1000 + - (century & 15) * 100 + - (year >> 4) * 10 + - (year & 15)) - doy = ((doy1 >> 4) * 100 + - (doy1 & 15) * 10 + - (doy_hours >> 4)) - hours = ((doy_hours & 15) * 10 + - (hours_mins >> 4)) - mins = ((hours_mins & 15) * 10 + - (mins_secs >> 4)) - secs = ((mins_secs & 15) * 10 + - (secs_msecs >> 4)) - msecs = ((secs_msecs & 15) * 100 + - (msecs >> 4) * 10 + - (msecs & 15)) - return (datetime(int(year), 1, 1) + - timedelta(days=int(doy - 1), - hours=int(hours), - minutes=int(mins), - seconds=int(secs), - milliseconds=int(msecs))) + doy = ((doy1 >> 4) * 100 + (doy1 & 15) * 10 + (doy_hours >> 4)) + hours = ((doy_hours & 15) * 10 + (hours_mins >> 4)) + mins = ((hours_mins & 15) * 10 + (mins_secs >> 4)) + secs = ((mins_secs & 15) * 10 + (secs_msecs >> 4)) + msecs = ((secs_msecs & 15) * 100 + (msecs >> 4) * 10 + (msecs & 15)) + return timedelta( + days=int(doy - 1), + hours=int(hours), + minutes=int(mins), + seconds=int(secs), + milliseconds=int(msecs) + ) satellite_status = np.dtype([("TagType", " Date: Fri, 10 Nov 2023 09:59:32 +0200 Subject: [PATCH 0689/1416] Remove duplicate entries of required netcdf variables in FCI reader --- satpy/etc/readers/fci_l1c_nc.yaml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index f89699ae3a..4462087a39 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -37,16 +37,12 @@ file_types: - data/{channel_name}/measured/index_map - data/mtg_geos_projection/attr/inverse_flattening - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/perspective_point_height - - data/mtg_geos_projection/attr/perspective_point_height - data/mtg_geos_projection/attr/perspective_point_height - data/mtg_geos_projection/attr/semi_major_axis - data/swath_direction - data/swath_number - index - state/celestial/earth_sun_distance - - state/celestial/earth_sun_distance - state/celestial/subsolar_latitude - state/celestial/subsolar_longitude - state/celestial/sun_satellite_distance @@ -95,16 +91,12 @@ file_types: - data/{channel_name}/measured/index_map - data/mtg_geos_projection/attr/inverse_flattening - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/perspective_point_height - - data/mtg_geos_projection/attr/perspective_point_height - data/mtg_geos_projection/attr/perspective_point_height - data/mtg_geos_projection/attr/semi_major_axis - data/swath_direction - data/swath_number - index - state/celestial/earth_sun_distance - - state/celestial/earth_sun_distance - state/celestial/subsolar_latitude - state/celestial/subsolar_longitude - state/celestial/sun_satellite_distance From f3c4c7945512b2e01821ea358b25a2751da8325f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 10 Nov 2023 10:25:54 +0200 Subject: [PATCH 0690/1416] Remove attribute names that are read via their parent --- satpy/etc/readers/fci_l1c_nc.yaml | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 4462087a39..d241b3fa9e 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -20,7 +20,6 @@ file_types: expected_segments: 40 required_netcdf_variables: - attr/platform - - data/mtg_geos_projection - data/{channel_name}/measured/start_position_row - data/{channel_name}/measured/end_position_row - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber @@ -35,10 +34,7 @@ file_types: - data/{channel_name}/measured/y - data/{channel_name}/measured/pixel_quality - data/{channel_name}/measured/index_map - - data/mtg_geos_projection/attr/inverse_flattening - - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/perspective_point_height - - data/mtg_geos_projection/attr/semi_major_axis + - data/mtg_geos_projection - data/swath_direction - data/swath_number - index @@ -74,7 +70,6 @@ file_types: expected_segments: 40 required_netcdf_variables: - attr/platform - - data/mtg_geos_projection - data/{channel_name}/measured/start_position_row - data/{channel_name}/measured/end_position_row - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber @@ -89,10 +84,7 @@ file_types: - data/{channel_name}/measured/y - data/{channel_name}/measured/pixel_quality - data/{channel_name}/measured/index_map - - data/mtg_geos_projection/attr/inverse_flattening - - data/mtg_geos_projection/attr/longitude_of_projection_origin - - data/mtg_geos_projection/attr/perspective_point_height - - data/mtg_geos_projection/attr/semi_major_axis + - data/mtg_geos_projection - data/swath_direction - data/swath_number - index From e43f3fa59ffc51a43d910ee4784165a8065ae5cb Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 10 Nov 2023 13:59:44 +0100 Subject: [PATCH 0691/1416] Fix nwcsaf_geo start time to be nominal time --- satpy/readers/nwcsaf_nc.py | 5 ++++- satpy/tests/reader_tests/test_nwcsaf_nc.py | 10 +++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 1b3d65cb96..5d8320f954 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -372,7 +372,10 @@ def __del__(self): @property def start_time(self): """Return the start time of the object.""" - return read_nwcsaf_time(self.nc.attrs["time_coverage_start"]) + try: + return read_nwcsaf_time(self.nc.attrs["nominal_product_time"]) + except KeyError: + return read_nwcsaf_time(self.nc.attrs["time_coverage_start"]) @property def end_time(self): diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 2070e5187c..07d6cee174 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -44,6 +44,7 @@ "pal_rgb": 3} NOMINAL_LONGITUDE = 0.0 +NOMINAL_TIME = "2023-01-18T10:30:00Z" START_TIME = "2023-01-18T10:39:17Z" END_TIME = "2023-01-18T10:42:22Z" START_TIME_PPS = "20230118T103917000Z" @@ -57,6 +58,9 @@ global_attrs.update(PROJ) +global_attrs_geo = global_attrs.copy() +global_attrs_geo["nominal_product_time"] = NOMINAL_TIME + CTTH_PALETTE_MEANINGS = ("0 500 1000 1500") COT_PALETTE_MEANINGS = ("0 2 5 8 10 13 16 19 23 26 29 33 36 40 43 47 51 55 59 63 68 72 77 81 86 91 96" @@ -90,7 +94,7 @@ def nwcsaf_geo_ct_filename(tmp_path_factory): return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data")) -def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs): +def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs_geo): """Create a CT file.""" filename = directory / "S_NWC_CT_MSG4_MSG-N-VISIR_20230118T103000Z_PLAX.nc" with h5netcdf.File(filename, mode="w") as nc_file: @@ -227,7 +231,7 @@ def nwcsaf_pps_cpp_filehandler(nwcsaf_pps_cpp_filename): @pytest.fixture(scope="session") def nwcsaf_old_geo_ct_filename(tmp_path_factory): """Create a CT file and return the filename.""" - attrs = global_attrs.copy() + attrs = global_attrs_geo.copy() attrs.update(PROJ_KM) attrs["time_coverage_start"] = np.array(["2023-01-18T10:39:17Z"], dtype="S20") return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data-old"), attrs=attrs) @@ -343,7 +347,7 @@ def test_times_are_in_dataset_attributes(self, nwcsaf_geo_ct_filehandler): def test_start_time(self, nwcsaf_geo_ct_filehandler): """Test the start time property.""" - assert nwcsaf_geo_ct_filehandler.start_time == read_nwcsaf_time(START_TIME) + assert nwcsaf_geo_ct_filehandler.start_time == read_nwcsaf_time(NOMINAL_TIME) def test_end_time(self, nwcsaf_geo_ct_filehandler): """Test the end time property.""" From bff527e3bb666f2f30683c678bb0e46051c437fb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:49:42 +0000 Subject: [PATCH 0692/1416] Update the OSI SAF L3 reader with some suggestions from review. --- satpy/readers/osisaf_l3_nc.py | 31 +++++++++------------- satpy/tests/reader_tests/test_osisaf_l3.py | 2 -- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 1574b6ba74..fd0a5cfc20 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. @@ -20,8 +18,6 @@ import logging from datetime import datetime -import numpy as np - from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) @@ -29,18 +25,6 @@ class OSISAFL3NCFileHandler(NetCDF4FileHandler): """Reader for the OSISAF l3 netCDF format.""" - - @staticmethod - def _parse_datetime(datestr): - try: - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") - except ValueError: - try: - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") - except ValueError: - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") - - def _get_ease_grid(self): """Set up the EASE grid.""" from pyresample import create_area_def @@ -169,14 +153,14 @@ def get_dataset(self, dataset_id, ds_info): valid_min = self._get_ds_attr(var_path + "/attr/valid_min") valid_max = self._get_ds_attr(var_path + "/attr/valid_max") if valid_min is not None and valid_max is not None: - data = data.where(data >= valid_min, np.nan) - data = data.where(data <= valid_max, np.nan) + data = data.where(data >= valid_min) + data = data.where(data <= valid_max) # Try to get the fill value for the data. # If there isn't one, assume all remaining pixels are valid. fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") if fill_value is not None: - data = data.where(data != fill_value, np.nan) + data = data.where(data != fill_value) # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. @@ -217,6 +201,15 @@ def _get_platname(self): except KeyError: return self["/attr/platform"] + @staticmethod + def _parse_datetime(datestr): + try: + return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") + except ValueError: + try: + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + except ValueError: + return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") @property def start_time(self): diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index e037884c04..a9a595202b 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. From 7f817731cb6ff125f4e7b3e60afeaa2680f0abe6 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:52:48 +0000 Subject: [PATCH 0693/1416] Remove unneeded function in OSI-SAF L3 --- satpy/readers/osisaf_l3_nc.py | 31 ++++++++++++------------------- 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index fd0a5cfc20..e5e185bb51 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -118,19 +118,12 @@ def get_area_def(self, area_id): return self._get_ftype_grid() - def _get_ds_attr(self, a_name): - """Get a dataset attribute and check it's valid.""" - try: - return self[a_name] - except KeyError: - return None - def _get_ds_units(self, ds_info, var_path): """Find the units of the datasets.""" file_units = ds_info.get("file_units") if file_units is None: - file_units = self._get_ds_attr(var_path + "/attr/units") + file_units = self.get(var_path + "/attr/units") if file_units is None: file_units = 1 return file_units @@ -150,22 +143,22 @@ def get_dataset(self, dataset_id, ds_info): # Try to get the valid limits for the data. # Not all datasets have these, so fall back on assuming no limits. - valid_min = self._get_ds_attr(var_path + "/attr/valid_min") - valid_max = self._get_ds_attr(var_path + "/attr/valid_max") + valid_min = self.get(var_path + "/attr/valid_min") + valid_max = self.get(var_path + "/attr/valid_max") if valid_min is not None and valid_max is not None: data = data.where(data >= valid_min) data = data.where(data <= valid_max) # Try to get the fill value for the data. # If there isn't one, assume all remaining pixels are valid. - fill_value = self._get_ds_attr(var_path + "/attr/_FillValue") + fill_value = self.get(var_path + "/attr/_FillValue") if fill_value is not None: data = data.where(data != fill_value) # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. - scale_factor = self._get_ds_attr(var_path + "/attr/scale_factor") - scale_offset = self._get_ds_attr(var_path + "/attr/add_offset") + scale_factor = self.get(var_path + "/attr/scale_factor") + scale_offset = self.get(var_path + "/attr/add_offset") if scale_offset is not None and scale_factor is not None: data = (data * scale_factor + scale_offset) @@ -213,22 +206,22 @@ def _parse_datetime(datestr): @property def start_time(self): - start_t = self._get_ds_attr("/attr/start_date") + start_t = self.get("/attr/start_date") if start_t is None: - start_t = self._get_ds_attr("/attr/start_time") + start_t = self.get("/attr/start_time") if start_t is None: - start_t = self._get_ds_attr("/attr/time_coverage_start") + start_t = self.get("/attr/time_coverage_start") if start_t is None: raise ValueError("Unknown start time attribute.") return self._parse_datetime(start_t) @property def end_time(self): - end_t = self._get_ds_attr("/attr/stop_date") + end_t = self.get("/attr/stop_date") if end_t is None: - end_t = self._get_ds_attr("/attr/stop_time") + end_t = self.get("/attr/stop_time") if end_t is None: - end_t = self._get_ds_attr("/attr/time_coverage_end") + end_t = self.get("/attr/time_coverage_end") if end_t is None: raise ValueError("Unknown stop time attribute.") return self._parse_datetime(end_t) From c450ad502f0a024984ed1d1e326e6df1d3087055 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 17:55:11 +0000 Subject: [PATCH 0694/1416] Update OSI SAF area def docstring. --- satpy/readers/osisaf_l3_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index e5e185bb51..fa93424518 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -111,7 +111,7 @@ def _get_ftype_grid(self): return self.area_def def get_area_def(self, area_id): - """Override abstract baseclass method""" + """Get the area definition, which varies depending on file type and structure.""" if "grid" in self.filename_info: return self._get_finfo_grid() else: From cd864e300701e98b539de0b02471d2e32a0b25cc Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 18:00:02 +0000 Subject: [PATCH 0695/1416] Add support for NRT ice concentration files to OSI SAF L3 reader. --- satpy/etc/readers/osisaf_nc.yaml | 4 ++-- satpy/readers/osisaf_l3_nc.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/osisaf_nc.yaml b/satpy/etc/readers/osisaf_nc.yaml index 479b5a38db..d789ae414c 100644 --- a/satpy/etc/readers/osisaf_nc.yaml +++ b/satpy/etc/readers/osisaf_nc.yaml @@ -17,7 +17,8 @@ reader: file_types: osi_sea_ice_conc: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler - file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] + file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc', + 'S-OSI_-{product_centre}_-{sensor}-GL_{hemisphere:2s}_CONCn__-{start_time:%Y%m%d%H%M}Z.nc'] osi_sea_ice_edge: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] @@ -42,7 +43,6 @@ datasets: status_flag: name: status_flag file_type: [osi_sea_ice_conc, osi_sea_ice_edge, osi_sea_ice_type] - orbit_num_amsr: name: orbit_num_amsr file_type: [osi_sea_ice_edge, osi_sea_ice_type] diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index fa93424518..0cc5e672b3 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -106,7 +106,7 @@ def _get_ftype_grid(self): if self.filetype_info["file_type"] == "osi_radflux_grid": self.area_def = self._get_geographic_grid() return self.area_def - elif self.filetype_info["file_type"] == "osi_sst": + elif self.filetype_info["file_type"] in ["osi_sst", "osi_sea_ice_conc"]: self.area_def = self._get_polar_stereographic_grid() return self.area_def From fd704fe6db0de046a2462efba497dd562904a734 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 13 Nov 2023 20:32:10 +0000 Subject: [PATCH 0696/1416] Simplify date parsing in OSI SAF reader. --- satpy/readers/osisaf_l3_nc.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 0cc5e672b3..1affb3a883 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -196,13 +196,12 @@ def _get_platname(self): @staticmethod def _parse_datetime(datestr): - try: - return datetime.strptime(datestr, "%Y-%m-%d %H:%M:%S") - except ValueError: + for dt_format in ("%Y-%m-%d %H:%M:%S","%Y%m%dT%H%M%SZ", "%Y-%m-%dT%H:%M:%SZ"): try: - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + return datetime.strptime(datestr, dt_format) except ValueError: - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%SZ") + continue + raise ValueError(f"Unsupported date format: {datestr}") @property def start_time(self): From 9ef2af9cbc3642c0dc90a55e8e4e097d475997f8 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 11:54:48 +0100 Subject: [PATCH 0697/1416] Avoid modification to CHANGELOG --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 12c8e50194..799ae0a867 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1143,7 +1143,7 @@ In this release 6 issues were closed. * [PR 1606](https://github.com/pytroll/satpy/pull/1606) - Make FSFile hashable again ([1605](https://github.com/pytroll/satpy/issues/1605), [1604](https://github.com/pytroll/satpy/issues/1604)) * [PR 1603](https://github.com/pytroll/satpy/pull/1603) - Update slstr_l2.yaml * [PR 1600](https://github.com/pytroll/satpy/pull/1600) - When setting `upper_right_corner` make sure that all dataset coordinates are flipped -* [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of add_coordinates_attrs_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) +* [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of link_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) #### Features added From e96f0de1f935865146a9967e35db65c8f0063b2f Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 12:02:42 +0100 Subject: [PATCH 0698/1416] Rename _encode* functions --- satpy/cf/attrs.py | 30 +++++++++++++++--------------- satpy/tests/cf_tests/test_attrs.py | 6 +++--- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index d4a41f2bfc..f4d6ece0d0 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -65,7 +65,7 @@ def _encode(self, obj): return str(obj) -def __encode_nc(obj): +def _encode_object(obj): """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. Raises: @@ -90,23 +90,15 @@ def __encode_nc(obj): raise ValueError('Unable to encode') -def _encode_nc(obj): - """Encode the given object as a netcdf compatible datatype.""" - try: - return obj.to_cf() - except AttributeError: - return _encode_python_objects(obj) - - def _encode_python_objects(obj): """Try to find the datatype which most closely resembles the object's nature. If on failure, encode as a string. Plain lists are encoded recursively. """ if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): - return [_encode_nc(item) for item in obj] + return [_encode_to_cf(item) for item in obj] try: - dump = __encode_nc(obj) + dump = _encode_object(obj) except ValueError: try: # Decode byte-strings @@ -117,7 +109,15 @@ def _encode_python_objects(obj): return dump -def _encode_attrs_nc(attrs): +def _encode_to_cf(obj): + """Encode the given object as a netcdf compatible datatype.""" + try: + return obj.to_cf() + except AttributeError: + return _encode_python_objects(obj) + + +def _encode_nc_attrs(attrs): """Encode dataset attributes in a netcdf compatible datatype. Args: @@ -130,7 +130,7 @@ def _encode_attrs_nc(attrs): encoded_attrs = [] for key, val in sorted(attrs.items()): if val is not None: - encoded_attrs.append((key, _encode_nc(val))) + encoded_attrs.append((key, _encode_to_cf(val))) return OrderedDict(encoded_attrs) @@ -193,7 +193,7 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) - dataarray.attrs = _encode_attrs_nc(dataarray.attrs) + dataarray.attrs = _encode_nc_attrs(dataarray.attrs) return dataarray @@ -215,7 +215,7 @@ def preprocess_header_attrs(header_attrs, flatten_attrs=False): if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) - header_attrs = _encode_attrs_nc(header_attrs) # OrderedDict + header_attrs = _encode_nc_attrs(header_attrs) # OrderedDict else: header_attrs = {} header_attrs = _add_history(header_attrs) diff --git a/satpy/tests/cf_tests/test_attrs.py b/satpy/tests/cf_tests/test_attrs.py index 787d1dc82d..9306ae9749 100644 --- a/satpy/tests/cf_tests/test_attrs.py +++ b/satpy/tests/cf_tests/test_attrs.py @@ -22,16 +22,16 @@ class TestCFAttributeEncoding: """Test case for CF attribute encodings.""" - def test__encode_attrs_nc(self): + def test__encode_nc_attrs(self): """Test attributes encoding.""" - from satpy.cf.attrs import _encode_attrs_nc + from satpy.cf.attrs import _encode_nc_attrs from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality attrs, expected, _ = get_test_attrs() # Test encoding - encoded = _encode_attrs_nc(attrs) + encoded = _encode_nc_attrs(attrs) assert_dict_array_equality(expected, encoded) # Test decoding of json-encoded attributes From 48df162107648870be89f0af9ddbd4fa6d1d9aba Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 12:04:24 +0100 Subject: [PATCH 0699/1416] Update xarray version --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index f50a8e99d3..fc1fa67343 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -570,5 +570,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.10") + versions["xarray"] >= Version("2023.11") ) From 14b1f066416190b464b630750d12519f00ef5fe4 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 13:36:54 +0100 Subject: [PATCH 0700/1416] Set default epoch argument to None --- satpy/_scene_converters.py | 4 --- satpy/cf/coords.py | 2 +- satpy/cf/dataarray.py | 6 ++++- satpy/cf/datasets.py | 33 ++++++++++++++----------- satpy/scene.py | 2 +- satpy/tests/cf_tests/test_coords.py | 2 -- satpy/tests/cf_tests/test_dataaarray.py | 2 -- satpy/tests/test_writers.py | 3 +-- satpy/writers/cf_writer.py | 20 ++++++++------- 9 files changed, 37 insertions(+), 37 deletions(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 54ccf85ac9..a890963a06 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -90,12 +90,8 @@ def to_xarray(scn, A CF-compliant xr.Dataset """ - from satpy.cf import EPOCH from satpy.cf.datasets import collect_cf_datasets - if epoch is None: - epoch = EPOCH - # Get list of DataArrays if datasets is None: datasets = list(scn.keys()) # list all loaded DataIDs diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index f486502a44..6e7a0892e9 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -188,7 +188,7 @@ def check_unique_projection_coords(dict_dataarrays): token_x = tokenize(dataarray["x"].data) unique_x.add(token_x) if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError("Datasets to be saved in one file (or one group) must have identical projection coordinates. " + raise ValueError("Datasets to be saved in one file (or one group) must have identical projection coordinates." "Please group them by area or save them in separate files.") diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 5a7779f4c1..a8ac78d9a3 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -57,7 +57,7 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name def make_cf_dataarray(dataarray, - epoch=EPOCH, + epoch=None, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, @@ -70,6 +70,7 @@ def make_cf_dataarray(dataarray, The data array to be made CF-compliant. epoch : str, optional Reference time for encoding of time coordinates. + If None, the default reference time is retrieved using `from satpy.cf import EPOCH` flatten_attrs : bool, optional If True, flatten dict-type attributes. The default is False. @@ -89,6 +90,9 @@ def make_cf_dataarray(dataarray, CF-compliant xr.DataArray. """ + if epoch is None: + epoch = EPOCH + dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, include_orig_name=include_orig_name) diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index e801be2fdf..d85a943fe7 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -61,13 +61,14 @@ def _get_groups(groups, list_datarrays): def _collect_cf_dataset(list_dataarrays, - epoch=EPOCH, - flatten_attrs=False, - exclude_attrs=None, - include_lonlats=True, - pretty=False, - include_orig_name=True, - numeric_name_prefix="CHANNEL_"): + epoch, + flatten_attrs, + exclude_attrs, + include_lonlats, + pretty, + include_orig_name, + numeric_name_prefix, + ): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Parameters @@ -77,19 +78,18 @@ def _collect_cf_dataset(list_dataarrays, epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf import EPOCH` - flatten_attrs : bool, optional + flatten_attrs : bool If True, flatten dict-type attributes. - exclude_attrs : list, optional + exclude_attrs : list List of xr.DataArray attribute names to be excluded. - include_lonlats : bool, optional + include_lonlats : bool If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty : bool, optional + pretty : bool Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name : bool, optional + include_orig_name : bool Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix : str, optional + numeric_name_prefix : str Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. @@ -180,7 +180,7 @@ def collect_cf_datasets(list_dataarrays, flatten_attrs=False, pretty=True, include_lonlats=True, - epoch=EPOCH, + epoch=None, include_orig_name=True, numeric_name_prefix="CHANNEL_", groups=None): @@ -231,6 +231,9 @@ def collect_cf_datasets(list_dataarrays, from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension + if epoch is None: + epoch = EPOCH + if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " diff --git a/satpy/scene.py b/satpy/scene.py index 587721a7be..bb8cf0ffab 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1128,7 +1128,7 @@ def to_xarray(self, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.cf import EPOCH" + If None, the default reference time is defined using "from satpy.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/tests/cf_tests/test_coords.py b/satpy/tests/cf_tests/test_coords.py index 2462f59181..68ba319741 100644 --- a/satpy/tests/cf_tests/test_coords.py +++ b/satpy/tests/cf_tests/test_coords.py @@ -228,5 +228,3 @@ def test_has_projection_coords(self, datasets): assert has_projection_coords(datasets) datasets["lat"].attrs["standard_name"] = "dummy" assert not has_projection_coords(datasets) - - # add_xy_coords_attrs diff --git a/satpy/tests/cf_tests/test_dataaarray.py b/satpy/tests/cf_tests/test_dataaarray.py index 099013bbbc..d0154cd84f 100644 --- a/satpy/tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/cf_tests/test_dataaarray.py @@ -114,5 +114,3 @@ def test_make_cf_dataarray_one_dimensional_array(self): arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=("y",), coords={"y": [0, 1, 2, 3], "acq_time": ("y", [0, 1, 2, 3])}) _ = make_cf_dataarray(arr) - - # _handle_dataarray_name diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index c2d049dae1..c11066d3f6 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -874,9 +874,8 @@ def test_group_results_by_output_file(tmp_path): """ from pyresample import create_area_def + from satpy.tests.utils import make_fake_scene from satpy.writers import group_results_by_output_file - - from .utils import make_fake_scene x = 10 fake_area = create_area_def("sargasso", 4326, resolution=1, width=x, height=x, center=(0, 0)) fake_scene = make_fake_scene( diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index de3b445025..7076cc841d 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -162,7 +162,7 @@ import xarray as xr from packaging.version import Version -from satpy.cf import EPOCH +from satpy.cf import EPOCH # noqa: F401 (for backward compatibility) from satpy.writers import Writer logger = logging.getLogger(__name__) @@ -232,7 +232,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) - def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, + def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=None, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. @@ -256,6 +256,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, preference for 'netcdf4'. epoch (str): Reference time for encoding of time coordinates. + If None, the default reference time is defined using `from satpy.cf import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): @@ -326,23 +327,24 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, return written @staticmethod - def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, + def da2cf(dataarray, epoch=None, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Convert the dataarray to something cf-compatible. Args: dataarray (xr.DataArray): - The data array to be converted + The data array to be converted. epoch (str): - Reference time for encoding of time coordinates + Reference time for encoding of time coordinates. + If None, the default reference time is defined using `from satpy.cf import EPOCH` flatten_attrs (bool): - If True, flatten dict-type attributes + If True, flatten dict-type attributes. exclude_attrs (list): - List of dataset attributes to be excluded + List of dataset attributes to be excluded. include_orig_name (bool): - Include the original dataset name in the netcdf variable attributes + Include the original dataset name in the netcdf variable attributes. numeric_name_prefix (str): - Prepend dataset name with this if starting with a digit + Prepend dataset name with this if starting with a digit. """ from satpy.cf.dataarray import make_cf_dataarray warnings.warn("CFWriter.da2cf is deprecated." From ec5f8fc4cafb1a89a39a42a7311077c896ef59a1 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 13:41:05 +0100 Subject: [PATCH 0701/1416] Reduce future risk of circular imports --- satpy/_scene_converters.py | 2 +- satpy/cf/dataarray.py | 5 +++-- satpy/cf/datasets.py | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index a890963a06..c400a159f1 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -66,7 +66,7 @@ def to_xarray(scn, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using "from satpy.cf import EPOCH" + If None, the default reference time is defined using "from satpy.cf import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index a8ac78d9a3..39b5d82dc9 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -19,7 +19,6 @@ import logging import warnings -from satpy.cf import EPOCH from satpy.cf.attrs import preprocess_datarray_attrs from satpy.cf.coords import add_xy_coords_attrs, set_cf_time_info @@ -70,7 +69,7 @@ def make_cf_dataarray(dataarray, The data array to be made CF-compliant. epoch : str, optional Reference time for encoding of time coordinates. - If None, the default reference time is retrieved using `from satpy.cf import EPOCH` + If None, the default reference time is defined using `from satpy.cf import EPOCH` flatten_attrs : bool, optional If True, flatten dict-type attributes. The default is False. @@ -90,6 +89,8 @@ def make_cf_dataarray(dataarray, CF-compliant xr.DataArray. """ + from satpy.cf import EPOCH + if epoch is None: epoch = EPOCH diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index d85a943fe7..43b85003de 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -22,7 +22,6 @@ import xarray as xr -from satpy.cf import EPOCH from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) @@ -228,6 +227,7 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ + from satpy.cf import EPOCH from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension From 1930845ba2cef1b2849b445a8a906193bb340963 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 14 Nov 2023 13:03:03 +0200 Subject: [PATCH 0702/1416] Keep all FCI radiance, reflectance and brightness temperatures as 32-bit floats --- satpy/readers/fci_l1c_nc.py | 28 +++++++-------- satpy/tests/reader_tests/test_fci_l1c_nc.py | 39 +++++++++++---------- 2 files changed, 35 insertions(+), 32 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 2d3e047c5e..0c7b9fb8cc 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -330,13 +330,13 @@ def _get_dataset_measurand(self, key, info=None): fv = attrs.pop( "FillValue", - default_fillvals.get(data.dtype.str[1:], np.nan)) - vr = attrs.get("valid_range", [-np.inf, np.inf]) + default_fillvals.get(data.dtype.str[1:], np.float32(np.nan))) + vr = attrs.get("valid_range", [np.float32(-np.inf), np.float32(np.inf)]) if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = data.dtype.type(fv) else: - nfv = np.nan + nfv = np.float32(np.nan) data = data.where((data >= vr[0]) & (data <= vr[1]), nfv) res = self.calibrate(data, key) @@ -632,16 +632,15 @@ def calibrate_counts_to_rad(self, data, key): def calibrate_rad_to_bt(self, radiance, key): """IR channel calibration.""" # using the method from PUG section Converting from Effective Radiance to Brightness Temperature for IR Channels - measured = self.get_channel_measured_group_path(key["name"]) - vc = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_wavenumber") + vc = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_wavenumber").astype(np.float32) - a = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_a") - b = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_b") + a = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_a").astype(np.float32) + b = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_b").astype(np.float32) - c1 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c1") - c2 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c2") + c1 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c1").astype(np.float32) + c2 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c2").astype(np.float32) for v in (vc, a, b, c1, c2): if v == v.attrs.get("FillValue", @@ -652,26 +651,27 @@ def calibrate_rad_to_bt(self, radiance, key): v.attrs.get("long_name", "at least one necessary coefficient"), measured)) - return radiance * np.nan + return radiance * np.float32(np.nan) nom = c2 * vc - denom = a * np.log(1 + (c1 * vc ** 3) / radiance) + denom = a * np.log(1 + (c1 * vc ** np.float32(3.)) / radiance) res = nom / denom - b / a + return res def calibrate_rad_to_refl(self, radiance, key): """VIS channel calibration.""" measured = self.get_channel_measured_group_path(key["name"]) - cesi = self.get_and_cache_npxr(measured + "/channel_effective_solar_irradiance") + cesi = self.get_and_cache_npxr(measured + "/channel_effective_solar_irradiance").astype(np.float32) if cesi == cesi.attrs.get( "FillValue", default_fillvals.get(cesi.dtype.str[1:])): logger.error( "channel effective solar irradiance set to fill value, " "cannot produce reflectance for {:s}.".format(measured)) - return radiance * np.nan + return radiance * np.float32(np.nan) sun_earth_distance = np.mean( self.get_and_cache_npxr("state/celestial/earth_sun_distance")) / 149597870.7 # [AU] @@ -683,7 +683,7 @@ def calibrate_rad_to_refl(self, radiance, key): "".format(sun_earth_distance)) sun_earth_distance = 1 - res = 100 * radiance * np.pi * sun_earth_distance ** 2 / cesi + res = 100 * radiance * np.float32(np.pi) * np.float32(sun_earth_distance) ** np.float32(2) / cesi return res diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 78226fc7ed..792de90462 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -103,18 +103,21 @@ def _get_test_calib_for_channel_ir(data, meas_path): from pyspectral.blackbody import C_SPEED as c from pyspectral.blackbody import H_PLANCK as h from pyspectral.blackbody import K_BOLTZMANN as k - data[meas_path + "/radiance_to_bt_conversion_coefficient_wavenumber"] = FakeH5Variable(da.array(955)) - data[meas_path + "/radiance_to_bt_conversion_coefficient_a"] = FakeH5Variable(da.array(1)) - data[meas_path + "/radiance_to_bt_conversion_coefficient_b"] = FakeH5Variable(da.array(0.4)) - data[meas_path + "/radiance_to_bt_conversion_constant_c1"] = FakeH5Variable(da.array(1e11 * 2 * h * c ** 2)) - data[meas_path + "/radiance_to_bt_conversion_constant_c2"] = FakeH5Variable(da.array(1e2 * h * c / k)) + data[meas_path + "/radiance_to_bt_conversion_coefficient_wavenumber"] = FakeH5Variable( + da.array(955.0, dtype=np.float32)) + data[meas_path + "/radiance_to_bt_conversion_coefficient_a"] = FakeH5Variable(da.array(1.0, dtype=np.float32)) + data[meas_path + "/radiance_to_bt_conversion_coefficient_b"] = FakeH5Variable(da.array(0.4, dtype=np.float32)) + data[meas_path + "/radiance_to_bt_conversion_constant_c1"] = FakeH5Variable( + da.array(1e11 * 2 * h * c ** 2, dtype=np.float32)) + data[meas_path + "/radiance_to_bt_conversion_constant_c2"] = FakeH5Variable( + da.array(1e2 * h * c / k, dtype=np.float32)) return data def _get_test_calib_for_channel_vis(data, meas): data["state/celestial/earth_sun_distance"] = FakeH5Variable( da.repeat(da.array([149597870.7]), 6000), dims=("x")) - data[meas + "/channel_effective_solar_irradiance"] = FakeH5Variable(da.array(50)) + data[meas + "/channel_effective_solar_irradiance"] = FakeH5Variable(da.array((50.0), dtype=np.float32)) return data @@ -124,7 +127,7 @@ def _get_test_calib_data_for_channel(data, ch_str): _get_test_calib_for_channel_ir(data, meas_path) elif ch_str.startswith("vis") or ch_str.startswith("nir"): _get_test_calib_for_channel_vis(data, meas_path) - data[meas_path + "/radiance_unit_conversion_coefficient"] = xr.DataArray(da.array(1234.56)) + data[meas_path + "/radiance_unit_conversion_coefficient"] = xr.DataArray(da.array(1234.56, dtype=np.float32)) def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): @@ -145,8 +148,8 @@ def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): dims=("y", "x"), attrs={ "valid_range": [0, 8191], - "warm_scale_factor": 2, - "warm_add_offset": -300, + "warm_scale_factor": np.float32(2.0), + "warm_add_offset": np.float32(-300.0), **common_attrs } ) @@ -156,8 +159,8 @@ def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): dims=("y", "x"), attrs={ "valid_range": [0, 4095], - "warm_scale_factor": 1, - "warm_add_offset": 0, + "warm_scale_factor": np.float32(1.0), + "warm_add_offset": np.float32(0.0), **common_attrs } ) @@ -521,10 +524,10 @@ def test_load_radiance(self, reader_configs, fh_param, fh_param["channels"]["terran_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float64 + assert res[ch].dtype == np.float32 assert res[ch].attrs["calibration"] == "radiance" assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" - assert res[ch].attrs["radiance_unit_conversion_coefficient"] == 1234.56 + assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56) if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 15) numpy.testing.assert_array_equal(res[ch][0], 9700) @@ -544,7 +547,7 @@ def test_load_reflectance(self, reader_configs, fh_param, for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float64 + assert res[ch].dtype == np.float32 assert res[ch].attrs["calibration"] == "reflectance" assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) @@ -564,15 +567,15 @@ def test_load_bt(self, reader_configs, caplog, fh_param, for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float64 + assert res[ch].dtype == np.float32 assert res[ch].attrs["calibration"] == "brightness_temperature" assert res[ch].attrs["units"] == "K" if ch == "ir_38": - numpy.testing.assert_array_almost_equal(res[ch][-1], 209.68274099) - numpy.testing.assert_array_almost_equal(res[ch][0], 1888.851296) + numpy.testing.assert_array_almost_equal(res[ch][-1], np.float32(209.68275)) + numpy.testing.assert_array_almost_equal(res[ch][0], np.float32(1888.8513)) else: - numpy.testing.assert_array_almost_equal(res[ch], 209.68274099) + numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275)) @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) From 9a8810f4303630c561addd28eab90dfb1ed93d32 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Tue, 14 Nov 2023 14:04:55 +0100 Subject: [PATCH 0703/1416] Fix tests --- satpy/cf/coords.py | 5 +++++ satpy/cf/dataarray.py | 5 ----- satpy/cf/datasets.py | 26 ++++++++++++++------------ satpy/tests/cf_tests/test_datasets.py | 4 +++- 4 files changed, 22 insertions(+), 18 deletions(-) diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 6e7a0892e9..05e8a792fd 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -100,6 +100,11 @@ def set_cf_time_info(dataarray, epoch): - the time coordinate has size 1 """ + from satpy.cf import EPOCH + + if epoch is None: + epoch = EPOCH + dataarray["time"].encoding["units"] = epoch dataarray["time"].attrs["standard_name"] = "time" dataarray["time"].attrs.pop("bounds", None) diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 39b5d82dc9..078c53c462 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -89,11 +89,6 @@ def make_cf_dataarray(dataarray, CF-compliant xr.DataArray. """ - from satpy.cf import EPOCH - - if epoch is None: - epoch = EPOCH - dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, include_orig_name=include_orig_name) diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index 43b85003de..3cb72af442 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -60,14 +60,13 @@ def _get_groups(groups, list_datarrays): def _collect_cf_dataset(list_dataarrays, - epoch, - flatten_attrs, - exclude_attrs, - include_lonlats, - pretty, - include_orig_name, - numeric_name_prefix, - ): + epoch=None, + flatten_attrs=False, + exclude_attrs=None, + include_lonlats=True, + pretty=False, + include_orig_name=True, + numeric_name_prefix="CHANNEL_"): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Parameters @@ -77,17 +76,24 @@ def _collect_cf_dataset(list_dataarrays, epoch : str Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is defined using `from satpy.cf import EPOCH` + flatten_attrs : bool, optional flatten_attrs : bool If True, flatten dict-type attributes. + exclude_attrs : list, optional exclude_attrs : list List of xr.DataArray attribute names to be excluded. + include_lonlats : bool, optional include_lonlats : bool If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. + pretty : bool, optional pretty : bool Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. + include_orig_name : bool, optional include_orig_name : bool Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix : str, optional numeric_name_prefix : str Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. @@ -227,13 +233,9 @@ def collect_cf_datasets(list_dataarrays, header_attrs : dict Global attributes to be attached to the xr.Dataset / netCDF4. """ - from satpy.cf import EPOCH from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension - if epoch is None: - epoch = EPOCH - if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " diff --git a/satpy/tests/cf_tests/test_datasets.py b/satpy/tests/cf_tests/test_datasets.py index b234a7c945..e943026509 100644 --- a/satpy/tests/cf_tests/test_datasets.py +++ b/satpy/tests/cf_tests/test_datasets.py @@ -52,7 +52,9 @@ def test_collect_cf_dataset(self): attrs={"name": "var2", "long_name": "variable 2"})] # Collect datasets - ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) + ds = _collect_cf_dataset(list_dataarrays, + include_lonlats=True, + ) # Test results assert len(ds.keys()) == 3 From 9d1dc6e4f7bbebb4ef3deca2f0a148850c17e1cd Mon Sep 17 00:00:00 2001 From: Florian Fichtner <12199342+fwfichtner@users.noreply.github.com> Date: Tue, 11 Jan 2022 14:26:36 +0100 Subject: [PATCH 0704/1416] Update AVHRR EPS reader to read cloud flags information --- satpy/etc/readers/avhrr_l1b_eps.yaml | 8 ++++++++ satpy/readers/eps_l1b.py | 3 +++ 2 files changed, 11 insertions(+) diff --git a/satpy/etc/readers/avhrr_l1b_eps.yaml b/satpy/etc/readers/avhrr_l1b_eps.yaml index 7bfa0e7160..e759d28d66 100644 --- a/satpy/etc/readers/avhrr_l1b_eps.yaml +++ b/satpy/etc/readers/avhrr_l1b_eps.yaml @@ -89,6 +89,7 @@ datasets: - latitude file_type: avhrr_eps + latitude: name: latitude resolution: 1050 @@ -131,6 +132,13 @@ datasets: coordinates: [longitude, latitude] file_type: avhrr_eps + cloud_flags: + name: cloud_flags + sensor: avhrr-3 + resolution: 1050 + coordinates: [longitude, latitude] + file_type: avhrr_eps + file_types: avhrr_eps: file_reader: !!python/name:satpy.readers.eps_l1b.EPSAVHRRFile diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 23e4ca712d..0c35a2eaad 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -299,6 +299,9 @@ def get_dataset(self, key, info): dataset = self._get_angle_dataarray(key) elif key["name"] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: dataset = self._get_calibrated_dataarray(key) + elif key['name'] == "cloud_flags": + array = self["CLOUD_INFORMATION"] + dataset = create_xarray(array) else: logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return From c3132760affc26f4b13466fe3a01146e3732a4d0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 15 Nov 2023 12:57:15 +0200 Subject: [PATCH 0705/1416] Keep original dtype in DayNightCompositor --- satpy/composites/__init__.py | 17 ++++++----- satpy/modifiers/angles.py | 2 ++ satpy/tests/test_composites.py | 52 +++++++++++++++++++++------------- 3 files changed, 42 insertions(+), 29 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d3a1e510cb..e3c9dc190a 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -713,9 +713,7 @@ def __call__( datasets = self.match_data_arrays(datasets) # At least one composite is requested. foreground_data = datasets[0] - weights = self._get_coszen_blending_weights(datasets) - # Apply enhancements to the foreground data foreground_data = enhance2dataset(foreground_data) @@ -759,7 +757,6 @@ def _get_coszen_blending_weights( # Calculate blending weights coszen -= np.min((lim_high, lim_low)) coszen /= np.abs(lim_low - lim_high) - return coszen.clip(0, 1) def _get_data_for_single_side_product( @@ -786,8 +783,8 @@ def _mask_weights(self, weights): def _get_day_night_data_for_single_side_product(self, foreground_data): if "day" in self.day_night: - return foreground_data, 0 - return 0, foreground_data + return foreground_data, foreground_data.dtype.type(0) + return foreground_data.dtype.type(0), foreground_data def _get_data_for_combined_product(self, day_data, night_data): # Apply enhancements also to night-side data @@ -848,15 +845,16 @@ def _weight_data( def _get_band_names(day_data, night_data): try: bands = day_data["bands"] - except TypeError: + except (IndexError, TypeError): bands = night_data["bands"] return bands def _get_single_band_data(data, band): - if isinstance(data, int): + try: + return data.sel(bands=band) + except AttributeError: return data - return data.sel(bands=band) def _get_single_channel(data: xr.DataArray) -> xr.DataArray: @@ -894,7 +892,8 @@ def add_alpha_bands(data): alpha = new_data[0].copy() alpha.data = da.ones((data.sizes["y"], data.sizes["x"]), - chunks=new_data[0].chunks) + chunks=new_data[0].chunks, + dtype=data.dtype) # Rename band to indicate it's alpha alpha["bands"] = "A" new_data.append(alpha) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 1d059e1f5a..847df7505e 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -395,6 +395,8 @@ def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: """ chunks = _geo_chunks_from_data_arr(data_arr) lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) + lons = lons.astype(data_arr.dtype) + lats = lats.astype(data_arr.dtype) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) return _geo_dask_to_data_array(cos_sza) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index bf8a9dfb9e..5bcbca0a1e 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -396,7 +396,7 @@ def setUp(self): start_time = datetime(2018, 1, 1, 18, 0, 0) # RGB - a = np.zeros((3, 2, 2), dtype=np.float64) + a = np.zeros((3, 2, 2), dtype=np.float32) a[:, 0, 0] = 0.1 a[:, 0, 1] = 0.2 a[:, 1, 0] = 0.3 @@ -404,7 +404,7 @@ def setUp(self): a = da.from_array(a, a.shape) self.data_a = xr.DataArray(a, attrs={"test": "a", "start_time": start_time}, coords={"bands": bands}, dims=("bands", "y", "x")) - b = np.zeros((3, 2, 2), dtype=np.float64) + b = np.zeros((3, 2, 2), dtype=np.float32) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 @@ -413,7 +413,7 @@ def setUp(self): self.data_b = xr.DataArray(b, attrs={"test": "b", "start_time": start_time}, coords={"bands": bands}, dims=("bands", "y", "x")) - sza = np.array([[80., 86.], [94., 100.]]) + sza = np.array([[80., 86.], [94., 100.]], dtype=np.float32) sza = da.from_array(sza, sza.shape) self.sza = xr.DataArray(sza, dims=("y", "x")) @@ -437,8 +437,9 @@ def test_daynight_sza(self): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() - expected = np.array([[0., 0.22122352], [0.5, 1.]]) - np.testing.assert_allclose(res.values[0], expected) + expected = np.array([[0., 0.22122352], [0.5, 1.]], dtype=np.float32) + assert res.dtype == np.float32 + np.testing.assert_allclose(res.values[0], expected, rtol=1e-6) def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" @@ -448,7 +449,8 @@ def test_daynight_area(self): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b)) res = res.compute() - expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) + expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + assert res.dtype == np.float32 for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel) @@ -460,8 +462,9 @@ def test_night_only_sza_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() - expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[0., 0.33296056], [1., 1.]]) + expected_red_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_alpha = np.array([[0., 0.3329599], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -473,7 +476,8 @@ def test_night_only_sza_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() - expected = np.array([[0., 0.11042631], [0.66835017, 1.]]) + expected = np.array([[0., 0.11042608], [0.6683501, 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands @@ -485,8 +489,10 @@ def test_night_only_area_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[np.nan, 0.], [0., 0.]]) + expected_l_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + # FIXME: with the current changes the np.nan becomes 0.0 instead, why?! + expected_alpha = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -498,7 +504,8 @@ def test_night_only_area_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_b,)) res = res.compute() - expected = np.array([[np.nan, 0.], [0., 0.]]) + expected = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands @@ -510,8 +517,9 @@ def test_day_only_sza_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a, self.sza)) res = res.compute() - expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) - expected_alpha = np.array([[1., 0.66703944], [0., 0.]]) + expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + expected_alpha = np.array([[1., 0.6670401], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -523,7 +531,8 @@ def test_day_only_sza_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() - expected_channel_data = np.array([[0., 0.22122352], [0., 0.]]) + expected_channel_data = np.array([[0., 0.22122373], [0., 0.]], dtype=np.float32) + assert res.dtype == np.float32 for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) assert "A" not in res.bands @@ -536,8 +545,9 @@ def test_day_only_area_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a,)) res = res.compute() - expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) - expected_alpha = np.array([[1., 1.], [1., 1.]]) + expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + expected_alpha = np.array([[1., 1.], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -549,8 +559,9 @@ def test_day_only_area_with_alpha_and_missing_data(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) - expected_alpha = np.array([[np.nan, 1.], [1., 1.]]) + expected_l_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_alpha = np.array([[np.nan, 1.], [1., 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) @@ -562,7 +573,8 @@ def test_day_only_area_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a,)) res = res.compute() - expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) + expected = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) + assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands From d8c594b53255d4f82b756de807c4d5f434280997 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:45:15 +0000 Subject: [PATCH 0706/1416] Add line to HDF-EOS tests to ensure that line splitting in attrs is handled correctly. --- satpy/tests/reader_tests/modis_tests/_modis_fixtures.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index e4272373b3..0b79e00854 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -365,7 +365,9 @@ def _create_struct_metadata_cmg(res) -> str: def _create_header_metadata() -> str: - archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND" + archive_metadata_header = ("GROUP = ARCHIVEDMETADATA\n" + 'TEST_URL = "http://modis.gsfc.nasa.gov/?some_val=100"\n' + "END_GROUP = ARCHIVEDMETADATA\nEND") return archive_metadata_header From 19d1b9969c9464d65735f5ce867deadac7e1bfc6 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:47:16 +0000 Subject: [PATCH 0707/1416] Fix docstring in MODIS L3 reader. --- satpy/readers/modis_l3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index dfddc0732b..68c7b435ed 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -20,7 +20,7 @@ Introduction ------------ -The ``modis_l3`` reader reads Modis L3 products in hdf-eos format. +The ``modis_l3`` reader reads MODIS L3 products in HDF-EOS format. There are multiple level 3 products, including some on sinusoidal grids and some on the climate modeling grid (CMG). This reader supports the CMG products at present, and the sinusoidal products will be added if there is demand. From 59e22405c03b408e67f067272b6369aa9a66fa24 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:49:46 +0000 Subject: [PATCH 0708/1416] Re-order functions in the MODIS L3 reader. --- satpy/readers/modis_l3.py | 45 ++++++++++++++++++++------------------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 68c7b435ed..2055d041d9 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -44,21 +44,17 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" - def _get_res(self): - """Compute the resolution from the file metadata.""" - gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] - if "CMG" not in gridname: - raise ValueError("Only CMG grids are supported") + def __init__(self, filename, filename_info, filetype_info, **kwargs): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, **kwargs) - # Get the grid resolution from the grid name - pos = gridname.rfind("_") + 1 - pos2 = gridname.rfind("Deg") + # Initialise number of rows and columns + self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Get the grid name and other projection info + self.area_extent = self._sort_grid() - # Some products don't have resolution listed. - if pos < 0 or pos2 < 0: - self.resolution = 360. / self.ncols - else: - self.resolution = float(gridname[pos:pos2]) def _sort_grid(self): """Get the grid properties.""" @@ -75,19 +71,24 @@ def _sort_grid(self): upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) - return (upperleft[0], lowerright[1], lowerright[0], upperleft[1]) + return upperleft[0], lowerright[1], lowerright[0], upperleft[1] - def __init__(self, filename, filename_info, filetype_info, **kwargs): - """Init the file handler.""" - super().__init__(filename, filename_info, filetype_info, **kwargs) + def _get_res(self): + """Compute the resolution from the file metadata.""" + gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] + if "CMG" not in gridname: + raise ValueError("Only CMG grids are supported") - # Initialise number of rows and columns - self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] - self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + # Get the grid resolution from the grid name + pos = gridname.rfind("_") + 1 + pos2 = gridname.rfind("Deg") - # Get the grid name and other projection info - self.area_extent = self._sort_grid() + # Some products don't have resolution listed. + if pos < 0 or pos2 < 0: + self.resolution = 360. / self.ncols + else: + self.resolution = float(gridname[pos:pos2]) def available_datasets(self, configured_datasets=None): From 876d683d758986acc9f277b6266de774eaeca752 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:50:55 +0000 Subject: [PATCH 0709/1416] Re-order code in the `available_datasets` function for MODIS L3. --- satpy/readers/modis_l3.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 2055d041d9..ef0a55975e 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -93,12 +93,11 @@ def _get_res(self): def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - logger.debug("Available_datasets begin...") - - ds_dict = self.sd.datasets() yield from super().available_datasets(configured_datasets) common = {"file_type": "mcd43_cmg_hdf", "resolution": self.resolution} + ds_dict = self.sd.datasets() + for key in ds_dict.keys(): if "/" in key: # not a dataset continue From 60a4a297427d37252a0a7dab766e7437549ace53 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:53:01 +0000 Subject: [PATCH 0710/1416] Remove `self.area` from the MODIS L3 code. --- satpy/readers/modis_l3.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index ef0a55975e..316fa5b9ae 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -127,6 +127,5 @@ def get_area_def(self, dsid): self.ncols, self.nrows, self.area_extent) - self.area = area - return self.area + return area From 01f09aa10ba029d7c05b6703b07f0963fc738a40 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 14:53:41 +0000 Subject: [PATCH 0711/1416] Add missing space in MODIS L3 tests. --- satpy/tests/reader_tests/modis_tests/test_modis_l3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index 23c1af6fc1..c7a0d3e9cf 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -53,7 +53,7 @@ def test_available_reader(self): ("loadable", "filename"), [ ("Coarse_Resolution_Surface_Reflectance_Band_2", lazy_fixture("modis_l3_nasa_mod09_file")), - ("BRDF_Albedo_Parameter1_Band2",lazy_fixture("modis_l3_nasa_mod43_file")), + ("BRDF_Albedo_Parameter1_Band2", lazy_fixture("modis_l3_nasa_mod43_file")), ] ) def test_scene_available_datasets(self, loadable, filename): From ef16a1a46105c23a2188586511d159dcd5472f24 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 15:07:54 +0000 Subject: [PATCH 0712/1416] Rename MODIS L3 file type to be more generic and refactor the dynamic dataset handler. --- satpy/etc/readers/modis_l3.yaml | 2 +- satpy/readers/modis_l3.py | 39 +++++++++++++++++++++++++++++---- 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/modis_l3.yaml b/satpy/etc/readers/modis_l3.yaml index 5ad2f32e04..608d15601a 100644 --- a/satpy/etc/readers/modis_l3.yaml +++ b/satpy/etc/readers/modis_l3.yaml @@ -9,7 +9,7 @@ reader: sensors: [modis] file_types: - mcd43_cmg_hdf: + modis_l3_cmg_hdf: file_patterns: - 'MCD43C{prod_type}.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D09CMG.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 316fa5b9ae..2c2f331c29 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -33,6 +33,7 @@ """ import logging +from typing import Iterable from pyresample import geometry @@ -94,14 +95,44 @@ def _get_res(self): def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" + # Initialise set of variable names to carry through code + handled_var_names = set() + yield from super().available_datasets(configured_datasets) - common = {"file_type": "mcd43_cmg_hdf", "resolution": self.resolution} + ds_dict = self.sd.datasets() - for key in ds_dict.keys(): - if "/" in key: # not a dataset + for is_avail, ds_info in (configured_datasets or []): + file_key = ds_info.get("file_key", ds_info["name"]) + # we must add all variables here even if another file handler has + # claimed the variable. It could be another instance of this file + # type and we don't want to add that variable dynamically if the + # other file handler defined it by the YAML definition. + handled_var_names.add(file_key) + if is_avail is not None: + # some other file handler said it has this dataset + # we don't know any more information than the previous + # file handler so let's yield early + yield is_avail, ds_info + continue + if self.file_type_matches(ds_info["file_type"]) is None: + # this is not the file type for this dataset + yield None, ds_info + yield file_key in ds_dict.keys(), ds_info + + yield from self._dynamic_variables_from_file(handled_var_names) + + def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: + + for var_name in self.sd.datasets().keys(): + if var_name in handled_var_names: + # skip variables that YAML had configured, but allow lon/lats + # to be reprocessed due to our dynamic coordinate naming continue - yield True, {"name": key} | common + common = {"file_type": "modis_l3_cmg_hdf", + "resolution": self.resolution, + "name": var_name} + yield True, common def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" From 9794f3df5a49546323d3ac7644cff308d8ac8204 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 15:18:38 +0000 Subject: [PATCH 0713/1416] Add additional check on MODIS L3 bounds to ensure values are scaled correctly. --- satpy/readers/modis_l3.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 2c2f331c29..4ff0bd1dc6 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -68,7 +68,7 @@ def _sort_grid(self): # For some reason, a few of the CMG products multiply their # decimal degree extents by one million. This fixes it. - if lowerright[0] > 1e6: + if lowerright[0] > 1e6 or upperleft[0] > 1e6: upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) From 23f2e0f97bc477adfdfb5eae63bc94f1f1ac1b49 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 15:45:59 +0000 Subject: [PATCH 0714/1416] Remove rogue `yield from` in MODIS L3 reader. --- satpy/readers/modis_l3.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 4ff0bd1dc6..1a205545ac 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -98,8 +98,6 @@ def available_datasets(self, configured_datasets=None): # Initialise set of variable names to carry through code handled_var_names = set() - yield from super().available_datasets(configured_datasets) - ds_dict = self.sd.datasets() for is_avail, ds_info in (configured_datasets or []): From a0e8386b6e332e770bba2edb43ad5a192dc90aca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 15:56:22 +0000 Subject: [PATCH 0715/1416] Properly handle incorrect datasets for the VIIRS EDR and MODIS --- satpy/readers/modis_l3.py | 1 + satpy/readers/viirs_edr.py | 1 + 2 files changed, 2 insertions(+) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 1a205545ac..2370609e7a 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -116,6 +116,7 @@ def available_datasets(self, configured_datasets=None): if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info + continue yield file_key in ds_dict.keys(), ds_info yield from self._dynamic_variables_from_file(handled_var_names) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 646d7e0d17..b0eaf7b7ba 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -215,6 +215,7 @@ def available_datasets(self, configured_datasets=None): if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info + continue yield file_key in self.nc, ds_info yield from self._dynamic_variables_from_file(handled_var_names) From b748479cf9d197b48d2bf830c6edc9cfbcfd030c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 16:16:18 +0000 Subject: [PATCH 0716/1416] Simplify the MODIS L3 code and add tests for the dynamic dataset availability. --- satpy/readers/modis_l3.py | 62 +++++++++---------- .../reader_tests/modis_tests/test_modis_l3.py | 23 +++++++ 2 files changed, 52 insertions(+), 33 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 2370609e7a..485fc1031f 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -49,30 +49,8 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, **kwargs) - # Initialise number of rows and columns - self.nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] - self.ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] - - # Get the grid name and other projection info - self.area_extent = self._sort_grid() - - - def _sort_grid(self): - """Get the grid properties.""" - # First, get the grid resolution - self._get_res() - - # Now compute the data extent - upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] - lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] - - # For some reason, a few of the CMG products multiply their - # decimal degree extents by one million. This fixes it. - if lowerright[0] > 1e6 or upperleft[0] > 1e6: - upperleft = tuple(val / 1e6 for val in upperleft) - lowerright = tuple(val / 1e6 for val in lowerright) - - return upperleft[0], lowerright[1], lowerright[0], upperleft[1] + # Get the grid resolution, name and other projection info + self.resolution = self._get_res() def _get_res(self): @@ -85,11 +63,12 @@ def _get_res(self): pos = gridname.rfind("_") + 1 pos2 = gridname.rfind("Deg") + # Initialise number of rows and columns # Some products don't have resolution listed. if pos < 0 or pos2 < 0: - self.resolution = 360. / self.ncols + return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] else: - self.resolution = float(gridname[pos:pos2]) + return float(gridname[pos:pos2]) def available_datasets(self, configured_datasets=None): @@ -104,7 +83,7 @@ def available_datasets(self, configured_datasets=None): file_key = ds_info.get("file_key", ds_info["name"]) # we must add all variables here even if another file handler has # claimed the variable. It could be another instance of this file - # type and we don't want to add that variable dynamically if the + # type, and we don't want to add that variable dynamically if the # other file handler defined it by the YAML definition. handled_var_names.add(file_key) if is_avail is not None: @@ -122,11 +101,9 @@ def available_datasets(self, configured_datasets=None): yield from self._dynamic_variables_from_file(handled_var_names) def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: - for var_name in self.sd.datasets().keys(): if var_name in handled_var_names: - # skip variables that YAML had configured, but allow lon/lats - # to be reprocessed due to our dynamic coordinate naming + # skip variables that YAML had configured continue common = {"file_type": "modis_l3_cmg_hdf", "resolution": self.resolution, @@ -141,6 +118,20 @@ def get_dataset(self, dataset_id, dataset_info): return dataset + def _get_area_extent(self): + """Get the grid properties.""" + + # Now compute the data extent + upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] + lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] + + # For some reason, a few of the CMG products multiply their + # decimal degree extents by one million. This fixes it. + if lowerright[0] > 1e6 or upperleft[0] > 1e6: + upperleft = tuple(val / 1e6 for val in upperleft) + lowerright = tuple(val / 1e6 for val in lowerright) + + return upperleft[0], lowerright[1], lowerright[0], upperleft[1] def get_area_def(self, dsid): """Get the area definition. @@ -150,12 +141,17 @@ def get_area_def(self, dsid): """ proj_param = "EPSG:4326" + # Get the size of the dataset + nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Construct the area definition area = geometry.AreaDefinition("gridded_modis", "A gridded L3 MODIS area", "longlat", proj_param, - self.ncols, - self.nrows, - self.area_extent) + ncols, + nrows, + self._get_area_extent()) return area diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index c7a0d3e9cf..3f6a9e8250 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -63,6 +63,29 @@ def test_scene_available_datasets(self, loadable, filename): assert len(available_datasets) > 0 assert loadable in available_datasets + from satpy.readers.modis_l3 import ModisL3GriddedHDFFileHandler + fh = ModisL3GriddedHDFFileHandler(filename[0], {}, {"file_type": "modis_l3_cmg_hdf"}) + configured_datasets = [[None, {"name": "none_ds", "file_type": "modis_l3_cmg_hdf"}], + [True, {"name": "true_ds", "file_type": "modis_l3_cmg_hdf"}], + [False, {"name": "false_ds", "file_type": "modis_l3_cmg_hdf"}], + [None, {"name": "other_ds", "file_type": "modis_l2_random"}]] + for status, mda in fh.available_datasets(configured_datasets): + if mda["name"] == "none_ds": + assert mda["file_type"] == "modis_l3_cmg_hdf" + assert status is False + elif mda["name"] == "true_ds": + assert mda["file_type"] == "modis_l3_cmg_hdf" + assert status + elif mda["name"] == "false_ds": + assert mda["file_type"] == "modis_l3_cmg_hdf" + assert status is False + elif mda["name"] == "other_ds": + assert mda["file_type"] == "modis_l2_random" + assert status is None + elif mda["name"] == loadable: + assert mda["file_type"] == "modis_l3_cmg_hdf" + assert status + def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): """Load and check an L2 variable.""" scene = Scene(reader="modis_l3", filenames=modis_l3_nasa_mod09_file) From 3dab2c6587b92f98214ce4d204958074051f2a7b Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 17:20:46 +0000 Subject: [PATCH 0717/1416] Refactor MODIS tests. --- .../modis_tests/_modis_fixtures.py | 80 +++++++++---------- .../reader_tests/modis_tests/test_modis_l3.py | 4 +- 2 files changed, 38 insertions(+), 46 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 0b79e00854..c702752b28 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -51,9 +51,6 @@ def _shape_for_resolution(resolution: int) -> tuple[int, int]: - # Case of a CMG 0.05 degree file, for L3 tests - if resolution == -999: - return 3600, 7200 assert resolution in RES_TO_REPEAT_FACTOR factor = RES_TO_REPEAT_FACTOR[resolution] if factor == 1: @@ -229,21 +226,9 @@ def generate_imapp_filename(suffix): return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" -def _add_geo_metadata(h, geo_res): - """Add the geoinfo metadata to the fake file.""" - if geo_res == -999 or geo_res == -9999: - setattr(h, 'StructMetadata.0', _create_struct_metadata_cmg(geo_res)) # noqa - else: - setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_res)) # noqa - - return h - - def create_hdfeos_test_file(filename: str, variable_infos: dict, - geo_resolution: Optional[int] = None, - file_shortname: Optional[str] = None, - include_metadata: bool = True): + metadata_dict: Optional[dict] = {}): """Create a fake MODIS L1b HDF4 file with headers. Args: @@ -256,17 +241,27 @@ def create_hdfeos_test_file(filename: str, file_shortname: Short name of the file to be stored in global metadata attributes. Only used if ``include_metadata`` is ``True`` (default). - include_metadata: Include global metadata attributes (default: True). + metadata_dict: A dictionary of metadata to be added to the file. """ h = SD(filename, SDC.WRITE | SDC.CREATE) - if include_metadata: - if geo_resolution is None or file_shortname is None: - raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.") - h = _add_geo_metadata(h, geo_resolution) - setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa - setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa + if metadata_dict is not None and metadata_dict != {}: + # Check if we're dealing with an L3 file + if "l3_type" not in metadata_dict.keys(): + if "file_shortname" not in metadata_dict["file_shortname"].keys(): + raise ValueError("'file_shortname' is required when including metadata.") + # For L1 and L2 files we need to know the resolution + if "geo_resolution" not in metadata_dict.keys(): + raise ValueError("'geo_resolution' is required when including L1/L2 metadata.") + setattr(h, "StructMetadata.0", _create_struct_metadata(metadata_dict["geo_resolution"])) + setattr(h, "CoreMetadata.0", _create_core_metadata(metadata_dict["file_shortname"])) # noqa + else: + # For an L3 file, we just call the relevant metadata creator + setattr(h, "StructMetadata.0", _create_struct_metadata_cmg(metadata_dict["l3_type"])) + setattr(h, "CoreMetadata.0", _create_core_metadata(metadata_dict["l3_type"])) # noqa + + setattr(h, "ArchiveMetadata.0", _create_header_metadata()) # noqa for var_name, var_info in variable_infos.items(): _add_variable_to_file(h, var_name, var_info) @@ -339,13 +334,14 @@ def _create_struct_metadata(geo_resolution: int) -> str: return struct_metadata_header -def _create_struct_metadata_cmg(res) -> str: +def _create_struct_metadata_cmg(ftype: str) -> str: # Case of a MOD09 file - gridline = 'GridName="MOD09CMG"\n' - upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" - upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" - if res == -9999: - # Case of a MCD43 file + if ftype == "MOD09": + gridline = 'GridName="MOD09CMG"\n' + upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" + upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" + # Case of a MCD43 file + elif ftype == "MCD43C1": gridline = 'GridName="MCD_CMG_BRDF_0.05Deg"\n' upleft = "UpperLeftPointMtrs=(-180.000000,90.000000)\n" upright = "LowerRightMtrs=(180.000000,-90.000000)\n" @@ -381,7 +377,7 @@ def modis_l1b_nasa_mod021km_file(tmpdir_factory) -> list[str]: variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD021KM") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD021KM"}) return [full_path] @@ -395,7 +391,7 @@ def modis_l1b_imapp_1000m_file(tmpdir_factory) -> list[str]: variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD021KM") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD021KM"}) return [full_path] @@ -406,7 +402,7 @@ def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_500_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD02HKM") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD02HKM"}) return [full_path] @@ -417,7 +413,7 @@ def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_250_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD02QKM") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD02QKM"}) return [full_path] @@ -427,7 +423,7 @@ def modis_l1b_nasa_mod03_file(tmpdir_factory) -> list[str]: filename = generate_nasa_l1b_filename("MOD03") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD03") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD03"}) return [full_path] @@ -437,7 +433,7 @@ def modis_l1b_imapp_geo_file(tmpdir_factory) -> list[str]: filename = generate_imapp_filename("geo") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD03") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD03"}) return [full_path] @@ -595,7 +591,7 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_cloud_mask_variable_info("Cloud_Mask", 1000)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD35") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD35"}) return [full_path] @@ -605,12 +601,12 @@ def generate_nasa_l3_filename(prefix: str) -> str: return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" -def modis_l3_file(tmpdir_factory, f_prefix, var_name, geo_res, f_short): +def modis_l3_file(tmpdir_factory, f_prefix, var_name, f_short): """Create a MODIS L3 file of the desired type.""" filename = generate_nasa_l3_filename(f_prefix) full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) variable_infos = _get_l3_refl_variable_info(var_name) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=geo_res, file_shortname=f_short) + create_hdfeos_test_file(full_path, variable_infos, {"l3_type": f_short}) return [full_path] @@ -620,7 +616,6 @@ def modis_l3_nasa_mod09_file(tmpdir_factory) -> list[str]: return modis_l3_file(tmpdir_factory, "MOD09CMG", "Coarse_Resolution_Surface_Reflectance_Band_2", - -999, "MOD09") @@ -630,7 +625,6 @@ def modis_l3_nasa_mod43_file(tmpdir_factory) -> list[str]: return modis_l3_file(tmpdir_factory, "MCD43C1", "BRDF_Albedo_Parameter1_Band2", - -9999, "MCD43C1") @@ -647,7 +641,7 @@ def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000)) - create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD06") + create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD06"}) return [full_path] @@ -658,7 +652,7 @@ def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_basic_variable_info("Snow_Mask", 1000)) - create_hdfeos_test_file(full_path, variable_infos, include_metadata=False) + create_hdfeos_test_file(full_path, variable_infos, {}) return [full_path] @@ -675,7 +669,7 @@ def modis_l2_imapp_mask_byte1_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_mask_byte1_variable_info()) - create_hdfeos_test_file(full_path, variable_infos, include_metadata=False) + create_hdfeos_test_file(full_path, variable_infos, {}) return [full_path] diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index 3f6a9e8250..de8ff682a1 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -27,8 +27,6 @@ from satpy import Scene, available_readers -from ._modis_fixtures import _shape_for_resolution - def _expected_area(): proj_param = "EPSG:4326" @@ -101,6 +99,6 @@ def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): assert data_arr_comp.dtype == data_arr.dtype assert data_arr_comp.dtype == np.float32 - assert data_arr_comp.shape == _shape_for_resolution(-999) + assert data_arr_comp.shape == (3600, 7200) assert data_arr_comp.attrs.get("resolution") == 0.05 assert data_arr_comp.attrs.get("area") == _expected_area() From 1b136b9c19f15f121c4468b97cfcdf9ff4380662 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 17:21:58 +0000 Subject: [PATCH 0718/1416] Fix bug in MODIS tests. --- satpy/tests/reader_tests/modis_tests/_modis_fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index c702752b28..7c20091d31 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -249,7 +249,7 @@ def create_hdfeos_test_file(filename: str, if metadata_dict is not None and metadata_dict != {}: # Check if we're dealing with an L3 file if "l3_type" not in metadata_dict.keys(): - if "file_shortname" not in metadata_dict["file_shortname"].keys(): + if "file_shortname" not in metadata_dict.keys(): raise ValueError("'file_shortname' is required when including metadata.") # For L1 and L2 files we need to know the resolution if "geo_resolution" not in metadata_dict.keys(): From d0774021726ab09b17c1fbdd4bde8e4650e9a034 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 17:25:08 +0000 Subject: [PATCH 0719/1416] Remove mutable argument from modis tests. Co-authored-by: David Hoese --- satpy/tests/reader_tests/modis_tests/_modis_fixtures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 7c20091d31..84ac7fc5ae 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -228,7 +228,7 @@ def generate_imapp_filename(suffix): def create_hdfeos_test_file(filename: str, variable_infos: dict, - metadata_dict: Optional[dict] = {}): + metadata_dict: Optional[dict] = None): """Create a fake MODIS L1b HDF4 file with headers. Args: From fe554dd1763de12226d53a5ec643623ac380c1ae Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 17:59:46 +0000 Subject: [PATCH 0720/1416] Additional refactoring of MODIS tests. --- .../modis_tests/_modis_fixtures.py | 98 ++++++++++++------- 1 file changed, 61 insertions(+), 37 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 84ac7fc5ae..e792b70d89 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -19,7 +19,6 @@ from __future__ import annotations from datetime import datetime, timedelta -from typing import Optional import numpy as np import pytest @@ -228,40 +227,29 @@ def generate_imapp_filename(suffix): def create_hdfeos_test_file(filename: str, variable_infos: dict, - metadata_dict: Optional[dict] = None): + struct_meta: str = "", + core_meta: str = "", + archive_meta: str = "", + ) -> None: """Create a fake MODIS L1b HDF4 file with headers. Args: filename: Full path of filename to be created. variable_infos: Dictionary mapping HDF4 variable names to dictionary of variable information (see ``_add_variable_to_file``). - geo_resolution: Resolution of geolocation datasets to be stored in the - metadata strings stored in the global metadata attributes. Only - used if ``include_metadata`` is ``True`` (default). - file_shortname: Short name of the file to be stored in global metadata - attributes. Only used if ``include_metadata`` is ``True`` - (default). - metadata_dict: A dictionary of metadata to be added to the file. + struct_meta: Contents of the 'StructMetadata.0' header. + core_meta: Contents of the 'CoreMetadata.0' header. + archive_meta:Contents of the 'ArchiveMetadata.0' header. """ h = SD(filename, SDC.WRITE | SDC.CREATE) - if metadata_dict is not None and metadata_dict != {}: - # Check if we're dealing with an L3 file - if "l3_type" not in metadata_dict.keys(): - if "file_shortname" not in metadata_dict.keys(): - raise ValueError("'file_shortname' is required when including metadata.") - # For L1 and L2 files we need to know the resolution - if "geo_resolution" not in metadata_dict.keys(): - raise ValueError("'geo_resolution' is required when including L1/L2 metadata.") - setattr(h, "StructMetadata.0", _create_struct_metadata(metadata_dict["geo_resolution"])) - setattr(h, "CoreMetadata.0", _create_core_metadata(metadata_dict["file_shortname"])) # noqa - else: - # For an L3 file, we just call the relevant metadata creator - setattr(h, "StructMetadata.0", _create_struct_metadata_cmg(metadata_dict["l3_type"])) - setattr(h, "CoreMetadata.0", _create_core_metadata(metadata_dict["l3_type"])) # noqa - - setattr(h, "ArchiveMetadata.0", _create_header_metadata()) # noqa + if struct_meta != "": + setattr(h, "StructMetadata.0", struct_meta) + if core_meta != "": + setattr(h, "CoreMetadata.0", core_meta) + if archive_meta != "": + setattr(h, "ArchiveMetadata.0", archive_meta) for var_name, var_info in variable_infos.items(): _add_variable_to_file(h, var_name, var_info) @@ -341,7 +329,7 @@ def _create_struct_metadata_cmg(ftype: str) -> str: upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" # Case of a MCD43 file - elif ftype == "MCD43C1": + else: gridline = 'GridName="MCD_CMG_BRDF_0.05Deg"\n' upleft = "UpperLeftPointMtrs=(-180.000000,90.000000)\n" upright = "LowerRightMtrs=(180.000000,-90.000000)\n" @@ -377,7 +365,11 @@ def modis_l1b_nasa_mod021km_file(tmpdir_factory) -> list[str]: variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD021KM"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD021KM"), + _create_header_metadata()) return [full_path] @@ -391,7 +383,11 @@ def modis_l1b_imapp_1000m_file(tmpdir_factory) -> list[str]: variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD021KM"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD021KM"), + _create_header_metadata()) return [full_path] @@ -402,7 +398,11 @@ def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_500_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD02HKM"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(1000), + _create_core_metadata("MOD02HKM"), + _create_header_metadata()) return [full_path] @@ -413,7 +413,11 @@ def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_250_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD02QKM"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(1000), + _create_core_metadata("MOD02QKM"), + _create_header_metadata()) return [full_path] @@ -423,7 +427,11 @@ def modis_l1b_nasa_mod03_file(tmpdir_factory) -> list[str]: filename = generate_nasa_l1b_filename("MOD03") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD03"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(1000), + _create_core_metadata("MOD03"), + _create_header_metadata()) return [full_path] @@ -433,7 +441,11 @@ def modis_l1b_imapp_geo_file(tmpdir_factory) -> list[str]: filename = generate_imapp_filename("geo") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 1000, "file_shortname": "MOD03"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(1000), + _create_core_metadata("MOD03"), + _create_header_metadata()) return [full_path] @@ -591,7 +603,11 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_cloud_mask_variable_info("Cloud_Mask", 1000)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD35"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD35"), + _create_header_metadata()) return [full_path] @@ -606,7 +622,11 @@ def modis_l3_file(tmpdir_factory, f_prefix, var_name, f_short): filename = generate_nasa_l3_filename(f_prefix) full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) variable_infos = _get_l3_refl_variable_info(var_name) - create_hdfeos_test_file(full_path, variable_infos, {"l3_type": f_short}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata_cmg(f_short), + _create_core_metadata(f_short), + _create_header_metadata()) return [full_path] @@ -641,7 +661,11 @@ def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000)) - create_hdfeos_test_file(full_path, variable_infos, {"geo_resolution": 5000, "file_shortname": "MOD06"}) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD06"), + _create_header_metadata()) return [full_path] @@ -652,7 +676,7 @@ def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_basic_variable_info("Snow_Mask", 1000)) - create_hdfeos_test_file(full_path, variable_infos, {}) + create_hdfeos_test_file(full_path, variable_infos) return [full_path] @@ -669,7 +693,7 @@ def modis_l2_imapp_mask_byte1_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_mask_byte1_variable_info()) - create_hdfeos_test_file(full_path, variable_infos, {}) + create_hdfeos_test_file(full_path, variable_infos) return [full_path] From a211edd9ba77c169b8f5989521a1be42d879b02f Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 19:34:15 +0000 Subject: [PATCH 0721/1416] Refactor MODIS L3 reader and improve tests. --- satpy/readers/modis_l3.py | 48 ++++++++----------- .../modis_tests/_modis_fixtures.py | 13 ++--- 2 files changed, 27 insertions(+), 34 deletions(-) diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 485fc1031f..29e0247fdc 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -44,33 +44,6 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" - - def __init__(self, filename, filename_info, filetype_info, **kwargs): - """Init the file handler.""" - super().__init__(filename, filename_info, filetype_info, **kwargs) - - # Get the grid resolution, name and other projection info - self.resolution = self._get_res() - - - def _get_res(self): - """Compute the resolution from the file metadata.""" - gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] - if "CMG" not in gridname: - raise ValueError("Only CMG grids are supported") - - # Get the grid resolution from the grid name - pos = gridname.rfind("_") + 1 - pos2 = gridname.rfind("Deg") - - # Initialise number of rows and columns - # Some products don't have resolution listed. - if pos < 0 or pos2 < 0: - return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] - else: - return float(gridname[pos:pos2]) - - def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" @@ -101,15 +74,34 @@ def available_datasets(self, configured_datasets=None): yield from self._dynamic_variables_from_file(handled_var_names) def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: + res = self._get_res() for var_name in self.sd.datasets().keys(): if var_name in handled_var_names: # skip variables that YAML had configured continue common = {"file_type": "modis_l3_cmg_hdf", - "resolution": self.resolution, + "resolution": res, "name": var_name} yield True, common + + def _get_res(self): + """Compute the resolution from the file metadata.""" + gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] + if "CMG" not in gridname: + raise ValueError("Only CMG grids are supported") + + # Get the grid resolution from the grid name + pos = gridname.rfind("_") + 1 + pos2 = gridname.rfind("Deg") + + # Initialise number of rows and columns + # Some products don't have resolution listed. + if pos < 0 or pos2 < 0: + return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] + else: + return float(gridname[pos:pos2]) + def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id["name"] diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index e792b70d89..6dc4bf2d05 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -19,6 +19,7 @@ from __future__ import annotations from datetime import datetime, timedelta +from typing import Optional import numpy as np import pytest @@ -227,9 +228,9 @@ def generate_imapp_filename(suffix): def create_hdfeos_test_file(filename: str, variable_infos: dict, - struct_meta: str = "", - core_meta: str = "", - archive_meta: str = "", + struct_meta: Optional[str] = None, + core_meta: Optional[str] = None, + archive_meta: Optional[str] = None, ) -> None: """Create a fake MODIS L1b HDF4 file with headers. @@ -244,11 +245,11 @@ def create_hdfeos_test_file(filename: str, """ h = SD(filename, SDC.WRITE | SDC.CREATE) - if struct_meta != "": + if struct_meta: setattr(h, "StructMetadata.0", struct_meta) - if core_meta != "": + if core_meta: setattr(h, "CoreMetadata.0", core_meta) - if archive_meta != "": + if archive_meta: setattr(h, "ArchiveMetadata.0", archive_meta) for var_name, var_info in variable_infos.items(): From 328816ec1dac80f1d3773ced4a0a7a2ab3b801cf Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 15 Nov 2023 20:18:06 +0000 Subject: [PATCH 0722/1416] Update HDFEOS code to always use `maxsplit=1` when splitting attrs. --- satpy/readers/hdfeos_base.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 6e25fd40a8..37fe714435 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -148,10 +148,7 @@ def _read_mda(cls, lines, element=None): @classmethod def _split_line(cls, line, lines): - try: - key, val = line.split("=") - except ValueError: - key, val = line.split("=", maxsplit=1) + key, val = line.split("=", maxsplit=1) key = key.strip() val = val.strip() try: From 3080dbc9efd0d70ad78e029f56a7df17bda56332 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 15 Nov 2023 23:42:42 +0100 Subject: [PATCH 0723/1416] Add support for angles in sgli reader --- satpy/readers/sgli_l1b.py | 93 +++++++++++++---------- satpy/tests/reader_tests/test_sgli_l1b.py | 49 +++++++++++- 2 files changed, 97 insertions(+), 45 deletions(-) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 25289b5de3..2dace6d2b3 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -49,26 +49,6 @@ "L": 1000} -def interpolate(arr, sampling, full_shape): - """Interpolate the angles and navigation.""" - # TODO: daskify this! - # TODO: do it in cartesian coordinates ! pbs at date line and poles - # possible - tie_x = np.arange(0, arr.shape[0] * sampling, sampling) - tie_y = np.arange(0, arr.shape[1] * sampling, sampling) - full_x = np.arange(0, full_shape[0]) - full_y = np.arange(0, full_shape[1]) - - - from scipy.interpolate import RectBivariateSpline - spl = RectBivariateSpline( - tie_x, tie_y, arr) - - values = spl(full_x, full_y) - - return da.from_array(values, chunks=(1000, 1000)) - - class HDF5SGLI(BaseFileHandler): """File handler for the SGLI l1b data.""" @@ -103,20 +83,13 @@ def get_dataset(self, key, info): file_key = info["file_key"] h5dataset = self.h5file[file_key] - # resampling_interval = h5dataset.attrs.get('Resampling_interval', 1) - # if resampling_interval != 1: - # logger.debug('Interpolating %s.', key["name"]) - # full_shape = (self.h5file['Image_data'].attrs['Number_of_lines'], - # self.h5file['Image_data'].attrs['Number_of_pixels']) - # dataset = interpolate(h5dataset, resampling_interval, full_shape) - # else: - # dataset = da.from_array(h5dataset[:].astype('= 0 + +def test_loading_solar_angles(sgli_file): + handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) + did = dict(name="solar_azimuth_angle", resolution=1000, polarization=None) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith"}) + assert res.shape == (1955, 1250) + assert res.chunks is not None + assert res.dtype == np.float32 + assert res.max() <= 180 From 09325c2fe97a1e966e281c638c32f47fec34b811 Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 16 Nov 2023 12:25:55 +0100 Subject: [PATCH 0724/1416] Fix docstrings --- satpy/_scene_converters.py | 58 ++++++++----------- satpy/cf/__init__.py | 2 - satpy/cf/area.py | 2 - satpy/cf/attrs.py | 2 - satpy/cf/coords.py | 2 - satpy/cf/dataarray.py | 38 ++++-------- satpy/cf/datasets.py | 115 ++++++++++++++----------------------- satpy/cf/encoding.py | 2 - satpy/writers/cf_writer.py | 51 +++++++--------- 9 files changed, 98 insertions(+), 174 deletions(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index c400a159f1..fbc0a7a627 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -52,42 +52,32 @@ def to_xarray(scn, If Scene DataArrays are on different areas, currently it fails, although in future we might return a DataTree object, grouped by area. - Parameters - ---------- - scn: satpy.Scene - Satpy Scene. - datasets (iterable): - List of Satpy Scene datasets to include in the output xr.Dataset. - Elements can be string name, a wavelength as a number, a DataID, - or DataQuery object. - If None (the default), it include all loaded Scene datasets. - header_attrs: - Global attributes of the output xr.Dataset. - epoch (str): - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is defined using "from satpy.cf import EPOCH" - flatten_attrs (bool): - If True, flatten dict-type attributes. - exclude_attrs (list): - List of xr.DataArray attribute names to be excluded. - include_lonlats (bool): - If True, it includes 'latitude' and 'longitude' coordinates. - If the 'area' attribute is a SwathDefinition, it always includes - latitude and longitude coordinates. - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, - but possibly less consistent. - include_orig_name (bool). - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix (str): - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. + Args: + scn (satpy.Scene): Satpy Scene. + datasets (iterable, optional): List of Satpy Scene datasets to include in + the output xr.Dataset. Elements can be string name, a wavelength as a + number, a DataID, or DataQuery object. If None (the default), it + includes all loaded Scene datasets. + header_attrs: Global attributes of the output xr.Dataset. + epoch (str, optional): Reference time for encoding the time coordinates + (if available). Format example: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using + "from satpy.cf_writer import EPOCH". + flatten_attrs (bool, optional): If True, flatten dict-type attributes. + exclude_attrs (list, optional): List of xr.DataArray attribute names to + be excluded. + include_lonlats (bool, optional): If True, includes 'latitude' and + 'longitude' coordinates. If the 'area' attribute is a SwathDefinition, + it always includes latitude and longitude coordinates. + pretty (bool, optional): Don't modify coordinate names, if possible. Makes + the file prettier, but possibly less consistent. + include_orig_name (bool, optional): Include the original dataset name as a + variable attribute in the xr.Dataset. + numeric_name_prefix (str, optional): Prefix to add to each variable with + name starting with a digit. Use '' or None to leave this out. Returns: - ------- - ds, xr.Dataset - A CF-compliant xr.Dataset + xr.Dataset: A CF-compliant xr.Dataset """ from satpy.cf.datasets import collect_cf_datasets diff --git a/satpy/cf/__init__.py b/satpy/cf/__init__.py index c48acebcf9..63fac5261c 100644 --- a/satpy/cf/__init__.py +++ b/satpy/cf/__init__.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- """Code for generation of CF-compliant datasets.""" EPOCH = u"seconds since 1970-01-01 00:00:00" diff --git a/satpy/cf/area.py b/satpy/cf/area.py index 5ce9f1e0c3..041338efd8 100644 --- a/satpy/cf/area.py +++ b/satpy/cf/area.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index af1977b1f4..2cf9ffa528 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 05e8a792fd..af11a62e43 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- """Set CF-compliant spatial and temporal coordinates.""" import logging diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 078c53c462..3c97a70336 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. @@ -63,31 +61,19 @@ def make_cf_dataarray(dataarray, numeric_name_prefix="CHANNEL_"): """Make the xr.DataArray CF-compliant. - Parameters - ---------- - dataarray : xr.DataArray - The data array to be made CF-compliant. - epoch : str, optional - Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH` - flatten_attrs : bool, optional - If True, flatten dict-type attributes. - The default is False. - exclude_attrs : list, optional - List of dataset attributes to be excluded. - The default is None. - include_orig_name : bool, optional - Include the original dataset name in the netcdf variable attributes. - The default is True. - numeric_name_prefix : TYPE, optional - Prepend dataset name with this if starting with a digit. - The default is ``"CHANNEL_"``. - - Returns - ------- - new_data : xr.DataArray - CF-compliant xr.DataArray. + Args: + dataarray (xr.DataArray): The data array to be made CF-compliant. + epoch (str, optional): Reference time for encoding of time coordinates. + If None, the default reference time is defined using `from satpy.cf import EPOCH`. + flatten_attrs (bool, optional): If True, flatten dict-type attributes. Defaults to False. + exclude_attrs (list, optional): List of dataset attributes to be excluded. Defaults to None. + include_orig_name (bool, optional): Include the original dataset name in the netcdf variable attributes. + Defaults to True. + numeric_name_prefix (str, optional): Prepend dataset name with this if starting with a digit. + Defaults to "CHANNEL_". + Returns: + xr.DataArray: A CF-compliant xr.DataArray. """ dataarray = _preprocess_dataarray_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index 3cb72af442..70ac3fb014 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. @@ -69,39 +67,24 @@ def _collect_cf_dataset(list_dataarrays, numeric_name_prefix="CHANNEL_"): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. - Parameters - ---------- - list_dataarrays : list - List of DataArrays to make CF compliant and merge into a xr.Dataset. - epoch : str - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is defined using `from satpy.cf import EPOCH` - flatten_attrs : bool, optional - flatten_attrs : bool - If True, flatten dict-type attributes. - exclude_attrs : list, optional - exclude_attrs : list - List of xr.DataArray attribute names to be excluded. - include_lonlats : bool, optional - include_lonlats : bool - If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. - If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty : bool, optional - pretty : bool - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name : bool, optional - include_orig_name : bool - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix : str, optional - numeric_name_prefix : str - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. - - Returns - ------- - ds : xr.Dataset - A partially CF-compliant xr.Dataset + Args: + list_dataarrays (list): List of DataArrays to make CF compliant and merge into an xr.Dataset. + epoch (str, optional): Reference time for encoding the time coordinates. + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is defined using `from satpy.cf import EPOCH`. + flatten_attrs (bool, optional): If True, flatten dict-type attributes. + exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. + include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also for a + satpy.Scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always includes latitude and longitude coordinates. + pretty (bool, optional): Don't modify coordinate names, if possible. + Makes the file prettier, but possibly less consistent. + include_orig_name (bool, optional): Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. + Use '' or None to leave this out. + + Returns: + xr.Dataset: A partially CF-compliant xr.Dataset. """ from satpy.cf.area import area2cf from satpy.cf.coords import ( @@ -194,44 +177,30 @@ def collect_cf_datasets(list_dataarrays, If the xr.DataArrays does not share the same dimensions, it creates a collection of xr.Datasets sharing the same dimensions. - Parameters - ---------- - list_dataarrays (list): - List of DataArrays to make CF compliant and merge into groups of xr.Datasets. - header_attrs: (dict): - Global attributes of the output xr.Dataset. - epoch (str): - Reference time for encoding the time coordinates (if available). - Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf import EPOCH` - flatten_attrs (bool): - If True, flatten dict-type attributes. - exclude_attrs (list): - List of xr.DataArray attribute names to be excluded. - include_lonlats (bool): - If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition. - If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates. - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name (bool). - Include the original dataset name as a variable attribute in the xr.Dataset. - numeric_name_prefix (str): - Prefix to add the each variable with name starting with a digit. - Use '' or None to leave this out. - groups (dict): - Group datasets according to the given assignment: - - `{'': ['dataset_name1', 'dataset_name2', ...]}` - - It is used to create grouped netCDFs using the CF_Writer. - If None (the default), no groups will be created. - - Returns - ------- - grouped_datasets : dict - A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} - header_attrs : dict - Global attributes to be attached to the xr.Dataset / netCDF4. + Args: + list_dataarrays (list): List of DataArrays to make CF compliant and merge into groups of xr.Datasets. + header_attrs (dict): Global attributes of the output xr.Dataset. + epoch (str, optional): Reference time for encoding the time coordinates. + Example format: "seconds since 1970-01-01 00:00:00". + If None, the default reference time is retrieved using `from satpy.cf import EPOCH`. + flatten_attrs (bool, optional): If True, flatten dict-type attributes. + exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. + include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also + for a satpy.Scene defined on an AreaDefinition. + If the 'area' attribute is a SwathDefinition, it always includes latitude and longitude coordinates. + pretty (bool, optional): Don't modify coordinate names, if possible. + Makes the file prettier, but possibly less consistent. + include_orig_name (bool, optional): Include the original dataset name as a variable attribute in the xr.Dataset. + numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. + Use '' or None to leave this out. + groups (dict, optional): Group datasets according to the given assignment: + `{'': ['dataset_name1', 'dataset_name2', ...]}`. + Used to create grouped netCDFs using the CF_Writer. If None, no groups will be created. + + Returns: + tuple: A tuple containing: + - grouped_datasets (dict): A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset}. + - header_attrs (dict): Global attributes to be attached to the xr.Dataset / netCDF4. """ from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension diff --git a/satpy/cf/encoding.py b/satpy/cf/encoding.py index 3cdf1fdf1d..5c77b6d69f 100644 --- a/satpy/cf/encoding.py +++ b/satpy/cf/encoding.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 7076cc841d..1204754bd0 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. @@ -240,36 +238,27 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Note that all datasets (if grouping: in one group) must have the same projection coordinates. Args: - datasets (list): - List of xr.DataArray to be saved. - filename (str): - Output file - groups (dict): - Group datasets according to the given assignment: `{'group_name': ['dataset1', 'dataset2', ...]}`. - Group name `None` corresponds to the root of the file, i.e. no group will be created. + datasets (list): List of xr.DataArray to be saved. + filename (str): Output file. + groups (dict): Group datasets according to the given assignment: + `{'group_name': ['dataset1', 'dataset2', ...]}`. + The group name `None` corresponds to the root of the file, i.e., no group will be created. Warning: The results will not be fully CF compliant! - header_attrs: - Global attributes to be included. - engine (str): - Module to be used for writing netCDF files. Follows xarray's - :meth:`~xarray.Dataset.to_netcdf` engine choices with a - preference for 'netcdf4'. - epoch (str): - Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH` - flatten_attrs (bool): - If True, flatten dict-type attributes. - exclude_attrs (list): - List of dataset attributes to be excluded. - include_lonlats (bool): - Always include latitude and longitude coordinates, even for datasets with area definition. - pretty (bool): - Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. - include_orig_name (bool). - Include the original dataset name as a variable attribute in the final netCDF. - numeric_name_prefix (str): - Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - + header_attrs: Global attributes to be included. + engine (str, optional): Module to be used for writing netCDF files. Follows xarray's + :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. + epoch (str, optional): Reference time for encoding of time coordinates. + If None, the default reference time is defined using `from satpy.cf import EPOCH`. + flatten_attrs (bool, optional): If True, flatten dict-type attributes. + exclude_attrs (list, optional): List of dataset attributes to be excluded. + include_lonlats (bool, optional): Always include latitude and longitude coordinates, + even for datasets with area definition. + pretty (bool, optional): Don't modify coordinate names, if possible. + Makes the file prettier, but possibly less consistent. + include_orig_name (bool, optional): Include the original dataset name as a variable + attribute in the final netCDF. + numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. + Use '' or None to leave this out. """ from satpy.cf.datasets import collect_cf_datasets from satpy.cf.encoding import update_encoding From c42d1edbde9b48667ef2d3be1092ed0da329073b Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 16 Nov 2023 12:30:43 +0100 Subject: [PATCH 0725/1416] Move EPOCH to satpy.cf.coords --- satpy/cf/__init__.py | 2 -- satpy/cf/coords.py | 5 +++-- satpy/cf/dataarray.py | 2 +- satpy/cf/datasets.py | 4 ++-- satpy/scene.py | 2 +- satpy/writers/cf_writer.py | 6 +++--- 6 files changed, 10 insertions(+), 11 deletions(-) diff --git a/satpy/cf/__init__.py b/satpy/cf/__init__.py index 63fac5261c..f8f662a93b 100644 --- a/satpy/cf/__init__.py +++ b/satpy/cf/__init__.py @@ -1,3 +1 @@ """Code for generation of CF-compliant datasets.""" - -EPOCH = u"seconds since 1970-01-01 00:00:00" diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index af11a62e43..80ce22de39 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -13,6 +13,9 @@ logger = logging.getLogger(__name__) +EPOCH = u"seconds since 1970-01-01 00:00:00" + + def add_xy_coords_attrs(dataarray): """Add relevant attributes to x, y coordinates.""" # If there are no coords, return dataarray @@ -98,8 +101,6 @@ def set_cf_time_info(dataarray, epoch): - the time coordinate has size 1 """ - from satpy.cf import EPOCH - if epoch is None: epoch = EPOCH diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 3c97a70336..5df68da887 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -64,7 +64,7 @@ def make_cf_dataarray(dataarray, Args: dataarray (xr.DataArray): The data array to be made CF-compliant. epoch (str, optional): Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH`. + If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. Defaults to False. exclude_attrs (list, optional): List of dataset attributes to be excluded. Defaults to None. include_orig_name (bool, optional): Include the original dataset name in the netcdf variable attributes. diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index 70ac3fb014..c6ea6fd351 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -71,7 +71,7 @@ def _collect_cf_dataset(list_dataarrays, list_dataarrays (list): List of DataArrays to make CF compliant and merge into an xr.Dataset. epoch (str, optional): Reference time for encoding the time coordinates. Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is defined using `from satpy.cf import EPOCH`. + If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also for a @@ -182,7 +182,7 @@ def collect_cf_datasets(list_dataarrays, header_attrs (dict): Global attributes of the output xr.Dataset. epoch (str, optional): Reference time for encoding the time coordinates. Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is retrieved using `from satpy.cf import EPOCH`. + If None, the default reference time is retrieved using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also diff --git a/satpy/scene.py b/satpy/scene.py index bb8cf0ffab..4722a47533 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1128,7 +1128,7 @@ def to_xarray(self, epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". - If None, the default reference time is defined using "from satpy.cf import EPOCH" + If None, the default reference time is defined using "from satpy.cf.coords import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 1204754bd0..b64a288213 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -160,7 +160,7 @@ import xarray as xr from packaging.version import Version -from satpy.cf import EPOCH # noqa: F401 (for backward compatibility) +from satpy.cf.coords import EPOCH # noqa: F401 (for backward compatibility) from satpy.writers import Writer logger = logging.getLogger(__name__) @@ -248,7 +248,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine (str, optional): Module to be used for writing netCDF files. Follows xarray's :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. epoch (str, optional): Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH`. + If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of dataset attributes to be excluded. include_lonlats (bool, optional): Always include latitude and longitude coordinates, @@ -325,7 +325,7 @@ def da2cf(dataarray, epoch=None, flatten_attrs=False, exclude_attrs=None, The data array to be converted. epoch (str): Reference time for encoding of time coordinates. - If None, the default reference time is defined using `from satpy.cf import EPOCH` + If None, the default reference time is defined using `from satpy.cf.coords import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): From 73871f2a38ee229733bf286b3a13010e37a51beb Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 16 Nov 2023 13:33:11 +0200 Subject: [PATCH 0726/1416] Fix test value --- satpy/tests/test_composites.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 5bcbca0a1e..cf9f2fa6d2 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -489,7 +489,7 @@ def test_night_only_area_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) # FIXME: with the current changes the np.nan becomes 0.0 instead, why?! expected_alpha = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) assert res.dtype == np.float32 From 6127a917ca44c7aa60ebcd276147f9c64f0820b5 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 16 Nov 2023 14:46:14 +0200 Subject: [PATCH 0727/1416] Fix getting mask for single side product --- satpy/composites/__init__.py | 2 +- satpy/tests/test_composites.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index e3c9dc190a..d0afdda950 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -869,7 +869,7 @@ def _get_single_channel(data: xr.DataArray) -> xr.DataArray: def _get_weight_mask_for_single_side_product(data_a, data_b): - if isinstance(data_a, int): + if data_b.shape: return ~da.isnan(data_b) return ~da.isnan(data_a) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index cf9f2fa6d2..c77c17aa37 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -490,7 +490,6 @@ def test_night_only_area_with_alpha(self): res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) - # FIXME: with the current changes the np.nan becomes 0.0 instead, why?! expected_alpha = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) From 735c3f460b6c86291f4fb8b4002dd812e6a34297 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 16 Nov 2023 14:56:51 +0200 Subject: [PATCH 0728/1416] Fix/revert expected test values --- satpy/tests/test_composites.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index c77c17aa37..a564003e81 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -437,7 +437,7 @@ def test_daynight_sza(self): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() - expected = np.array([[0., 0.22122352], [0.5, 1.]], dtype=np.float32) + expected = np.array([[0., 0.22122374], [0.5, 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected, rtol=1e-6) @@ -462,7 +462,7 @@ def test_night_only_sza_with_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() - expected_red_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) expected_alpha = np.array([[0., 0.3329599], [1., 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) @@ -476,7 +476,7 @@ def test_night_only_sza_without_alpha(self): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() - expected = np.array([[0., 0.11042608], [0.6683501, 1.]], dtype=np.float32) + expected = np.array([[0., 0.11042609], [0.6683502, 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands @@ -558,7 +558,7 @@ def test_day_only_area_with_alpha_and_missing_data(self): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() - expected_l_channel = np.array([[np.nan, 0.], [0.49999994, 1.]], dtype=np.float32) + expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) expected_alpha = np.array([[np.nan, 1.], [1., 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) From 0a18d2da91893472fd802175161405157c3854e6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 09:29:53 -0600 Subject: [PATCH 0729/1416] Add test for VIIRS EDR available datasets fix --- satpy/tests/reader_tests/test_viirs_edr.py | 27 ++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 9b13f384e2..a6932520c0 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -285,6 +285,33 @@ def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: return ds +def test_available_datasets(aod_file): + """Test that available datasets doesn't claim non-filetype datasets. + + For example, if a YAML-configured dataset's file type is not loaded + then the available status is `None` and should remain `None`. This + means no file type knows what to do with this dataset. If it is + `False` then that means that a file type knows of the dataset, but + that the variable is not available in the file. In the below test + this isn't the case so the YAML-configured dataset should be + provided once and have a `None` availability. + + """ + from satpy.readers.viirs_edr import VIIRSJRRFileHandler + file_handler = VIIRSJRRFileHandler( + aod_file, + {"platform_shortname": "npp"}, + {"file_type": "jrr_aod"}, + ) + fake_yaml_datasets = [ + (None, {"file_key": "fake", "file_type": "fake_file", "name": "fake"}), + ] + available_datasets = list(file_handler.available_datasets(configured_datasets=fake_yaml_datasets)) + fake_availables = [avail_tuple for avail_tuple in available_datasets if avail_tuple[1]["name"] == "fake"] + assert len(fake_availables) == 1 + assert fake_availables[0][0] is None + + class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" From ba09f1859368a4faec05e4079301026fd83c9e9c Mon Sep 17 00:00:00 2001 From: ghiggi Date: Thu, 16 Nov 2023 18:32:32 +0100 Subject: [PATCH 0730/1416] Simplify functions for CodeScene happiness --- satpy/cf/area.py | 6 ++- satpy/cf/attrs.py | 45 ++++++++++------ satpy/cf/coords.py | 122 ++++++++++++++++++++++++++++-------------- satpy/cf/dataarray.py | 5 +- satpy/cf/datasets.py | 19 ++++--- 5 files changed, 127 insertions(+), 70 deletions(-) diff --git a/satpy/cf/area.py b/satpy/cf/area.py index 041338efd8..93c8b28eed 100644 --- a/satpy/cf/area.py +++ b/satpy/cf/area.py @@ -67,9 +67,11 @@ def _add_grid_mapping(dataarray): def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] - if not got_lonlats and (isinstance(dataarray.attrs["area"], SwathDefinition) or include_lonlats): + include_lonlats = include_lonlats or isinstance(dataarray.attrs["area"], SwathDefinition) + is_area_def = isinstance(dataarray.attrs["area"], AreaDefinition) + if not got_lonlats and include_lonlats: dataarray = _add_lonlat_coords(dataarray) - if isinstance(dataarray.attrs["area"], AreaDefinition): + if is_area_def: dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) res.append(dataarray) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index 2cf9ffa528..eb562c1c93 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -59,35 +59,50 @@ def _encode(self, obj): return tuple(obj) elif isinstance(obj, np.ndarray): return obj.tolist() - return str(obj) +def _encode_numpy_array(obj): + """Encode numpy array as a netCDF4 serializable datatype.""" + from satpy.writers.cf_writer import NC4_DTYPES + + # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. + is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 + if is_plain_1d: + if obj.dtype in NC4_DTYPES: + return obj + elif obj.dtype == np.bool_: + # Boolean arrays are not supported, convert to array of strings. + return [s.lower() for s in obj.astype(str)] + return obj.tolist() + else: + raise ValueError("Only a 1D numpy array can be encoded as netCDF attribute.") + + def _encode_object(obj): """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. Raises: ValueError if no such datatype could be found """ - from satpy.writers.cf_writer import NC4_DTYPES - if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): return obj elif isinstance(obj, (float, str, np.integer, np.floating)): return obj elif isinstance(obj, np.ndarray): - # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. - is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 - if is_plain_1d: - if obj.dtype in NC4_DTYPES: - return obj - elif obj.dtype == np.bool_: - # Boolean arrays are not supported, convert to array of strings. - return [s.lower() for s in obj.astype(str)] - return obj.tolist() + return _encode_numpy_array(obj) raise ValueError("Unable to encode") +def _try_decode_object(obj): + """Try to decode byte string""" + try: + decoded = obj.decode() + except AttributeError: + decoded = obj + return decoded + + def _encode_python_objects(obj): """Try to find the datatype which most closely resembles the object's nature. @@ -98,11 +113,7 @@ def _encode_python_objects(obj): try: dump = _encode_object(obj) except ValueError: - try: - # Decode byte-strings - decoded = obj.decode() - except AttributeError: - decoded = obj + decoded = _try_decode_object(obj) dump = json.dumps(decoded, cls=AttributeEncoder).strip('"') return dump diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 80ce22de39..ba1d195663 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -46,12 +46,26 @@ def _is_projected(dataarray): return True +def _is_area(dataarray): + if isinstance(dataarray.attrs["area"], AreaDefinition): + return True + else: + return False + + +def _is_swath(dataarray): + if isinstance(dataarray.attrs["area"], SwathDefinition): + return True + else: + return False + + def _try_to_get_crs(dataarray): """Try to get a CRS from attributes.""" if "area" in dataarray.attrs: - if isinstance(dataarray.attrs["area"], AreaDefinition): + if _is_area(dataarray): return dataarray.attrs["area"].crs - if not isinstance(dataarray.attrs["area"], SwathDefinition): + if not _is_swath(dataarray): logger.warning( f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " "Assuming projected CRS.") @@ -116,9 +130,7 @@ def set_cf_time_info(dataarray, epoch): def _is_lon_or_lat_dataarray(dataarray): """Check if the DataArray represents the latitude or longitude coordinate.""" - if "standard_name" in dataarray.attrs and dataarray.attrs["standard_name"] in ["longitude", "latitude"]: - return True - return False + return dataarray.attrs.get("standard_name", "") in ("longitude", "latitude") def has_projection_coords(dict_datarrays): @@ -129,6 +141,35 @@ def has_projection_coords(dict_datarrays): return False +def _get_is_nondimensional_coords_dict(dict_dataarrays): + tokens = defaultdict(set) + for dataarray in dict_dataarrays.values(): + for coord_name in dataarray.coords: + if not _is_lon_or_lat_dataarray(dataarray[coord_name]) and coord_name not in dataarray.dims: + tokens[coord_name].add(tokenize(dataarray[coord_name].data)) + coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + return coords_unique + + +def _warn_if_pretty_but_not_unique(pretty, coord_name): + """Warn if coordinates cannot be pretty-formatted due to non-uniqueness.""" + if pretty: + warnings.warn( + f'Cannot pretty-format "{coord_name}" coordinates because they are ' + 'not identical among the given datasets', + stacklevel=2 + ) + + +def _rename_coords(dict_dataarrays, coord_name): + """Rename coordinates in the datasets.""" + for name, dataarray in dict_dataarrays.items(): + if coord_name in dataarray.coords: + rename = {coord_name: f"{name}_{coord_name}"} + dict_dataarrays[name] = dataarray.rename(rename) + return dict_dataarrays + + def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): """Make non-dimensional coordinates unique among all datasets. @@ -155,28 +196,14 @@ def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): """ # Determine which non-dimensional coordinates are unique # - coords_unique has structure: {coord_name: True/False} - tokens = defaultdict(set) - for dataarray in dict_dataarrays.values(): - for coord_name in dataarray.coords: - if not _is_lon_or_lat_dataarray(dataarray[coord_name]) and coord_name not in dataarray.dims: - tokens[coord_name].add(tokenize(dataarray[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) + is_coords_unique_dict = _get_is_nondimensional_coords_dict(dict_dataarrays) # Prepend dataset name, if not unique or no pretty-format desired new_dict_dataarrays = dict_dataarrays.copy() - for coord_name, unique in coords_unique.items(): + for coord_name, unique in is_coords_unique_dict.items(): if not pretty or not unique: - if pretty: - warnings.warn( - 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), - stacklevel=2 - ) - for name, dataarray in dict_dataarrays.items(): - if coord_name in dataarray.coords: - rename = {coord_name: "{}_{}".format(name, coord_name)} - new_dict_dataarrays[name] = new_dict_dataarrays[name].rename(rename) - + _warn_if_pretty_but_not_unique(pretty, coord_name) + new_dict_dataarrays = _rename_coords(new_dict_dataarrays, coord_name) return new_dict_dataarrays @@ -196,6 +223,7 @@ def check_unique_projection_coords(dict_dataarrays): "Please group them by area or save them in separate files.") + def add_coordinates_attrs_coords(dict_dataarrays): """Add to DataArrays the coordinates specified in the 'coordinates' attribute. @@ -208,23 +236,39 @@ def add_coordinates_attrs_coords(dict_dataarrays): In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set automatically. """ - for da_name, dataarray in dict_dataarrays.items(): - declared_coordinates = _get_coordinates_list(dataarray) - for coord in declared_coordinates: - if coord not in dataarray.coords: - try: - dimensions_not_in_data = list(set(dict_dataarrays[coord].dims) - set(dataarray.dims)) - dataarray[coord] = dict_dataarrays[coord].squeeze(dimensions_not_in_data, drop=True) - except KeyError: - warnings.warn( - 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), - stacklevel=2 - ) - continue - + for dataarray_name in dict_dataarrays.keys(): + dict_dataarrays = _add_declared_coordinates(dict_dataarrays, + dataarray_name=dataarray_name) # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - dataarray.attrs.pop("coordinates", None) + dict_dataarrays[dataarray_name].attrs.pop("coordinates", None) + return dict_dataarrays + + +def _add_declared_coordinates(dict_dataarrays, dataarray_name): + """Add declared coordinates to the dataarray if they exist.""" + dataarray = dict_dataarrays[dataarray_name] + declared_coordinates = _get_coordinates_list(dataarray) + for coord in declared_coordinates: + if coord not in dataarray.coords: + dict_dataarrays = _try_add_coordinate(dict_dataarrays, + dataarray_name=dataarray_name, + coord=coord) + return dict_dataarrays + + +def _try_add_coordinate(dict_dataarrays, dataarray_name, coord): + """Try to add a coordinate to the dataarray, warn if not possible.""" + try: + dataarray_dims = set(dict_dataarrays[dataarray_name].dims) + coordinate_dims = set(dict_dataarrays[coord].dims) + dimensions_to_squeeze = list(coordinate_dims - dataarray_dims) + dict_dataarrays[dataarray_name][coord] = dict_dataarrays[coord].squeeze(dimensions_to_squeeze, drop=True) + except KeyError: + warnings.warn( + f'Coordinate "{coord}" referenced by dataarray {dataarray_name} does not ' + 'exist, dropping reference.', + stacklevel=2 + ) return dict_dataarrays diff --git a/satpy/cf/dataarray.py b/satpy/cf/dataarray.py index 5df68da887..dc2ae7d6c1 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/dataarray.py @@ -41,15 +41,16 @@ def _handle_dataarray_name(original_name, numeric_name_prefix): def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" original_name = None + named_has_changed = False dataarray = dataarray.copy() if "name" in dataarray.attrs: original_name = dataarray.attrs.pop("name") original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) + named_has_changed = original_name != new_name - if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: + if named_has_changed and include_orig_name: dataarray.attrs["original_name"] = original_name - return dataarray diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index c6ea6fd351..cab71de58c 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -16,7 +16,6 @@ """Utility to generate a CF-compliant Datasets.""" import logging import warnings -from collections import defaultdict import xarray as xr @@ -39,6 +38,11 @@ def _get_extra_ds(dataarray, keys=None): return dict_datarrays +def _get_group_dataarrays(group_members, list_dataarrays): + """Yield DataArrays that are part of a specific group.""" + return [da for da in list_dataarrays if da.attrs["name"] in group_members] + + def _get_groups(groups, list_datarrays): """Return a dictionary with the list of xr.DataArray associated to each group. @@ -46,15 +50,10 @@ def _get_groups(groups, list_datarrays): Else, collect the DataArrays associated to each group. """ if groups is None: - grouped_dataarrays = {None: list_datarrays} - else: - grouped_dataarrays = defaultdict(list) - for datarray in list_datarrays: - for group_name, group_members in groups.items(): - if datarray.attrs["name"] in group_members: - grouped_dataarrays[group_name].append(datarray) - break - return grouped_dataarrays + return {None: list_datarrays} + + return {group_name: _get_group_dataarrays(group_members, list_datarrays) + for group_name, group_members in groups.items()} def _collect_cf_dataset(list_dataarrays, From 83e815d3b67404fa0b6f9cec458f56f63b430f12 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 12:09:38 -0600 Subject: [PATCH 0731/1416] Cleanup CF attrs functions --- satpy/cf/attrs.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index eb562c1c93..987aeec6cb 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -68,15 +68,14 @@ def _encode_numpy_array(obj): # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 - if is_plain_1d: - if obj.dtype in NC4_DTYPES: - return obj - elif obj.dtype == np.bool_: - # Boolean arrays are not supported, convert to array of strings. - return [s.lower() for s in obj.astype(str)] - return obj.tolist() - else: + if not is_plain_1d: raise ValueError("Only a 1D numpy array can be encoded as netCDF attribute.") + if obj.dtype in NC4_DTYPES: + return obj + if obj.dtype == np.bool_: + # Boolean arrays are not supported, convert to array of strings. + return [s.lower() for s in obj.astype(str)] + return obj.tolist() def _encode_object(obj): @@ -85,9 +84,9 @@ def _encode_object(obj): Raises: ValueError if no such datatype could be found """ - if isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)): - return obj - elif isinstance(obj, (float, str, np.integer, np.floating)): + is_nonbool_int = isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)) + is_encode_type = isinstance(obj, (float, str, np.integer, np.floating)) + if is_nonbool_int or is_encode_type: return obj elif isinstance(obj, np.ndarray): return _encode_numpy_array(obj) @@ -194,10 +193,10 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) dataarray = _format_prerequisites_attrs(dataarray) dataarray = _remove_none_attrs(dataarray) - _ = dataarray.attrs.pop("area", None) + dataarray.attrs.pop("area", None) if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: - dataarray.attrs["long_name"] = dataarray.name + dataarray.attrs["long_name"] = dataarray.attrs["name"] if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) From cadcfef1129c2a74d5c4e47cb6f4deadb4b5b358 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 12:36:54 -0600 Subject: [PATCH 0732/1416] Remove commented out tests --- satpy/tests/test_writers.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index c11066d3f6..c40b51fa01 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -594,26 +594,6 @@ def test_geotiff(self): compute_writer_results([res]) assert os.path.isfile(fname) -# FIXME: This reader needs more information than exist at the moment -# def test_mitiff(self): -# """Test writing to mitiff file""" -# fname = os.path.join(self.base_dir, 'mitiff.tif') -# res = self.scn.save_datasets(filename=fname, -# datasets=['test'], -# writer='mitiff') -# compute_writer_results([res]) -# self.assertTrue(os.path.isfile(fname)) - -# FIXME: This reader needs more information than exist at the moment -# def test_cf(self): -# """Test writing to NetCDF4 file""" -# fname = os.path.join(self.base_dir, 'cf.nc') -# res = self.scn.save_datasets(filename=fname, -# datasets=['test'], -# writer='cf') -# compute_writer_results([res]) -# self.assertTrue(os.path.isfile(fname)) - def test_multiple_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results From ceabff9532d2d7aaac0803fd00e2ab3251278a96 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 15:29:03 -0600 Subject: [PATCH 0733/1416] Refactor attribute handling --- satpy/cf/attrs.py | 97 ++++++++++++++++++++++++----------------------- 1 file changed, 49 insertions(+), 48 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index 987aeec6cb..3b355748b8 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -14,12 +14,15 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of attributes.""" +from __future__ import annotations + import datetime import json import logging from collections import OrderedDict import numpy as np +import xarray as xr from satpy.writers.utils import flatten_dict @@ -142,68 +145,66 @@ def _encode_nc_attrs(attrs): return OrderedDict(encoded_attrs) -def _add_ancillary_variables_attrs(dataarray): - """Replace ancillary_variables DataArray with a list of their name.""" - list_ancillary_variable_names = [da_ancillary.attrs["name"] - for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] - if list_ancillary_variable_names: - dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) - else: - dataarray.attrs.pop("ancillary_variables", None) - return dataarray +def preprocess_datarray_attrs( + dataarray: xr.DataArray, + flatten_attrs: bool, + exclude_attrs: list[str] | None +) -> xr.DataArray: + """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" + _drop_attrs(dataarray, exclude_attrs) + _add_ancillary_variables_attrs(dataarray) + _format_prerequisites_attrs(dataarray) + if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: + dataarray.attrs["long_name"] = dataarray.name -def _drop_exclude_attrs(dataarray, exclude_attrs): - """Remove user-specified list of attributes.""" - if exclude_attrs is None: - exclude_attrs = [] - for key in exclude_attrs: - dataarray.attrs.pop(key, None) - return dataarray + if flatten_attrs: + dataarray.attrs = flatten_dict(dataarray.attrs) + dataarray.attrs = _encode_nc_attrs(dataarray.attrs) -def _remove_satpy_attrs(new_data): - """Remove _satpy attribute.""" - satpy_attrs = [key for key in new_data.attrs if key.startswith("_satpy")] - for satpy_attr in satpy_attrs: - new_data.attrs.pop(satpy_attr) - new_data.attrs.pop("_last_resampler", None) - return new_data + return dataarray -def _format_prerequisites_attrs(dataarray): - """Reformat prerequisites attribute value to string.""" - if "prerequisites" in dataarray.attrs: - dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] - return dataarray +def _drop_attrs( + dataarray: xr.DataArray, + user_excluded_attrs: list[str] | None +) -> None: + """Remove undesirable attributes.""" + attrs_to_drop = ( + (user_excluded_attrs or []) + + _get_satpy_attrs(dataarray) + + _get_none_attrs(dataarray) + + ["area"] + ) + for key in attrs_to_drop: + dataarray.attrs.pop(key, None) -def _remove_none_attrs(dataarray): - """Remove attribute keys with None value.""" - for key, val in dataarray.attrs.copy().items(): - if val is None: - dataarray.attrs.pop(key) - return dataarray +def _get_satpy_attrs(new_data): + """Remove _satpy attribute.""" + return [key for key in new_data.attrs if key.startswith("_satpy")] + ["_last_resampler"] -def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): - """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" - dataarray = _remove_satpy_attrs(dataarray) - dataarray = _add_ancillary_variables_attrs(dataarray) - dataarray = _drop_exclude_attrs(dataarray, exclude_attrs) - dataarray = _format_prerequisites_attrs(dataarray) - dataarray = _remove_none_attrs(dataarray) - dataarray.attrs.pop("area", None) +def _get_none_attrs(dataarray): + """Remove attribute keys with None value.""" + return [attr_name for attr_name, attr_val in dataarray.attrs.items() if attr_val is None] - if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: - dataarray.attrs["long_name"] = dataarray.attrs["name"] - if flatten_attrs: - dataarray.attrs = flatten_dict(dataarray.attrs) +def _add_ancillary_variables_attrs(dataarray: xr.DataArray) -> None: + """Replace ancillary_variables DataArray with a list of their name.""" + list_ancillary_variable_names = [da_ancillary.attrs["name"] + for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] + if list_ancillary_variable_names: + dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) + else: + dataarray.attrs.pop("ancillary_variables", None) - dataarray.attrs = _encode_nc_attrs(dataarray.attrs) - return dataarray +def _format_prerequisites_attrs(dataarray: xr.DataArray) -> None: + """Reformat prerequisites attribute value to string.""" + if "prerequisites" in dataarray.attrs: + dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] def _add_history(attrs): From bf681f2f88204aab02334dd9c12ddfcb6a2f9fc1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 15:54:07 -0600 Subject: [PATCH 0734/1416] Rename dataarray to data_arr and add type annotations --- satpy/cf/area.py | 46 ++--- satpy/cf/attrs.py | 54 +++--- satpy/cf/coords.py | 207 +++++++++++------------ satpy/cf/{dataarray.py => data_array.py} | 32 ++-- satpy/cf/datasets.py | 14 +- satpy/tests/cf_tests/test_dataaarray.py | 24 +-- satpy/writers/cf_writer.py | 16 +- 7 files changed, 196 insertions(+), 197 deletions(-) rename satpy/cf/{dataarray.py => data_array.py} (74%) diff --git a/satpy/cf/area.py b/satpy/cf/area.py index 93c8b28eed..88a12a3c52 100644 --- a/satpy/cf/area.py +++ b/satpy/cf/area.py @@ -23,24 +23,24 @@ logger = logging.getLogger(__name__) -def _add_lonlat_coords(dataarray): +def _add_lonlat_coords(data_arr: xr.DataArray) -> xr.DataArray: """Add 'longitude' and 'latitude' coordinates to DataArray.""" - dataarray = dataarray.copy() - area = dataarray.attrs["area"] - ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ["x", "y"]} - chunks = getattr(dataarray.isel(**ignore_dims), "chunks", None) + data_arr = data_arr.copy() + area = data_arr.attrs["area"] + ignore_dims = {dim: 0 for dim in data_arr.dims if dim not in ["x", "y"]} + chunks = getattr(data_arr.isel(**ignore_dims), "chunks", None) lons, lats = area.get_lonlats(chunks=chunks) - dataarray["longitude"] = xr.DataArray(lons, dims=["y", "x"], - attrs={"name": "longitude", + data_arr["longitude"] = xr.DataArray(lons, dims=["y", "x"], + attrs={"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}, - name="longitude") - dataarray["latitude"] = xr.DataArray(lats, dims=["y", "x"], - attrs={"name": "latitude", + name="longitude") + data_arr["latitude"] = xr.DataArray(lats, dims=["y", "x"], + attrs={"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}, - name="latitude") - return dataarray + name="latitude") + return data_arr def _create_grid_mapping(area): @@ -55,24 +55,24 @@ def _create_grid_mapping(area): return area.area_id, grid_mapping -def _add_grid_mapping(dataarray): +def _add_grid_mapping(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: """Convert an area to at CF grid mapping.""" - dataarray = dataarray.copy() - area = dataarray.attrs["area"] + data_arr = data_arr.copy() + area = data_arr.attrs["area"] gmapping_var_name, attrs = _create_grid_mapping(area) - dataarray.attrs["grid_mapping"] = gmapping_var_name - return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) + data_arr.attrs["grid_mapping"] = gmapping_var_name + return data_arr, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) -def area2cf(dataarray, include_lonlats=False, got_lonlats=False): +def area2cf(data_arr: xr.DataArray, include_lonlats: bool = False, got_lonlats: bool = False) -> list[xr.DataArray]: """Convert an area to at CF grid mapping or lon and lats.""" res = [] - include_lonlats = include_lonlats or isinstance(dataarray.attrs["area"], SwathDefinition) - is_area_def = isinstance(dataarray.attrs["area"], AreaDefinition) + include_lonlats = include_lonlats or isinstance(data_arr.attrs["area"], SwathDefinition) + is_area_def = isinstance(data_arr.attrs["area"], AreaDefinition) if not got_lonlats and include_lonlats: - dataarray = _add_lonlat_coords(dataarray) + data_arr = _add_lonlat_coords(data_arr) if is_area_def: - dataarray, gmapping = _add_grid_mapping(dataarray) + data_arr, gmapping = _add_grid_mapping(data_arr) res.append(gmapping) - res.append(dataarray) + res.append(data_arr) return res diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index 3b355748b8..f9d49416c8 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -145,66 +145,66 @@ def _encode_nc_attrs(attrs): return OrderedDict(encoded_attrs) -def preprocess_datarray_attrs( - dataarray: xr.DataArray, +def preprocess_attrs( + data_arr: xr.DataArray, flatten_attrs: bool, exclude_attrs: list[str] | None ) -> xr.DataArray: """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" - _drop_attrs(dataarray, exclude_attrs) - _add_ancillary_variables_attrs(dataarray) - _format_prerequisites_attrs(dataarray) + _drop_attrs(data_arr, exclude_attrs) + _add_ancillary_variables_attrs(data_arr) + _format_prerequisites_attrs(data_arr) - if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: - dataarray.attrs["long_name"] = dataarray.name + if "long_name" not in data_arr.attrs and "standard_name" not in data_arr.attrs: + data_arr.attrs["long_name"] = data_arr.name if flatten_attrs: - dataarray.attrs = flatten_dict(dataarray.attrs) + data_arr.attrs = flatten_dict(data_arr.attrs) - dataarray.attrs = _encode_nc_attrs(dataarray.attrs) + data_arr.attrs = _encode_nc_attrs(data_arr.attrs) - return dataarray + return data_arr def _drop_attrs( - dataarray: xr.DataArray, + data_arr: xr.DataArray, user_excluded_attrs: list[str] | None ) -> None: """Remove undesirable attributes.""" attrs_to_drop = ( - (user_excluded_attrs or []) + - _get_satpy_attrs(dataarray) + - _get_none_attrs(dataarray) + - ["area"] + (user_excluded_attrs or []) + + _get_satpy_attrs(data_arr) + + _get_none_attrs(data_arr) + + ["area"] ) for key in attrs_to_drop: - dataarray.attrs.pop(key, None) + data_arr.attrs.pop(key, None) -def _get_satpy_attrs(new_data): +def _get_satpy_attrs(data_arr: xr.DataArray) -> list[str]: """Remove _satpy attribute.""" - return [key for key in new_data.attrs if key.startswith("_satpy")] + ["_last_resampler"] + return [key for key in data_arr.attrs if key.startswith("_satpy")] + ["_last_resampler"] -def _get_none_attrs(dataarray): +def _get_none_attrs(data_arr: xr.DataArray) -> list[str]: """Remove attribute keys with None value.""" - return [attr_name for attr_name, attr_val in dataarray.attrs.items() if attr_val is None] + return [attr_name for attr_name, attr_val in data_arr.attrs.items() if attr_val is None] -def _add_ancillary_variables_attrs(dataarray: xr.DataArray) -> None: +def _add_ancillary_variables_attrs(data_arr: xr.DataArray) -> None: """Replace ancillary_variables DataArray with a list of their name.""" list_ancillary_variable_names = [da_ancillary.attrs["name"] - for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] + for da_ancillary in data_arr.attrs.get("ancillary_variables", [])] if list_ancillary_variable_names: - dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) + data_arr.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) else: - dataarray.attrs.pop("ancillary_variables", None) + data_arr.attrs.pop("ancillary_variables", None) -def _format_prerequisites_attrs(dataarray: xr.DataArray) -> None: +def _format_prerequisites_attrs(data_arr: xr.DataArray) -> None: """Reformat prerequisites attribute value to string.""" - if "prerequisites" in dataarray.attrs: - dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] + if "prerequisites" in data_arr.attrs: + data_arr.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in data_arr.attrs["prerequisites"]] def _add_history(attrs): diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index ba1d195663..48a0748509 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -1,4 +1,5 @@ """Set CF-compliant spatial and temporal coordinates.""" +from __future__ import annotations import logging import warnings @@ -8,6 +9,7 @@ import numpy as np import xarray as xr from dask.base import tokenize +from pyproj import CRS from pyresample.geometry import AreaDefinition, SwathDefinition logger = logging.getLogger(__name__) @@ -16,27 +18,27 @@ EPOCH = u"seconds since 1970-01-01 00:00:00" -def add_xy_coords_attrs(dataarray): +def add_xy_coords_attrs(data_arr: xr.DataArray) -> xr.DataArray: """Add relevant attributes to x, y coordinates.""" # If there are no coords, return dataarray - if not dataarray.coords.keys() & {"x", "y", "crs"}: - return dataarray + if not data_arr.coords.keys() & {"x", "y", "crs"}: + return data_arr # If projected area - if _is_projected(dataarray): - dataarray = _add_xy_projected_coords_attrs(dataarray) + if _is_projected(data_arr): + data_arr = _add_xy_projected_coords_attrs(data_arr) else: - dataarray = _add_xy_geographic_coords_attrs(dataarray) - if "crs" in dataarray.coords: - dataarray = dataarray.drop_vars("crs") - return dataarray + data_arr = _add_xy_geographic_coords_attrs(data_arr) + if "crs" in data_arr.coords: + data_arr = data_arr.drop_vars("crs") + return data_arr -def _is_projected(dataarray): +def _is_projected(data_arr: xr.DataArray) -> bool: """Guess whether data are projected or not.""" - crs = _try_to_get_crs(dataarray) + crs = _try_to_get_crs(data_arr) if crs: return crs.is_projected - units = _try_get_units_from_coords(dataarray) + units = _try_get_units_from_coords(data_arr) if units: if units.endswith("m"): return True @@ -46,65 +48,60 @@ def _is_projected(dataarray): return True -def _is_area(dataarray): - if isinstance(dataarray.attrs["area"], AreaDefinition): - return True - else: - return False +def _is_area(data_arr: xr.DataArray) -> bool: + return isinstance(data_arr.attrs["area"], AreaDefinition) -def _is_swath(dataarray): - if isinstance(dataarray.attrs["area"], SwathDefinition): - return True - else: - return False +def _is_swath(data_arr: xr.DataArray) -> bool: + return isinstance(data_arr.attrs["area"], SwathDefinition) -def _try_to_get_crs(dataarray): +def _try_to_get_crs(data_arr: xr.DataArray) -> CRS: """Try to get a CRS from attributes.""" - if "area" in dataarray.attrs: - if _is_area(dataarray): - return dataarray.attrs["area"].crs - if not _is_swath(dataarray): + if "area" in data_arr.attrs: + if _is_area(data_arr): + return data_arr.attrs["area"].crs + if not _is_swath(data_arr): logger.warning( - f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. " + f"Could not tell CRS from area of type {type(data_arr.attrs['area']).__name__:s}. " "Assuming projected CRS.") - if "crs" in dataarray.coords: - return dataarray.coords["crs"].item() + if "crs" in data_arr.coords: + return data_arr.coords["crs"].item() -def _try_get_units_from_coords(dataarray): +def _try_get_units_from_coords(data_arr: xr.DataArray) -> str | None: """Try to retrieve coordinate x/y units.""" for c in ["x", "y"]: with suppress(KeyError): # If the data has only 1 dimension, it has only one of x or y coords - if "units" in dataarray.coords[c].attrs: - return dataarray.coords[c].attrs["units"] + if "units" in data_arr.coords[c].attrs: + return data_arr.coords[c].attrs["units"] + return None -def _add_xy_projected_coords_attrs(dataarray, x="x", y="y"): +def _add_xy_projected_coords_attrs(data_arr: xr.DataArray, x: str = "x", y: str = "y") -> xr.DataArray: """Add relevant attributes to x, y coordinates of a projected CRS.""" - if x in dataarray.coords: - dataarray[x].attrs["standard_name"] = "projection_x_coordinate" - dataarray[x].attrs["units"] = "m" - if y in dataarray.coords: - dataarray[y].attrs["standard_name"] = "projection_y_coordinate" - dataarray[y].attrs["units"] = "m" - return dataarray + if x in data_arr.coords: + data_arr[x].attrs["standard_name"] = "projection_x_coordinate" + data_arr[x].attrs["units"] = "m" + if y in data_arr.coords: + data_arr[y].attrs["standard_name"] = "projection_y_coordinate" + data_arr[y].attrs["units"] = "m" + return data_arr -def _add_xy_geographic_coords_attrs(dataarray, x="x", y="y"): +def _add_xy_geographic_coords_attrs(data_arr: xr.DataArray, x: str = "x", y: str = "y") -> xr.DataArray: """Add relevant attributes to x, y coordinates of a geographic CRS.""" - if x in dataarray.coords: - dataarray[x].attrs["standard_name"] = "longitude" - dataarray[x].attrs["units"] = "degrees_east" - if y in dataarray.coords: - dataarray[y].attrs["standard_name"] = "latitude" - dataarray[y].attrs["units"] = "degrees_north" - return dataarray + if x in data_arr.coords: + data_arr[x].attrs["standard_name"] = "longitude" + data_arr[x].attrs["units"] = "degrees_east" + if y in data_arr.coords: + data_arr[y].attrs["standard_name"] = "latitude" + data_arr[y].attrs["units"] = "degrees_north" + return data_arr -def set_cf_time_info(dataarray, epoch): +def set_cf_time_info(data_arr: xr.DataArray, epoch: str | None) -> xr.DataArray: """Set CF time attributes and encoding. It expand the DataArray with a time dimension if does not yet exists. @@ -118,37 +115,33 @@ def set_cf_time_info(dataarray, epoch): if epoch is None: epoch = EPOCH - dataarray["time"].encoding["units"] = epoch - dataarray["time"].attrs["standard_name"] = "time" - dataarray["time"].attrs.pop("bounds", None) + data_arr["time"].encoding["units"] = epoch + data_arr["time"].attrs["standard_name"] = "time" + data_arr["time"].attrs.pop("bounds", None) - if "time" not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims("time") + if "time" not in data_arr.dims and data_arr["time"].size not in data_arr.shape: + data_arr = data_arr.expand_dims("time") - return dataarray + return data_arr -def _is_lon_or_lat_dataarray(dataarray): - """Check if the DataArray represents the latitude or longitude coordinate.""" - return dataarray.attrs.get("standard_name", "") in ("longitude", "latitude") +def has_projection_coords(data_arrays: dict[str, xr.DataArray]) -> bool: + """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" + return any(_is_lon_or_lat_dataarray(data_arr) for data_arr in data_arrays.values()) -def has_projection_coords(dict_datarrays): - """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" - for dataarray in dict_datarrays.values(): - if _is_lon_or_lat_dataarray(dataarray): - return True - return False +def _is_lon_or_lat_dataarray(data_arr: xr.DataArray) -> bool: + """Check if the DataArray represents the latitude or longitude coordinate.""" + return data_arr.attrs.get("standard_name", "") in ("longitude", "latitude") -def _get_is_nondimensional_coords_dict(dict_dataarrays): +def _get_is_nondimensional_coords_dict(data_arrays: dict[str, xr.DataArray]) -> dict[str, bool]: tokens = defaultdict(set) - for dataarray in dict_dataarrays.values(): - for coord_name in dataarray.coords: - if not _is_lon_or_lat_dataarray(dataarray[coord_name]) and coord_name not in dataarray.dims: - tokens[coord_name].add(tokenize(dataarray[coord_name].data)) - coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) - return coords_unique + for data_arr in data_arrays.values(): + for coord_name in data_arr.coords: + if not _is_lon_or_lat_dataarray(data_arr[coord_name]) and coord_name not in data_arr.dims: + tokens[coord_name].add(tokenize(data_arr[coord_name].data)) + return dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) def _warn_if_pretty_but_not_unique(pretty, coord_name): @@ -161,16 +154,19 @@ def _warn_if_pretty_but_not_unique(pretty, coord_name): ) -def _rename_coords(dict_dataarrays, coord_name): +def _rename_coords(data_arrays: dict[str, xr.DataArray], coord_name: str) -> dict[str, xr.DataArray]: """Rename coordinates in the datasets.""" - for name, dataarray in dict_dataarrays.items(): + for name, dataarray in data_arrays.items(): if coord_name in dataarray.coords: rename = {coord_name: f"{name}_{coord_name}"} - dict_dataarrays[name] = dataarray.rename(rename) - return dict_dataarrays + data_arrays[name] = dataarray.rename(rename) + return data_arrays -def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): +def ensure_unique_nondimensional_coords( + data_arrays: dict[str, xr.DataArray], + pretty: bool = False +) -> dict[str, xr.DataArray]: """Make non-dimensional coordinates unique among all datasets. Non-dimensional coordinates, such as scanline timestamps, @@ -185,9 +181,9 @@ def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): this is not applied to latitude and longitude. Args: - datas (dict): + datas: Dictionary of (dataset name, dataset) - pretty (bool): + pretty: Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. Returns: @@ -196,10 +192,10 @@ def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): """ # Determine which non-dimensional coordinates are unique # - coords_unique has structure: {coord_name: True/False} - is_coords_unique_dict = _get_is_nondimensional_coords_dict(dict_dataarrays) + is_coords_unique_dict = _get_is_nondimensional_coords_dict(data_arrays) # Prepend dataset name, if not unique or no pretty-format desired - new_dict_dataarrays = dict_dataarrays.copy() + new_dict_dataarrays = data_arrays.copy() for coord_name, unique in is_coords_unique_dict.items(): if not pretty or not unique: _warn_if_pretty_but_not_unique(pretty, coord_name) @@ -207,11 +203,11 @@ def ensure_unique_nondimensional_coords(dict_dataarrays, pretty=False): return new_dict_dataarrays -def check_unique_projection_coords(dict_dataarrays): +def check_unique_projection_coords(data_arrays: dict[str, xr.DataArray]) -> None: """Check that all datasets share the same projection coordinates x/y.""" unique_x = set() unique_y = set() - for dataarray in dict_dataarrays.values(): + for dataarray in data_arrays.values(): if "y" in dataarray.dims: token_y = tokenize(dataarray["y"].data) unique_y.add(token_y) @@ -223,8 +219,7 @@ def check_unique_projection_coords(dict_dataarrays): "Please group them by area or save them in separate files.") - -def add_coordinates_attrs_coords(dict_dataarrays): +def add_coordinates_attrs_coords(data_arrays: dict[str, xr.DataArray]) -> dict[str, xr.DataArray]: """Add to DataArrays the coordinates specified in the 'coordinates' attribute. It deal with the 'coordinates' attributes indicating lat/lon coords @@ -236,51 +231,55 @@ def add_coordinates_attrs_coords(dict_dataarrays): In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set automatically. """ - for dataarray_name in dict_dataarrays.keys(): - dict_dataarrays = _add_declared_coordinates(dict_dataarrays, - dataarray_name=dataarray_name) + for dataarray_name in data_arrays.keys(): + data_arrays = _add_declared_coordinates(data_arrays, + dataarray_name=dataarray_name) # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - dict_dataarrays[dataarray_name].attrs.pop("coordinates", None) - return dict_dataarrays + data_arrays[dataarray_name].attrs.pop("coordinates", None) + return data_arrays -def _add_declared_coordinates(dict_dataarrays, dataarray_name): +def _add_declared_coordinates(data_arrays: dict[str, xr.DataArray], dataarray_name: str) -> dict[str, xr.DataArray]: """Add declared coordinates to the dataarray if they exist.""" - dataarray = dict_dataarrays[dataarray_name] + dataarray = data_arrays[dataarray_name] declared_coordinates = _get_coordinates_list(dataarray) for coord in declared_coordinates: if coord not in dataarray.coords: - dict_dataarrays = _try_add_coordinate(dict_dataarrays, - dataarray_name=dataarray_name, - coord=coord) - return dict_dataarrays + data_arrays = _try_add_coordinate(data_arrays, + dataarray_name=dataarray_name, + coord=coord) + return data_arrays -def _try_add_coordinate(dict_dataarrays, dataarray_name, coord): +def _try_add_coordinate( + data_arrays: dict[str, xr.DataArray], + dataarray_name: str, + coord: str +) -> dict[str, xr.DataArray]: """Try to add a coordinate to the dataarray, warn if not possible.""" try: - dataarray_dims = set(dict_dataarrays[dataarray_name].dims) - coordinate_dims = set(dict_dataarrays[coord].dims) + dataarray_dims = set(data_arrays[dataarray_name].dims) + coordinate_dims = set(data_arrays[coord].dims) dimensions_to_squeeze = list(coordinate_dims - dataarray_dims) - dict_dataarrays[dataarray_name][coord] = dict_dataarrays[coord].squeeze(dimensions_to_squeeze, drop=True) + data_arrays[dataarray_name][coord] = data_arrays[coord].squeeze(dimensions_to_squeeze, drop=True) except KeyError: warnings.warn( f'Coordinate "{coord}" referenced by dataarray {dataarray_name} does not ' 'exist, dropping reference.', stacklevel=2 ) - return dict_dataarrays + return data_arrays -def _get_coordinates_list(dataarray): +def _get_coordinates_list(data_arr: xr.DataArray) -> list[str]: """Return a list with the coordinates names specified in the 'coordinates' attribute.""" - declared_coordinates = dataarray.attrs.get("coordinates", []) + declared_coordinates = data_arr.attrs.get("coordinates", []) if isinstance(declared_coordinates, str): declared_coordinates = declared_coordinates.split(" ") return declared_coordinates -def add_time_bounds_dimension(ds, time="time"): +def add_time_bounds_dimension(ds: xr.Dataset, time: str = "time") -> xr.Dataset: """Add time bound dimension to xr.Dataset.""" start_times = [] end_times = [] diff --git a/satpy/cf/dataarray.py b/satpy/cf/data_array.py similarity index 74% rename from satpy/cf/dataarray.py rename to satpy/cf/data_array.py index dc2ae7d6c1..ef86953f84 100644 --- a/satpy/cf/dataarray.py +++ b/satpy/cf/data_array.py @@ -17,13 +17,13 @@ import logging import warnings -from satpy.cf.attrs import preprocess_datarray_attrs +from satpy.cf.attrs import preprocess_attrs from satpy.cf.coords import add_xy_coords_attrs, set_cf_time_info logger = logging.getLogger(__name__) -def _handle_dataarray_name(original_name, numeric_name_prefix): +def _handle_data_array_name(original_name, numeric_name_prefix): if original_name[0].isdigit(): if numeric_name_prefix: new_name = numeric_name_prefix + original_name @@ -38,14 +38,14 @@ def _handle_dataarray_name(original_name, numeric_name_prefix): return original_name, new_name -def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name): +def _preprocess_data_array_name(dataarray, numeric_name_prefix, include_orig_name): """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" original_name = None named_has_changed = False dataarray = dataarray.copy() if "name" in dataarray.attrs: original_name = dataarray.attrs.pop("name") - original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) + original_name, new_name = _handle_data_array_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) named_has_changed = original_name != new_name @@ -54,12 +54,12 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name return dataarray -def make_cf_dataarray(dataarray, - epoch=None, - flatten_attrs=False, - exclude_attrs=None, - include_orig_name=True, - numeric_name_prefix="CHANNEL_"): +def make_cf_data_array(dataarray, + epoch=None, + flatten_attrs=False, + exclude_attrs=None, + include_orig_name=True, + numeric_name_prefix="CHANNEL_"): """Make the xr.DataArray CF-compliant. Args: @@ -76,12 +76,12 @@ def make_cf_dataarray(dataarray, Returns: xr.DataArray: A CF-compliant xr.DataArray. """ - dataarray = _preprocess_dataarray_name(dataarray=dataarray, - numeric_name_prefix=numeric_name_prefix, - include_orig_name=include_orig_name) - dataarray = preprocess_datarray_attrs(dataarray=dataarray, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs) + dataarray = _preprocess_data_array_name(dataarray=dataarray, + numeric_name_prefix=numeric_name_prefix, + include_orig_name=include_orig_name) + dataarray = preprocess_attrs(data_arr=dataarray, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs) dataarray = add_xy_coords_attrs(dataarray) if "time" in dataarray.coords: dataarray = set_cf_time_info(dataarray, epoch=epoch) diff --git a/satpy/cf/datasets.py b/satpy/cf/datasets.py index cab71de58c..2c5080ee42 100644 --- a/satpy/cf/datasets.py +++ b/satpy/cf/datasets.py @@ -92,7 +92,7 @@ def _collect_cf_dataset(list_dataarrays, ensure_unique_nondimensional_coords, has_projection_coords, ) - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! @@ -134,12 +134,12 @@ def _collect_cf_dataset(list_dataarrays, # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name # area information can be lost here !!! for new_dataarray in list_new_dataarrays: - new_dataarray = make_cf_dataarray(new_dataarray, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) + new_dataarray = make_cf_data_array(new_dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) dict_cf_dataarrays[new_dataarray.name] = new_dataarray # Check all DataArrays have same projection coordinates diff --git a/satpy/tests/cf_tests/test_dataaarray.py b/satpy/tests/cf_tests/test_dataaarray.py index d0154cd84f..50e5b54424 100644 --- a/satpy/tests/cf_tests/test_dataaarray.py +++ b/satpy/tests/cf_tests/test_dataaarray.py @@ -25,23 +25,23 @@ def test_preprocess_dataarray_name(): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" from satpy import Scene - from satpy.cf.dataarray import _preprocess_dataarray_name + from satpy.cf.data_array import _preprocess_data_array_name scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) dataarray = scn["1"] # If numeric_name_prefix is a string, test add the original_name attributes - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) + out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) assert out_da.attrs["original_name"] == "1" # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) + out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix="", include_orig_name=True) assert "original_name" not in out_da.attrs - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True) + out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix=False, include_orig_name=True) assert "original_name" not in out_da.attrs - out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True) + out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix=None, include_orig_name=True) assert "original_name" not in out_da.attrs @@ -49,7 +49,7 @@ def test_make_cf_dataarray_lonlat(): """Test correct CF encoding for area with lon/lat units.""" from pyresample import create_area_def - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array from satpy.resample import add_crs_xy_coords area = create_area_def("mavas", 4326, shape=(5, 5), @@ -59,7 +59,7 @@ def test_make_cf_dataarray_lonlat(): dims=("y", "x"), attrs={"area": area}) da = add_crs_xy_coords(da, area) - new_da = make_cf_dataarray(da) + new_da = make_cf_data_array(da) assert new_da["x"].attrs["units"] == "degrees_east" assert new_da["y"].attrs["units"] == "degrees_north" @@ -69,7 +69,7 @@ class TestCfDataArray: def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality @@ -94,7 +94,7 @@ def test_make_cf_dataarray(self): coords={"y": [0, 1], "x": [1, 2], "acq_time": ("y", [3, 4])}) # Test conversion to something cf-compliant - res = make_cf_dataarray(arr) + res = make_cf_data_array(arr) np.testing.assert_array_equal(res["x"], arr["x"]) np.testing.assert_array_equal(res["y"], arr["y"]) np.testing.assert_array_equal(res["acq_time"], arr["acq_time"]) @@ -103,14 +103,14 @@ def test_make_cf_dataarray(self): assert_dict_array_equality(res.attrs, attrs_expected) # Test attribute kwargs - res_flat = make_cf_dataarray(arr, flatten_attrs=True, exclude_attrs=["int"]) + res_flat = make_cf_data_array(arr, flatten_attrs=True, exclude_attrs=["int"]) attrs_expected_flat.pop("int") assert_dict_array_equality(res_flat.attrs, attrs_expected_flat) def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=("y",), coords={"y": [0, 1, 2, 3], "acq_time": ("y", [0, 1, 2, 3])}) - _ = make_cf_dataarray(arr) + _ = make_cf_data_array(arr) diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index b64a288213..4f67215bd1 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -335,16 +335,16 @@ def da2cf(dataarray, epoch=None, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit. """ - from satpy.cf.dataarray import make_cf_dataarray + from satpy.cf.data_array import make_cf_data_array warnings.warn("CFWriter.da2cf is deprecated." - "Use satpy.cf.dataarray.make_cf_dataarray instead.", + "Use satpy.cf.dataarray.make_cf_data_array instead.", DeprecationWarning, stacklevel=3) - return make_cf_dataarray(dataarray=dataarray, - epoch=epoch, - flatten_attrs=flatten_attrs, - exclude_attrs=exclude_attrs, - include_orig_name=include_orig_name, - numeric_name_prefix=numeric_name_prefix) + return make_cf_data_array(dataarray=dataarray, + epoch=epoch, + flatten_attrs=flatten_attrs, + exclude_attrs=exclude_attrs, + include_orig_name=include_orig_name, + numeric_name_prefix=numeric_name_prefix) @staticmethod def update_encoding(dataset, to_netcdf_kwargs): From 055cbef95d2d13b3957206f94d3c29b775ed4e8d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 21:22:03 -0600 Subject: [PATCH 0735/1416] Reduce code complexity --- satpy/tests/cf_tests/test_area.py | 42 +++++++++++-------------------- satpy/tests/utils.py | 24 ++++++++++++------ 2 files changed, 30 insertions(+), 36 deletions(-) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index cf8548d568..a00df3925e 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -18,6 +18,7 @@ """Tests for the CF Area.""" import dask.array as da import numpy as np +import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition @@ -71,7 +72,7 @@ def test_area2cf(self): assert "latitude" in res[0].coords assert "grid_mapping" not in res[0].attrs - def test__add_grid_mapping(self): + def test_add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" from satpy.cf.area import _add_grid_mapping @@ -255,7 +256,8 @@ def _gm_matches(gmapping, expected): assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) - def test__add_lonlat_coords(self): + @pytest.mark.parametrize("dims", [("y", "x"), ("bands", "y", "x")]) + def test_add_lonlat_coords(self, dims): """Test the conversion from areas to lon/lat.""" from satpy.cf.area import _add_lonlat_coords @@ -268,35 +270,19 @@ def test__add_lonlat_coords(self): [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), attrs={"area": area}) + if len(dims) == 2: + data_arr = xr.DataArray(data=[[1, 2], [3, 4]], dims=dims, attrs={"area": area}) + else: + data_arr = xr.DataArray( + data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), + dims=("bands", "y", "x"), + attrs={"area": area}, + ) - res = _add_lonlat_coords(dataarray) + res = _add_lonlat_coords(data_arr) # original should be unmodified - assert "longitude" not in dataarray.coords - assert set(res.coords) == {"longitude", "latitude"} - lat = res["latitude"] - lon = res["longitude"] - np.testing.assert_array_equal(lat.data, lats_ref) - np.testing.assert_array_equal(lon.data, lons_ref) - assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() - assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() - - area = AreaDefinition( - "seviri", - "Native SEVIRI grid", - "geos", - "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", - 10, 10, - [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] - ) - lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), - dims=("bands", "y", "x"), attrs={"area": area}) - res = _add_lonlat_coords(dataarray) - - # original should be unmodified - assert "longitude" not in dataarray.coords + assert "longitude" not in data_arr.coords assert set(res.coords) == {"longitude", "latitude"} lat = res["latitude"] lon = res["longitude"] diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index 1fb736d427..a6ebf8753e 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -18,6 +18,7 @@ from contextlib import contextmanager from datetime import datetime +from typing import Any from unittest import mock import dask.array as da @@ -414,14 +415,21 @@ def assert_dict_array_equality(d1, d2): assert set(d1.keys()) == set(d2.keys()) for key, val1 in d1.items(): val2 = d2[key] - if isinstance(val1, np.ndarray): - np.testing.assert_array_equal(val1, val2) - assert val1.dtype == val2.dtype - else: - assert val1 == val2 - if isinstance(val1, (np.floating, np.integer, np.bool_)): - assert isinstance(val2, np.generic) - assert val1.dtype == val2.dtype + compare_func = _compare_numpy_array if isinstance(val1, np.ndarray) else _compare_nonarray + compare_func(val1, val2) + + +def _compare_numpy_array(val1: np.ndarray, val2: np.ndarray) -> None: + np.testing.assert_array_equal(val1, val2) + assert val1.dtype == val2.dtype + + +def _compare_nonarray(val1: Any, val2: Any) -> None: + assert val1 == val2 + if isinstance(val1, (np.floating, np.integer, np.bool_)): + assert isinstance(val2, np.generic) + assert val1.dtype == val2.dtype + def xfail_skyfield_unstable_numpy2(): """Determine if skyfield-based tests should be xfail in the unstable numpy 2.x environment.""" From 2a65eea467d6bb0526fce172a121aa4f95c146cb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 21:48:34 -0600 Subject: [PATCH 0736/1416] Refactor CF area tests --- satpy/tests/cf_tests/test_area.py | 70 +++++++++++++++---------------- 1 file changed, 34 insertions(+), 36 deletions(-) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index a00df3925e..0539ebeb86 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -22,18 +22,25 @@ import xarray as xr from pyresample import AreaDefinition, SwathDefinition +from satpy.cf.area import area2cf + + +@pytest.fixture() +def input_data_arr() -> xr.DataArray: + return xr.DataArray( + data=[[1, 2], [3, 4]], + dims=("y", "x"), + coords={"y": [1, 2], "x": [3, 4]}, + attrs={"name": "var1"}, + ) + class TestCFArea: """Test case for CF Area.""" - def test_area2cf(self): + @pytest.mark.parametrize("include_lonlats", [False, True]) + def test_area2cf_geos_area_nolonlats(self, input_data_arr, include_lonlats): """Test the conversion of an area to CF standards.""" - from satpy.cf.area import area2cf - - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, - attrs={"name": "var1"}) - - # a) Area Definition and strict=False geos = AreaDefinition( area_id="geos", description="geos", @@ -41,32 +48,21 @@ def test_area2cf(self): projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) - ds = ds_base.copy(deep=True) - ds.attrs["area"] = geos + input_data_arr.attrs["area"] = geos - res = area2cf(ds, include_lonlats=False) + res = area2cf(input_data_arr, include_lonlats=include_lonlats) assert len(res) == 2 assert res[0].size == 1 # grid mapping variable assert res[0].name == res[1].attrs["grid_mapping"] + if include_lonlats: + assert "longitude" in res[1].coords + assert "latitude" in res[1].coords - # b) Area Definition and include_lonlats=False - ds = ds_base.copy(deep=True) - ds.attrs["area"] = geos - res = area2cf(ds, include_lonlats=True) - # same as above - assert len(res) == 2 - assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs["grid_mapping"] - # but now also have the lon/lats - assert "longitude" in res[1].coords - assert "latitude" in res[1].coords - - # c) Swath Definition + def test_area2cf_swath(self, input_data_arr): swath = SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) - ds = ds_base.copy(deep=True) - ds.attrs["area"] = swath + input_data_arr.attrs["area"] = swath - res = area2cf(ds, include_lonlats=False) + res = area2cf(input_data_arr, include_lonlats=False) assert len(res) == 1 assert "longitude" in res[0].coords assert "latitude" in res[0].coords @@ -76,15 +72,6 @@ def test_add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" from satpy.cf.area import _add_grid_mapping - def _gm_matches(gmapping, expected): - """Assert that all keys in ``expected`` match the values in ``gmapping``.""" - for attr_key, attr_val in expected.attrs.items(): - test_val = gmapping.attrs[attr_key] - if attr_val is None or isinstance(attr_val, str): - assert test_val == attr_val - else: - np.testing.assert_almost_equal(test_val, attr_val, decimal=3) - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, attrs={"name": "var1"}) @@ -261,12 +248,13 @@ def test_add_lonlat_coords(self, dims): """Test the conversion from areas to lon/lat.""" from satpy.cf.area import _add_lonlat_coords + width, height = (2, 2) if len(dims) == 2 else (10, 10) area = AreaDefinition( "seviri", "Native SEVIRI grid", "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", - 2, 2, + width, height, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() @@ -290,3 +278,13 @@ def test_add_lonlat_coords(self, dims): np.testing.assert_array_equal(lon.data, lons_ref) assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() + + +def _gm_matches(gmapping, expected): + """Assert that all keys in ``expected`` match the values in ``gmapping``.""" + for attr_key, attr_val in expected.attrs.items(): + test_val = gmapping.attrs[attr_key] + if attr_val is None or isinstance(attr_val, str): + assert test_val == attr_val + else: + np.testing.assert_almost_equal(test_val, attr_val, decimal=3) From 63e8407f6e23c68a0a5afdfbf9fe3e3e2f17edc6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 16 Nov 2023 21:59:59 -0600 Subject: [PATCH 0737/1416] Refactor CF area tests a little more --- satpy/tests/cf_tests/test_area.py | 94 ++++++++++++++----------------- 1 file changed, 41 insertions(+), 53 deletions(-) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index 0539ebeb86..31b51b6cd9 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -22,7 +22,7 @@ import xarray as xr from pyresample import AreaDefinition, SwathDefinition -from satpy.cf.area import area2cf +from satpy.cf.area import _add_grid_mapping, area2cf @pytest.fixture() @@ -68,14 +68,12 @@ def test_area2cf_swath(self, input_data_arr): assert "latitude" in res[0].coords assert "grid_mapping" not in res[0].attrs - def test_add_grid_mapping(self): - """Test the conversion from pyresample area object to CF grid mapping.""" - from satpy.cf.area import _add_grid_mapping + def test_add_grid_mapping_cf_repr(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, - attrs={"name": "var1"}) + Projection has a corresponding CF representation (e.g. geos). - # a) Projection has a corresponding CF representation (e.g. geos) + """ a = 6378169. b = 6356583.8 h = 35785831. @@ -97,9 +95,8 @@ def test_add_grid_mapping(self): # 'sweep_angle_axis': None, }) - ds = ds_base.copy() - ds.attrs["area"] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) + input_data_arr.attrs["area"] = geos + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) if "sweep_angle_axis" in grid_mapping.attrs: # older versions of pyproj might not include this assert grid_mapping.attrs["sweep_angle_axis"] == "y" @@ -107,9 +104,14 @@ def test_add_grid_mapping(self): assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # should not have been modified - assert "grid_mapping" not in ds.attrs + assert "grid_mapping" not in input_data_arr.attrs + + def test_add_grid_mapping_no_cf_repr(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. + + Projection does not have a corresponding CF representation (e.g. COSMO). - # b) Projection does not have a corresponding CF representation (COSMO) + """ cosmo7 = AreaDefinition( area_id="cosmo7", description="cosmo7", @@ -119,11 +121,9 @@ def test_add_grid_mapping(self): width=597, height=510, area_extent=[-1812933, -1003565, 814056, 1243448] ) + input_data_arr.attrs["area"] = cosmo7 - ds = ds_base.copy() - ds.attrs["area"] = cosmo7 - - new_ds, grid_mapping = _add_grid_mapping(ds) + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert "crs_wkt" in grid_mapping.attrs wkt = grid_mapping.attrs["crs_wkt"] assert 'ELLIPSOID["WGS 84"' in wkt @@ -133,7 +133,12 @@ def test_add_grid_mapping(self): assert 'PARAMETER["o_lon_p",-5.465' in wkt assert new_ds.attrs["grid_mapping"] == "cosmo7" - # c) Projection Transverse Mercator + def test_add_grid_mapping_transverse_mercator(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. + + Projection is transverse mercator. + + """ lat_0 = 36.5 lon_0 = 15.0 @@ -154,13 +159,17 @@ def test_add_grid_mapping(self): "false_northing": 0., }) - ds = ds_base.copy() - ds.attrs["area"] = tmerc - new_ds, grid_mapping = _add_grid_mapping(ds) + input_data_arr.attrs["area"] = tmerc + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "tmerc" _gm_matches(grid_mapping, tmerc_expected) - # d) Projection that has a representation but no explicit a/b + def test_add_grid_mapping_cf_repr_no_ab(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. + + Projection has a corresponding CF representation but no explicit a/b. + + """ h = 35785831. geos = AreaDefinition( area_id="geos", @@ -175,19 +184,24 @@ def test_add_grid_mapping(self): "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, "grid_mapping_name": "geostationary", + "reference_ellipsoid_name": "WGS 84", # 'semi_major_axis': 6378137.0, # 'semi_minor_axis': 6356752.314, # 'sweep_angle_axis': None, }) - ds = ds_base.copy() - ds.attrs["area"] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) + input_data_arr.attrs["area"] = geos + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) - # e) oblique Mercator + def test_add_grid_mapping_oblique_mercator(self, input_data_arr): + """Test the conversion from pyresample area object to CF grid mapping. + + Projection is oblique mercator. + + """ area = AreaDefinition( area_id="omerc_otf", description="On-the-fly omerc area", @@ -211,38 +225,12 @@ def test_add_grid_mapping(self): "reference_ellipsoid_name": "WGS 84"} omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) - ds = ds_base.copy() - ds.attrs["area"] = area - new_ds, grid_mapping = _add_grid_mapping(ds) + input_data_arr.attrs["area"] = area + new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "omerc_otf" _gm_matches(grid_mapping, omerc_expected) - # f) Projection that has a representation but no explicit a/b - h = 35785831. - geos = AreaDefinition( - area_id="geos", - description="geos", - proj_id="geos", - projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", - "lat_0": 0, "lon_0": 0}, - width=2, height=2, - area_extent=[-1, -1, 1, 1]) - geos_expected = xr.DataArray(data=0, - attrs={"perspective_point_height": h, - "latitude_of_projection_origin": 0, - "longitude_of_projection_origin": 0, - "grid_mapping_name": "geostationary", - "reference_ellipsoid_name": "WGS 84", - }) - - ds = ds_base.copy() - ds.attrs["area"] = geos - new_ds, grid_mapping = _add_grid_mapping(ds) - - assert new_ds.attrs["grid_mapping"] == "geos" - _gm_matches(grid_mapping, geos_expected) - @pytest.mark.parametrize("dims", [("y", "x"), ("bands", "y", "x")]) def test_add_lonlat_coords(self, dims): """Test the conversion from areas to lon/lat.""" From 5706549920ae25c7b0053e1fa2e6a431f8213604 Mon Sep 17 00:00:00 2001 From: Florian Fichtner <12199342+fwfichtner@users.noreply.github.com> Date: Fri, 17 Nov 2023 10:36:46 +0100 Subject: [PATCH 0738/1416] add unittest cloud_flags --- satpy/tests/reader_tests/test_eps_l1b.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/satpy/tests/reader_tests/test_eps_l1b.py b/satpy/tests/reader_tests/test_eps_l1b.py index 22035b81e7..d9a484face 100644 --- a/satpy/tests/reader_tests/test_eps_l1b.py +++ b/satpy/tests/reader_tests/test_eps_l1b.py @@ -152,6 +152,15 @@ def test_angles(self): assert res.attrs["sensor"] == "avhrr-3" assert res.attrs["name"] == "solar_zenith_angle" + def test_clould_flags(self): + """Test getting the cloud flags.""" + did = make_dataid(name="cloud_flags") + res = self.fh.get_dataset(did, {}) + assert isinstance(res, xr.DataArray) + assert res.attrs["platform_name"] == "Metop-C" + assert res.attrs["sensor"] == "avhrr-3" + assert res.attrs["name"] == "cloud_flags" + @mock.patch("satpy.readers.eps_l1b.EPSAVHRRFile.__getitem__") def test_get_full_angles_twice(self, mock__getitem__): """Test get full angles twice.""" From 7691b9c4dcd20d6b9e10100d964b5d39a36d8588 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 17 Nov 2023 16:58:36 +0000 Subject: [PATCH 0739/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/readers/avhrr_l1b_eps.yaml | 6 +++--- satpy/readers/eps_l1b.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/avhrr_l1b_eps.yaml b/satpy/etc/readers/avhrr_l1b_eps.yaml index e759d28d66..fbb4b4ec18 100644 --- a/satpy/etc/readers/avhrr_l1b_eps.yaml +++ b/satpy/etc/readers/avhrr_l1b_eps.yaml @@ -89,7 +89,7 @@ datasets: - latitude file_type: avhrr_eps - + latitude: name: latitude resolution: 1050 @@ -137,8 +137,8 @@ datasets: sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] - file_type: avhrr_eps - + file_type: avhrr_eps + file_types: avhrr_eps: file_reader: !!python/name:satpy.readers.eps_l1b.EPSAVHRRFile diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 0c35a2eaad..25a050d00a 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -301,7 +301,7 @@ def get_dataset(self, key, info): dataset = self._get_calibrated_dataarray(key) elif key['name'] == "cloud_flags": array = self["CLOUD_INFORMATION"] - dataset = create_xarray(array) + dataset = create_xarray(array) else: logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return From c1c2240af91723da74b49ced23fe7e6f49b9fb69 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 17 Nov 2023 11:01:43 -0600 Subject: [PATCH 0740/1416] Refactor angle loading in eps_l1b.py --- satpy/readers/eps_l1b.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 25a050d00a..15a345de2b 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -316,16 +316,14 @@ def get_dataset(self, key, info): def _get_angle_dataarray(self, key): """Get an angle dataarray.""" - sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles() - if key["name"] == "solar_zenith_angle": - dataset = create_xarray(sun_zen) - elif key["name"] == "solar_azimuth_angle": - dataset = create_xarray(sun_azi) - if key["name"] == "satellite_zenith_angle": - dataset = create_xarray(sat_zen) - elif key["name"] == "satellite_azimuth_angle": - dataset = create_xarray(sat_azi) - return dataset + arr_index = { + "solar_azimuth_angle": 0, + "solar_zenith_angle": 1, + "satellite_azimuth_angle": 2, + "satellite_zenith_angle": 3, + }[key["name"] + data = self.get_full_angles()[arr_index] + return create_xarray(data) @cached_property def three_a_mask(self): From 41815049b33f29db3bc70dcb21b8223dd64350c6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 17 Nov 2023 11:06:48 -0600 Subject: [PATCH 0741/1416] More refactoring in eps_l1b.py --- satpy/readers/eps_l1b.py | 37 ++++++++++++++++++++----------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 15a345de2b..bd03f40820 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -287,22 +287,9 @@ def get_dataset(self, key, info): if self.sections is None: self._read_all() - if key["name"] in ["longitude", "latitude"]: - lons, lats = self.get_full_lonlats() - if key["name"] == "longitude": - dataset = create_xarray(lons) - else: - dataset = create_xarray(lats) - - elif key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", - "satellite_zenith_angle", "satellite_azimuth_angle"]: - dataset = self._get_angle_dataarray(key) - elif key["name"] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: - dataset = self._get_calibrated_dataarray(key) - elif key['name'] == "cloud_flags": - array = self["CLOUD_INFORMATION"] - dataset = create_xarray(array) - else: + try: + dataset = self._get_data_array(key) + except KeyError: logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return @@ -314,6 +301,22 @@ def get_dataset(self, key, info): dataset.attrs.update(key.to_dict()) return dataset + def _get_data_array(self, key): + name = key["name"] + if name in ["longitude", "latitude"]: + data = self.get_full_lonlats()[int(name == "latitude")] + dataset = create_xarray(data) + elif name in ["solar_zenith_angle", "solar_azimuth_angle", "satellite_zenith_angle", "satellite_azimuth_angle"]: + dataset = self._get_angle_dataarray(key) + elif name in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: + dataset = self._get_calibrated_dataarray(key) + elif name == "cloud_flags": + array = self["CLOUD_INFORMATION"] + dataset = create_xarray(array) + else: + raise KeyError(f"Unknown channel: {name}") + return dataset + def _get_angle_dataarray(self, key): """Get an angle dataarray.""" arr_index = { @@ -321,7 +324,7 @@ def _get_angle_dataarray(self, key): "solar_zenith_angle": 1, "satellite_azimuth_angle": 2, "satellite_zenith_angle": 3, - }[key["name"] + }[key["name"]] data = self.get_full_angles()[arr_index] return create_xarray(data) From 504220210195f671bf7a5e91d1ecb6ea2bbd6c0a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 17 Nov 2023 14:17:04 -0600 Subject: [PATCH 0742/1416] Fix sphinx docstring error in make_cf_data_array --- satpy/cf/data_array.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/cf/data_array.py b/satpy/cf/data_array.py index ef86953f84..e0b26867c7 100644 --- a/satpy/cf/data_array.py +++ b/satpy/cf/data_array.py @@ -71,7 +71,7 @@ def make_cf_data_array(dataarray, include_orig_name (bool, optional): Include the original dataset name in the netcdf variable attributes. Defaults to True. numeric_name_prefix (str, optional): Prepend dataset name with this if starting with a digit. - Defaults to "CHANNEL_". + Defaults to ``"CHANNEL_"``. Returns: xr.DataArray: A CF-compliant xr.DataArray. From cc366c0d11799374f46301b2c70c1560483dbd95 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 17 Nov 2023 14:17:26 -0600 Subject: [PATCH 0743/1416] Add py.typed file so users get type information in their IDE --- MANIFEST.in | 1 + satpy/py.typed | 0 2 files changed, 1 insertion(+) create mode 100644 satpy/py.typed diff --git a/MANIFEST.in b/MANIFEST.in index 3a7cdb0b43..05c921b367 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -13,4 +13,5 @@ include satpy/version.py include pyproject.toml include setup.py include setup.cfg +include satpy/py.typed global-exclude *.py[cod] diff --git a/satpy/py.typed b/satpy/py.typed new file mode 100644 index 0000000000..e69de29bb2 From e98eb3ee54831dd2449f7fc73a72c5bdff6b4324 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 20 Nov 2023 11:18:47 +0200 Subject: [PATCH 0744/1416] Cast lons/lats to data dtype only if dtypes don't match and data are floats --- satpy/modifiers/angles.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 2a032b20b3..1471ba3669 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -403,8 +403,9 @@ def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: """ chunks = _geo_chunks_from_data_arr(data_arr) lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) - lons = lons.astype(data_arr.dtype) - lats = lats.astype(data_arr.dtype) + if lons.dtype != data_arr.dtype and np.issubdtype(data_arr.dtype, np.floating): + lons = lons.astype(data_arr.dtype) + lats = lats.astype(data_arr.dtype) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) return _geo_dask_to_data_array(cos_sza) From 5f385c67f103615cf75096b7db55a78d86093acf Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Nov 2023 16:23:55 +0100 Subject: [PATCH 0745/1416] Add support for ir and pol --- satpy/etc/composites/sgli.yaml | 55 ---- satpy/etc/readers/sgli_l1b.yaml | 368 +--------------------- satpy/readers/sgli_l1b.py | 97 +++--- satpy/tests/reader_tests/test_sgli_l1b.py | 280 ++++++++++++---- 4 files changed, 275 insertions(+), 525 deletions(-) diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index 58f52a1124..451c60d8e6 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -89,50 +89,6 @@ composites: modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color - true_color_land: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - standard_name: true_color - - true_color_desert: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - standard_name: true_color - - true_color_marine_clean: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - standard_name: true_color - - true_color_marine_tropical: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - standard_name: true_color - true_color_raw: compositor: !!python/name:satpy.composites.FillingCompositor prerequisites: @@ -145,14 +101,3 @@ composites: - name: 'VN3' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color - - ocean_color: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'Oa08' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'Oa06' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'Oa03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - standard_name: ocean_color diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index cbf5c4989d..9f8108510f 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -111,6 +111,8 @@ datasets: solar_zenith_angle: name: solar_zenith_angle sensor: sgli + units: degree + standard_name: solar_zenith_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -119,6 +121,8 @@ datasets: solar_azimuth_angle: name: solar_azimuth_angle sensor: sgli + units: degree + standard_name: solar_azimuth_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -127,6 +131,8 @@ datasets: satellite_zenith_angle: name: satellite_zenith_angle sensor: sgli + units: degree + standard_name: satellite_zenith_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -135,6 +141,8 @@ datasets: satellite_azimuth_angle: name: satellite_azimuth_angle sensor: sgli + units: degree + standard_name: satellite_azimuth_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v @@ -334,7 +342,7 @@ datasets: units: "%" coordinates: [longitude_p, latitude_p] file_type: gcom-c_l1b_p - file_key: Image_data/Lt_P1_{pol} + file_key: Image_data/Lt_P1_{polarization} P2: name: P2 @@ -351,7 +359,7 @@ datasets: units: "%" coordinates: [longitude_p, latitude_p] file_type: gcom-c_l1b_p - file_key: Image_data/Lt_P2_{pol} + file_key: Image_data/Lt_P2_{polarization} SW1: name: SW1 @@ -444,359 +452,3 @@ datasets: coordinates: [longitude_ir, latitude_ir] file_type: gcom-c_l1b_ir file_key: Image_data/Lt_TI02 - - # Oa02: - # name: Oa02 - # sensor: olci - # wavelength: [0.4075, 0.4125, 0.4175] - # resolution: 300 - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # coordinates: [longitude, latitude] - # file_type: esa_l1b - - # Oa03: - # name: Oa03 - # sensor: olci - # wavelength: [0.4375,0.4425,0.4475] - # resolution: 300 - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # coordinates: [longitude, latitude] - # file_type: esa_l1b - - # Oa04: - # name: Oa04 - # sensor: olci - # wavelength: [0.485,0.49,0.495] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa05: - # name: Oa05 - # sensor: olci - # wavelength: [0.505,0.51,0.515] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa06: - # name: Oa06 - # sensor: olci - # wavelength: [0.555,0.56,0.565] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa07: - # name: Oa07 - # sensor: olci - # wavelength: [0.615,0.62,0.625] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa08: - # name: Oa08 - # sensor: olci - # wavelength: [0.66,0.665,0.67] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa09: - # name: Oa09 - # sensor: olci - # wavelength: [0.67,0.67375,0.6775] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa10: - # name: Oa10 - # sensor: olci - # wavelength: [0.6775,0.68125,0.685] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa11: - # name: Oa11 - # sensor: olci - # wavelength: [0.70375,0.70875,0.71375] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa12: - # name: Oa12 - # sensor: olci - # wavelength: [0.75,0.75375,0.7575] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa13: - # name: Oa13 - # sensor: olci - # wavelength: [0.76,0.76125,0.7625] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa14: - # name: Oa14 - # sensor: olci - # wavelength: [0.760625, 0.764375, 0.768125] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa15: - # name: Oa15 - # sensor: olci - # wavelength: [0.76625, 0.7675, 0.76875] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa16: - # name: Oa16 - # sensor: olci - # wavelength: [0.77125, 0.77875, 0.78625] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa17: - # name: Oa17 - # sensor: olci - # wavelength: [0.855, 0.865, 0.875] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa18: - # name: Oa18 - # sensor: olci - # wavelength: [0.88, 0.885, 0.89] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa19: - # name: Oa19 - # sensor: olci - # wavelength: [0.895, 0.9, 0.905] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa20: - # name: Oa20 - # sensor: olci - # wavelength: [0.93, 0.94, 0.95] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # Oa21: - # name: Oa21 - # sensor: olci - # wavelength: [1.0, 1.02, 1.04] - # resolution: 300 - # coordinates: [longitude, latitude] - # calibration: - # radiance: - # standard_name: toa_outgoing_radiance_per_unit_wavelength - # units: W m-2 um-1 sr-1 - # reflectance: - # standard_name: toa_bidirectional_reflectance - # units: "%" - # file_type: esa_l1b - - # solar_zenith_angle: - # name: solar_zenith_angle - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_angles - - # solar_azimuth_angle: - # name: solar_azimuth_angle - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_angles - - # satellite_zenith_angle: - # name: satellite_zenith_angle - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_angles - - # satellite_azimuth_angle: - # name: satellite_azimuth_angle - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_angles - - # humidity: - # name: humidity - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_meteo - - # sea_level_pressure: - # name: sea_level_pressure - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_meteo - - # total_columnar_water_vapour: - # name: total_columnar_water_vapour - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_meteo - - # total_ozone: - # name: total_ozone - # sensor: olci - # resolution: 300 - # coordinates: [longitude, latitude] - # file_type: esa_meteo diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 2dace6d2b3..1a7e076ff9 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -48,6 +48,10 @@ "K": 1000, "L": 1000} +polarization_keys = {0: "0", + -60: "m60", + 60: "60"} + class HDF5SGLI(BaseFileHandler): """File handler for the SGLI l1b data.""" @@ -75,12 +79,9 @@ def get_dataset(self, key, info): if key["resolution"] != self.resolution: return - # if key["polarization"] is not None: - # pols = {0: '0', -60: 'm60', 60: 'p60'} - # file_key = info['file_key'].format(pol=pols[key["polarization"]]) - # else: - # file_key = info['file_key'] file_key = info["file_key"] + if key["name"].startswith("P"): + file_key = file_key.format(polarization=polarization_keys[key["polarization"]]) h5dataset = self.h5file[file_key] chunks = normalize_chunks(("auto", "auto"), h5dataset.shape, previous_chunks=h5dataset.chunks, dtype=np.float32) @@ -89,19 +90,19 @@ def get_dataset(self, key, info): dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) with xr.set_options(keep_attrs=True): - # TODO add ir and polarized channels - if key["name"][:2] in ["VN", "SW", "P1", "P2"]: - dataset = self.get_visible_dataset(key, h5dataset, dataset) - elif key["name"][:-2] in ["longitude", "latitude"]: + if key["name"].startswith(("VN", "SW", "P")): + dataset = self.get_visible_dataset(key, dataset) + elif key["name"].startswith("TI"): + dataset = self.get_ir_dataset(key, dataset) + elif key["name"].startswith(("longitude", "latitude")): resampling_interval = attrs["Resampling_interval"] if resampling_interval != 1: new_lons, new_lats = self.interpolate_lons_lats(resampling_interval) - if key["name"][:-2] == "longitude": + if key["name"].startswith("longitude"): dataset = new_lons else: dataset = new_lats - dataset = xr.DataArray(dataset, attrs=attrs) - return dataset + dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) elif key["name"] in ["satellite_azimuth_angle", "satellite_zenith_angle"]: resampling_interval = attrs["Resampling_interval"] if resampling_interval != 1: @@ -110,8 +111,7 @@ def get_dataset(self, key, info): dataset = new_azi else: dataset = new_zen - dataset = xr.DataArray(dataset, attrs=attrs) - return dataset + dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) elif key["name"] in ["solar_azimuth_angle", "solar_zenith_angle"]: resampling_interval = attrs["Resampling_interval"] if resampling_interval != 1: @@ -120,13 +120,14 @@ def get_dataset(self, key, info): dataset = new_azi else: dataset = new_zen - dataset = xr.DataArray(dataset, attrs=attrs) - return dataset + dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) else: raise NotImplementedError() dataset.attrs["platform_name"] = "GCOM-C1" - + dataset.attrs["sensor"] = "sgli" + dataset.attrs["units"] = info["units"] + dataset.attrs["standard_name"] = info["standard_name"] return dataset def interpolate_lons_lats(self, resampling_interval): @@ -145,12 +146,18 @@ def interpolate_solar_angles(self, resampling_interval): return self.interpolate_angles(azi, zen, resampling_interval) def interpolate_angles(self, azi, zen, resampling_interval): - azi = azi * azi.attrs["Slope"] + azi.attrs["Offset"] - zen = zen * zen.attrs["Slope"] + zen.attrs["Offset"] + azi = self.scale_array(azi) + zen = self.scale_array(zen) zen = zen[:] - 90 new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) return new_azi, new_zen + 90 + def scale_array(self, array): + try: + return array * array.attrs["Slope"] + array.attrs["Offset"] + except KeyError: + return array + def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): from geotiepoints.geointerpolator import GeoGridInterpolator @@ -165,10 +172,9 @@ def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interva return new_azi, new_pol - def get_visible_dataset(self, key, h5dataset, dataset): - + def get_visible_dataset(self, key, dataset): dataset = self.mask_to_14_bits(dataset) - dataset = self.calibrate(dataset, key["calibration"]) + dataset = self.calibrate_vis(dataset, key["calibration"]) #dataset.attrs.update(info) #dataset = self._mask_and_scale(dataset, h5dataset, key) @@ -180,7 +186,7 @@ def mask_to_14_bits(self, dataset): return dataset & dataset.attrs["Mask"].item() - def calibrate(self, dataset, calibration): + def calibrate_vis(self, dataset, calibration): attrs = dataset.attrs if calibration == "counts": return dataset @@ -198,34 +204,23 @@ def get_missing_and_saturated(self, attrs): saturation = int(mask_vals[1].split(b":")[0].strip()) return missing, saturation - # def _mask_and_scale(self, dataset, h5dataset, key): - # with xr.set_options(keep_attrs=True): - # if 'Mask' in h5dataset.attrs: - # mask_value = h5dataset.attrs['Mask'].item() - # dataset = dataset & mask_value - # if 'Bit00(LSB)-13' in h5dataset.attrs: - # mask_info = h5dataset.attrs['Bit00(LSB)-13'].item() - # mask_vals = mask_info.split(b'\n')[1:] - # missing = int(mask_vals[0].split(b':')[0].strip()) - # saturation = int(mask_vals[1].split(b':')[0].strip()) - # dataset = dataset.where(dataset < min(missing, saturation)) - # if 'Maximum_valid_DN' in h5dataset.attrs: - # # dataset = dataset.where(dataset <= h5dataset.attrs['Maximum_valid_DN'].item()) - # pass - # if key["name"][:2] in ['VN', 'SW', 'P1', 'P2']: - # if key["calibration"] == 'counts': - # pass - # if key["calibration"] == 'radiance': - # dataset = dataset * h5dataset.attrs['Slope'] + h5dataset.attrs['Offset'] - # if key["calibration"] == 'reflectance': - # # dataset = dataset * h5dataset.attrs['Slope'] + h5dataset.attrs['Offset'] - # # dataset *= np.pi / h5dataset.attrs['Band_weighted_TOA_solar_irradiance'] * 100 - # # equivalent to the two lines above - # dataset = (dataset * h5dataset.attrs['Slope_reflectance'] - # + h5dataset.attrs['Offset_reflectance']) * 100 - # else: - # dataset = dataset * h5dataset.attrs['Slope'] + h5dataset.attrs['Offset'] - # return dataset + def get_ir_dataset(self, key, dataset): + dataset = self.mask_to_14_bits(dataset) + dataset = self.calibrate_ir(dataset, key["calibration"]) + return dataset + + def calibrate_ir(self, dataset, calibration): + attrs = dataset.attrs + if calibration == "counts": + return dataset + elif calibration in ["radiance", "brightness_temperature"]: + calibrated = dataset * attrs["Slope"] + attrs["Offset"] + if calibration == "brightness_temperature": + raise NotImplementedError("Cannot calibrate to brightness temperatures.") + # from pyspectral.radiance_tb_conversion import radiance2tb + # calibrated = radiance2tb(calibrated, attrs["Center_wavelength"] * 1e-9) + missing, _ = self.get_missing_and_saturated(attrs) + return calibrated.where(dataset < missing) class H5Array(BackendArray): diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index e153867aeb..19f6480ae7 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -20,10 +20,10 @@ ZEN_ARRAY = np.random.randint(0, 180 * 100, size=(197, 126), dtype=np.int16) -def test_open_dataset(sgli_file): +def test_open_dataset(sgli_vn_file): """Test open_dataset function.""" from satpy.readers.sgli_l1b import SGLIBackend - res = open_dataset(sgli_file, engine=SGLIBackend, chunks={}) + res = open_dataset(sgli_vn_file, engine=SGLIBackend, chunks={}) assert isinstance(res, Dataset) data_array = res["Lt_VN01"] assert isinstance(data_array, DataArray) @@ -32,8 +32,8 @@ def test_open_dataset(sgli_file): @pytest.fixture(scope="session") -def sgli_file(tmp_path_factory): - filename = tmp_path_factory.mktemp("data") / "test_file.h5" +def sgli_vn_file(tmp_path_factory): + filename = tmp_path_factory.mktemp("data") / "test_vn_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], @@ -44,6 +44,7 @@ def sgli_file(tmp_path_factory): image_data = h5f.create_group("Image_data") image_data.attrs["Number_of_lines"] = 1955 image_data.attrs["Number_of_pixels"] = 1250 + vn01 = image_data.create_dataset("Lt_VN01", data=FULL_KM_ARRAY, chunks=(116, 157)) vn01.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) vn01.attrs["Offset_reflectance"] = np.array([-0.05], dtype=np.float32) @@ -53,109 +54,266 @@ def sgli_file(tmp_path_factory): vn01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], dtype="|S61") - geometry_data = h5f.create_group("Geometry_data") - longitude = geometry_data.create_dataset("Longitude", data=LON_LAT_ARRAY, chunks=(47, 63)) - longitude.attrs["Resampling_interval"] = 10 - latitude = geometry_data.create_dataset("Latitude", data=LON_LAT_ARRAY, chunks=(47, 63)) - latitude.attrs["Resampling_interval"] = 10 - - angles_slope = np.array([0.01], dtype=np.float32) - angles_offset = np.array([0], dtype=np.float32) - - azimuth = geometry_data.create_dataset("Sensor_azimuth", data=AZI_ARRAY, chunks=(47, 63)) - azimuth.attrs["Resampling_interval"] = 10 - azimuth.attrs["Slope"] = angles_slope - azimuth.attrs["Offset"] = angles_offset - zenith = geometry_data.create_dataset("Sensor_zenith", data=ZEN_ARRAY, chunks=(47, 63)) - zenith.attrs["Resampling_interval"] = 10 - zenith.attrs["Slope"] = angles_slope - zenith.attrs["Offset"] = angles_offset - - sazimuth = geometry_data.create_dataset("Solar_azimuth", data=AZI_ARRAY, chunks=(47, 63)) - sazimuth.attrs["Resampling_interval"] = 10 - sazimuth.attrs["Slope"] = angles_slope - sazimuth.attrs["Offset"] = angles_offset - szenith = geometry_data.create_dataset("Solar_zenith", data=ZEN_ARRAY, chunks=(47, 63)) - szenith.attrs["Resampling_interval"] = 10 - szenith.attrs["Slope"] = angles_slope - szenith.attrs["Offset"] = angles_offset + add_downsampled_geometry_data(h5f) + + return filename + +@pytest.fixture(scope="session") +def sgli_ir_file(tmp_path_factory): + filename = tmp_path_factory.mktemp("data") / "test_ir_file.h5" + with h5py.File(filename, "w") as h5f: + global_attributes = h5f.create_group("Global_attributes") + global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], + dtype="|S21") + global_attributes.attrs["Scene_end_time"] = np.array([END_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], + dtype="|S21") + + image_data = h5f.create_group("Image_data") + image_data.attrs["Number_of_lines"] = 1854 + image_data.attrs["Number_of_pixels"] = 1250 + + sw01 = image_data.create_dataset("Lt_SW01", data=FULL_KM_ARRAY, chunks=(116, 157)) + sw01.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) + sw01.attrs["Offset_reflectance"] = np.array([0.0], dtype=np.float32) + sw01.attrs["Slope"] = np.array([0.02], dtype=np.float32) + sw01.attrs["Offset"] = np.array([-25], dtype=np.float32) + sw01.attrs["Mask"] = np.array([16383], dtype=np.uint16) + sw01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + + + ti01 = image_data.create_dataset("Lt_TI01", data=FULL_KM_ARRAY, chunks=(116, 157)) + ti01.attrs["Slope"] = np.array([0.0012], dtype=np.float32) + ti01.attrs["Offset"] = np.array([-1.65], dtype=np.float32) + ti01.attrs["Mask"] = np.array([16383], dtype=np.uint16) + ti01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + ti01.attrs["Center_wavelength"] = np.array([12000], dtype=np.float32) + + add_downsampled_geometry_data(h5f) return filename -def test_start_time(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +@pytest.fixture(scope="session") +def sgli_pol_file(tmp_path_factory): + filename = tmp_path_factory.mktemp("data") / "test_pol_file.h5" + with h5py.File(filename, "w") as h5f: + global_attributes = h5f.create_group("Global_attributes") + global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], + dtype="|S21") + global_attributes.attrs["Scene_end_time"] = np.array([END_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], + dtype="|S21") + + image_data = h5f.create_group("Image_data") + image_data.attrs["Number_of_lines"] = 1854 + image_data.attrs["Number_of_pixels"] = 1250 + + p1_0 = image_data.create_dataset("Lt_P1_0", data=FULL_KM_ARRAY, chunks=(116, 157)) + p1_0.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) + p1_0.attrs["Offset_reflectance"] = np.array([0.0], dtype=np.float32) + p1_0.attrs["Slope"] = np.array([0.02], dtype=np.float32) + p1_0.attrs["Offset"] = np.array([-25], dtype=np.float32) + p1_0.attrs["Mask"] = np.array([16383], dtype=np.uint16) + p1_0.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + + + p1_m60 = image_data.create_dataset("Lt_P1_m60", data=FULL_KM_ARRAY, chunks=(116, 157)) + p1_m60.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) + p1_m60.attrs["Offset_reflectance"] = np.array([-60.0], dtype=np.float32) + p1_m60.attrs["Slope"] = np.array([0.0012], dtype=np.float32) + p1_m60.attrs["Offset"] = np.array([-1.65], dtype=np.float32) + p1_m60.attrs["Mask"] = np.array([16383], dtype=np.uint16) + p1_m60.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + + p1_60 = image_data.create_dataset("Lt_P1_60", data=FULL_KM_ARRAY, chunks=(116, 157)) + p1_60.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) + p1_60.attrs["Offset_reflectance"] = np.array([60.0], dtype=np.float32) + p1_60.attrs["Slope"] = np.array([0.0012], dtype=np.float32) + p1_60.attrs["Offset"] = np.array([-1.65], dtype=np.float32) + p1_60.attrs["Mask"] = np.array([16383], dtype=np.uint16) + p1_60.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], + dtype="|S61") + + geometry_data = h5f.create_group("Geometry_data") + longitude = geometry_data.create_dataset("Longitude", data=FULL_KM_ARRAY.astype(np.float32), chunks=(47, 63)) + longitude.attrs["Resampling_interval"] = 1 + latitude = geometry_data.create_dataset("Latitude", data=FULL_KM_ARRAY.astype(np.float32), chunks=(47, 63)) + latitude.attrs["Resampling_interval"] = 1 + + return filename + +def add_downsampled_geometry_data(h5f): + geometry_data = h5f.create_group("Geometry_data") + longitude = geometry_data.create_dataset("Longitude", data=LON_LAT_ARRAY, chunks=(47, 63)) + longitude.attrs["Resampling_interval"] = 10 + latitude = geometry_data.create_dataset("Latitude", data=LON_LAT_ARRAY, chunks=(47, 63)) + latitude.attrs["Resampling_interval"] = 10 + + angles_slope = np.array([0.01], dtype=np.float32) + angles_offset = np.array([0], dtype=np.float32) + + azimuth = geometry_data.create_dataset("Sensor_azimuth", data=AZI_ARRAY, chunks=(47, 63)) + azimuth.attrs["Resampling_interval"] = 10 + azimuth.attrs["Slope"] = angles_slope + azimuth.attrs["Offset"] = angles_offset + zenith = geometry_data.create_dataset("Sensor_zenith", data=ZEN_ARRAY, chunks=(47, 63)) + zenith.attrs["Resampling_interval"] = 10 + zenith.attrs["Slope"] = angles_slope + zenith.attrs["Offset"] = angles_offset + + sazimuth = geometry_data.create_dataset("Solar_azimuth", data=AZI_ARRAY, chunks=(47, 63)) + sazimuth.attrs["Resampling_interval"] = 10 + sazimuth.attrs["Slope"] = angles_slope + sazimuth.attrs["Offset"] = angles_offset + szenith = geometry_data.create_dataset("Solar_zenith", data=ZEN_ARRAY, chunks=(47, 63)) + szenith.attrs["Resampling_interval"] = 10 + szenith.attrs["Slope"] = angles_slope + szenith.attrs["Offset"] = angles_offset + + +def test_start_time(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = START_TIME.microsecond % 1000 assert handler.start_time == START_TIME - timedelta(microseconds=microseconds) -def test_end_time(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_end_time(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = END_TIME.microsecond % 1000 assert handler.end_time == END_TIME - timedelta(microseconds=microseconds) -def test_get_dataset_counts(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_get_dataset_counts(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert np.allclose(res, FULL_KM_ARRAY & MASK) assert res.dtype == np.uint16 assert res.attrs["platform_name"] == "GCOM-C1" + assert res.attrs["sensor"] == "sgli" -def test_get_dataset_reflectances(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_get_vn_dataset_reflectances(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="reflectance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5 - 0.05) assert res.dtype == np.float32 + assert res.dims == ("y", "x") -def test_get_dataset_radiance(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_get_vn_dataset_radiance(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert np.allclose(res[0, :], FULL_KM_ARRAY[0, :] * np.float32(0.02) - 25) assert res.dtype == np.float32 -def test_channel_is_masked(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_channel_is_masked(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert res.max() == MASK -def test_missing_values_are_masked(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_missing_values_are_masked(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert np.isnan(res).sum() == 149 -def test_channel_is_chunked(sgli_file): +def test_channel_is_chunked(sgli_vn_file): with dask.config.set({"array.chunk-size": "1MiB"}): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01"}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + "standard_name": ""}) assert res.chunks[0][0] > 116 -def test_loading_lon_lat(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_loading_lon_lat(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="longitude_v", resolution=1000, polarization=None) - res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude"}) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", + "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 + assert res.dims == ("y", "x") -def test_loading_sensor_angles(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_loading_sensor_angles(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="satellite_zenith_angle", resolution=1000, polarization=None) - res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith"}) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", + "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 assert res.min() >= 0 -def test_loading_solar_angles(sgli_file): - handler = HDF5SGLI(sgli_file, {"resolution": "L"}, {}) +def test_loading_solar_angles(sgli_vn_file): + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="solar_azimuth_angle", resolution=1000, polarization=None) - res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith"}) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", + "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 assert res.max() <= 180 + +def test_get_sw_dataset_reflectances(sgli_ir_file): + handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) + did = dict(name="SW1", resolution=1000, polarization=None, calibration="reflectance") + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_SW01", "units": "", + "standard_name": ""}) + assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5) + assert res.dtype == np.float32 + +def test_get_ti_dataset_radiance(sgli_ir_file): + handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) + did = dict(name="TI1", resolution=1000, polarization=None, calibration="radiance") + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "", + "standard_name": ""}) + assert np.allclose(res[0, :], FULL_KM_ARRAY[0, :] * np.float32(0.0012) - 1.65) + assert res.dtype == np.float32 + +def test_get_ti_dataset_bt(sgli_ir_file): + handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) + did = dict(name="TI1", resolution=1000, polarization=None, calibration="brightness_temperature") + with pytest.raises(NotImplementedError): + _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", + "standard_name": "toa_brightness_temperature"}) + +def test_get_ti_lon_lats(sgli_ir_file): + handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) + did = dict(name="longitude_ir", resolution=1000, polarization=None) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", + "standard_name": ""}) + assert res.shape == (1854, 1250) + assert res.chunks is not None + assert res.dtype == np.float32 + +@pytest.mark.parametrize("polarization", [0, -60, 60]) +def test_get_polarized_dataset_reflectance(sgli_pol_file, polarization): + """Test getting polarized reflectances.""" + handler = HDF5SGLI(sgli_pol_file, {"resolution": "L"}, {}) + did = dict(name="P1", resolution=1000, polarization=polarization, calibration="reflectance") + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_P1_{polarization}", "units": "%", + "standard_name": "toa_bidirectional_reflectance"}) + assert res.dtype == np.float32 + expected = (FULL_KM_ARRAY[0, :] * np.float32(5e-5) + np.float32(polarization)) * 100 + np.testing.assert_allclose(res[0, :], expected) + assert res.attrs["units"] == "%" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + +def test_get_polarized_longitudes(sgli_pol_file): + """Test getting polarized reflectances.""" + handler = HDF5SGLI(sgli_pol_file, {"resolution": "L"}, {}) + did = dict(name="longitude", resolution=1000, polarization=0) + res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", + "standard_name": ""}) + assert res.dtype == np.float32 + expected = FULL_KM_ARRAY.astype(np.float32) + np.testing.assert_allclose(res, expected) From c2ad3d0966ea4cf98021ba98f0f5724bdb544242 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Nov 2023 20:47:55 +0100 Subject: [PATCH 0746/1416] Adress linting issues --- .pre-commit-config.yaml | 4 +- pyproject.toml | 2 +- satpy/readers/sgli_l1b.py | 159 ++++++++++++---------- satpy/tests/reader_tests/test_sgli_l1b.py | 22 ++- 4 files changed, 108 insertions(+), 79 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eb21aa6601..f10beb1a7d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,9 +1,9 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.247' + rev: 'v0.1.6' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks diff --git a/pyproject.toml b/pyproject.toml index 1282120a59..300e738e60 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # In the future, add "A", "B", "S", "N", "D" -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +select = ["D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 [tool.ruff.per-file-ignores] diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 1a7e076ff9..61a4b61f9d 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -75,10 +75,9 @@ def end_time(self): return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") def get_dataset(self, key, info): - """Get the dataset.""" + """Get the dataset from the file.""" if key["resolution"] != self.resolution: return - file_key = info["file_key"] if key["name"].startswith("P"): file_key = file_key.format(polarization=polarization_keys[key["polarization"]]) @@ -95,32 +94,9 @@ def get_dataset(self, key, info): elif key["name"].startswith("TI"): dataset = self.get_ir_dataset(key, dataset) elif key["name"].startswith(("longitude", "latitude")): - resampling_interval = attrs["Resampling_interval"] - if resampling_interval != 1: - new_lons, new_lats = self.interpolate_lons_lats(resampling_interval) - if key["name"].startswith("longitude"): - dataset = new_lons - else: - dataset = new_lats - dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) - elif key["name"] in ["satellite_azimuth_angle", "satellite_zenith_angle"]: - resampling_interval = attrs["Resampling_interval"] - if resampling_interval != 1: - new_azi, new_zen = self.interpolate_sensor_angles(resampling_interval) - if "azimuth" in key["name"]: - dataset = new_azi - else: - dataset = new_zen - dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) - elif key["name"] in ["solar_azimuth_angle", "solar_zenith_angle"]: - resampling_interval = attrs["Resampling_interval"] - if resampling_interval != 1: - new_azi, new_zen = self.interpolate_solar_angles(resampling_interval) - if "azimuth" in key["name"]: - dataset = new_azi - else: - dataset = new_zen - dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) + dataset = self.get_lon_lats(key) + elif "angle" in key["name"]: + dataset = self.get_angles(key) else: raise NotImplementedError() @@ -130,63 +106,18 @@ def get_dataset(self, key, info): dataset.attrs["standard_name"] = info["standard_name"] return dataset - def interpolate_lons_lats(self, resampling_interval): - lons = self.h5file["Geometry_data/Longitude"] - lats = self.h5file["Geometry_data/Latitude"] - return self.interpolate_spherical(lons, lats, resampling_interval) - - def interpolate_sensor_angles(self, resampling_interval): - azi = self.h5file["Geometry_data/Sensor_azimuth"] - zen = self.h5file["Geometry_data/Sensor_zenith"] - return self.interpolate_angles(azi, zen, resampling_interval) - - def interpolate_solar_angles(self, resampling_interval): - azi = self.h5file["Geometry_data/Solar_azimuth"] - zen = self.h5file["Geometry_data/Solar_zenith"] - return self.interpolate_angles(azi, zen, resampling_interval) - - def interpolate_angles(self, azi, zen, resampling_interval): - azi = self.scale_array(azi) - zen = self.scale_array(zen) - zen = zen[:] - 90 - new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) - return new_azi, new_zen + 90 - - def scale_array(self, array): - try: - return array * array.attrs["Slope"] + array.attrs["Offset"] - except KeyError: - return array - - def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): - from geotiepoints.geointerpolator import GeoGridInterpolator - - full_shape = (self.h5file["Image_data"].attrs["Number_of_lines"], - self.h5file["Image_data"].attrs["Number_of_pixels"]) - - tie_lines = np.arange(0, polar_angle.shape[0] * resampling_interval, resampling_interval) - tie_cols = np.arange(0, polar_angle.shape[1] * resampling_interval, resampling_interval) - - interpolator = GeoGridInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, method="slinear") - new_azi, new_pol = interpolator.interpolate_to_shape(full_shape, chunks="auto") - return new_azi, new_pol - - def get_visible_dataset(self, key, dataset): + """Produce a DataArray with a visible channel data in it.""" dataset = self.mask_to_14_bits(dataset) dataset = self.calibrate_vis(dataset, key["calibration"]) - #dataset.attrs.update(info) - #dataset = self._mask_and_scale(dataset, h5dataset, key) - - # return dataset def mask_to_14_bits(self, dataset): """Mask data to 14 bits.""" return dataset & dataset.attrs["Mask"].item() - def calibrate_vis(self, dataset, calibration): + """Calibrate visible data.""" attrs = dataset.attrs if calibration == "counts": return dataset @@ -198,6 +129,7 @@ def calibrate_vis(self, dataset, calibration): return calibrated.where(dataset < missing) def get_missing_and_saturated(self, attrs): + """Get the missing and saturation values.""" missing_and_saturated = attrs["Bit00(LSB)-13"].item() mask_vals = missing_and_saturated.split(b"\n")[1:] missing = int(mask_vals[0].split(b":")[0].strip()) @@ -205,11 +137,13 @@ def get_missing_and_saturated(self, attrs): return missing, saturation def get_ir_dataset(self, key, dataset): + """Produce a DataArray with an IR channel data in it.""" dataset = self.mask_to_14_bits(dataset) dataset = self.calibrate_ir(dataset, key["calibration"]) return dataset def calibrate_ir(self, dataset, calibration): + """Calibrate IR channel.""" attrs = dataset.attrs if calibration == "counts": return dataset @@ -222,6 +156,81 @@ def calibrate_ir(self, dataset, calibration): missing, _ = self.get_missing_and_saturated(attrs) return calibrated.where(dataset < missing) + def get_lon_lats(self, key): + """Get lon/lats from the file.""" + lons = self.h5file["Geometry_data/Longitude"] + lats = self.h5file["Geometry_data/Latitude"] + attrs = lons.attrs + resampling_interval = attrs["Resampling_interval"] + if resampling_interval != 1: + lons, lats = self.interpolate_spherical(lons, lats, resampling_interval) + if key["name"].startswith("longitude"): + dataset = lons + else: + dataset = lats + return xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) + + def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): + """Interpolate spherical coordinates.""" + from geotiepoints.geointerpolator import GeoGridInterpolator + + full_shape = (self.h5file["Image_data"].attrs["Number_of_lines"], + self.h5file["Image_data"].attrs["Number_of_pixels"]) + + tie_lines = np.arange(0, polar_angle.shape[0] * resampling_interval, resampling_interval) + tie_cols = np.arange(0, polar_angle.shape[1] * resampling_interval, resampling_interval) + + interpolator = GeoGridInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, method="slinear") + new_azi, new_pol = interpolator.interpolate_to_shape(full_shape, chunks="auto") + return new_azi, new_pol + + def get_angles(self, key): + """Get angles from the file.""" + if "solar" in key["name"]: + azi, zen, attrs = self.get_solar_angles() + elif "satellite" in key["name"]: + azi, zen, attrs = self.get_sensor_angles() + if "azimuth" in key["name"]: + dataset = azi + else: + dataset = zen + dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) + return dataset + + def get_solar_angles(self): + """Get the solar angles.""" + azi = self.h5file["Geometry_data/Solar_azimuth"] + zen = self.h5file["Geometry_data/Solar_zenith"] + attrs = zen.attrs + azi = self.scale_array(azi) + zen = self.scale_array(zen) + return *self.get_full_angles(azi, zen, attrs), attrs + + def get_sensor_angles(self): + """Get the solar angles.""" + azi = self.h5file["Geometry_data/Sensor_azimuth"] + zen = self.h5file["Geometry_data/Sensor_zenith"] + attrs = zen.attrs + azi = self.scale_array(azi) + zen = self.scale_array(zen) + return *self.get_full_angles(azi, zen, attrs), attrs + + def scale_array(self, array): + """Scale an array with its attributes `Slope` and `Offset` if available.""" + try: + return array * array.attrs["Slope"] + array.attrs["Offset"] + except KeyError: + return array + + def get_full_angles(self, azi, zen, attrs): + """Interpolate angle arrays.""" + resampling_interval = attrs["Resampling_interval"] + if resampling_interval != 1: + zen = zen[:] - 90 + new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) + return new_azi, new_zen + 90 + return azi, zen + class H5Array(BackendArray): """An Hdf5-based array.""" diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 19f6480ae7..9fa8caa8b1 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -33,6 +33,7 @@ def test_open_dataset(sgli_vn_file): @pytest.fixture(scope="session") def sgli_vn_file(tmp_path_factory): + """Create a stub VN file.""" filename = tmp_path_factory.mktemp("data") / "test_vn_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") @@ -60,6 +61,7 @@ def sgli_vn_file(tmp_path_factory): @pytest.fixture(scope="session") def sgli_ir_file(tmp_path_factory): + """Create a stub IR file.""" filename = tmp_path_factory.mktemp("data") / "test_ir_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") @@ -96,6 +98,7 @@ def sgli_ir_file(tmp_path_factory): @pytest.fixture(scope="session") def sgli_pol_file(tmp_path_factory): + """Create a POL stub file.""" filename = tmp_path_factory.mktemp("data") / "test_pol_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") @@ -145,6 +148,7 @@ def sgli_pol_file(tmp_path_factory): return filename def add_downsampled_geometry_data(h5f): + """Add downsampled geometry data to an h5py file instance.""" geometry_data = h5f.create_group("Geometry_data") longitude = geometry_data.create_dataset("Longitude", data=LON_LAT_ARRAY, chunks=(47, 63)) longitude.attrs["Resampling_interval"] = 10 @@ -174,17 +178,20 @@ def add_downsampled_geometry_data(h5f): def test_start_time(sgli_vn_file): + """Test that the start time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = START_TIME.microsecond % 1000 assert handler.start_time == START_TIME - timedelta(microseconds=microseconds) def test_end_time(sgli_vn_file): + """Test that the end time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = END_TIME.microsecond % 1000 assert handler.end_time == END_TIME - timedelta(microseconds=microseconds) def test_get_dataset_counts(sgli_vn_file): + """Test that counts can be extracted from a file.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", @@ -195,15 +202,18 @@ def test_get_dataset_counts(sgli_vn_file): assert res.attrs["sensor"] == "sgli" def test_get_vn_dataset_reflectances(sgli_vn_file): + """Test that the vn datasets can be calibrated to reflectances.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="reflectance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "%", "standard_name": ""}) assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5 - 0.05) assert res.dtype == np.float32 assert res.dims == ("y", "x") + assert res.units == "%" def test_get_vn_dataset_radiance(sgli_vn_file): + """Test that datasets can be calibrated to radiance.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", @@ -212,6 +222,7 @@ def test_get_vn_dataset_radiance(sgli_vn_file): assert res.dtype == np.float32 def test_channel_is_masked(sgli_vn_file): + """Test that channels are masked for no-data.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", @@ -219,6 +230,7 @@ def test_channel_is_masked(sgli_vn_file): assert res.max() == MASK def test_missing_values_are_masked(sgli_vn_file): + """Check that missing values are masked.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", @@ -226,6 +238,7 @@ def test_missing_values_are_masked(sgli_vn_file): assert np.isnan(res).sum() == 149 def test_channel_is_chunked(sgli_vn_file): + """Test that the channel data is chunked.""" with dask.config.set({"array.chunk-size": "1MiB"}): handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") @@ -234,6 +247,7 @@ def test_channel_is_chunked(sgli_vn_file): assert res.chunks[0][0] > 116 def test_loading_lon_lat(sgli_vn_file): + """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="longitude_v", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", @@ -244,6 +258,7 @@ def test_loading_lon_lat(sgli_vn_file): assert res.dims == ("y", "x") def test_loading_sensor_angles(sgli_vn_file): + """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="satellite_zenith_angle", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", @@ -254,6 +269,7 @@ def test_loading_sensor_angles(sgli_vn_file): assert res.min() >= 0 def test_loading_solar_angles(sgli_vn_file): + """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="solar_azimuth_angle", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", @@ -264,6 +280,7 @@ def test_loading_solar_angles(sgli_vn_file): assert res.max() <= 180 def test_get_sw_dataset_reflectances(sgli_ir_file): + """Test getting SW dataset reflectances.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="SW1", resolution=1000, polarization=None, calibration="reflectance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_SW01", "units": "", @@ -272,6 +289,7 @@ def test_get_sw_dataset_reflectances(sgli_ir_file): assert res.dtype == np.float32 def test_get_ti_dataset_radiance(sgli_ir_file): + """Test getting thermal IR radiances.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="TI1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "", @@ -280,6 +298,7 @@ def test_get_ti_dataset_radiance(sgli_ir_file): assert res.dtype == np.float32 def test_get_ti_dataset_bt(sgli_ir_file): + """Test getting brightness temperatures for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="TI1", resolution=1000, polarization=None, calibration="brightness_temperature") with pytest.raises(NotImplementedError): @@ -287,6 +306,7 @@ def test_get_ti_dataset_bt(sgli_ir_file): "standard_name": "toa_brightness_temperature"}) def test_get_ti_lon_lats(sgli_ir_file): + """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="longitude_ir", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", From 2521d1a6f7c28fc181665884638e6f03f17960c7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 20 Nov 2023 19:58:20 +0000 Subject: [PATCH 0747/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.0.247 → v0.1.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.247...v0.1.6) - [github.com/pre-commit/mirrors-mypy: v1.6.1 → v1.7.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.6.1...v1.7.0) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eb21aa6601..d84659c6f0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,9 +1,9 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.247' + rev: 'v0.1.6' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.6.1' # Use the sha / tag you want to point at + rev: 'v1.7.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From c1394e67883f2d4278997b819da49c1341b6933c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Nov 2023 22:04:00 +0100 Subject: [PATCH 0748/1416] Continue fixing assertraises --- satpy/tests/test_composites.py | 27 ++++++++++++++++----------- satpy/tests/test_dataset.py | 3 ++- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index bf8a9dfb9e..70bc2abf25 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -78,7 +78,8 @@ def test_mult_ds_no_area(self): ds2 = self._get_test_ds() del ds2.attrs["area"] comp = CompositeBase("test_comp") - self.assertRaises(ValueError, comp.match_data_arrays, (ds1, ds2)) + with pytest.raises(ValueError, match="Missing 'area' attribute"): + comp.match_data_arrays((ds1, ds2)) def test_mult_ds_diff_area(self): """Test that datasets with different areas fail.""" @@ -94,7 +95,8 @@ def test_mult_ds_diff_area(self): 100, 50, (-30037508.34, -20018754.17, 10037508.34, 18754.17)) comp = CompositeBase("test_comp") - self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) + with pytest.raises(IncompatibleAreas): + comp.match_data_arrays((ds1, ds2)) def test_mult_ds_diff_dims(self): """Test that datasets with different dimensions still pass.""" @@ -118,7 +120,8 @@ def test_mult_ds_diff_size(self): ds1 = self._get_test_ds(shape=(50, 100), dims=("x", "y")) ds2 = self._get_test_ds(shape=(3, 50, 100), dims=("bands", "y", "x")) comp = CompositeBase("test_comp") - self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) + with pytest.raises(IncompatibleAreas): + comp.match_data_arrays((ds1, ds2)) def test_nondimensional_coords(self): """Test the removal of non-dimensional coordinates when compositing.""" @@ -351,9 +354,11 @@ def test_bad_areas_diff(self): from satpy.composites import DifferenceCompositor, IncompatibleAreas comp = DifferenceCompositor(name="diff") # too many arguments - self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds2_big)) + with pytest.raises(ValueError, match="Expected 2 datasets, got 3"): + comp((self.ds1, self.ds2, self.ds2_big)) # different resolution - self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2_big)) + with pytest.raises(IncompatibleAreas): + comp((self.ds1, self.ds2_big)) @pytest.fixture() @@ -1051,8 +1056,8 @@ def test_concat_datasets(self): assert res.shape[0] == num_bands assert res.bands[0] == "L" assert res.bands[1] == "A" - self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, - [self.all_valid, self.wrong_shape], "LA") + with pytest.raises(IncompatibleAreas): + self.comp._concat_datasets([self.all_valid, self.wrong_shape], "LA") def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" @@ -1099,8 +1104,8 @@ def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, match_data_arrays.reset_mock() # When areas are incompatible, masking shouldn't happen match_data_arrays.side_effect = IncompatibleAreas() - self.assertRaises(IncompatibleAreas, - self.comp, [self.all_valid, self.wrong_shape]) + with pytest.raises(IncompatibleAreas): + self.comp([self.all_valid, self.wrong_shape]) match_data_arrays.assert_called_once() def test_call(self): @@ -1217,7 +1222,7 @@ def test_init(self, get_area_def): from satpy.composites import StaticImageCompositor # No filename given raises ValueError - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="StaticImageCompositor needs a .*"): StaticImageCompositor("name") # No area defined @@ -1281,7 +1286,7 @@ def load(self, arg): # Non-georeferenced image, no area given img.attrs.pop("area") comp = StaticImageCompositor("name", filename="/foo.tif") - with self.assertRaises(AttributeError): + with pytest.raises(AttributeError): comp() # Non-georeferenced image, area given diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 014a450e0c..1b827b8dcf 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -54,7 +54,8 @@ def test_init_bad_modifiers(self): """Test that modifiers are a tuple.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - self.assertRaises(TypeError, DataID, dikc, name="a", modifiers="str") + with pytest.raises(TypeError): + DataID(dikc, name="a", modifiers="str") def test_compare_no_wl(self): """Compare fully qualified wavelength ID to no wavelength ID.""" From 1a35f2ec2b19f7c59d6eb055bd3b5289e1cebbc3 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Nov 2023 22:46:34 +0100 Subject: [PATCH 0749/1416] Get rid of the last assertraises --- satpy/tests/multiscene_tests/test_save_animation.py | 4 +++- satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py | 3 ++- satpy/tests/reader_tests/test_abi_l1b.py | 4 ++-- satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py | 9 +++++---- satpy/tests/reader_tests/test_ami_l1b.py | 11 +++-------- satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py | 2 +- satpy/tests/reader_tests/test_clavrx.py | 4 +++- satpy/tests/reader_tests/test_electrol_hrit.py | 5 +++-- satpy/tests/reader_tests/test_goes_imager_nc_noaa.py | 6 ++++-- satpy/tests/reader_tests/test_hrit_base.py | 6 ++++-- satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py | 5 +++-- satpy/tests/reader_tests/test_netcdf_utils.py | 3 ++- satpy/tests/reader_tests/test_scmi.py | 4 +++- satpy/tests/reader_tests/test_seviri_base.py | 4 ++-- .../tests/reader_tests/test_seviri_l1b_calibration.py | 2 +- satpy/tests/reader_tests/test_seviri_l1b_icare.py | 7 ++++--- satpy/tests/reader_tests/test_utils.py | 8 +++++--- satpy/tests/reader_tests/test_vii_base_nc.py | 5 +++-- satpy/tests/reader_tests/test_vii_l1b_nc.py | 3 ++- 19 files changed, 55 insertions(+), 40 deletions(-) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 6807446bbb..7ec1a53df8 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -28,6 +28,8 @@ from datetime import datetime from unittest import mock +import pytest + from satpy.tests.multiscene_tests.test_utils import ( _create_test_area, _create_test_dataset, @@ -248,7 +250,7 @@ def test_save_datasets_distributed_source_target(self): with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [(source_mock, target_mock)] # some arbitrary return value # force order of datasets by specifying them - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], writer="geotiff") diff --git a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py index 7055a4df6d..a7cfa17ddb 100644 --- a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py +++ b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py @@ -24,6 +24,7 @@ import unittest import numpy as np +import pytest from satpy.readers.aapp_mhs_amsub_l1c import _HEADERTYPE, _SCANTYPE, HEADER_LENGTH, MHS_AMSUB_AAPPL1CFile from satpy.tests.utils import make_dataid @@ -396,7 +397,7 @@ def test_sensor_name(self): tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) - with self.assertRaises(IOError): + with pytest.raises(IOError, match="Sensor neither MHS nor AMSU-B!"): fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) def test_read(self): diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index ab2b1eec54..64720f7808 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -389,8 +389,8 @@ class FakeDataID(dict): def to_dict(self): return self - with self.assertRaises(ValueError, msg="Did not detect invalid cal"): - did = FakeDataID(name="C05", calibration="invalid", modifiers=()) + did = FakeDataID(name="C05", calibration="invalid", modifiers=()) + with pytest.raises(ValueError, match="Unknown calibration 'invalid'"): self.reader.get_dataset(did, {}) diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index 05abef600b..33f9984ede 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -25,6 +25,7 @@ import dask.array as da import numpy as np +import pytest from pyresample.geometry import AreaDefinition from satpy.readers.ahi_l1b_gridded_bin import AHI_LUT_NAMES, AHIGriddedFileHandler @@ -90,9 +91,9 @@ def test_bad_area(self): """Ensure an error is raised for an usupported area.""" tmp_fh = self.make_fh("ext.01") tmp_fh.areaname = "scanning" - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): tmp_fh.get_area_def(None) - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.make_fh("ext.01", area="scanning") @@ -141,12 +142,12 @@ def test_calibrate(self, np_loadtxt, os_exist, get_luts): np.testing.assert_allclose(refl_out, out_data) # Check that exception is raised if bad calibration is passed - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.fh.calibrate(in_data, "lasers") # Check that exception is raised if no file is present np_loadtxt.side_effect = FileNotFoundError - with self.assertRaises(FileNotFoundError): + with pytest.raises(FileNotFoundError): self.fh.calibrate(in_data, "reflectance") diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index cdbc4468c9..f385a6080b 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -23,7 +23,7 @@ import dask.array as da import numpy as np import xarray as xr -from pytest import approx # noqa: PT013 +from pytest import approx, raises # noqa: PT013 class FakeDataset(object): @@ -198,13 +198,8 @@ def test_get_dataset(self): def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.tests.utils import make_dataid - with self.assertRaises(ValueError): - ds_id = make_dataid(name="VI006", calibration="_bad_") - ds_info = {"file_key": "image_pixel_values", - "standard_name": "toa_outgoing_radiance_per_unit_wavelength", - "units": "W m-2 um-1 sr-1", - } - self.reader.get_dataset(ds_id, ds_info) + with raises(ValueError, match="_bad_ invalid value for .*"): + _ = make_dataid(name="VI006", calibration="_bad_") @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 2272a950bf..dfcaff4514 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -180,7 +180,7 @@ def test_read_raw_data(self): # Test exception if all data is masked reader.mask = [1] fh.reader = None - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="All data is masked out"): fh.read_raw_data() @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index bc5e968b08..b4b1aef1a5 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np +import pytest import xarray as xr from pyresample.geometry import AreaDefinition, SwathDefinition @@ -347,7 +348,8 @@ def test_no_nav_donor(self): "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - self.assertRaises(IOError, r.load, ["variable1", "variable2", "variable3"]) + with pytest.raises(IOError, match="Could not find navigation donor for"): + r.load(["variable1", "variable2", "variable3"]) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" diff --git a/satpy/tests/reader_tests/test_electrol_hrit.py b/satpy/tests/reader_tests/test_electrol_hrit.py index b3e14c24d1..6a328275fd 100644 --- a/satpy/tests/reader_tests/test_electrol_hrit.py +++ b/satpy/tests/reader_tests/test_electrol_hrit.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np +import pytest from xarray import DataArray from satpy.readers.electrol_hrit import ( @@ -211,8 +212,8 @@ def test_calibrate(self, *mocks): dtype=np.uint16).reshape(5, 5)) # Test that calibration fails if given a silly mode - self.assertRaises(NotImplementedError, fh.calibrate, counts, - "nonsense") + with pytest.raises(NotImplementedError): + fh.calibrate(counts, "nonsense") # Test that 'counts' calibration returns identical values to input out = fh.calibrate(counts, "counts") diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py index 59236290b8..1fd5e65cac 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py @@ -444,13 +444,15 @@ def test_get_dataset_invalid(self): args = dict(key=make_dataid(name="00_7", calibration="brightness_temperature"), info={}) - self.assertRaises(ValueError, self.reader.get_dataset, **args) + with pytest.raises(ValueError, match="Cannot calibrate VIS channel to 2"): + self.reader.get_dataset(**args) # IR -> Reflectance args = dict(key=make_dataid(name="10_7", calibration="reflectance"), info={}) - self.assertRaises(ValueError, self.reader.get_dataset, **args) + with pytest.raises(ValueError, match="Cannot calibrate IR channel to 1"): + self.reader.get_dataset(**args) # Unsupported calibration with pytest.raises(ValueError, match="invalid invalid value for "): diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 7edbd02329..cb2dc6c3f4 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -44,10 +44,12 @@ def test_xrit_cmd(self): old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) os.environ["XRIT_DECOMPRESS_PATH"] = "/path/to/my/bin" - self.assertRaises(IOError, get_xritdecompress_cmd) + with pytest.raises(IOError, match=".* does not exist!"): + get_xritdecompress_cmd() os.environ["XRIT_DECOMPRESS_PATH"] = gettempdir() - self.assertRaises(IOError, get_xritdecompress_cmd) + with pytest.raises(IOError, match=".* is a directory!.*"): + get_xritdecompress_cmd() with NamedTemporaryFile() as fd: os.environ["XRIT_DECOMPRESS_PATH"] = fd.name diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py index 416d74d16e..9bf5f5f093 100644 --- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py +++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np +import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler @@ -488,7 +489,7 @@ def test_reading_attrs(self): # Make sure we have some files res = reader.load(["wvc_lon"]) assert res["wvc_lon"].attrs["L2B_Number_WVC_cells"] == 10 - with self.assertRaises(KeyError): + with pytest.raises(KeyError): assert res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"] == 10 def test_reading_attrs_nsoas(self): @@ -502,7 +503,7 @@ def test_reading_attrs_nsoas(self): reader.create_filehandlers(files) # Make sure we have some files res = reader.load(["wvc_lon"]) - with self.assertRaises(KeyError): + with pytest.raises(KeyError): assert res["wvc_lon"].attrs["L2B_Number_WVC_cells"] == 10 assert res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"] == 10 diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index ea104ed086..2d29288784 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -21,6 +21,7 @@ import unittest import numpy as np +import pytest try: from satpy.readers.netcdf_utils import NetCDF4FileHandler @@ -232,7 +233,7 @@ def test_filenotfound(self): """Test that error is raised when file not found.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler - with self.assertRaises(IOError): + with pytest.raises(IOError, match=".*No such file or directory.*"): NetCDF4FileHandler("/thisfiledoesnotexist.nc", {}, {}) def test_get_and_cache_npxr_is_xr(self): diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 89eda0479a..13c74a7d5c 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -21,6 +21,7 @@ from unittest import mock import numpy as np +import pytest import xarray as xr @@ -273,4 +274,5 @@ def test_get_area_def_bad(self, adef): "grid_mapping_name": "fake", } ) - self.assertRaises(ValueError, reader.get_area_def, None) + with pytest.raises(ValueError, match="Can't handle projection 'fake'"): + reader.get_area_def(None) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index ced24a77ea..c2d190e084 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -98,7 +98,7 @@ def test_pad_data_horizontally_bad_shape(self): east_bound = 5 west_bound = 10 final_size = (1, 20) - with self.assertRaises(IndexError): + with pytest.raises(IndexError): pad_data_horizontally(data, final_size, east_bound, west_bound) def test_pad_data_vertically_bad_shape(self): @@ -107,7 +107,7 @@ def test_pad_data_vertically_bad_shape(self): south_bound = 5 north_bound = 10 final_size = (20, 1) - with self.assertRaises(IndexError): + with pytest.raises(IndexError): pad_data_vertically(data, final_size, south_bound, north_bound) def observation_start_time(self): diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index d46af5abd2..e6c2cdcf16 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -130,7 +130,7 @@ def test_ir_calibrate(self): CHANNEL_NAME, CAL_TYPE2) xr.testing.assert_allclose(result, TBS_OUTPUT2, rtol=1E-5) - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPEBAD) def test_vis_calibrate(self): diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 372611c87d..7c32001168 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -22,6 +22,7 @@ import dask.array as da import numpy as np +import pytest from satpy.readers import load_reader from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler @@ -193,13 +194,13 @@ def _run_target(): plat, sens = _run_target() assert plat == sat - with self.assertRaises(NameError): - file_data["/attr/Sensors"] = "BADSAT/NOSENSE" + file_data["/attr/Sensors"] = "BADSAT/NOSENSE" + with pytest.raises(NameError): plat, sens = _run_target() def test_bad_bandname(self): """Check reader raises an error if a band bandname is passed.""" - with self.assertRaises(NameError): + with pytest.raises(NameError): self.p.target(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())._get_dsname({"name": "badband"}) diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 6471159449..67bdb41374 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -215,10 +215,12 @@ def test_np2str(self): # multi-element array npbytes = np.array([npbytes, npbytes]) - self.assertRaises(ValueError, hf.np2str, npbytes) + with pytest.raises(ValueError, match="Array is not a string type or is larger than 1"): + hf.np2str(npbytes) # non-array - self.assertRaises(ValueError, hf.np2str, 5) + with pytest.raises(ValueError, match="Array is not a string type or is larger than 1"): + hf.np2str(5) def test_get_earth_radius(self): """Test earth radius computation.""" @@ -419,7 +421,7 @@ def test_get_user_calibration_factors(self): assert offset == 0.0 # Check that incorrect dict keys throw an error - with self.assertRaises(KeyError): + with pytest.raises(KeyError): hf.get_user_calibration_factors("IR108", radcor_dict) diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index 82c0e6a4e1..60fe7dcdcb 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -25,6 +25,7 @@ from unittest import mock import numpy as np +import pytest import xarray as xr from netCDF4 import Dataset @@ -218,10 +219,10 @@ def test_file_reading(self): @mock.patch("satpy.readers.vii_base_nc.tie_points_geo_interpolation") def test_functions(self, tpgi_, tpi_): """Test the functions.""" - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.reader._perform_orthorectification(mock.Mock(), mock.Mock()) - with self.assertRaises(NotImplementedError): + with pytest.raises(NotImplementedError): self.reader._perform_calibration(mock.Mock(), mock.Mock()) # Checks that the _perform_interpolation function is correctly executed diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index d9ee714d09..22ab14e0a3 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -29,6 +29,7 @@ import dask.array as da import numpy as np +import pytest import xarray as xr from netCDF4 import Dataset @@ -147,7 +148,7 @@ def test_functions(self): assert np.all(return_variable == variable) # invalid calibration: raises a ValueError - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="Unknown calibration invalid for dataset test"): self.reader._perform_calibration(variable, {"calibration": "invalid", "name": "test"}) From e8388678a16d1e8270b836f8b89165323c9af03c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 20 Nov 2023 20:03:51 -0600 Subject: [PATCH 0750/1416] Bump expected xarray version in test_cf.py --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 18b5947eb6..5723e88ba5 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -1448,5 +1448,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.11") + versions["xarray"] >= Version("2023.12") ) From 7591239c5aeeb96b0e331cf1e08762484ffb03ea Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 21 Nov 2023 08:20:06 +0100 Subject: [PATCH 0751/1416] Add "D" checks to ruff config --- doc/source/doi_role.py | 4 ++++ pyproject.toml | 3 ++- satpy/readers/atms_sdr_hdf5.py | 3 +-- satpy/readers/generic_image.py | 3 +-- satpy/readers/gerb_l2_hr_h5.py | 3 +-- satpy/readers/ici_l1b_nc.py | 6 ++---- satpy/readers/modis_l3.py | 2 -- satpy/readers/scatsat1_l2b.py | 3 +++ satpy/tests/reader_tests/test_abi_l1b.py | 6 ++++++ satpy/tests/reader_tests/test_satpy_cf_nc.py | 3 +++ satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py | 1 + 11 files changed, 24 insertions(+), 13 deletions(-) diff --git a/doc/source/doi_role.py b/doc/source/doi_role.py index 115e7895c6..0bb48d9880 100644 --- a/doc/source/doi_role.py +++ b/doc/source/doi_role.py @@ -20,6 +20,7 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): + """Create a doi role.""" if options is None: options = {} if content is None: @@ -34,6 +35,7 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): + """Create an arxive role.""" if options is None: options = {} if content is None: @@ -48,6 +50,7 @@ def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): def setup_link_role(app): + """Set up the role link.""" app.add_role("doi", doi_role, override=True) app.add_role("DOI", doi_role, override=True) app.add_role("arXiv", arxiv_role, override=True) @@ -55,5 +58,6 @@ def setup_link_role(app): def setup(app): + """Set up the app.""" app.connect("builder-inited", setup_link_role) return {"version": "0.1", "parallel_read_safe": True} diff --git a/pyproject.toml b/pyproject.toml index 1282120a59..fe5bc8dc59 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,8 +16,9 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ # In the future, add "A", "B", "S", "N", "D" -select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +select = ["D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 +ignore = ["D417"] [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests diff --git a/satpy/readers/atms_sdr_hdf5.py b/satpy/readers/atms_sdr_hdf5.py index 7f2d43bd71..dc78399aca 100644 --- a/satpy/readers/atms_sdr_hdf5.py +++ b/satpy/readers/atms_sdr_hdf5.py @@ -15,8 +15,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -""" -Reader for the ATMS SDR format. +"""Reader for the ATMS SDR format. A reader for Advanced Technology Microwave Sounder (ATMS) SDR data as it e.g. comes out of the CSPP package for processing Direct Readout data. diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 1ba160095f..f6c983e8d5 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -15,8 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -""" -Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). +"""Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). Returns a dataset without calibration. Includes coordinates if available in the file (eg. geotiff). diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 0bf918d68f..4f34c1fde8 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -34,8 +34,7 @@ def gerb_get_dataset(ds, ds_info): - """ - Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. + """Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. The routine takes into account the quantisation factor and fill values. """ diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index d6ebea0c56..b063c51c4f 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -176,8 +176,7 @@ def _interpolate_geo( latitude, n_samples, ): - """ - Perform the interpolation of geographic coordinates from tie points to pixel points. + """Perform the interpolation of geographic coordinates from tie points to pixel points. Args: longitude: xarray DataArray containing the longitude dataset to @@ -229,8 +228,7 @@ def _interpolate_viewing_angle( zenith, n_samples, ): - """ - Perform the interpolation of angular coordinates from tie points to pixel points. + """Perform the interpolation of angular coordinates from tie points to pixel points. Args: azimuth: xarray DataArray containing the azimuth angle dataset to diff --git a/satpy/readers/modis_l3.py b/satpy/readers/modis_l3.py index 29e0247fdc..2862301168 100644 --- a/satpy/readers/modis_l3.py +++ b/satpy/readers/modis_l3.py @@ -46,7 +46,6 @@ class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - # Initialise set of variable names to carry through code handled_var_names = set() @@ -112,7 +111,6 @@ def get_dataset(self, dataset_id, dataset_info): def _get_area_extent(self): """Get the grid properties.""" - # Now compute the data extent upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py index 9989bf3d86..886ce458b3 100644 --- a/satpy/readers/scatsat1_l2b.py +++ b/satpy/readers/scatsat1_l2b.py @@ -26,8 +26,10 @@ class SCATSAT1L2BFileHandler(BaseFileHandler): + """File handler for ScatSat level 2 files, as distributed by Eumetsat in HDF5 format.""" def __init__(self, filename, filename_info, filetype_info): + """Initialize the file handler.""" super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") h5data = self.h5f["science_data"] @@ -44,6 +46,7 @@ def __init__(self, filename, filename_info, filetype_info): self.longitude_scale = float(h5data.attrs["Longitude Scale"]) def get_dataset(self, key, info): + """Get the dataset.""" h5data = self.h5f["science_data"] stdname = info.get("standard_name") diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 1c7d2c78ef..969c497410 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -131,11 +131,13 @@ def _create_fake_rad_dataset(rad: xr.DataArray, resolution: int) -> xr.Dataset: def generate_l1b_filename(chan_name: str) -> str: + """Generate a l1b filename.""" return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" @pytest.fixture() def c01_refl(tmp_path) -> xr.DataArray: + """Load c01 reflectances.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load(["C01"])["C01"] @@ -143,6 +145,7 @@ def c01_refl(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_rad(tmp_path) -> xr.DataArray: + """Load c01 radiances.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @@ -150,6 +153,7 @@ def c01_rad(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: + """Load c01 radiances through h5netcdf.""" shape = RAD_SHAPE[1000] rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 @@ -172,6 +176,7 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: @pytest.fixture() def c01_counts(tmp_path) -> xr.DataArray: + """Load c01 counts.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] @@ -179,6 +184,7 @@ def c01_counts(tmp_path) -> xr.DataArray: @pytest.fixture() def c07_bt_creator(tmp_path) -> Callable: + """Create a loader for c07 brightness temperatures.""" def _load_data_array( clip_negative_radiances: bool = False, ): diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index e71534fbd2..0c22f5b3f1 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -89,6 +89,7 @@ def _create_test_netcdf(filename, resolution=742): @pytest.fixture(scope="session") def cf_scene(): + """Create a cf scene.""" tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = np.array([[1, 2], [3, 4]]) @@ -214,6 +215,7 @@ def cf_scene(): @pytest.fixture() def nc_filename(tmp_path): + """Create an nc filename for viirs m band.""" now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -221,6 +223,7 @@ def nc_filename(tmp_path): @pytest.fixture() def nc_filename_i(tmp_path): + """Create an nc filename for viirs i band.""" now = datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 49206962e5..7ec34fd9bf 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -31,6 +31,7 @@ @pytest.fixture() def nc_filename(tmp_path): + """Create an nc test data file and return its filename.""" now = datetime.datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) From 8662786043afba31329a9fda964d4cdf6a2c1051 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 21 Nov 2023 08:48:10 +0100 Subject: [PATCH 0752/1416] Fix "A" ruff checks --- doc/source/conf.py | 2 +- pyproject.toml | 6 +- satpy/etc/eps_gomel1b_10.0.xml | 1785 ++++++++++++++++++++++++++++++++ satpy/readers/xmlformat.py | 18 +- satpy/tests/test_node.py | 2 +- satpy/tests/test_readers.py | 2 +- 6 files changed, 1800 insertions(+), 15 deletions(-) create mode 100644 satpy/etc/eps_gomel1b_10.0.xml diff --git a/doc/source/conf.py b/doc/source/conf.py index df006727c0..3aa810420e 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -117,7 +117,7 @@ def __getattr__(cls, name): # General information about the project. project = u"Satpy" -copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) +copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # noqa: A001 # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index fe5bc8dc59..61c08ba57b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,10 +15,10 @@ line_length = 120 [tool.ruff] # See https://docs.astral.sh/ruff/rules/ -# In the future, add "A", "B", "S", "N", "D" -select = ["D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +# In the future, add "B", "S", "N" +select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 -ignore = ["D417"] +ignore = ["D417", "A003"] [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests diff --git a/satpy/etc/eps_gomel1b_10.0.xml b/satpy/etc/eps_gomel1b_10.0.xml new file mode 100644 index 0000000000..39942ee907 --- /dev/null +++ b/satpy/etc/eps_gomel1b_10.0.xml @@ -0,0 +1,1785 @@ + + + + + + + + + + 130 + + + 80 + + PFS April 2004 + april04 + spring04 + + + EPS GOME Level 1B Format + + + This GOME 1B description was generated using the GOME PFS Excel document Issue 8 Revision 0 (eps_gomel1_8.0_names_masks_v13.xls) and pfs2xml version 3.3 + + + + GOME_*1B_*Z* + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + eps-product + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+
+ + + + +
+ + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + +
+ + +
+ + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+
+ + +
+ + + + + + + + + + + +
+ + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + +
+
+ + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
diff --git a/satpy/readers/xmlformat.py b/satpy/readers/xmlformat.py index 969c30113a..21200d4c8d 100644 --- a/satpy/readers/xmlformat.py +++ b/satpy/readers/xmlformat.py @@ -31,22 +31,22 @@ "uinteger4": ">u4", } -def process_delimiter(elt, ascii=False): +def process_delimiter(elt, text=False): """Process a 'delimiter' tag.""" - del elt, ascii + del elt, text -def process_field(elt, ascii=False): +def process_field(elt, text=False): """Process a 'field' tag.""" # NOTE: if there is a variable defined in this field and it is different # from the default, we could change the value and restart. scale = np.uint8(1) - if elt.get("type") == "bitfield" and not ascii: + if elt.get("type") == "bitfield" and not text: current_type = ">u" + str(int(elt.get("length")) // 8) scale = np.dtype(current_type).type(1) elif (elt.get("length") is not None): - if ascii: + if text: add = 33 else: add = 0 @@ -64,9 +64,9 @@ def process_field(elt, ascii=False): return ((elt.get("name"), current_type, scale)) -def process_array(elt, ascii=False): +def process_array(elt, text=False): """Process an 'array' tag.""" - del ascii + del text chld = list(elt) if len(chld) > 1: raise ValueError() @@ -147,10 +147,10 @@ def parse_format(xml_file): types_scales = {} for prod in tree.find("product"): - ascii = (prod.tag in ["mphr", "sphr"]) + text = (prod.tag in ["mphr", "sphr"]) res = [] for i in prod: - lres = CASES[i.tag](i, ascii) + lres = CASES[i.tag](i, text) if lres is not None: res.append(lres) types_scales[(prod.tag, int(prod.get("subclass")))] = res diff --git a/satpy/tests/test_node.py b/satpy/tests/test_node.py index 7475b04d24..35fd8a27bb 100644 --- a/satpy/tests/test_node.py +++ b/satpy/tests/test_node.py @@ -26,7 +26,7 @@ class FakeCompositor: """A fake compositor.""" - def __init__(self, id): + def __init__(self, id): # noqa: A002 """Set up the fake compositor.""" self.id = id diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 378f3fdb5a..d91e2b6fed 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -637,7 +637,7 @@ def test_available_readers_base_loader(self, monkeypatch): from satpy import available_readers from satpy._config import glob_config - def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): + def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): # noqa: A002 if name in ("netcdf4", ): raise ImportError(f"Mocked import error {name}") return real_import(name, globals=globals, locals=locals, fromlist=fromlist, level=level) From 47f54674a4f646cd799399b8d007161c91e85a7c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 21 Nov 2023 09:00:40 +0100 Subject: [PATCH 0753/1416] Add noqa on A003 errors --- pyproject.toml | 2 +- satpy/composites/__init__.py | 2 +- satpy/readers/__init__.py | 2 +- satpy/readers/avhrr_l1b_gaclac.py | 2 +- satpy/scene.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 61c08ba57b..0f3569280c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ line_length = 120 # In the future, add "B", "S", "N" select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 -ignore = ["D417", "A003"] +ignore = ["D417"] [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d3a1e510cb..52b7c1555d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -112,7 +112,7 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar self.attrs = kwargs @property - def id(self): + def id(self): # noqa: A003 """Return the DataID of the object.""" try: return self.attrs["_satpy_id"] diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 81ebf2393b..385e1126e4 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -705,7 +705,7 @@ def __repr__(self): """Representation of the object.""" return '' - def open(self, *args, **kwargs): + def open(self, *args, **kwargs): # noqa: A003 """Open the file. This is read-only. diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index c566175b8c..22dfc857f5 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -196,7 +196,7 @@ def get_dataset(self, key, info): return res - def slice(self, data, times): + def slice(self, data, times): # noqa: A003 """Select user-defined scanlines and/or strip invalid coordinates. Furthermore, update scanline timestamps. diff --git a/satpy/scene.py b/satpy/scene.py index d96c81a0e4..27822e9ad5 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -660,7 +660,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): else: replace_anc(new_ds, pres) - def slice(self, key): + def slice(self, key): # noqa: A003 """Slice Scene by dataset index. .. note:: From 33f354f9d6d8c9e62ca8f79d4cb60809d20b6235 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 21 Nov 2023 09:04:22 +0100 Subject: [PATCH 0754/1416] Add noqa for D417 --- doc/source/reader_table.py | 2 +- pyproject.toml | 1 - satpy/composites/__init__.py | 12 +- satpy/composites/abi.py | 2 +- satpy/composites/agri.py | 2 +- satpy/composites/glm.py | 2 +- satpy/dataset/data_dict.py | 4 +- satpy/dependency_tree.py | 8 +- satpy/enhancements/__init__.py | 10 +- satpy/etc/eps_gomel1b_10.0.xml | 1785 ------------------- satpy/modifiers/_crefl.py | 2 +- satpy/modifiers/filters.py | 2 +- satpy/modifiers/geometry.py | 8 +- satpy/modifiers/parallax.py | 2 +- satpy/modifiers/spectral.py | 4 +- satpy/multiscene/_multiscene.py | 2 +- satpy/readers/__init__.py | 6 +- satpy/readers/avhrr_l1b_gaclac.py | 2 +- satpy/readers/file_handlers.py | 2 +- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 +- satpy/readers/nucaps.py | 2 +- satpy/readers/viirs_edr_active_fires.py | 2 +- satpy/readers/viirs_sdr.py | 2 +- satpy/resample.py | 4 +- satpy/scene.py | 2 +- satpy/tests/modifier_tests/test_parallax.py | 2 +- satpy/writers/awips_tiled.py | 4 +- satpy/writers/cf_writer.py | 2 +- satpy/writers/geotiff.py | 2 +- satpy/writers/ninjogeotiff.py | 2 +- 30 files changed, 49 insertions(+), 1835 deletions(-) delete mode 100644 satpy/etc/eps_gomel1b_10.0.xml diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 3ddec3444b..618cb2b96b 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -39,7 +39,7 @@ def rst_table_row(columns=None): return row -def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): +def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): # noqa: D417 """Create header for rst table. Args: diff --git a/pyproject.toml b/pyproject.toml index 0f3569280c..4de1e302f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,6 @@ line_length = 120 # In the future, add "B", "S", "N" select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] line-length = 120 -ignore = ["D417"] [tool.ruff.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 52b7c1555d..fa43d4e689 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -343,7 +343,7 @@ class CategoricalDataCompositor(CompositeBase): res = [[20, 40, 30], [50, 30, 10]] """ - def __init__(self, name, lut=None, **kwargs): + def __init__(self, name, lut=None, **kwargs): # noqa: D417 """Get look-up-table used to recategorize data. Args: @@ -381,7 +381,7 @@ class GenericCompositor(CompositeBase): modes = {1: "L", 2: "LA", 3: "RGB", 4: "RGBA"} - def __init__(self, name, common_channel_mask=True, **kwargs): + def __init__(self, name, common_channel_mask=True, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -679,7 +679,7 @@ class DayNightCompositor(GenericCompositor): of the image (night or day). See the documentation below for more details. """ - def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs): + def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -1014,7 +1014,7 @@ def __call__(self, projectables, *args, **kwargs): class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" - def __init__(self, name, transition_min=258.15, transition_max=298.15, + def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: D417 transition_gamma=3.0, **kwargs): """Collect custom configuration values. @@ -1357,7 +1357,7 @@ class StaticImageCompositor(GenericCompositor, DataDownloadMixin): """ - def __init__(self, name, filename=None, url=None, known_hash=None, area=None, + def __init__(self, name, filename=None, url=None, known_hash=None, area=None, # noqa: D417 **kwargs): """Collect custom configuration values. @@ -1735,7 +1735,7 @@ def _get_flag_value(mask, val): class LongitudeMaskingCompositor(SingleBandCompositor): """Masks areas outside defined longitudes.""" - def __init__(self, name, lon_min=None, lon_max=None, **kwargs): + def __init__(self, name, lon_min=None, lon_max=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: diff --git a/satpy/composites/abi.py b/satpy/composites/abi.py index 3ae5237906..88c0db1d8e 100644 --- a/satpy/composites/abi.py +++ b/satpy/composites/abi.py @@ -42,7 +42,7 @@ class SimulatedGreen(GenericCompositor): """ - def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): + def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): # noqa: D417 """Initialize fractions for input channels. Args: diff --git a/satpy/composites/agri.py b/satpy/composites/agri.py index 839706457e..20024282d2 100644 --- a/satpy/composites/agri.py +++ b/satpy/composites/agri.py @@ -42,7 +42,7 @@ class SimulatedRed(GenericCompositor): """ - def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs): + def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs): # noqa: D417 """Initialize fractions for input channels. Args: diff --git a/satpy/composites/glm.py b/satpy/composites/glm.py index e9b6ef275e..e1c9b676c6 100644 --- a/satpy/composites/glm.py +++ b/satpy/composites/glm.py @@ -43,7 +43,7 @@ class HighlightCompositor(GenericCompositor): """ - def __init__(self, name, min_highlight=0.0, max_highlight=10.0, + def __init__(self, name, min_highlight=0.0, max_highlight=10.0, # noqa: D417 max_factor=(0.8, 0.8, -0.8, 0), **kwargs): """Initialize composite with highlight factor options. diff --git a/satpy/dataset/data_dict.py b/satpy/dataset/data_dict.py index 790d688b24..783ddc4487 100644 --- a/satpy/dataset/data_dict.py +++ b/satpy/dataset/data_dict.py @@ -51,7 +51,7 @@ def get_best_dataset_key(key, choices): return [choice for choice, distance in zip(sorted_choices, distances) if distance == distances[0]] -def get_key(key, key_container, num_results=1, best=True, query=None, +def get_key(key, key_container, num_results=1, best=True, query=None, # noqa: D417 **kwargs): """Get the fully-specified key best matching the provided key. @@ -139,7 +139,7 @@ def keys(self, names=False, wavelengths=False): else: return keys - def get_key(self, match_key, num_results=1, best=True, **dfilter): + def get_key(self, match_key, num_results=1, best=True, **dfilter): # noqa: D417 """Get multiple fully-specified keys that match the provided query. Args: diff --git a/satpy/dependency_tree.py b/satpy/dependency_tree.py index d99fb536eb..7c2b65a6c5 100644 --- a/satpy/dependency_tree.py +++ b/satpy/dependency_tree.py @@ -327,7 +327,7 @@ def _create_subtree_from_reader(self, dataset_key, query): LOG.trace("Found reader provided dataset:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) return node - def _find_reader_node(self, dataset_key, query): + def _find_reader_node(self, dataset_key, query): # noqa: D417 """Attempt to find a `DataID` in the available readers. Args: @@ -517,7 +517,7 @@ def get_modifier(self, comp_id): raise KeyError("Could not find modifier '{}'".format(modifier)) - def _create_required_subtrees(self, parent, prereqs, query=None): + def _create_required_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine required prerequisite Nodes for a composite. Args: @@ -531,7 +531,7 @@ def _create_required_subtrees(self, parent, prereqs, query=None): raise MissingDependencies(unknown_datasets) return prereq_nodes - def _create_optional_subtrees(self, parent, prereqs, query=None): + def _create_optional_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine optional prerequisite Nodes for a composite. Args: @@ -549,7 +549,7 @@ def _create_optional_subtrees(self, parent, prereqs, query=None): return prereq_nodes - def _create_prerequisite_subtrees(self, parent, prereqs, query=None): + def _create_prerequisite_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine prerequisite Nodes for a composite. Args: diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index e2dda9cf63..00a0f8dd4e 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -131,7 +131,7 @@ def wrapper(data, **kwargs): return on_dask_array(wrapper) -def piecewise_linear_stretch( +def piecewise_linear_stretch( # noqa: D417 img: XRImage, xp: ArrayLike, fp: ArrayLike, @@ -229,7 +229,7 @@ def _cira_stretch(band_data): return band_data -def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): +def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): # noqa: D417 """Stretch method based on the Reinhard algorithm, using luminance. Args: @@ -293,7 +293,7 @@ def _lookup_table(band_data, luts=None, index=-1): return lut[band_data] -def colorize(img, **kwargs): +def colorize(img, **kwargs): # noqa: D417 """Colorize the given image. Args: @@ -365,7 +365,7 @@ def _merge_colormaps(kwargs, img=None): return full_cmap -def create_colormap(palette, img=None): +def create_colormap(palette, img=None): # noqa: D417 """Create colormap of the given numpy file, color vector, or colormap. Args: @@ -525,7 +525,7 @@ def _three_d_effect_delayed(band_data, kernel, mode): return new_data.reshape((1, band_data.shape[0], band_data.shape[1])) -def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs): +def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs): # noqa: D417 """Scale data linearly in two separate regions. This enhancement scales the input data linearly by splitting the data diff --git a/satpy/etc/eps_gomel1b_10.0.xml b/satpy/etc/eps_gomel1b_10.0.xml deleted file mode 100644 index 39942ee907..0000000000 --- a/satpy/etc/eps_gomel1b_10.0.xml +++ /dev/null @@ -1,1785 +0,0 @@ - - - - - - - - - - 130 - - - 80 - - PFS April 2004 - april04 - spring04 - - - EPS GOME Level 1B Format - - - This GOME 1B description was generated using the GOME PFS Excel document Issue 8 Revision 0 (eps_gomel1_8.0_names_masks_v13.xls) and pfs2xml version 3.3 - - - - GOME_*1B_*Z* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - eps-product - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- - - - - - - - - - - - - - -
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - -
-
- - - - -
- - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - -
- - -
- - - - - - - - - - - - - -
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - -
- - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - -
-
- - -
- - - - - - - - - - - -
- - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - -
- - - - - - - - - - -
-
- - - - - - - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
-
diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py index bc42228f26..a68d9a460e 100644 --- a/satpy/modifiers/_crefl.py +++ b/satpy/modifiers/_crefl.py @@ -35,7 +35,7 @@ class ReflectanceCorrector(ModifierBase, DataDownloadMixin): Uses a python rewrite of the C CREFL code written for VIIRS and MODIS. """ - def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", + def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", # noqa: D417 url=None, known_hash=None, **kwargs): """Initialize the compositor with values from the user or from the configuration file. diff --git a/satpy/modifiers/filters.py b/satpy/modifiers/filters.py index 151082e723..0bfc3592b7 100644 --- a/satpy/modifiers/filters.py +++ b/satpy/modifiers/filters.py @@ -11,7 +11,7 @@ class Median(ModifierBase): """Apply a median filter to the band.""" - def __init__(self, median_filter_params, **kwargs): + def __init__(self, median_filter_params, **kwargs): # noqa: D417 """Create the instance. Args: diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 1194eb036a..cc903ad5e9 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -33,7 +33,7 @@ class SunZenithCorrectorBase(ModifierBase): """Base class for sun zenith correction modifiers.""" - def __init__(self, max_sza=95.0, **kwargs): + def __init__(self, max_sza=95.0, **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -96,7 +96,7 @@ class SunZenithCorrector(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=88., **kwargs): + def __init__(self, correction_limit=88., **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -142,7 +142,7 @@ class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=88., **kwargs): + def __init__(self, correction_limit=88., **kwargs): # noqa: D417 """Collect custom configuration values. Args: @@ -177,7 +177,7 @@ class SunZenithReducer(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): + def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): # noqa: D417 """Collect custom configuration values. Args: diff --git a/satpy/modifiers/parallax.py b/satpy/modifiers/parallax.py index 8c5c138e5d..9d70aa12c3 100644 --- a/satpy/modifiers/parallax.py +++ b/satpy/modifiers/parallax.py @@ -265,7 +265,7 @@ def __init__(self, base_area, self.debug_mode = debug_mode self.diagnostics = {} - def __call__(self, cth_dataset, **kwargs): + def __call__(self, cth_dataset, **kwargs): # noqa: D417 """Apply parallax correction to dataset. Args: diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e3ea3214b8..46466540c9 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -43,7 +43,7 @@ class NIRReflectance(ModifierBase): TERMINATOR_LIMIT = 85.0 MASKING_LIMIT = 88.0 - def __init__(self, sunz_threshold=TERMINATOR_LIMIT, + def __init__(self, sunz_threshold=TERMINATOR_LIMIT, # noqa: D417 masking_limit=MASKING_LIMIT, **kwargs): """Collect custom configuration values. @@ -139,7 +139,7 @@ def _init_reflectance_calculator(self, metadata): class NIREmissivePartFromReflectance(NIRReflectance): """Get the emissive part of NIR bands.""" - def __init__(self, sunz_threshold=None, **kwargs): + def __init__(self, sunz_threshold=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index 976fbbbd2a..4440641d8f 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -192,7 +192,7 @@ def first_scene(self): return self._scene_gen.first @classmethod - def from_files( + def from_files( # noqa: D417 cls, files_to_sort: Collection[str], reader: str | Collection[str] | None = None, diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 385e1126e4..c8fc0a8b69 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -111,7 +111,7 @@ def group_files(files_to_sort, reader=None, time_threshold=10, return list(_filter_groups(groups, missing=missing)) -def _assign_files_to_readers(files_to_sort, reader_names, +def _assign_files_to_readers(files_to_sort, reader_names, # noqa: D417 reader_kwargs): """Assign files to readers. @@ -190,7 +190,7 @@ def _get_file_keys_for_reader_files(reader_files, group_keys=None): return file_keys -def _get_sorted_file_groups(all_file_keys, time_threshold): +def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417 """Get sorted file groups. Get a list of dictionaries, where each list item consists of a dictionary @@ -673,7 +673,7 @@ class FSFile(os.PathLike): """ - def __init__(self, file, fs=None): + def __init__(self, file, fs=None): # noqa: D417 """Initialise the FSFile instance. Args: diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index 22dfc857f5..cfc3e1283e 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -60,7 +60,7 @@ class GACLACFile(BaseFileHandler): """Reader for GAC and LAC data.""" - def __init__(self, filename, filename_info, filetype_info, + def __init__(self, filename, filename_info, filetype_info, # noqa: D417 start_line=None, end_line=None, strip_invalid_coords=True, interpolate_coords=True, **reader_kwargs): """Init the file handler. diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 3fdeed1edc..66a028eb4c 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -25,7 +25,7 @@ from satpy.readers import open_file_or_filename -def open_dataset(filename, *args, **kwargs): +def open_dataset(filename, *args, **kwargs): # noqa: D417 """Open a file with xarray. Args: diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 9a309a0bb8..fc5aea2c8e 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -540,7 +540,7 @@ class FiduceoMviriBase(BaseFileHandler): "IR": "count_ir" } - def __init__(self, filename, filename_info, filetype_info, + def __init__(self, filename, filename_info, filetype_info, # noqa: D417 mask_bad_quality=False): """Initialize the file handler. diff --git a/satpy/readers/nucaps.py b/satpy/readers/nucaps.py index 2c9e2ba39f..19db6f9976 100644 --- a/satpy/readers/nucaps.py +++ b/satpy/readers/nucaps.py @@ -237,7 +237,7 @@ def get_dataset(self, dataset_id, ds_info): class NUCAPSReader(FileYAMLReader): """Reader for NUCAPS NetCDF4 files.""" - def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): + def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): # noqa: D417 """Configure reader behavior. Args: diff --git a/satpy/readers/viirs_edr_active_fires.py b/satpy/readers/viirs_edr_active_fires.py index bd8f3f6d69..9fa5b5d59a 100644 --- a/satpy/readers/viirs_edr_active_fires.py +++ b/satpy/readers/viirs_edr_active_fires.py @@ -46,7 +46,7 @@ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) self.prefix = filetype_info.get("variable_prefix") - def get_dataset(self, dsid, dsinfo): + def get_dataset(self, dsid, dsinfo): # noqa: D417 """Get requested data as DataArray. Args: diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index db9ba9ba10..eef02f7777 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -185,7 +185,7 @@ def split_desired_other(fhs, prime_geo, second_geo): class VIIRSSDRReader(FileYAMLReader): """Custom file reader for finding VIIRS SDR geolocation at runtime.""" - def __init__(self, config_files, use_tc=None, **kwargs): + def __init__(self, config_files, use_tc=None, **kwargs): # noqa: D417 """Initialize file reader and adjust geolocation preferences. Args: diff --git a/satpy/resample.py b/satpy/resample.py index c8ed073ae5..ddab90be82 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -823,7 +823,7 @@ def compute(self, data, **kwargs): """Call the resampling.""" raise NotImplementedError("Use the sub-classes") - def resample(self, data, **kwargs): + def resample(self, data, **kwargs): # noqa: D417 """Resample `data` by calling `precompute` and `compute` methods. Args: @@ -899,7 +899,7 @@ class BucketAvg(BucketResamplerBase): """ - def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): + def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): # noqa: D417 """Call the resampling. Args: diff --git a/satpy/scene.py b/satpy/scene.py index 27822e9ad5..9d9057c907 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1384,7 +1384,7 @@ def unload(self, keepables=None): LOG.debug("Unloading dataset: %r", ds_id) del self._datasets[ds_id] - def load(self, wishlist, calibration="*", resolution="*", + def load(self, wishlist, calibration="*", resolution="*", # noqa: D417 polarization="*", level="*", modifiers="*", generate=True, unload=True, **kwargs): """Read and generate requested datasets. diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index e1b426dce2..b769e45608 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -49,7 +49,7 @@ def fake_tle(): line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") -def _get_fake_areas(center, sizes, resolution, code=4326): +def _get_fake_areas(center, sizes, resolution, code=4326): # noqa: D417 """Get multiple square areas with the same center. Returns multiple square areas centered at the same location diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 9bab65fe35..15680e8091 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -422,7 +422,7 @@ def __call__(self): class LetteredTileGenerator(NumberedTileGenerator): """Helper class to generate per-tile metadata for lettered tiles.""" - def __init__(self, area_definition, extents, sector_crs, + def __init__(self, area_definition, extents, sector_crs, # noqa: D417 cell_size=(2000000, 2000000), num_subtiles=None, use_sector_reference=False): """Initialize tile information for later generation. @@ -1501,7 +1501,7 @@ def _get_tile_data_info(self, data_arrs, creation_time, source_name): return ds_info # TODO: Add additional untiled variable support - def save_datasets(self, datasets, sector_id=None, + def save_datasets(self, datasets, sector_id=None, # noqa: D417 source_name=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 506a8bf561..301df399c8 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -1092,7 +1092,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) - def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, + def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, # noqa: D417 flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py index 1a522ecd68..229cf777db 100644 --- a/satpy/writers/geotiff.py +++ b/satpy/writers/geotiff.py @@ -137,7 +137,7 @@ def separate_init_kwargs(cls, kwargs): return init_kwargs, kwargs - def save_image( + def save_image( # noqa: D417 self, img: XRImage, filename: Optional[str] = None, diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index a8f603861e..5f88cc52ed 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -103,7 +103,7 @@ class NinJoGeoTIFFWriter(GeoTIFFWriter): scale_offset_tag_names = ("ninjo_Gradient", "ninjo_AxisIntercept") - def save_image( + def save_image( # noqa: D417 self, image, filename=None, fill_value=None, compute=True, keep_palette=False, cmap=None, overviews=None, overviews_minsize=256, overviews_resampling=None, From 65d35510f9fa9d2da60b93ec209b746e44a57ea3 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 10:29:53 +0200 Subject: [PATCH 0755/1416] Set dtype for get_lonlats() in NIR reflectance calculation --- satpy/modifiers/spectral.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e3ea3214b8..979469876a 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -107,7 +107,7 @@ def _get_sun_zenith_from_provided_data(projectables, optional_datasets): if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") _nir = projectables[0] - lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks) + lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks, dtype=_nir.dtype) sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) return sun_zenith From e40fb6cdbb28483459c9c420788ce9c1b8b58d0b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 10:50:35 +0200 Subject: [PATCH 0756/1416] Check that NIR dtype is passed to get_lonlats() call --- satpy/tests/test_modifiers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 4aece73487..fccda40c13 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -273,7 +273,7 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): # due to copying of DataArrays, self.get_lonlats is not the same as the one that was called # we must used the area from the final result DataArray - res.attrs["area"].get_lonlats.assert_called() + res.attrs["area"].get_lonlats.assert_called_with(chunks=((2,), (2,)), dtype=self.nir.dtype) sza.assert_called_with(self.start_time, self.lons, self.lats) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None) assert np.allclose(res.data, self.refl * 100).compute() From 4c7b3301d42bab04ed202e34c7f8fa4bfefee005 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 13:02:46 +0200 Subject: [PATCH 0757/1416] Move SZA dtype determination one level up --- satpy/modifiers/spectral.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 979469876a..7e3ffe66df 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -75,7 +75,7 @@ def _get_reflectance_as_dataarray(self, projectables, optional_datasets): da_nir = _nir.data da_tb11 = _tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) + da_sun_zenith = self._get_sun_zenith_from_provided_data(_nir, optional_datasets, _nir.dtype) logger.info("Getting reflective part of %s", _nir.attrs["name"]) reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) @@ -95,7 +95,7 @@ def _get_tb13_4_from_optionals(optional_datasets): return tb13_4 @staticmethod - def _get_sun_zenith_from_provided_data(projectables, optional_datasets): + def _get_sun_zenith_from_provided_data(_nir, optional_datasets, dtype): """Get the sunz from available data or compute it if unavailable.""" sun_zenith = None @@ -106,8 +106,7 @@ def _get_sun_zenith_from_provided_data(projectables, optional_datasets): if sun_zenith is None: if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") - _nir = projectables[0] - lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks, dtype=_nir.dtype) + lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks, dtype=dtype) sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) return sun_zenith From b992edb6e774c2a9e1dfa35426a54df9c7266492 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 13:43:02 +0200 Subject: [PATCH 0758/1416] Rename _nir variable to nir --- satpy/modifiers/spectral.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 7e3ffe66df..431b118c1c 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -71,16 +71,16 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_reflectance_as_dataarray(self, projectables, optional_datasets): """Get the reflectance as a dataarray.""" - _nir, _tb11 = projectables - da_nir = _nir.data + nir, _tb11 = projectables + da_nir = nir.data da_tb11 = _tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(_nir, optional_datasets, _nir.dtype) + da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) - logger.info("Getting reflective part of %s", _nir.attrs["name"]) - reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) + logger.info("Getting reflective part of %s", nir.attrs["name"]) + reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) - proj = self._create_modified_dataarray(reflectance, base_dataarray=_nir) + proj = self._create_modified_dataarray(reflectance, base_dataarray=nir) proj.attrs["units"] = "%" return proj @@ -95,7 +95,7 @@ def _get_tb13_4_from_optionals(optional_datasets): return tb13_4 @staticmethod - def _get_sun_zenith_from_provided_data(_nir, optional_datasets, dtype): + def _get_sun_zenith_from_provided_data(nir, optional_datasets, dtype): """Get the sunz from available data or compute it if unavailable.""" sun_zenith = None @@ -106,8 +106,8 @@ def _get_sun_zenith_from_provided_data(_nir, optional_datasets, dtype): if sun_zenith is None: if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") - lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks, dtype=dtype) - sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) + lons, lats = nir.attrs["area"].get_lonlats(chunks=nir.data.chunks, dtype=dtype) + sun_zenith = sun_zenith_angle(nir.attrs["start_time"], lons, lats) return sun_zenith def _create_modified_dataarray(self, reflectance, base_dataarray): From a0c2c23b0bea0fd0e8747e6e24b4921e94263edc Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 14:23:45 +0200 Subject: [PATCH 0759/1416] Fix NIREmissive SZA calculation to use the same as NIRReflective --- satpy/modifiers/spectral.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 431b118c1c..ea7cbc6bac 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -162,16 +162,16 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, projectables, optional_datasets): """Get the emissivity as a dataarray.""" - _nir, _tb11 = projectables - da_nir = _nir.data + nir, _tb11 = projectables + da_nir = nir.data da_tb11 = _tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) + da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) - logger.info("Getting emissive part of %s", _nir.attrs["name"]) - emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) + logger.info("Getting emissive part of %s", nir.attrs["name"]) + emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) - proj = self._create_modified_dataarray(emissivity, base_dataarray=_nir) + proj = self._create_modified_dataarray(emissivity, base_dataarray=nir) proj.attrs["units"] = "K" return proj From a7a023619cd19951e3c5005a767b9d8c84b46a16 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 17:42:12 +0200 Subject: [PATCH 0760/1416] Add pykdtree as unstable dependency --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6e1fdfc781..de5409cfd3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -84,6 +84,7 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ + git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/astropy/astropy; LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From 12561044220f621742744e2c75e186b4b32ffec2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 10:26:58 -0600 Subject: [PATCH 0761/1416] Try building pykdtree with numpy 2 in ci.yaml --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index de5409cfd3..bd39401623 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -64,6 +64,7 @@ jobs: - name: Install unstable dependencies if: matrix.experimental == true shell: bash -l {0} + # Install pykdtree with --no-build-isolation so it builds with numpy 2.0 # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | @@ -75,6 +76,7 @@ jobs: numpy \ pandas \ scipy; \ + python -m pip install --no-deps --upgrade --no-build-isolation git+https://github.com/storpipfugl/pykdtree; \ python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ @@ -84,7 +86,6 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ - git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/astropy/astropy; LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From efe647fb83df65b10e0f0acaa5584dc0ee96ec88 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 10:50:42 -0600 Subject: [PATCH 0762/1416] More debugging for unstable in ci.yaml --- .github/workflows/ci.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bd39401623..d976ca2251 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -75,8 +75,9 @@ jobs: matplotlib \ numpy \ pandas \ - scipy; \ - python -m pip install --no-deps --upgrade --no-build-isolation git+https://github.com/storpipfugl/pykdtree; \ + scipy + python -m pip install --no-deps --upgrade --no-build-isolation -vv git+https://github.com/storpipfugl/pykdtree + python -c "from pykdtree.kdtree import KDTree" python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ @@ -86,7 +87,7 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ - git+https://github.com/astropy/astropy; + git+https://github.com/astropy/astropy LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From 05111affd16b2d91ffc140b191c70024109f2a2c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 11:12:47 -0600 Subject: [PATCH 0763/1416] Try removing pykdtree first in unstable CI --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d976ca2251..0a03e003fa 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -76,7 +76,8 @@ jobs: numpy \ pandas \ scipy - python -m pip install --no-deps --upgrade --no-build-isolation -vv git+https://github.com/storpipfugl/pykdtree + mamba remove --force-remove -y pykdtree + python -m pip install --no-deps --upgrade --no-build-isolation -vvv git+https://github.com/storpipfugl/pykdtree python -c "from pykdtree.kdtree import KDTree" python -m pip install \ --no-deps --upgrade \ From 8a55d33e0d9ac2af710797536ec38cc5129e9a5a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 11:26:15 -0600 Subject: [PATCH 0764/1416] Build pyresample with numpy 2 in unstable CI --- .github/workflows/ci.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0a03e003fa..b371893b8a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -76,9 +76,10 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree - python -m pip install --no-deps --upgrade --no-build-isolation -vvv git+https://github.com/storpipfugl/pykdtree - python -c "from pykdtree.kdtree import KDTree" + mamba remove --force-remove -y pykdtree pyresample + python -m pip install --no-deps --upgrade --no-build-isolation -vvv \ + git+https://github.com/storpipfugl/pykdtree \ + git+https://github.com/pytroll/pyresample python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ From c6bb954ac6f58e4d4c8e520eef89c6ffdb79327a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 11:33:09 -0600 Subject: [PATCH 0765/1416] Add missing versioneer unstable CI dependency --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b371893b8a..36aed73828 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -79,7 +79,8 @@ jobs: mamba remove --force-remove -y pykdtree pyresample python -m pip install --no-deps --upgrade --no-build-isolation -vvv \ git+https://github.com/storpipfugl/pykdtree \ - git+https://github.com/pytroll/pyresample + git+https://github.com/pytroll/pyresample \ + versioneer python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ From 522156425acb1e3013093c0faaafe5c817521aca Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 11:45:45 -0600 Subject: [PATCH 0766/1416] Install versioneer in a different way --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 36aed73828..7e6e446568 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -68,6 +68,7 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | + python -m pip install versioneer python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ @@ -79,8 +80,7 @@ jobs: mamba remove --force-remove -y pykdtree pyresample python -m pip install --no-deps --upgrade --no-build-isolation -vvv \ git+https://github.com/storpipfugl/pykdtree \ - git+https://github.com/pytroll/pyresample \ - versioneer + git+https://github.com/pytroll/pyresample python -m pip install \ --no-deps --upgrade \ git+https://github.com/dask/dask \ From 921ed0f5451272bd120fbc16ede325eb307d59dd Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 21 Nov 2023 19:54:25 +0200 Subject: [PATCH 0767/1416] Refactor _get_reflectance/emissivity_as_dataarray --- satpy/modifiers/spectral.py | 33 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index ea7cbc6bac..28a3804da7 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -67,23 +67,24 @@ def __call__(self, projectables, optional_datasets=None, **info): Not supposed to be used for wavelength outside [3, 4] µm. """ projectables = self.match_data_arrays(projectables) - return self._get_reflectance_as_dataarray(projectables, optional_datasets) + inputs = self._get_nir_inputs(projectables, optional_datasets) + return self._get_reflectance_as_dataarray(*inputs) - def _get_reflectance_as_dataarray(self, projectables, optional_datasets): + def _get_reflectance_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the reflectance as a dataarray.""" - nir, _tb11 = projectables - da_nir = nir.data - da_tb11 = _tb11.data - da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) - logger.info("Getting reflective part of %s", nir.attrs["name"]) - reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) - + reflectance = self._get_reflectance_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) proj = self._create_modified_dataarray(reflectance, base_dataarray=nir) proj.attrs["units"] = "%" return proj + def _get_nir_inputs(self, projectables, optional_datasets): + nir, _tb11 = projectables + da_tb11 = _tb11.data + da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) + da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) + return (nir, da_tb11, da_tb13_4, da_sun_zenith) + @staticmethod def _get_tb13_4_from_optionals(optional_datasets): tb13_4 = None @@ -158,18 +159,14 @@ def __call__(self, projectables, optional_datasets=None, **info): """ projectables = self.match_data_arrays(projectables) - return self._get_emissivity_as_dataarray(projectables, optional_datasets) + inputs = self._get_nir_inputs(projectables, optional_datasets) + return self._get_emissivity_as_dataarray(*inputs) - def _get_emissivity_as_dataarray(self, projectables, optional_datasets): + def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - nir, _tb11 = projectables - da_nir = nir.data - da_tb11 = _tb11.data - da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) - da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) logger.info("Getting emissive part of %s", nir.attrs["name"]) - emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) + emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) proj = self._create_modified_dataarray(emissivity, base_dataarray=nir) proj.attrs["units"] = "K" From 05c126835e19ecb24714160e8d3ad04f6c4bcdf1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 12:17:09 -0600 Subject: [PATCH 0768/1416] Try --no-build-isolation for all unstable deps --- .github/workflows/ci.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7e6e446568..3849514506 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,11 +78,9 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample - python -m pip install --no-deps --upgrade --no-build-isolation -vvv \ + python -m pip install --no-deps --upgrade --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ - git+https://github.com/pytroll/pyresample - python -m pip install \ - --no-deps --upgrade \ + git+https://github.com/pytroll/pyresample \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From 21de5b04dfff6101a314ea21ab53d3d461931bab Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 13:24:46 -0600 Subject: [PATCH 0769/1416] Add extension-helpers to unstable build dependencies --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3849514506..4ccddc8904 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -68,7 +68,7 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | - python -m pip install versioneer + python -m pip install versioneer extension-helpers python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ From 3a97a1a02ea474507e2e2df956068d53c31acca9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 13:35:50 -0600 Subject: [PATCH 0770/1416] Allow dependencies to be installed in unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4ccddc8904..8368c325bc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,7 +78,7 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample - python -m pip install --no-deps --upgrade --no-build-isolation \ + python -m pip install --upgrade --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/dask/dask \ From 0c53817da1c8dd4cf62068c38ab54a0680e00085 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 13:46:08 -0600 Subject: [PATCH 0771/1416] More unstable CI reworking --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8368c325bc..52e6f3c387 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -68,7 +68,7 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | - python -m pip install versioneer extension-helpers + python -m pip install versioneer extension-helpers setuptools-scm configobj python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ @@ -78,7 +78,7 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample - python -m pip install --upgrade --no-build-isolation \ + python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/dask/dask \ From 71e489ecf1043329a4a2752ed09998ddee449bcb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 13:58:37 -0600 Subject: [PATCH 0772/1416] Add shapely to unstable CI build --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 52e6f3c387..ef11beaac8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -88,6 +88,7 @@ jobs: git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ + git+https://github.com/shapely/shapely \ git+https://github.com/astropy/astropy LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV From 8b4e05d5508bd0db31c26b5ccd049844cfd04b6f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 14:10:42 -0600 Subject: [PATCH 0773/1416] Add trollimage to unstable dependencies --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ef11beaac8..38b69cd477 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,10 +77,11 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample + mamba remove --force-remove -y pykdtree pyresample trollimage python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ + git+https://github.com/pytroll/trollimage \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From a58e9c9af7e77e8eb9cc875430d455cb6cc64658 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 14:21:50 -0600 Subject: [PATCH 0774/1416] Try adding pyhdf to unstable deps --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 38b69cd477..eb08e0a0bb 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -82,6 +82,7 @@ jobs: git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ + git+https://github.com/fhs/pyhdf \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From 08159bab2a463b5d77ff3c99dd7420d606cd6a7e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 14:56:27 -0600 Subject: [PATCH 0775/1416] Try removing pyhdf from conda in unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index eb08e0a0bb..36746696e7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,7 +77,7 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample trollimage + mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ From 4b165c4c2cb1a0727c5cf56aee5000bc42b069c3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:06:03 -0600 Subject: [PATCH 0776/1416] Add netcdf4-python to unstable CI --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 36746696e7..e132c837bd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,12 +77,13 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf + mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ + git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From f2cbbe55a5adf62fb714e773ad2a1255d662796d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:14:38 -0600 Subject: [PATCH 0777/1416] Add h5py to unstable CI --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e132c837bd..dfc594e723 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,13 +77,14 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 + mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ git+https://github.com/Unidata/netcdf4-python \ + git+https://github.com/h5py/h5py \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From fe67279d869d219c63448ac2900fdc7760e633c7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:23:23 -0600 Subject: [PATCH 0778/1416] Add missing unstable deps --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index dfc594e723..e2659bfd98 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -68,7 +68,7 @@ jobs: # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions run: | - python -m pip install versioneer extension-helpers setuptools-scm configobj + python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ From c9a861f1ec71e060eb57a7066f40002d4a2b6fef Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:45:09 -0600 Subject: [PATCH 0779/1416] Install h5py in unstable CI without build isolation --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e2659bfd98..64c980a4e2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,13 +78,13 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py + python -m pip install --upgrade --no-deps --pre git+https://github.com/h5py/h5py python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ git+https://github.com/Unidata/netcdf4-python \ - git+https://github.com/h5py/h5py \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/zarr-developers/zarr \ From 88ae0a6eccb6122b6b016441b7d47a0ad3258133 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 15:59:34 -0600 Subject: [PATCH 0780/1416] Use h5py cython branch for unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 64c980a4e2..026b9bc50e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,7 +78,7 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py - python -m pip install --upgrade --no-deps --pre git+https://github.com/h5py/h5py + python -m pip install --upgrade --no-deps --pre git+https://github.com/takluyver/h5py@cython-3 python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ From 9efa3cbac62ffc639e49cfcffd524f98038aca14 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 21 Nov 2023 16:04:13 -0600 Subject: [PATCH 0781/1416] Fix cython dev h5py install --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 026b9bc50e..14e5e3ffcd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -78,12 +78,12 @@ jobs: pandas \ scipy mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py - python -m pip install --upgrade --no-deps --pre git+https://github.com/takluyver/h5py@cython-3 python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ + git+https://github.com/takluyver/h5py@cython-3 \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ From d250a9dfba72613107bc995a202196969efee45f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 23 Nov 2023 09:40:54 +0200 Subject: [PATCH 0782/1416] Rename _tb11 variable to tb11 --- satpy/modifiers/spectral.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 28a3804da7..029ee88cb8 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -79,8 +79,8 @@ def _get_reflectance_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): return proj def _get_nir_inputs(self, projectables, optional_datasets): - nir, _tb11 = projectables - da_tb11 = _tb11.data + nir, tb11 = projectables + da_tb11 = tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) return (nir, da_tb11, da_tb13_4, da_sun_zenith) From 2ff0b2acad2becd2a86b070e16dfdd73e2432b78 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 23 Nov 2023 16:25:32 +0200 Subject: [PATCH 0783/1416] Add tolerances to tests affected by Trollimage dtype handling --- satpy/tests/test_modifiers.py | 6 ++++-- satpy/tests/writer_tests/test_ninjogeotiff.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 4aece73487..23c22f529e 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -119,13 +119,15 @@ def test_basic_default_not_provided(self, sunz_ds1, as_32bit): sunz_ds1 = sunz_ds1.astype(np.float32) comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) res = comp((sunz_ds1,), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]), + rtol=1e-6) assert "y" in res.coords assert "x" in res.coords ds1 = sunz_ds1.copy().drop_vars(("y", "x")) res = comp((ds1,), test_attr="test") res_np = res.compute() - np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) + np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]), + rtol=1e-6) assert res.dtype == res_np.dtype assert "y" not in res.coords assert "x" not in res.coords diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index a9c60bdf90..e05150a571 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -630,7 +630,7 @@ def test_write_and_read_file_units( np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.467717, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), - -79.86771) + -79.86771, rtol=1e-5) fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( From 8dce4219f43ec3870e520f2dcdaa84b15895da9f Mon Sep 17 00:00:00 2001 From: Aaron Rainbolt Date: Thu, 23 Nov 2023 21:05:28 -0600 Subject: [PATCH 0784/1416] Use assert_called_once rather than called_once in tests --- satpy/tests/scene_tests/test_resampling.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 6b5f74ee59..cc812839ac 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -319,14 +319,14 @@ def test_resample_reduce_data_toggle(self, rs): assert not slice_data.called assert not get_area_slices.called scene.resample(target_area) - assert slice_data.called_once - assert get_area_slices.called_once + slice_data.assert_called_once + get_area_slices.assert_called_once scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - assert get_area_slices.called_once + get_area_slices.assert_called_once def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" From 99c91209fd2e0d60f3680e7332043d491d3c6b4c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 23 Nov 2023 21:18:56 -0600 Subject: [PATCH 0785/1416] Fix call to assert_called_once in test_resampling.py --- satpy/tests/scene_tests/test_resampling.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index cc812839ac..07e1cc2814 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -319,14 +319,14 @@ def test_resample_reduce_data_toggle(self, rs): assert not slice_data.called assert not get_area_slices.called scene.resample(target_area) - slice_data.assert_called_once - get_area_slices.assert_called_once + slice_data.assert_called_once() + get_area_slices.assert_called_once() scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - get_area_slices.assert_called_once + get_area_slices.assert_called_once() def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" From f15a2f4b394a784c516992425b361908d96a7bc6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 25 Nov 2023 19:59:23 -0600 Subject: [PATCH 0786/1416] Fix expected reduce_data method calls --- satpy/tests/scene_tests/test_resampling.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 07e1cc2814..bd4ff6d49b 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -316,17 +316,18 @@ def test_resample_reduce_data_toggle(self, rs): ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) - assert not slice_data.called - assert not get_area_slices.called + slice_data.assert_not_called() + get_area_slices.assert_not_called() scene.resample(target_area) - slice_data.assert_called_once() - get_area_slices.assert_called_once() + assert slice_data.call_count == 3 + assert get_area_slices.call_count == 2 scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - get_area_slices.assert_called_once() + # reductions are cached, no additional reductions in second call + assert get_area_slices.call_count == 2 def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" From 43ab244b204287231c0e4145ad6a02dec890361d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 26 Nov 2023 14:20:16 -0600 Subject: [PATCH 0787/1416] Fix expected number of area slice calls in resample test --- satpy/tests/scene_tests/test_resampling.py | 6 ++++-- satpy/tests/writer_tests/test_ninjogeotiff.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index bd4ff6d49b..d59019e3f7 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -320,14 +320,16 @@ def test_resample_reduce_data_toggle(self, rs): get_area_slices.assert_not_called() scene.resample(target_area) assert slice_data.call_count == 3 - assert get_area_slices.call_count == 2 + assert get_area_slices.call_count == 1 + assert get_area_slices_big.call_count == 1 scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 - # reductions are cached, no additional reductions in second call + # get area slices is called again, once per area assert get_area_slices.call_count == 2 + assert get_area_slices_big.call_count == 2 def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index a9c60bdf90..e05150a571 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -630,7 +630,7 @@ def test_write_and_read_file_units( np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.467717, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), - -79.86771) + -79.86771, rtol=1e-5) fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( From 2d1ea402c79f83ac3c0ac36a34a4176aa1844d2f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 27 Nov 2023 11:55:15 +0100 Subject: [PATCH 0788/1416] Ensure attributes are correct --- satpy/tests/reader_tests/test_sgli_l1b.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 9fa8caa8b1..5ae305a068 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -210,16 +210,18 @@ def test_get_vn_dataset_reflectances(sgli_vn_file): assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5 - 0.05) assert res.dtype == np.float32 assert res.dims == ("y", "x") - assert res.units == "%" + assert res.attrs["units"] == "%" def test_get_vn_dataset_radiance(sgli_vn_file): """Test that datasets can be calibrated to radiance.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") - res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", - "standard_name": ""}) + res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "W m-2 um-1 sr-1", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength"}) assert np.allclose(res[0, :], FULL_KM_ARRAY[0, :] * np.float32(0.02) - 25) assert res.dtype == np.float32 + assert res.attrs["units"] == "W m-2 um-1 sr-1" + assert res.attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" def test_channel_is_masked(sgli_vn_file): """Test that channels are masked for no-data.""" From fc41c70e08c27c1c8c0a64d69b2c5a5df4329116 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 19:55:46 +0000 Subject: [PATCH 0789/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/mirrors-mypy: v1.7.0 → v1.7.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.7.0...v1.7.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d84659c6f0..99e77cb56a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.7.0' # Use the sha / tag you want to point at + rev: 'v1.7.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From 0782f8112576ee428726451d5da4ab6385ede4a1 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 28 Nov 2023 08:49:00 +0200 Subject: [PATCH 0790/1416] Add file pattern for CRRPh of NWC SAF GEO v2021 --- satpy/etc/readers/nwcsaf-geo.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/etc/readers/nwcsaf-geo.yaml b/satpy/etc/readers/nwcsaf-geo.yaml index 29e3b5cc05..e22ae09fc1 100644 --- a/satpy/etc/readers/nwcsaf-geo.yaml +++ b/satpy/etc/readers/nwcsaf-geo.yaml @@ -41,7 +41,8 @@ file_types: nc_nwcsaf_crr-ph: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF - file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] + file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', + 'S_NWC_CRRPh_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ishai: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF From 47557a25fb32278384095b5c5e0548fd5c3896d0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 28 Nov 2023 12:53:40 +0200 Subject: [PATCH 0791/1416] Update VIRR test to allow floating point differences --- satpy/tests/reader_tests/test_virr_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py index e3fbd73272..f899cd537c 100644 --- a/satpy/tests/reader_tests/test_virr_l1b.py +++ b/satpy/tests/reader_tests/test_virr_l1b.py @@ -158,7 +158,7 @@ def _fy3_helper(self, platform_name, reader, Emissive_units): "solar_azimuth_angle", "sensor_azimuth_angle"] assert ["virr_geoxx", "virr_l1b"] == attributes["file_type"] assert ("longitude", "latitude") == attributes["coordinates"] - assert band_values[dataset["name"]] == round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6) + np.testing.assert_allclose(band_values[dataset["name"]], ds[ds.shape[0] // 2][ds.shape[1] // 2], rtol=1e-6) assert "valid_range" not in ds.attrs def test_fy3b_file(self): From 1d86df949213ec92f4bc7dd5599d327916954ad9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 28 Nov 2023 13:43:44 +0000 Subject: [PATCH 0792/1416] Rename `subsatellite_longitude` in INSAT-3D --- satpy/readers/insat3d_img_l1b_h5.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 9f2224ef82..205f4d17b2 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -187,7 +187,7 @@ def get_area_def(self, ds_id): a = 6378137.0 b = 6356752.314245 - nom_cen_pos = self.datatree.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"][1] + subsatellite_longitude = self.datatree.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"][1] pdict = { "cfac": cfac, @@ -200,7 +200,7 @@ def get_area_def(self, ds_id): "a": a, "b": b, "h": h, - "ssp_lon": nom_cen_pos, + "ssp_lon": subsatellite_longitude, "a_name": "insat3d82", "a_desc": "insat3d82", "p_id": "geosmsg" From 9e89902a6de27401d86dccbf357e9e9521b393ff Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 14:51:26 +0100 Subject: [PATCH 0793/1416] update default parameters in modifier init --- satpy/modifiers/geometry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index cc903ad5e9..1e8841c6bd 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -177,7 +177,7 @@ class SunZenithReducer(SunZenithCorrectorBase): """ - def __init__(self, correction_limit=55., max_sza=90, strength=1.5, **kwargs): # noqa: D417 + def __init__(self, correction_limit=80., max_sza=90, strength=1.3, **kwargs): # noqa: D417 """Collect custom configuration values. Args: From 00df29f826f2990e9071b14216a9afb7d647a63c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 28 Nov 2023 13:51:43 +0000 Subject: [PATCH 0794/1416] Refactor date attribute getter for OSI SAF reader. --- satpy/readers/osisaf_l3_nc.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 1affb3a883..2953cc6dfc 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -205,22 +205,22 @@ def _parse_datetime(datestr): @property def start_time(self): - start_t = self.get("/attr/start_date") - if start_t is None: - start_t = self.get("/attr/start_time") - if start_t is None: - start_t = self.get("/attr/time_coverage_start") + poss_names = ["/attr/start_date", "/attr/start_time", "/attr/time_coverage_start"] + for name in poss_names: + start_t = self.get(name) + if start_t is not None: + break if start_t is None: raise ValueError("Unknown start time attribute.") return self._parse_datetime(start_t) @property def end_time(self): - end_t = self.get("/attr/stop_date") - if end_t is None: - end_t = self.get("/attr/stop_time") - if end_t is None: - end_t = self.get("/attr/time_coverage_end") + poss_names = ["/attr/stop_date", "/attr/stop_time", "/attr/time_coverage_end"] + for name in poss_names: + end_t = self.get(name) + if end_t is not None: + break if end_t is None: raise ValueError("Unknown stop time attribute.") return self._parse_datetime(end_t) From 2ad06aa6a0b06fa41712c69266ca57808e482985 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:22:20 +0100 Subject: [PATCH 0795/1416] Test Insat sublon is not hardcoded --- satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 1886560402..92aef2b906 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -74,6 +74,7 @@ start_time = datetime(2009, 6, 9, 9, 0) end_time = datetime(2009, 6, 9, 9, 30) +subsatellite_longitude = 82 time_pattern = "%d-%b-%YT%H:%M:%S" @@ -81,7 +82,7 @@ "Field_of_View(degrees)": 17.973925, "Acquisition_Start_Time": start_time.strftime(time_pattern), "Acquisition_End_Time": end_time.strftime(time_pattern), - "Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude": [0.0, 82.0], + "Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude": [0.0, subsatellite_longitude], "Nominal_Altitude(km)": 36000.0, } @@ -243,6 +244,7 @@ def test_insat3d_has_orbital_parameters(insat_filehandler): assert "orbital_parameters" in darr.attrs assert "satellite_nominal_longitude" in darr.attrs["orbital_parameters"] + assert darr.attrs["orbital_parameters"]["satellite_nominal_longitude"] == subsatellite_longitude assert "satellite_nominal_latitude" in darr.attrs["orbital_parameters"] assert "satellite_nominal_altitude" in darr.attrs["orbital_parameters"] assert "satellite_actual_altitude" in darr.attrs["orbital_parameters"] @@ -276,6 +278,7 @@ def test_filehandler_returns_area(insat_filehandler): ds_id = make_dataid(name="MIR", resolution=4000, calibration="brightness_temperature") area_def = fh.get_area_def(ds_id) lons, lats = area_def.get_lonlats(chunks=1000) + assert "+lon_0=" + str(subsatellite_longitude) in area_def.crs.to_proj4() def test_filehandler_has_start_and_end_time(insat_filehandler): From a607fbd489bd698882b69789391c05682db33989 Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 15:33:59 +0100 Subject: [PATCH 0796/1416] remove defaults from docstrings, and include parameters in log message --- satpy/modifiers/geometry.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 1e8841c6bd..693529f9dd 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -181,10 +181,10 @@ def __init__(self, correction_limit=80., max_sza=90, strength=1.3, **kwargs): # """Collect custom configuration values. Args: - correction_limit (float): Solar zenith angle in degrees where to start the signal reduction. Default 60. + correction_limit (float): Solar zenith angle in degrees where to start the signal reduction. max_sza (float): Maximum solar zenith angle in degrees where to apply the signal reduction. Beyond - this solar zenith angle the signal will become zero. Default 90. - strength (float): The strength of the non-linear signal reduction. Default 1.5 + this solar zenith angle the signal will become zero. + strength (float): The strength of the non-linear signal reduction. """ self.correction_limit = correction_limit @@ -194,7 +194,8 @@ def __init__(self, correction_limit=80., max_sza=90, strength=1.3, **kwargs): # raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") def _apply_correction(self, proj, coszen): - logger.debug("Apply sun-zenith signal reduction") + logger.debug(f"Applying sun-zenith signal reduction with correction_limit {self.correction_limit} deg," + f" strength {self.strength}, and max_sza {self.max_sza} deg.") res = proj.copy() sunz = np.rad2deg(np.arccos(coszen.data)) res.data = sunzen_reduction(proj.data, sunz, From 79585f152830089ae654d7fa640faa818c3608df Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:40:15 +0100 Subject: [PATCH 0797/1416] Fix style --- satpy/modifiers/spectral.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) From 98061ec7e6207554c10366533226b66ecefc4fb5 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:46:05 +0100 Subject: [PATCH 0798/1416] Fix style --- satpy/modifiers/spectral.py | 1 - satpy/readers/osisaf_l3_nc.py | 3 ++- satpy/tests/reader_tests/test_osisaf_l3.py | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 2953cc6dfc..56d4773a43 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -120,7 +120,6 @@ def get_area_def(self, area_id): def _get_ds_units(self, ds_info, var_path): """Find the units of the datasets.""" - file_units = ds_info.get("file_units") if file_units is None: file_units = self.get(var_path + "/attr/units") @@ -205,6 +204,7 @@ def _parse_datetime(datestr): @property def start_time(self): + """Get the start time.""" poss_names = ["/attr/start_date", "/attr/start_time", "/attr/time_coverage_start"] for name in poss_names: start_t = self.get(name) @@ -216,6 +216,7 @@ def start_time(self): @property def end_time(self): + """Get the end time.""" poss_names = ["/attr/stop_date", "/attr/stop_time", "/attr/time_coverage_end"] for name in poss_names: end_t = self.get(name) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index a9a595202b..3fa9e5bb35 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -178,7 +178,6 @@ def test_get_dataset(self, tmp_path): def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) @@ -202,6 +201,7 @@ class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader ice files.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="ice") self.filename_info = {"grid": "ease"} self.filetype_info = {"file_type": "osi_sea_ice_conc"} @@ -258,6 +258,7 @@ class TestOSISAFL3ReaderFluxStere(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader flux files on stereographic grid.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="flux_stere") self.filename_info = {"grid": "polstere"} self.filetype_info = {"file_type": "osi_radflux_stere"} @@ -294,6 +295,7 @@ class TestOSISAFL3ReaderFluxGeo(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader flux files on lat/lon grid (GEO sensors).""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="flux_geo") self.filename_info = {} self.filetype_info = {"file_type": "osi_radflux_grid"} @@ -329,6 +331,7 @@ class TestOSISAFL3ReaderSST(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader surface temperature files.""" def setup_method(self): + """Set up the tests.""" super().setup_method(tester="sst") self.filename_info = {} self.filetype_info = {"file_type": "osi_sst"} From 6d26c565171a41ff3a2fc755f42b29f3d5746768 Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 15:51:58 +0100 Subject: [PATCH 0799/1416] add true_color_fully_sunzencorrected --- satpy/etc/composites/fci.yaml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 336ae415b3..366b8bbc20 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -33,6 +33,20 @@ composites: - name: vis_08 standard_name: toa_bidirectional_reflectance + ndvi_hybrid_green_fully_sunzencorrected: + description: Same as ndvi_hybrid_green, but without Sun-zenith reduction + compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen + limits: [ 0.15, 0.05 ] + strength: 3.0 + prerequisites: + - name: vis_05 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: vis_06 + modifiers: [ sunz_corrected, rayleigh_corrected ] + - name: vis_08 + modifiers: [ sunz_corrected ] + standard_name: toa_bidirectional_reflectance + binary_cloud_mask: # This will set all clear pixels to '0', all pixles with cloudy features (meteorological/dust/ash clouds) to '1' and # missing/undefined pixels to 'nan'. This can be used for the the official EUMETSAT cloud mask product (CLM). @@ -56,6 +70,19 @@ composites: modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: true_color + true_color_fully_sunzencorrected: + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + description: > + Same as true_color, but without Sun-zenith reduction. For users that want to maintain as much data as possible + close to the terminator, at cost of some artefacts (bright limb and reddish clouds) (see issue #2643). + prerequisites: + - name: vis_06 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: ndvi_hybrid_green_fully_sunzencorrected + - name: vis_04 + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: true_color + true_color_raw_with_corrected_green: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > From b4e8fa572d35b55f84151c61a73d071391f697fb Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 28 Nov 2023 15:54:31 +0100 Subject: [PATCH 0800/1416] Fix style --- satpy/cf/attrs.py | 2 +- satpy/cf/coords.py | 2 +- satpy/modifiers/spectral.py | 1 - satpy/tests/cf_tests/test_area.py | 2 ++ 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index f9d49416c8..cdec8500d4 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -97,7 +97,7 @@ def _encode_object(obj): def _try_decode_object(obj): - """Try to decode byte string""" + """Try to decode byte string.""" try: decoded = obj.decode() except AttributeError: diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 48a0748509..9220632fcb 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -181,7 +181,7 @@ def ensure_unique_nondimensional_coords( this is not applied to latitude and longitude. Args: - datas: + data_arrays: Dictionary of (dataset name, dataset) pretty: Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index 31b51b6cd9..ee24d0e10d 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -27,6 +27,7 @@ @pytest.fixture() def input_data_arr() -> xr.DataArray: + """Create a data array.""" return xr.DataArray( data=[[1, 2], [3, 4]], dims=("y", "x"), @@ -59,6 +60,7 @@ def test_area2cf_geos_area_nolonlats(self, input_data_arr, include_lonlats): assert "latitude" in res[1].coords def test_area2cf_swath(self, input_data_arr): + """Test area2cf for swath definitions.""" swath = SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) input_data_arr.attrs["area"] = swath From ea6d6e035aa9222e3fe2def19d81ee3a1faf7afa Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 16:11:57 +0100 Subject: [PATCH 0801/1416] update defaults test --- satpy/tests/test_modifiers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 81ce5f3ad8..0c8eb51b3f 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -179,7 +179,7 @@ def test_default_settings(self, sunz_ds1, sunz_sza): """Test default settings with sza data available.""" res = self.default((sunz_ds1, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, - np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), + np.array([[0.02916261, 0.02839063], [0.02949383, 0.02871911]]), rtol=1e-5) def test_custom_settings(self, sunz_ds1, sunz_sza): From 4dd1920a1ed98c1438cd9a319a9fb632bbec0613 Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 28 Nov 2023 16:55:58 +0100 Subject: [PATCH 0802/1416] make ruff happy --- satpy/modifiers/spectral.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index e37f6d3c9f..18d1df2379 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -164,7 +164,6 @@ def __call__(self, projectables, optional_datasets=None, **info): def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" - logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) From e338294dd2b4924c11c7b08b22b28bc150ecd6f5 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 29 Nov 2023 08:08:58 +0100 Subject: [PATCH 0803/1416] Update changelog for v0.45.0 --- CHANGELOG.md | 60 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b9ab4e1b7..aa85b83f56 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,63 @@ +## Version 0.45.0 (2023/11/29) + +### Issues Closed + +* [Issue 2646](https://github.com/pytroll/satpy/issues/2646) - satpy/tests/scene_tests/test_resampling.py is using called_once in assertions rather than assert_called_once, causing test failures on Python 3.12 ([PR 2648](https://github.com/pytroll/satpy/pull/2648) by [@ArrayBolt3](https://github.com/ArrayBolt3)) +* [Issue 2643](https://github.com/pytroll/satpy/issues/2643) - SunZenithReducer defaults make True Color FCI imagery too dark at high solar zenith angles ([PR 2653](https://github.com/pytroll/satpy/pull/2653) by [@ameraner](https://github.com/ameraner)) +* [Issue 2638](https://github.com/pytroll/satpy/issues/2638) - Update AVHRR EPS reader to read cloud flags information ([PR 2639](https://github.com/pytroll/satpy/pull/2639) by [@fwfichtner](https://github.com/fwfichtner)) +* [Issue 2619](https://github.com/pytroll/satpy/issues/2619) - NDVI hybrid green correction triggers early dask computations ([PR 2623](https://github.com/pytroll/satpy/pull/2623) by [@pnuu](https://github.com/pnuu)) +* [Issue 2614](https://github.com/pytroll/satpy/issues/2614) - DayNightCompositor triggers early dask computation ([PR 2617](https://github.com/pytroll/satpy/pull/2617) by [@pnuu](https://github.com/pnuu)) +* [Issue 2613](https://github.com/pytroll/satpy/issues/2613) - modifier NIREmissivePartFromReflectance triggers early dask computation +* [Issue 2604](https://github.com/pytroll/satpy/issues/2604) - grid_mapping attrs lead to failure of cf writer +* [Issue 2601](https://github.com/pytroll/satpy/issues/2601) - Is the 31(32)-band read by the modis_l1b reader converted to bright temperature by default? + +In this release 8 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2648](https://github.com/pytroll/satpy/pull/2648) - Fix assert_called_once usage in resample tests ([2646](https://github.com/pytroll/satpy/issues/2646)) +* [PR 2635](https://github.com/pytroll/satpy/pull/2635) - Fix nwcsaf_geo start time to be nominal time +* [PR 2627](https://github.com/pytroll/satpy/pull/2627) - Fix ABI readers using wrong dtype for resolution-based chunks +* [PR 2625](https://github.com/pytroll/satpy/pull/2625) - Cleanup various warnings encountered during tests +* [PR 2623](https://github.com/pytroll/satpy/pull/2623) - Fix unnecessary Dask `compute()`s in `NDVIHybridGreen` compositor ([2619](https://github.com/pytroll/satpy/issues/2619)) +* [PR 2617](https://github.com/pytroll/satpy/pull/2617) - Reduce Dask computations in `DayNightCompositor` ([2614](https://github.com/pytroll/satpy/issues/2614)) +* [PR 2608](https://github.com/pytroll/satpy/pull/2608) - Fix ABI L2 to only convert reflectances to percentages +* [PR 2607](https://github.com/pytroll/satpy/pull/2607) - Fix ABI L2 reader to produce reflectances as percentages +* [PR 2606](https://github.com/pytroll/satpy/pull/2606) - Change platform name for EPIC (DSCOVR) to upper case. +* [PR 2585](https://github.com/pytroll/satpy/pull/2585) - Make caching warn if some of the args are unhashable + +#### Features added + +* [PR 2653](https://github.com/pytroll/satpy/pull/2653) - Update Sun-zenith reducer defaults ([2643](https://github.com/pytroll/satpy/issues/2643)) +* [PR 2652](https://github.com/pytroll/satpy/pull/2652) - Add file pattern for CRRPh of NWC SAF GEO v2021 +* [PR 2642](https://github.com/pytroll/satpy/pull/2642) - Set dtype for get_lonlats() in NIR reflectance calculation +* [PR 2640](https://github.com/pytroll/satpy/pull/2640) - Keep original dtype in DayNightCompositor +* [PR 2639](https://github.com/pytroll/satpy/pull/2639) - Update AVHRR EPS reader to read cloud flags information ([2638](https://github.com/pytroll/satpy/issues/2638)) +* [PR 2637](https://github.com/pytroll/satpy/pull/2637) - Keep FCI data as 32-bit floats +* [PR 2632](https://github.com/pytroll/satpy/pull/2632) - Add reader for OSI SAF L3 products +* [PR 2631](https://github.com/pytroll/satpy/pull/2631) - Add a reader for MODIS Level 3 files in CMG format. +* [PR 2623](https://github.com/pytroll/satpy/pull/2623) - Fix unnecessary Dask `compute()`s in `NDVIHybridGreen` compositor ([2619](https://github.com/pytroll/satpy/issues/2619)) +* [PR 2621](https://github.com/pytroll/satpy/pull/2621) - Add resolution-based chunking to ABI L1b reader +* [PR 2610](https://github.com/pytroll/satpy/pull/2610) - Remove legacy resampler code + +#### Clean ups + +* [PR 2648](https://github.com/pytroll/satpy/pull/2648) - Fix assert_called_once usage in resample tests ([2646](https://github.com/pytroll/satpy/issues/2646)) +* [PR 2641](https://github.com/pytroll/satpy/pull/2641) - Add "A" and "D" checks to ruff config +* [PR 2634](https://github.com/pytroll/satpy/pull/2634) - Remove duplicate entries of required netcdf variables in FCI reader +* [PR 2625](https://github.com/pytroll/satpy/pull/2625) - Cleanup various warnings encountered during tests +* [PR 2624](https://github.com/pytroll/satpy/pull/2624) - Replace assertRaises with pytest.raises +* [PR 2621](https://github.com/pytroll/satpy/pull/2621) - Add resolution-based chunking to ABI L1b reader +* [PR 2612](https://github.com/pytroll/satpy/pull/2612) - Remove tests for removed and deprecated functionality +* [PR 2610](https://github.com/pytroll/satpy/pull/2610) - Remove legacy resampler code +* [PR 2586](https://github.com/pytroll/satpy/pull/2586) - Replace flake8 with ruff in pre-commit and ci linting +* [PR 2524](https://github.com/pytroll/satpy/pull/2524) - Refactor CFWriter utility into CF directory + +In this release 31 pull requests were closed. + + ## Version 0.44.0 (2023/10/17) ### Issues Closed From bd0d423d8306e29e3f91a41d3df32e82867897df Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 29 Nov 2023 09:48:31 +0100 Subject: [PATCH 0804/1416] Skip some sgli tests for python <=3.9 --- satpy/tests/reader_tests/test_sgli_l1b.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 5ae305a068..0ae65bb8d3 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -1,4 +1,5 @@ """Tests for the SGLI L1B backend.""" +import sys from datetime import datetime, timedelta import dask @@ -248,6 +249,7 @@ def test_channel_is_chunked(sgli_vn_file): "standard_name": ""}) assert res.chunks[0][0] > 116 +@pytest.mark.skipif(sys.version_info <= (3, 9)) def test_loading_lon_lat(sgli_vn_file): """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -259,6 +261,7 @@ def test_loading_lon_lat(sgli_vn_file): assert res.dtype == np.float32 assert res.dims == ("y", "x") +@pytest.mark.skipif(sys.version_info <= (3, 9)) def test_loading_sensor_angles(sgli_vn_file): """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -270,6 +273,7 @@ def test_loading_sensor_angles(sgli_vn_file): assert res.dtype == np.float32 assert res.min() >= 0 +@pytest.mark.skipif(sys.version_info <= (3, 9)) def test_loading_solar_angles(sgli_vn_file): """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -307,6 +311,7 @@ def test_get_ti_dataset_bt(sgli_ir_file): _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", "standard_name": "toa_brightness_temperature"}) +@pytest.mark.skipif(sys.version_info <= (3, 9)) def test_get_ti_lon_lats(sgli_ir_file): """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) From a7d6674637b695b7f48bd611e7d0453d039c741f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 29 Nov 2023 16:53:34 +0100 Subject: [PATCH 0805/1416] Fix skipif with reason --- satpy/tests/reader_tests/test_sgli_l1b.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 0ae65bb8d3..85feb1ffaa 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -249,7 +249,7 @@ def test_channel_is_chunked(sgli_vn_file): "standard_name": ""}) assert res.chunks[0][0] > 116 -@pytest.mark.skipif(sys.version_info <= (3, 9)) +@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_lon_lat(sgli_vn_file): """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -261,7 +261,7 @@ def test_loading_lon_lat(sgli_vn_file): assert res.dtype == np.float32 assert res.dims == ("y", "x") -@pytest.mark.skipif(sys.version_info <= (3, 9)) +@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_sensor_angles(sgli_vn_file): """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -273,7 +273,7 @@ def test_loading_sensor_angles(sgli_vn_file): assert res.dtype == np.float32 assert res.min() >= 0 -@pytest.mark.skipif(sys.version_info <= (3, 9)) +@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_solar_angles(sgli_vn_file): """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -311,7 +311,7 @@ def test_get_ti_dataset_bt(sgli_ir_file): _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", "standard_name": "toa_brightness_temperature"}) -@pytest.mark.skipif(sys.version_info <= (3, 9)) +@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") def test_get_ti_lon_lats(sgli_ir_file): """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) From 8f5258afeee55c3834cc40921a9f1d92b279916c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 29 Nov 2023 17:06:59 +0100 Subject: [PATCH 0806/1416] Fix skipif again --- satpy/tests/reader_tests/test_sgli_l1b.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 85feb1ffaa..c2b8bd5167 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -249,7 +249,7 @@ def test_channel_is_chunked(sgli_vn_file): "standard_name": ""}) assert res.chunks[0][0] > 116 -@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_lon_lat(sgli_vn_file): """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -261,7 +261,7 @@ def test_loading_lon_lat(sgli_vn_file): assert res.dtype == np.float32 assert res.dims == ("y", "x") -@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_sensor_angles(sgli_vn_file): """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -273,7 +273,7 @@ def test_loading_sensor_angles(sgli_vn_file): assert res.dtype == np.float32 assert res.min() >= 0 -@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_solar_angles(sgli_vn_file): """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) @@ -311,7 +311,7 @@ def test_get_ti_dataset_bt(sgli_ir_file): _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", "standard_name": "toa_brightness_temperature"}) -@pytest.mark.skipif(sys.version_info <= (3, 9), reason="Python 3.10 or higher needed for geotiepoints") +@pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_get_ti_lon_lats(sgli_ir_file): """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) From 9c9ae0897836c89f122793fc222c1ef6a67fc39f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 30 Nov 2023 14:42:11 +0100 Subject: [PATCH 0807/1416] Fix missing radiance units in eps l1b --- satpy/readers/eps_l1b.py | 7 ++++--- satpy/tests/reader_tests/test_eps_l1b.py | 11 +++++++++++ 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index bd03f40820..fbeb3ecba6 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -81,10 +81,10 @@ def read_records(filename): grh = np.fromfile(fdes, grh_dtype, 1) if grh.size == 0: break - rec_class = record_class[int(grh["record_class"])] + rec_class = record_class[int(grh["record_class"].squeeze())] sub_class = grh["RECORD_SUBCLASS"][0] - expected_size = int(grh["RECORD_SIZE"]) + expected_size = int(grh["RECORD_SIZE"].squeeze()) bare_size = expected_size - grh_dtype.itemsize try: the_type = form.dtype((rec_class, sub_class)) @@ -144,7 +144,8 @@ class EPSAVHRRFile(BaseFileHandler): sensors = {"AVHR": "avhrr-3"} units = {"reflectance": "%", - "brightness_temperature": "K"} + "brightness_temperature": "K", + "radiance": "W m^-2 sr^-1"} def __init__(self, filename, filename_info, filetype_info): """Initialize FileHandler.""" diff --git a/satpy/tests/reader_tests/test_eps_l1b.py b/satpy/tests/reader_tests/test_eps_l1b.py index d9a484face..16c7cfe27b 100644 --- a/satpy/tests/reader_tests/test_eps_l1b.py +++ b/satpy/tests/reader_tests/test_eps_l1b.py @@ -134,6 +134,17 @@ def test_dataset(self): assert res.attrs["calibration"] == "brightness_temperature" assert res.attrs["units"] == "K" + def test_get_dataset_radiance(self): + """Test loading a data array with radiance calibration.""" + did = make_dataid(name="1", calibration="radiance") + res = self.fh.get_dataset(did, {}) + assert isinstance(res, xr.DataArray) + assert res.attrs["platform_name"] == "Metop-C" + assert res.attrs["sensor"] == "avhrr-3" + assert res.attrs["name"] == "1" + assert res.attrs["calibration"] == "radiance" + assert res.attrs["units"] == "W m^-2 sr^-1" + def test_navigation(self): """Test the navigation.""" did = make_dataid(name="longitude") From aac3016f0d1287c619c5682d45a3ccf7d6428ffc Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 1 Dec 2023 11:17:35 -0600 Subject: [PATCH 0808/1416] Update unstable version of h5py in CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 14e5e3ffcd..2eff205013 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,7 +83,7 @@ jobs: git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ - git+https://github.com/takluyver/h5py@cython-3 \ + git+https://github.com/djhoese/h5py@cython-3-davidh \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ From e8938cbef0e5b4473a63b95fde3ec37b1b50ce13 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 1 Dec 2023 21:33:26 -0600 Subject: [PATCH 0809/1416] Add h5netcdf to unstable CI --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2eff205013..9931130d53 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -84,6 +84,7 @@ jobs: git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ git+https://github.com/djhoese/h5py@cython-3-davidh \ + git+https://github.com/h5netcdf/h5netcdf \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ From 3c7e6deeaee713aa9be2d93d7cae5fced8cd5f74 Mon Sep 17 00:00:00 2001 From: Kexin828 <149068356+Kexin828@users.noreply.github.com> Date: Sun, 3 Dec 2023 18:16:53 +0800 Subject: [PATCH 0810/1416] Update tropomi_l2.yaml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The Tropomi l2 reader can read ’S5P_OFFL_L2__NO2____‘ files but cannot read 'S5P_RPRO_L2__NO2____' files. Because 'S5P_RPRO_L2__NO2____' files have reduced in their names, which means the pattern defined here doesn't match. Adding a new pattern to the pattern list with the reduced added to the end can work. --- satpy/etc/readers/tropomi_l2.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/etc/readers/tropomi_l2.yaml b/satpy/etc/readers/tropomi_l2.yaml index 3e961f7d56..181311d25d 100644 --- a/satpy/etc/readers/tropomi_l2.yaml +++ b/satpy/etc/readers/tropomi_l2.yaml @@ -14,6 +14,7 @@ file_types: file_reader: !!python/name:satpy.readers.tropomi_l2.TROPOMIL2FileHandler file_patterns: - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}.nc' + - '{platform_shortname:3s}_RPRO_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}_reduced.nc' datasets: latitude: From b7167249133f39708d11c72feb90545afbdd64a7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 10:45:42 +0000 Subject: [PATCH 0811/1416] Bump conda-incubator/setup-miniconda from 2 to 3 Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 2 to 3. - [Release notes](https://github.com/conda-incubator/setup-miniconda/releases) - [Changelog](https://github.com/conda-incubator/setup-miniconda/blob/main/CHANGELOG.md) - [Commits](https://github.com/conda-incubator/setup-miniconda/compare/v2...v3) --- updated-dependencies: - dependency-name: conda-incubator/setup-miniconda dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 14e5e3ffcd..30acd9ec08 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,7 +36,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Conda Environment - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniforge-variant: Mambaforge miniforge-version: latest From 1f2041c069e7c9ff54ddf2691aa3389a189d0dd5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 10:45:51 +0000 Subject: [PATCH 0812/1416] Bump pypa/gh-action-pypi-publish from 1.8.10 to 1.8.11 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.10 to 1.8.11. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.10...v1.8.11) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 4ed63fefdd..9fd1d86b5a 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.10 + uses: pypa/gh-action-pypi-publish@v1.8.11 with: user: __token__ password: ${{ secrets.pypi_password }} From 2aab8c4b2cbb4e3b77540dd794ba11439b517467 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 4 Dec 2023 11:56:37 +0100 Subject: [PATCH 0813/1416] Address a few review comments --- satpy/readers/sgli_l1b.py | 37 ----------------------- satpy/tests/reader_tests/test_sgli_l1b.py | 16 +--------- 2 files changed, 1 insertion(+), 52 deletions(-) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 61a4b61f9d..67b8d46816 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -35,9 +35,6 @@ import numpy as np import xarray as xr from dask.array.core import normalize_chunks -from xarray import Dataset, Variable -from xarray.backends import BackendArray, BackendEntrypoint -from xarray.core import indexing # from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler @@ -230,37 +227,3 @@ def get_full_angles(self, azi, zen, attrs): new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) return new_azi, new_zen + 90 return azi, zen - - -class H5Array(BackendArray): - """An Hdf5-based array.""" - - def __init__(self, array): - """Initialize the array.""" - self.shape = array.shape - self.dtype = array.dtype - self.array = array - - def __getitem__(self, key): - """Get a slice of the array.""" - return indexing.explicit_indexing_adapter( - key, self.shape, indexing.IndexingSupport.BASIC, self._getitem - ) - - def _getitem(self, key): - return self.array[key] - - -class SGLIBackend(BackendEntrypoint): - """The SGLI backend.""" - - def open_dataset(self, filename, *, drop_variables=None): - """Open the dataset.""" - ds = Dataset() - h5f = h5py.File(filename) - h5_arr = h5f["Image_data"]["Lt_VN01"] - chunks = dict(zip(("y", "x"), h5_arr.chunks)) - ds["Lt_VN01"] = Variable(["y", "x"], - indexing.LazilyIndexedArray(H5Array(h5_arr)), - encoding={"preferred_chunks": chunks}) - return ds diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index c2b8bd5167..1512e38762 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -3,12 +3,9 @@ from datetime import datetime, timedelta import dask -import dask.array as da import h5py import numpy as np import pytest -from dask.array.core import normalize_chunks -from xarray import DataArray, Dataset, open_dataset from satpy.readers.sgli_l1b import HDF5SGLI @@ -21,18 +18,7 @@ ZEN_ARRAY = np.random.randint(0, 180 * 100, size=(197, 126), dtype=np.int16) -def test_open_dataset(sgli_vn_file): - """Test open_dataset function.""" - from satpy.readers.sgli_l1b import SGLIBackend - res = open_dataset(sgli_vn_file, engine=SGLIBackend, chunks={}) - assert isinstance(res, Dataset) - data_array = res["Lt_VN01"] - assert isinstance(data_array, DataArray) - assert isinstance(data_array.data, da.Array) - assert data_array.chunks == normalize_chunks((116, 157), data_array.shape) - - -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def sgli_vn_file(tmp_path_factory): """Create a stub VN file.""" filename = tmp_path_factory.mktemp("data") / "test_vn_file.h5" From 728631eb32afad4eaef8f53bbed863b9ab05282c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 4 Dec 2023 12:41:09 +0100 Subject: [PATCH 0814/1416] Refactor dataset preparation --- satpy/readers/sgli_l1b.py | 17 +++++++++++------ satpy/tests/reader_tests/test_sgli_l1b.py | 12 ++++++++++-- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 67b8d46816..1e2a64f783 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -85,6 +85,16 @@ def get_dataset(self, key, info): attrs = h5dataset.attrs dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) + dataset = self.prepare_dataset(key, dataset) + + dataset.attrs["platform_name"] = "GCOM-C1" + dataset.attrs["sensor"] = "sgli" + dataset.attrs["units"] = info["units"] + dataset.attrs["standard_name"] = info["standard_name"] + return dataset + + def prepare_dataset(self, key, dataset): + """Prepare the dataset according to key.""" with xr.set_options(keep_attrs=True): if key["name"].startswith(("VN", "SW", "P")): dataset = self.get_visible_dataset(key, dataset) @@ -95,12 +105,7 @@ def get_dataset(self, key, info): elif "angle" in key["name"]: dataset = self.get_angles(key) else: - raise NotImplementedError() - - dataset.attrs["platform_name"] = "GCOM-C1" - dataset.attrs["sensor"] = "sgli" - dataset.attrs["units"] = info["units"] - dataset.attrs["standard_name"] = info["standard_name"] + raise KeyError(f"Unrecognized dataset {key['name']}") return dataset def get_visible_dataset(self, key, dataset): diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 1512e38762..7f5fffa70c 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -46,7 +46,7 @@ def sgli_vn_file(tmp_path_factory): return filename -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def sgli_ir_file(tmp_path_factory): """Create a stub IR file.""" filename = tmp_path_factory.mktemp("data") / "test_ir_file.h5" @@ -83,7 +83,7 @@ def sgli_ir_file(tmp_path_factory): return filename -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def sgli_pol_file(tmp_path_factory): """Create a POL stub file.""" filename = tmp_path_factory.mktemp("data") / "test_pol_file.h5" @@ -188,6 +188,14 @@ def test_get_dataset_counts(sgli_vn_file): assert res.attrs["platform_name"] == "GCOM-C1" assert res.attrs["sensor"] == "sgli" +def test_get_dataset_for_unknown_channel(sgli_vn_file): + """Test that counts can be extracted from a file.""" + handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) + did = dict(name="VIN", resolution=1000, polarization=None, calibration="counts") + with pytest.raises(KeyError): + handler.get_dataset(did, {"file_key": "Image_data/Lt_VIN01", "units": "", + "standard_name": ""}) + def test_get_vn_dataset_reflectances(sgli_vn_file): """Test that the vn datasets can be calibrated to reflectances.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) From 2c83c70007899e17345f184239f5fbd94090e52c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 4 Dec 2023 10:21:30 -0600 Subject: [PATCH 0815/1416] Update satpy/etc/readers/tropomi_l2.yaml --- satpy/etc/readers/tropomi_l2.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/tropomi_l2.yaml b/satpy/etc/readers/tropomi_l2.yaml index 181311d25d..afd78d750f 100644 --- a/satpy/etc/readers/tropomi_l2.yaml +++ b/satpy/etc/readers/tropomi_l2.yaml @@ -14,7 +14,7 @@ file_types: file_reader: !!python/name:satpy.readers.tropomi_l2.TROPOMIL2FileHandler file_patterns: - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}.nc' - - '{platform_shortname:3s}_RPRO_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}_reduced.nc' + - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}_reduced.nc' datasets: latitude: From c85f0a073560c14f914425683600aab98f4a736e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 5 Dec 2023 11:15:23 -0600 Subject: [PATCH 0816/1416] Update .github/workflows/ci.yaml --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9931130d53..0d73af13e1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -83,7 +83,7 @@ jobs: git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/fhs/pyhdf \ - git+https://github.com/djhoese/h5py@cython-3-davidh \ + git+https://github.com/h5py/h5py \ git+https://github.com/h5netcdf/h5netcdf \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ From 60a7c1b3a527b896838114a5ee0f2ce7b26fabec Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 8 Dec 2023 09:42:53 +0200 Subject: [PATCH 0817/1416] Fix RealisticColors compositor not to upcast data to float64 --- satpy/composites/__init__.py | 8 ++++---- satpy/tests/test_composites.py | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 9295f94dc7..686b8c4c27 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -992,17 +992,17 @@ def __call__(self, projectables, *args, **kwargs): hrv = projectables[2] try: - ch3 = 3 * hrv - vis06 - vis08 + ch3 = 3.0 * hrv - vis06 - vis08 ch3.attrs = hrv.attrs except ValueError: raise IncompatibleAreas ndvi = (vis08 - vis06) / (vis08 + vis06) - ndvi = np.where(ndvi < 0, 0, ndvi) + ndvi = ndvi.where(ndvi >= 0.0, 0.0) - ch1 = ndvi * vis06 + (1 - ndvi) * vis08 + ch1 = ndvi * vis06 + (1.0 - ndvi) * vis08 ch1.attrs = vis06.attrs - ch2 = ndvi * vis08 + (1 - ndvi) * vis06 + ch2 = ndvi * vis08 + (1.0 - ndvi) * vis06 ch2.attrs = vis08.attrs res = super(RealisticColors, self).__call__((ch1, ch2, ch3), diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 7fbe177bfb..4a7b2a2ce9 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1867,3 +1867,37 @@ def _create_fake_composite_config(yaml_filename: str): }, comp_file, ) + + +class TestRealisticColors: + """Test the SEVIRI Realistic Colors compositor.""" + + def test_realistic_colors(self): + """Test the compositor.""" + from satpy.composites import RealisticColors + + vis06 = xr.DataArray(da.arange(0, 15, dtype=np.float32).reshape(3, 5), dims=("y", "x"), + attrs={"foo": "foo"}) + vis08 = xr.DataArray(da.arange(15, 0, -1, dtype=np.float32).reshape(3, 5), dims=("y", "x"), + attrs={"bar": "bar"}) + hrv = xr.DataArray(6 * da.ones((3, 5), dtype=np.float32), dims=("y", "x"), + attrs={"baz": "baz"}) + + expected_red = np.array([[0.0, 2.733333, 4.9333334, 6.6, 7.733333], + [8.333333, 8.400001, 7.9333334, 7.0, 6.0], + [5.0, 4.0, 3.0, 2.0, 1.0]], dtype=np.float32) + expected_green = np.array([ + [15.0, 12.266666, 10.066668, 8.400001, 7.2666664], + [6.6666665, 6.6000004, 7.0666666, 8.0, 9.0], + [10.0, 11.0, 12.0, 13.0, 14.0]], dtype=np.float32) + + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = RealisticColors("Ni!") + res = comp((vis06, vis08, hrv)) + + arr = res.values + + assert res.dtype == np.float32 + np.testing.assert_allclose(arr[0, :, :], expected_red) + np.testing.assert_allclose(arr[1, :, :], expected_green) + np.testing.assert_allclose(arr[2, :, :], 3.0) From 91b59ca721cdda8d8b7cf4716c74050aa589d06b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 8 Dec 2023 10:07:50 -0600 Subject: [PATCH 0818/1416] Workaround AWIPS bug not handling integers properly in "awips_tiled" writer --- satpy/tests/writer_tests/test_awips_tiled.py | 2 +- satpy/writers/awips_tiled.py | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index 63113a9f94..dbc1bc82d7 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -198,7 +198,7 @@ def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_da check_required_properties(unmasked_ds, output_ds) scale_factor = output_ds["data"].encoding["scale_factor"] np.testing.assert_allclose(input_data_arr.values, output_ds["data"].data, - atol=scale_factor / 2) + atol=scale_factor * 0.75) def test_units_length_warning(self, tmp_path): """Test long 'units' warnings are raised.""" diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 15680e8091..03ce3e9d68 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -630,7 +630,13 @@ def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): # max value fills = [2 ** (file_bit_depth - 1) - 1] - mx = (vmax - vmin) / (2 ** bit_depth - 1 - num_fills) + # NOTE: AWIPS is buggy and does not properly handle both + # halves an integers data space. The below code limits + # unsigned integers to the positive half and this seems + # to work better with current AWIPS. + mx = (vmax - vmin) / (2 ** (bit_depth - 1) - 1 - num_fills) + # NOTE: This is what the line should look like if AWIPS wasn't buggy: + # mx = (vmax - vmin) / (2 ** bit_depth - 1 - num_fills) bx = vmin if not is_unsigned and not unsigned_in_signed: bx += 2 ** (bit_depth - 1) * mx From 333f0f2902d7f115c48a874d37bee210d531995e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 14:16:08 +0200 Subject: [PATCH 0819/1416] Add ASII-GW and ASII-TF datasets for v2021 --- satpy/etc/readers/nwcsaf-geo.yaml | 39 +++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/satpy/etc/readers/nwcsaf-geo.yaml b/satpy/etc/readers/nwcsaf-geo.yaml index e22ae09fc1..8ab3db9500 100644 --- a/satpy/etc/readers/nwcsaf-geo.yaml +++ b/satpy/etc/readers/nwcsaf-geo.yaml @@ -689,11 +689,13 @@ datasets: file_type: nc_nwcsaf_rdt # ----ASII products in multiple files ------------ + # until v2018 asii_turb_trop_prob: name: asii_turb_trop_prob resolution: 3000 file_type: [nc_nwcsaf_asii_tf, nc_nwcsaf_asii] + # until v2018 asii_turb_prob_pal: name: asii_turb_prob_pal resolution: 3000 @@ -701,6 +703,24 @@ datasets: # ----ASII-TF product ------------ + # v2021 onwards + asiitf_prob: + name: asiitf_prob + resolution: 3000 + file_type: nc_nwcsaf_asii_tf + + # v2021 onwards + asiitf_prob_pal: + name: asiitf_prob_pal + file_type: nc_nwcsaf_asii_tf + + # v2021 onwards + asiitf_status_flag: + name: asiitf_status_flag + resolution: 3000 + file_type: nc_nwcsaf_asii_tf + + # until v2018 asii_turb_prob_status_flag: name: asii_turb_trop_prob_status_flag resolution: 3000 @@ -718,11 +738,30 @@ datasets: # ----ASII-GW product ------------ + # v2021 onwards + asiigw_wv_prob: + name: asiigw_wv_prob + resolution: 3000 + file_type: nc_nwcsaf_asii_gw + + # v2021 onwards + asiigw_status_flag: + name: asiigw_status_flag + resolution: 3000 + file_type: nc_nwcsaf_asii_gw + + # v2021 onwards + asiigw_wv_prob_pal: + name: asiigw_wv_prob_pal + file_type: nc_nwcsaf_asii_gw + + # until v2018 asii_turb_wave_prob: name: asii_turb_wave_prob resolution: 3000 file_type: nc_nwcsaf_asii_gw + # until v2018 asii_turb_wave_prob_status_flag: name: asii_turb_wave_prob_status_flag resolution: 3000 From 241a2d7db19c72c061e75fb5390b8865fc22d3f0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 14:16:38 +0200 Subject: [PATCH 0820/1416] Add missing cma_quality and ct_status_flag datasets --- satpy/etc/readers/nwcsaf-geo.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/satpy/etc/readers/nwcsaf-geo.yaml b/satpy/etc/readers/nwcsaf-geo.yaml index 8ab3db9500..3ab2bf8536 100644 --- a/satpy/etc/readers/nwcsaf-geo.yaml +++ b/satpy/etc/readers/nwcsaf-geo.yaml @@ -77,6 +77,11 @@ datasets: resolution: 3000 file_type: nc_nwcsaf_cma + cma_quality: + name: cma_quality + resolution: 3000 + file_type: nc_nwcsaf_cma + cma_pal: name: cma_pal resolution: 3000 @@ -129,6 +134,11 @@ datasets: resolution: 3000 file_type: nc_nwcsaf_ct + ct_status_flag: + name: ct_status_flag + resolution: 3000 + file_type: nc_nwcsaf_ct + ct_pal: name: ct_pal resolution: 3000 From 4829c4a02b602a1531b574d21634351de0ff6804 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 14:22:21 +0200 Subject: [PATCH 0821/1416] Fix crrph palette and status flag names --- satpy/etc/readers/nwcsaf-geo.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/nwcsaf-geo.yaml b/satpy/etc/readers/nwcsaf-geo.yaml index 3ab2bf8536..c003d6f6e7 100644 --- a/satpy/etc/readers/nwcsaf-geo.yaml +++ b/satpy/etc/readers/nwcsaf-geo.yaml @@ -380,7 +380,7 @@ datasets: file_type: nc_nwcsaf_crr-ph crrph_pal: - name: crrph_intensity_pal + name: crrph_pal resolution: 3000 file_type: nc_nwcsaf_crr-ph @@ -400,7 +400,7 @@ datasets: file_type: nc_nwcsaf_crr-ph crrph_status_flag: - name: crrph_status + name: crrph_status_flag resolution: 3000 file_type: nc_nwcsaf_crr-ph From ac5301e092134a4b8884ffc8e1f6ae6d84f797a0 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 11 Dec 2023 13:09:42 +0000 Subject: [PATCH 0822/1416] Bump expected xarray version number --- satpy/tests/writer_tests/test_cf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 7fabb04f10..d33b195977 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -566,9 +566,9 @@ def _should_use_compression_keyword(): # xarray currently ignores the "compression" keyword, see # https://github.com/pydata/xarray/issues/7388. There's already an open # PR, so we assume that this will be fixed in the next minor release - # (current release is 2023.02). If not, tests will fail and remind us. + # (current release is 2023.12). If not, tests will fail and remind us. versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.12") + versions["xarray"] >= Version("2024.02") ) From a59fdd59c14be620a509feec8ef1e6aedbd57436 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 11 Dec 2023 13:11:10 +0000 Subject: [PATCH 0823/1416] Make attribute encoding public --- satpy/cf/attrs.py | 8 ++++---- satpy/tests/cf_tests/test_attrs.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/cf/attrs.py b/satpy/cf/attrs.py index cdec8500d4..adcc2ba60a 100644 --- a/satpy/cf/attrs.py +++ b/satpy/cf/attrs.py @@ -128,8 +128,8 @@ def _encode_to_cf(obj): return _encode_python_objects(obj) -def _encode_nc_attrs(attrs): - """Encode dataset attributes in a netcdf compatible datatype. +def encode_attrs_to_cf(attrs): + """Encode dataset attributes as a netcdf compatible datatype. Args: attrs (dict): @@ -161,7 +161,7 @@ def preprocess_attrs( if flatten_attrs: data_arr.attrs = flatten_dict(data_arr.attrs) - data_arr.attrs = _encode_nc_attrs(data_arr.attrs) + data_arr.attrs = encode_attrs_to_cf(data_arr.attrs) return data_arr @@ -224,7 +224,7 @@ def preprocess_header_attrs(header_attrs, flatten_attrs=False): if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) - header_attrs = _encode_nc_attrs(header_attrs) # OrderedDict + header_attrs = encode_attrs_to_cf(header_attrs) # OrderedDict else: header_attrs = {} header_attrs = _add_history(header_attrs) diff --git a/satpy/tests/cf_tests/test_attrs.py b/satpy/tests/cf_tests/test_attrs.py index 082dea602c..4772d5e428 100644 --- a/satpy/tests/cf_tests/test_attrs.py +++ b/satpy/tests/cf_tests/test_attrs.py @@ -24,14 +24,14 @@ class TestCFAttributeEncoding: def test__encode_nc_attrs(self): """Test attributes encoding.""" - from satpy.cf.attrs import _encode_nc_attrs + from satpy.cf.attrs import encode_attrs_to_cf from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality attrs, expected, _ = get_test_attrs() # Test encoding - encoded = _encode_nc_attrs(attrs) + encoded = encode_attrs_to_cf(attrs) assert_dict_array_equality(expected, encoded) # Test decoding of json-encoded attributes From aa98fd6d69335a8c862570e1ec3479d92f0cd9eb Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 15:14:50 +0200 Subject: [PATCH 0824/1416] Update xarray version in compression tests for compression kwarg --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 7fabb04f10..62c9995cde 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -570,5 +570,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.12") + versions["xarray"] >= Version("2024.1") ) From cc33e1c1fa680ec9e10121824cb2a245cf017fb1 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 11 Dec 2023 13:22:35 +0000 Subject: [PATCH 0825/1416] Revert "Bump expected xarray version number" This reverts commit ac5301e092134a4b8884ffc8e1f6ae6d84f797a0. --- satpy/tests/writer_tests/test_cf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index d33b195977..7fabb04f10 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -566,9 +566,9 @@ def _should_use_compression_keyword(): # xarray currently ignores the "compression" keyword, see # https://github.com/pydata/xarray/issues/7388. There's already an open # PR, so we assume that this will be fixed in the next minor release - # (current release is 2023.12). If not, tests will fail and remind us. + # (current release is 2023.02). If not, tests will fail and remind us. versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2024.02") + versions["xarray"] >= Version("2023.12") ) From 77830d67ca86bfb398fa38ec10d21f097b2faffe Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Mon, 11 Dec 2023 13:39:02 +0000 Subject: [PATCH 0826/1416] Add AMV/AMVI file pattern and variable --- satpy/etc/readers/fci_l2_nc.yaml | 267 +++++++++++++++++++++++++++++++ 1 file changed, 267 insertions(+) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 20d9935682..7c0724b6ac 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -64,6 +64,16 @@ file_types: file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-ASR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + nc_fci_amvi: + file_reader: !!python/name:readers.fci_amv_l2_nc.FciAmvL2NCFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMVI-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + + nc_fci_amv: + file_reader: !!python/name:readers.fci_amv_l2_nc.FciAmvL2NCFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMV-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + datasets: # CLM @@ -2734,3 +2744,260 @@ datasets: file_type: nc_fci_asr file_key: product_timeliness long_name: product_timeliness_index + +# AMV Intermediate Product + intm_latitude: + name: intm_latitude + file_type: nc_fci_amvi + file_key: intm_latitude + standard_name: latitude + + intm_longitude: + name: intm_longitude + file_type: nc_fci_amvi + file_key: intm_longitude + standard_name: longitude + + intm_speed: + name: intm_speed + file_type: nc_fci_amvi + file_key: intm_speed + standard_name: wind_speed + coordinates: + - intm_longitude + - intm_latitude + + intm_u_component: + name: intm_u_component + file_type: nc_fci_amvi + file_key: intm_u_component + standard_name: wind_speed_horizontal_component + coordinates: + - intm_longitude + - intm_latitude + + intm_v_component: + name: intm_v_component + file_type: nc_fci_amvi + file_key: intm_v_component + standard_name: wind_speed_vertical_component + coordinates: + - intm_longitude + - intm_latitude + + intm_direction: + name: intm_direction + file_type: nc_fci_amvi + file_key: intm_direction + standard_name: wind_to_direction + coordinates: + - intm_longitude + - intm_latitude + + intm_pressure: + name: intm_pressure + file_type: nc_fci_amvi + file_key: intm_pressure + standard_name: wind_pressure + coordinates: + - intm_longitude + - intm_latitude + + intm_temperature: + name: intm_temperature + file_type: nc_fci_amvi + file_key: intm_temperature + standard_name: wind_temperature + coordinates: + - intm_longitude + - intm_latitude + + intm_target_type: + name: intm_target_type + file_type: nc_fci_amvi + file_key: target_type + standard_name: wind_target_type + coordinates: + - intm_longitude + - intm_latitude + + intm_wind_method: + name: intm_wind_method + file_type: nc_fci_amvi + file_key: wind_method + standard_name: wind_wind_method + coordinates: + - intm_longitude + - intm_latitude + +# AMV Final Product + channel_id: + name: channel_id + file_type: nc_fci_amv + file_key: channel_id + standard_name: channel_id + + latitude: + name: latitude + file_type: nc_fci_amv + file_key: latitude + standard_name: latitude + + longitude: + name: longitude + file_type: nc_fci_amv + file_key: longitude + standard_name: longitude + + speed: + name: speed + file_type: nc_fci_amv + file_key: speed + standard_name: wind_speed + coordinates: + - longitude + - latitude + + speed_u_component: + name: speed_u_component + file_type: nc_fci_amv + file_key: speed_u_component + standard_name: wind_speed_horizontal_component + coordinates: + - longitude + - latitude + + speed_v_component: + name: speed_v_component + file_type: nc_fci_amv + file_key: speed_v_component + standard_name: wind_speed_vertical_component + coordinates: + - longitude + - latitude + + direction: + name: direction + file_type: nc_fci_amv + file_key: direction + standard_name: wind_to_direction + coordinates: + - longitude + - latitude + + pressure: + name: pressure + file_type: nc_fci_amv + file_key: pressure + standard_name: wind_pressure + coordinates: + - longitude + - latitude + + temperature: + name: temperature + file_type: nc_fci_amv + file_key: temperature + standard_name: wind_temperature + coordinates: + - longitude + - latitude + + target_type: + name: target_type + file_type: nc_fci_amv + file_key: target_type + standard_name: wind_target_type + coordinates: + - longitude + - latitude + + wind_method: + name: wind_method + file_type: nc_fci_amv + file_key: wind_method + standard_name: wind_wind_method + coordinates: + - longitude + - latitude + + fcst_u: + name: fcst_u + file_type: nc_fci_amv + file_key: forecast_u_component + standard_name: wind_forecast_u_component + coordinates: + - longitude + - latitude + + fcst_v: + name: fcst_v + file_type: nc_fci_amv + file_key: forecast_v_component + standard_name: wind_forecast_v_component + coordinates: + - longitude + - latitude + + best_fit_pres: + name: best_fit_pres + file_type: nc_fci_amv + file_key: best_fit_pressure + standard_name: wind_best_fit_pressure + coordinates: + - longitude + - latitude + + best_fit_u: + name: best_fit_u + file_type: nc_fci_amv + file_key: best_fit_u_component + standard_name: wind_best_fit_u_component + coordinates: + - longitude + - latitude + + best_fit_v: + name: best_fit_v + file_type: nc_fci_amv + file_key: best_fit_v_component + standard_name: wind_best_fit_v_component + coordinates: + - longitude + - latitude + + qi: + name: qi + file_type: nc_fci_amv + file_key: overall_reliability + standard_name: wind_overall_reliability + coordinates: + - longitude + - latitude + + qi_excl_fcst: + name: qi_excl_fcst + file_type: nc_fci_amv + file_key: overall_reliability_exc_forecast + standard_name: wind_overall_reliability_exc_forecast + coordinates: + - longitude + - latitude + + product_quality: + name: product_quality + file_type: nc_fci_amv + file_key: product_quality + long_name: product_quality_index + + product_completeness: + name: product_completeness + file_type: nc_fci_amv + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness: + name: product_timeliness + file_type: nc_fci_amv + file_key: product_timeliness + long_name: product_timeliness_index From fe50d42cc928c390b4582978045e25edc17c8137 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 15:14:50 +0200 Subject: [PATCH 0827/1416] Update xarray version in compression tests for compression kwarg --- satpy/tests/writer_tests/test_cf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 7fabb04f10..62c9995cde 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -570,5 +570,5 @@ def _should_use_compression_keyword(): versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and - versions["xarray"] >= Version("2023.12") + versions["xarray"] >= Version("2024.1") ) From a43187efd63b48f68fe43a08c779c011fc0d7776 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 16:19:14 +0200 Subject: [PATCH 0828/1416] Convert times in SEVIRI readers to nanosecond precision to silence warnings --- satpy/readers/seviri_base.py | 4 +++- satpy/tests/reader_tests/test_seviri_base.py | 19 ++++++++++++------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 25e6ed1a8b..5b19e56833 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -475,8 +475,10 @@ def get_cds_time(days, msecs): days = np.array([days], dtype="int64") msecs = np.array([msecs], dtype="int64") + # use nanosecond precision to silence warning from XArray + nsecs = 1000000 * msecs.astype("timedelta64[ns]") time = np.datetime64("1958-01-01").astype("datetime64[ms]") + \ - days.astype("timedelta64[D]") + msecs.astype("timedelta64[ms]") + days.astype("timedelta64[D]") + nsecs time[time == np.datetime64("1958-01-01 00:00")] = np.datetime64("NaT") if len(time) == 1: diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index c2d190e084..42b79ea0c8 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -74,23 +74,28 @@ def test_chebyshev(self): exp = chebyshev4(coefs, time, domain) np.testing.assert_allclose(res, exp) - def test_get_cds_time(self): - """Test the get_cds_time function.""" - # Scalar + def test_get_cds_time_scalar(self): + """Test the get_cds_time function for scalar inputs.""" assert get_cds_time(days=21246, msecs=12 * 3600 * 1000) == np.datetime64("2016-03-03 12:00") - # Array + def test_get_cds_time_array(self): + """Test the get_cds_time function for array inputs.""" days = np.array([21246, 21247, 21248]) msecs = np.array([12*3600*1000, 13*3600*1000 + 1, 14*3600*1000 + 2]) expected = np.array([np.datetime64("2016-03-03 12:00:00.000"), np.datetime64("2016-03-04 13:00:00.001"), np.datetime64("2016-03-05 14:00:00.002")]) - np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) + res = get_cds_time(days=days, msecs=msecs) + np.testing.assert_equal(res, expected) + def test_get_cds_time_nanoseconds(self): + """Test the get_cds_time function for having nanosecond precision.""" days = 21246 - msecs = 12*3600*1000 + msecs = 12 * 3600 * 1000 expected = np.datetime64("2016-03-03 12:00:00.000") - np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) + res = get_cds_time(days=days, msecs=msecs) + np.testing.assert_equal(res, expected) + assert ".000000000" in res.__repr__() def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" From df26a586eff71ed269e449dd8c59ef12a4b129a6 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Dec 2023 18:34:06 +0200 Subject: [PATCH 0829/1416] Check for dtype instead of string representation --- satpy/tests/reader_tests/test_seviri_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 42b79ea0c8..86f684bb5e 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -95,7 +95,7 @@ def test_get_cds_time_nanoseconds(self): expected = np.datetime64("2016-03-03 12:00:00.000") res = get_cds_time(days=days, msecs=msecs) np.testing.assert_equal(res, expected) - assert ".000000000" in res.__repr__() + assert res.dtype == np.dtype("datetime64[ns]") def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" From 101c44ddf8b9da6101eaa13b0a902ae5574de3b4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 11 Dec 2023 10:46:40 -0600 Subject: [PATCH 0830/1416] Add remaining JPSS satellite platform aliases to "mirs" reader --- satpy/readers/mirs.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index 1ee0912b0f..34edd02739 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -50,6 +50,10 @@ PLATFORMS = {"n18": "NOAA-18", "n19": "NOAA-19", "np": "NOAA-19", + "n20": "NOAA-20", + "n21": "NOAA-21", + "n22": "NOAA-22", + "n23": "NOAA-23", "m2": "MetOp-A", "m1": "MetOp-B", "m3": "MetOp-C", @@ -60,11 +64,14 @@ "f17": "DMSP-F17", "f18": "DMSP-F18", "gpm": "GPM", - "n20": "NOAA-20", } SENSOR = {"n18": amsu, "n19": amsu, "n20": "atms", + "n21": "atms", + "n22": "atms", + "n23": "atms", + "n24": "atms", "np": amsu, "m1": amsu, "m2": amsu, From 943085f54b14dfadee4ecd591d9321a3aeaf61aa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 20:24:37 +0000 Subject: [PATCH 0831/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.6 → v0.1.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.6...v0.1.7) - [github.com/PyCQA/bandit: 1.7.5 → 1.7.6](https://github.com/PyCQA/bandit/compare/1.7.5...1.7.6) - [github.com/pycqa/isort: 5.12.0 → 5.13.1](https://github.com/pycqa/isort/compare/5.12.0...5.13.1) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 99e77cb56a..8036f793f5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.6' + rev: 'v0.1.7' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -14,7 +14,7 @@ repos: - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.5' # Update me! + rev: '1.7.6' # Update me! hooks: - id: bandit args: [--ini, .bandit] @@ -29,7 +29,7 @@ repos: - types-requests args: ["--python-version", "3.9", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.1 hooks: - id: isort language_version: python3 From 8428da1e67a8befe4b79df0572426ddafb2d5585 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 11 Dec 2023 14:52:53 -0600 Subject: [PATCH 0832/1416] Change pre-commit update schedule to monthly --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8036f793f5..a398bd445f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,4 +36,5 @@ repos: ci: # To trigger manually, comment on a pull request with "pre-commit.ci autofix" autofix_prs: false + autoupdate_schedule: "monthly" skip: [bandit] From b8fea39fee9bfafc68039e1481784d2f9a440b3f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 11 Dec 2023 14:53:24 -0600 Subject: [PATCH 0833/1416] Change dependabot to monthly updates --- .github/dependabot.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 90e05c40d0..95179b06c9 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,4 +8,4 @@ updates: - package-ecosystem: "github-actions" # See documentation for possible values directory: "/" # Location of package manifests schedule: - interval: "weekly" + interval: "monthly" From 8c7999539d095e2817ff4c734264b201c3e3e16c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 11 Dec 2023 15:29:56 -0600 Subject: [PATCH 0834/1416] Update MiRS reader coefficient files to newer version --- satpy/etc/readers/mirs.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/etc/readers/mirs.yaml b/satpy/etc/readers/mirs.yaml index 4e70fbed2c..5ca15f66b0 100644 --- a/satpy/etc/readers/mirs.yaml +++ b/satpy/etc/readers/mirs.yaml @@ -8,13 +8,13 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsu, amsu-mhs, atms, ssmis, gmi] data_files: - - url: "https://zenodo.org/record/4472664/files/limbcoef_atmsland_noaa20.txt" - known_hash: "08a3b7c1594a963610dd864b7ecd12f0ab486412d35185c2371d924dd92c5779" - - url: "https://zenodo.org/record/4472664/files/limbcoef_atmsland_snpp.txt" + - url: "https://zenodo.org/record/10357932/files/limbcoef_atmsland_noaa20.txt" + known_hash: "08deca15afe8638effac9e6ccb442c2c386f5444926129d30a250d5840264c1d" + - url: "https://zenodo.org/record/10357932/files/limbcoef_atmsland_snpp.txt" known_hash: "4b01543699792306711ef1699244e96186487e8a869e4ae42bf1f0e4d00fd063" - - url: "https://zenodo.org/record/4472664/files/limbcoef_atmssea_noaa20.txt" - known_hash: "6853d0536b11c31dc130ab12c61fa322a76d3823a4b8ff9a18a0ecedbf269a88" - - url: "https://zenodo.org/record/4472664/files/limbcoef_atmssea_snpp.txt" + - url: "https://zenodo.org/record/10357932/files/limbcoef_atmssea_noaa20.txt" + known_hash: "07cd7874ff3f069cc3d473bdd0d1d19880ef01ac8d75cb0212a3687c059557f4" + - url: "https://zenodo.org/record/10357932/files/limbcoef_atmssea_snpp.txt" known_hash: "d0f806051b80320e046bdae6a9b68616152bbf8c2dbf3667b9834459259c0d72" file_types: From 944b049aefe93d8f2fb2a0339ce34cb555cac853 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 09:11:42 +0000 Subject: [PATCH 0835/1416] Add File handler for AMV --- satpy/readers/fci_l2_nc.py | 81 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index c387326f89..3743a64480 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -401,3 +401,84 @@ def _modify_area_extent(stand_area_extent): area_extent = tuple([ll_x, ll_y, ur_x, ur_y]) return area_extent + +class FciL2NCAMVFileHandler(FciL2CommonFunctions, BaseFileHandler): + """Reader class for FCI L2 AMV products in NetCDF4 format.""" + def __init__(self, filename, filename_info, filetype_info): + """Open the NetCDF file with xarray and prepare for dataset reading.""" + super().__init__(filename, filename_info, filetype_info) + + # Use xarray's default netcdf4 engine to open the file + self.nc = xr.open_dataset( + self.filename, + decode_cf=True, + mask_and_scale=True, + chunks={ + "number_of_images": CHUNK_SIZE, + # 'number_of_height_estimates': CHUNK_SIZE, + "number_of_winds": CHUNK_SIZE + } + ) + @property + def spacecraft_name(self): + """Get spacecraft name.""" + try: + return self.nc.attrs["platform"] + except KeyError: + # TODO if the platform attribute is not valid, return a default value + logger.warning("Spacecraft name cannot be obtained from file content, use default value instead") + return "MTI1" + + @property + def sensor_name(self): + """Get instrument name.""" + try: + return self.nc.attrs["data_source"] + except KeyError: + # TODO if the data_source attribute is not valid, return a default value + logger.warning("Sensor cannot be obtained from file content, use default value instead") + return "FCI" + + def _get_global_attributes(self): + """Create a dictionary of global attributes to be added to all datasets. + + Returns: + dict: A dictionary of global attributes. + filename: name of the product file + spacecraft_name: name of the spacecraft + sensor: name of sensor + platform_name: name of the platform + + """ + attributes = { + "filename": self.filename, + "spacecraft_name": self._spacecraft_name, + "sensor": self._sensor_name, + "platform_name": self._spacecraft_name, + "channel":self.filename_info["channel"] + } + return attributes + + def get_dataset(self, dataset_id, dataset_info): + """Get dataset using the file_key in dataset_info.""" + var_key = dataset_info["file_key"] + logger.debug("Reading in file to get dataset with key %s.", var_key) + + try: + variable = self.nc[var_key] + except KeyError: + logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) + return None + + # Manage the attributes of the dataset + variable.attrs.update(dataset_info) + variable.attrs.update(self._get_global_attributes()) + + return variable + + def __del__(self): + """Close the NetCDF file that may still be open.""" + try: + self.nc.close() + except AttributeError: + pass From 0fe0ae197bc6c5b6aeb4badb4fc4e935c895f2be Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 09:34:18 +0000 Subject: [PATCH 0836/1416] Fix wrong naming for method --- satpy/readers/fci_l2_nc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 3743a64480..5d15d24528 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -452,9 +452,9 @@ def _get_global_attributes(self): """ attributes = { "filename": self.filename, - "spacecraft_name": self._spacecraft_name, - "sensor": self._sensor_name, - "platform_name": self._spacecraft_name, + "spacecraft_name": self.spacecraft_name, + "sensor": self.sensor_name, + "platform_name": self.spacecraft_name, "channel":self.filename_info["channel"] } return attributes From 9ca6d246d9e243a79354596004a8e3a44ad47add Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 09:34:48 +0000 Subject: [PATCH 0837/1416] Fix reference to AMV file handler --- satpy/etc/readers/fci_l2_nc.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 7c0724b6ac..f9c12849eb 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -65,12 +65,12 @@ file_types: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-ASR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_amvi: - file_reader: !!python/name:readers.fci_amv_l2_nc.FciAmvL2NCFileHandler + file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCAMVFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMVI-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_amv: - file_reader: !!python/name:readers.fci_amv_l2_nc.FciAmvL2NCFileHandler + file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCAMVFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMV-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' From f2b3238574bbf07906866d5c0be9c096731e0ccc Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 09:43:42 +0000 Subject: [PATCH 0838/1416] Remove duplicate method del --- satpy/readers/fci_l2_nc.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 5d15d24528..0948cd0e0a 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -475,10 +475,3 @@ def get_dataset(self, dataset_id, dataset_info): variable.attrs.update(self._get_global_attributes()) return variable - - def __del__(self): - """Close the NetCDF file that may still be open.""" - try: - self.nc.close() - except AttributeError: - pass From 0d2312a617fee7a9c6264c8f586700f35e569329 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 12:49:38 +0200 Subject: [PATCH 0839/1416] Use create_gradient_search_resampler() --- satpy/resample.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/resample.py b/satpy/resample.py index ddab90be82..f74b6c5ecd 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -154,7 +154,7 @@ from packaging import version from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler from pyresample.geometry import SwathDefinition -from pyresample.gradient import GradientSearchResampler +from pyresample.gradient import create_gradient_search_resampler from pyresample.resampler import BaseResampler as PRBaseResampler from satpy._config import config_search_paths, get_config_path @@ -1009,7 +1009,7 @@ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): "nearest": KDTreeResampler, "bilinear": BilinearResampler, "native": NativeResampler, - "gradient_search": GradientSearchResampler, + "gradient_search": create_gradient_search_resampler, "bucket_avg": BucketAvg, "bucket_sum": BucketSum, "bucket_count": BucketCount, From 7c54a14e0d1d9f40a887de88f0204e2a80305441 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 12:50:21 +0200 Subject: [PATCH 0840/1416] Do not use proj dicts --- satpy/tests/test_resample.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 7135661578..9a0584e301 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -48,7 +48,6 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No """ import dask.array as da from pyresample.geometry import AreaDefinition, SwathDefinition - from pyresample.utils import proj4_str_to_dict from xarray import DataArray ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, @@ -62,16 +61,16 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No input_proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 " "+b=6356752.31414 +sweep=x +units=m +no_defs") + crs = CRS(input_proj_str) source = AreaDefinition( "test_target", "test_target", "test_target", - proj4_str_to_dict(input_proj_str), + crs, input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) ds1.attrs["area"] = source - crs = CRS.from_string(input_proj_str) ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() @@ -95,7 +94,7 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No "test_target", "test_target", "test_target", - proj4_str_to_dict(output_proj_str), + CRS(output_proj_str), output_shape[1], # width output_shape[0], # height (-1000., -1500., 1000., 1500.), From c72cee28dacd0a1ab2d4e49843f8180885f87a50 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 10:54:29 +0000 Subject: [PATCH 0841/1416] Add test for AMV reader --- satpy/tests/reader_tests/test_fci_l2_nc.py | 81 +++++++++++++++++++++- 1 file changed, 80 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 22611a8469..fb5c725ffc 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -29,7 +29,7 @@ from netCDF4 import Dataset from pyresample import geometry -from satpy.readers.fci_l2_nc import FciL2NCFileHandler, FciL2NCSegmentFileHandler +from satpy.readers.fci_l2_nc import FciL2NCAMVFileHandler, FciL2NCFileHandler, FciL2NCSegmentFileHandler from satpy.tests.utils import make_dataid AREA_DEF = geometry.AreaDefinition( @@ -507,3 +507,82 @@ def test_byte_extraction(self): }) assert dataset.values == 0 + +class TestFciL2NCAMVFileHandler(unittest.TestCase): + """Test the FciL2NCFileHandler reader.""" + + def setUp(self): + """Set up the test by creating a test file and opening it with the reader.""" + # Easiest way to test the reader is to create a test netCDF file on the fly + # Create unique filenames to prevent race conditions when tests are run in parallel + self.test_file = str(uuid.uuid4()) + ".nc" + with Dataset(self.test_file, "w") as nc: + # Create dimensions + nc.createDimension("number_of_winds", 50000) + + # add global attributes + nc.data_source = "test_data_source" + nc.platform = "test_platform" + + # Add datasets + latitude = nc.createVariable("latitude", np.float32, dimensions=("number_of_winds",)) + latitude[:] = np.arange(50000) + + longitude = nc.createVariable("y", np.float32, dimensions=("number_of_winds",)) + longitude[:] = np.arange(50000) + + qi = nc.createVariable("product_quality", np.int8) + qi[:] = 99. + + test_dataset = nc.createVariable("test_one_layer", np.float32, + dimensions="number_of_winds") + test_dataset[:] = np.ones((50000)) + test_dataset.test_attr = "attr" + test_dataset.units = "test_units" + + mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) + mtg_geos_projection.longitude_of_projection_origin = 0.0 + mtg_geos_projection.semi_major_axis = 6378137. + mtg_geos_projection.inverse_flattening = 298.257223563 + mtg_geos_projection.perspective_point_height = 35786400. + + self.fh = FciL2NCAMVFileHandler(filename=self.test_file, + filename_info={"channel":"test_channel"}, + filetype_info={}) + + def tearDown(self): + """Remove the previously created test file.""" + # First delete the file handler, forcing the file to be closed if still open + del self.fh + # Then we can safely remove the file from the system + with suppress(OSError): + os.remove(self.test_file) + + def test_all_basic(self): + """Test all basic functionalities.""" + assert self.fh.spacecraft_name == "test_platform" + assert self.fh.sensor_name == "test_data_source" + assert self.fh.ssp_lon == 0.0 + + global_attributes = self.fh._get_global_attributes() + expected_global_attributes = { + "filename": self.test_file, + "spacecraft_name": "test_platform", + "sensor": "test_data_source", + "platform_name": "test_platform", + "channel": "test_channel" + } + assert global_attributes == expected_global_attributes + + def test_dataset(self): + """Test the correct execution of the get_dataset function with a valid file_key.""" + dataset = self.fh.get_dataset(make_dataid(name="test_dataset", resolution=2000), + {"name": "test_dataset", + "file_key": "test_dataset", + "fill_value": -999, + "file_type": "test_file_type"}) + + np.testing.assert_allclose(dataset.values, np.ones((50000))) + assert dataset.attrs["test_attr"] == "attr" + assert dataset.attrs["units"] == "test_units" + assert dataset.attrs["fill_value"] == -999 From a7f93eb9eecf5919bb42071251c3092864a7589b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 13:12:34 +0200 Subject: [PATCH 0842/1416] Remove unnecessary (since Pyresample 1.18) mask_all_nan/skipna handling --- satpy/resample.py | 47 +++---------------------- satpy/tests/test_resample.py | 68 ------------------------------------ 2 files changed, 4 insertions(+), 111 deletions(-) diff --git a/satpy/resample.py b/satpy/resample.py index f74b6c5ecd..336e3fec11 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -148,10 +148,8 @@ import dask.array as da import numpy as np -import pyresample import xarray as xr import zarr -from packaging import version from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler from pyresample.geometry import SwathDefinition from pyresample.gradient import create_gradient_search_resampler @@ -177,8 +175,6 @@ resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() -PR_USE_SKIPNA = version.parse(pyresample.__version__) > version.parse("1.17.0") - def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" @@ -773,33 +769,6 @@ def _get_replicated_chunk_sizes(d_arr, repeats): return tuple(repeated_chunks) -def _get_arg_to_pass_for_skipna_handling(**kwargs): - """Determine if skipna can be passed to the compute functions for the average and sum bucket resampler.""" - # FIXME this can be removed once Pyresample 1.18.0 is a Satpy requirement - - if PR_USE_SKIPNA: - if "mask_all_nan" in kwargs: - warnings.warn( - "Argument mask_all_nan is deprecated. Please use skipna for missing values handling. " - "Continuing with default skipna=True, if not provided differently.", - DeprecationWarning, - stacklevel=3 - ) - kwargs.pop("mask_all_nan") - else: - if "mask_all_nan" in kwargs: - warnings.warn( - "Argument mask_all_nan is deprecated." - "Please update Pyresample and use skipna for missing values handling.", - DeprecationWarning, - stacklevel=3 - ) - kwargs.setdefault("mask_all_nan", False) - kwargs.pop("skipna") - - return kwargs - - class BucketResamplerBase(PRBaseResampler): """Base class for bucket resampling which implements averaging.""" @@ -832,11 +801,6 @@ def resample(self, data, **kwargs): # noqa: D417 Returns (xarray.DataArray): Data resampled to the target area """ - if not PR_USE_SKIPNA and "skipna" in kwargs: - raise ValueError("You are trying to set the skipna argument but you are using an old version of" - " Pyresample that does not support it." - "Please update Pyresample to 1.18.0 or higher to be able to use this argument.") - self.precompute(**kwargs) attrs = data.attrs.copy() data_arr = data.data @@ -910,17 +874,16 @@ def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): # noqa: D417 Returns: dask.Array """ - kwargs = _get_arg_to_pass_for_skipna_handling(skipna=skipna, **kwargs) - results = [] if data.ndim == 3: for i in range(data.shape[0]): res = self.resampler.get_average(data[i, :, :], fill_value=fill_value, + skipna=skipna, **kwargs) results.append(res) else: - res = self.resampler.get_average(data, fill_value=fill_value, + res = self.resampler.get_average(data, fill_value=fill_value, skipna=skipna, **kwargs) results.append(res) @@ -948,16 +911,14 @@ class BucketSum(BucketResamplerBase): def compute(self, data, skipna=True, **kwargs): """Call the resampling.""" - kwargs = _get_arg_to_pass_for_skipna_handling(skipna=skipna, **kwargs) - results = [] if data.ndim == 3: for i in range(data.shape[0]): - res = self.resampler.get_sum(data[i, :, :], + res = self.resampler.get_sum(data[i, :, :], skipna=skipna, **kwargs) results.append(res) else: - res = self.resampler.get_sum(data, **kwargs) + res = self.resampler.get_sum(data, skipna=skipna, **kwargs) results.append(res) return da.stack(results) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 9a0584e301..d0bbbe2a46 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -581,17 +581,10 @@ def test_compute(self): res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) assert res.shape == (3, 5, 5) - @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) - self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=True) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - skipna=True) - self._compute_mocked_bucket_avg(data, fill_value=2, skipna=False) self.bucket.resampler.get_average.assert_called_once_with( data, @@ -604,35 +597,6 @@ def test_compute_and_use_skipna_handling(self): fill_value=2, skipna=True) - @mock.patch("satpy.resample.PR_USE_SKIPNA", False) - def test_compute_and_not_use_skipna_handling(self): - """Test bucket resampler computation and not use skipna handling.""" - data = da.ones((5,)) - - self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=True) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=True) - - self._compute_mocked_bucket_avg(data, fill_value=2, mask_all_nan=False) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=False) - - self._compute_mocked_bucket_avg(data, fill_value=2) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=False) - - self._compute_mocked_bucket_avg(data, fill_value=2, skipna=True) - self.bucket.resampler.get_average.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=False) - @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test bucket resamplers resample method.""" @@ -712,16 +676,10 @@ def test_compute(self): res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) assert res.shape == (3, 5, 5) - @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) - self._compute_mocked_bucket_sum(data, mask_all_nan=True) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - skipna=True) - self._compute_mocked_bucket_sum(data, skipna=False) self.bucket.resampler.get_sum.assert_called_once_with( data, @@ -732,32 +690,6 @@ def test_compute_and_use_skipna_handling(self): data, skipna=True) - @mock.patch("satpy.resample.PR_USE_SKIPNA", False) - def test_compute_and_not_use_skipna_handling(self): - """Test bucket resampler computation and not use skipna handling.""" - data = da.ones((5,)) - - self._compute_mocked_bucket_sum(data, mask_all_nan=True) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - mask_all_nan=True) - - self._compute_mocked_bucket_sum(data, mask_all_nan=False) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - mask_all_nan=False) - - self._compute_mocked_bucket_sum(data) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - mask_all_nan=False) - - self._compute_mocked_bucket_sum(data, fill_value=2, skipna=True) - self.bucket.resampler.get_sum.assert_called_once_with( - data, - fill_value=2, - mask_all_nan=False) - class TestBucketCount(unittest.TestCase): """Test the count bucket resampler.""" From 5f00f09efe3412b06b32d481a6dca26c56e42e5d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 13:17:32 +0200 Subject: [PATCH 0843/1416] Catch re-chunking warning --- satpy/tests/test_resample.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index d0bbbe2a46..11a9644eb4 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -247,8 +247,12 @@ def test_expand_reduce_agg_rechunk(self): into that chunk size. """ + from satpy.utils import PerformanceWarning + d_arr = da.zeros((6, 20), chunks=3) - new_data = NativeResampler._expand_reduce(d_arr, {0: 0.5, 1: 0.5}) + text = "Array chunk size is not divisible by aggregation factor. Re-chunking to continue native resampling." + with pytest.warns(PerformanceWarning, match=text): + new_data = NativeResampler._expand_reduce(d_arr, {0: 0.5, 1: 0.5}) assert new_data.shape == (3, 10) def test_expand_reduce_numpy(self): From 084d5031d279c93216d740dad1509182767f44b7 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 12 Dec 2023 11:39:07 +0000 Subject: [PATCH 0844/1416] Update AHI HSD reader to correctly handle singleton arrays. --- satpy/readers/ahi_hsd.py | 30 ++++++++++++------------ satpy/tests/reader_tests/test_ahi_hsd.py | 12 +++++----- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 8e14d049b9..889b858ff5 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -419,12 +419,12 @@ def end_time(self): @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"][0])) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"][0])) @property def nominal_start_time(self): @@ -498,8 +498,8 @@ def _get_area_def(self): pdict["h"] = float(self.proj_info["distance_from_earth_center"] * 1000 - pdict["a"]) pdict["b"] = float(self.proj_info["earth_polar_radius"] * 1000) pdict["ssp_lon"] = float(self.proj_info["sub_lon"]) - pdict["nlines"] = int(self.data_info["number_of_lines"]) - pdict["ncols"] = int(self.data_info["number_of_columns"]) + pdict["nlines"] = int(self.data_info["number_of_lines"][0]) + pdict["ncols"] = int(self.data_info["number_of_columns"][0]) pdict["scandir"] = "N2S" pdict["loff"] = pdict["loff"] + (self.segment_number * pdict["nlines"]) @@ -528,19 +528,19 @@ def _read_header(self, fp_): fpos = 0 header["block1"] = np.fromfile( fp_, dtype=_BASIC_INFO_TYPE, count=1) - fpos = fpos + int(header["block1"]["blocklength"]) + fpos = fpos + int(header["block1"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block1") fp_.seek(fpos, 0) header["block2"] = np.fromfile(fp_, dtype=_DATA_INFO_TYPE, count=1) - fpos = fpos + int(header["block2"]["blocklength"]) + fpos = fpos + int(header["block2"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block2") fp_.seek(fpos, 0) header["block3"] = np.fromfile(fp_, dtype=_PROJ_INFO_TYPE, count=1) - fpos = fpos + int(header["block3"]["blocklength"]) + fpos = fpos + int(header["block3"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block3") fp_.seek(fpos, 0) header["block4"] = np.fromfile(fp_, dtype=_NAV_INFO_TYPE, count=1) - fpos = fpos + int(header["block4"]["blocklength"]) + fpos = fpos + int(header["block4"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block4") fp_.seek(fpos, 0) header["block5"] = np.fromfile(fp_, dtype=_CAL_INFO_TYPE, count=1) @@ -553,7 +553,7 @@ def _read_header(self, fp_): cal = np.fromfile(fp_, dtype=_VISCAL_INFO_TYPE, count=1) else: cal = np.fromfile(fp_, dtype=_IRCAL_INFO_TYPE, count=1) - fpos = fpos + int(header["block5"]["blocklength"]) + fpos = fpos + int(header["block5"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block5") fp_.seek(fpos, 0) @@ -561,12 +561,12 @@ def _read_header(self, fp_): header["block6"] = np.fromfile( fp_, dtype=_INTER_CALIBRATION_INFO_TYPE, count=1) - fpos = fpos + int(header["block6"]["blocklength"]) + fpos = fpos + int(header["block6"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block6") fp_.seek(fpos, 0) header["block7"] = np.fromfile( fp_, dtype=_SEGMENT_INFO_TYPE, count=1) - fpos = fpos + int(header["block7"]["blocklength"]) + fpos = fpos + int(header["block7"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block7") fp_.seek(fpos, 0) header["block8"] = np.fromfile( @@ -576,7 +576,7 @@ def _read_header(self, fp_): corrections = [] for _i in range(ncorrs): corrections.append(np.fromfile(fp_, dtype=_NAVIGATION_CORRECTION_SUBINFO_TYPE, count=1)) - fpos = fpos + int(header["block8"]["blocklength"]) + fpos = fpos + int(header["block8"]["blocklength"][0]) self._check_fpos(fp_, fpos, 40, "block8") fp_.seek(fpos, 0) header["navigation_corrections"] = corrections @@ -591,7 +591,7 @@ def _read_header(self, fp_): dtype=_OBSERVATION_LINE_TIME_INFO_TYPE, count=1)) header["observation_time_information"] = lines_and_times - fpos = fpos + int(header["block9"]["blocklength"]) + fpos = fpos + int(header["block9"]["blocklength"][0]) self._check_fpos(fp_, fpos, 40, "block9") fp_.seek(fpos, 0) @@ -604,12 +604,12 @@ def _read_header(self, fp_): for _i in range(num_err_info_data): err_info_data.append(np.fromfile(fp_, dtype=_ERROR_LINE_INFO_TYPE, count=1)) header["error_information_data"] = err_info_data - fpos = fpos + int(header["block10"]["blocklength"]) + fpos = fpos + int(header["block10"]["blocklength"][0]) self._check_fpos(fp_, fpos, 40, "block10") fp_.seek(fpos, 0) header["block11"] = np.fromfile(fp_, dtype=_SPARE_TYPE, count=1) - fpos = fpos + int(header["block11"]["blocklength"]) + fpos = fpos + int(header["block11"]["blocklength"][0]) self._check_fpos(fp_, fpos, 0, "block11") fp_.seek(fpos, 0) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 9338440246..2075b88947 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -48,8 +48,8 @@ "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": 11000, - "number_of_lines": 1100, + "number_of_columns": [11000], + "number_of_lines": [1100], "spare": "", } FAKE_PROJ_INFO: InfoDict = { @@ -135,8 +135,8 @@ def test_region(self, fromfile, np2str): "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": 1000, - "number_of_lines": 1000, + "number_of_columns": [1000], + "number_of_lines": [1000], "spare": ""} area_def = fh.get_area_def(None) @@ -183,8 +183,8 @@ def test_segment(self, fromfile, np2str): "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": 11000, - "number_of_lines": 1100, + "number_of_columns": [11000], + "number_of_lines": [1100], "spare": ""} area_def = fh.get_area_def(None) From b00a6d93bb27283f440fb593154f612ec2fe4a9f Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 12:06:46 +0000 Subject: [PATCH 0845/1416] Fix test for get_dataset --- satpy/tests/reader_tests/test_fci_l2_nc.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index fb5c725ffc..8eff51e344 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -534,9 +534,9 @@ def setUp(self): qi = nc.createVariable("product_quality", np.int8) qi[:] = 99. - test_dataset = nc.createVariable("test_one_layer", np.float32, + test_dataset = nc.createVariable("test_dataset", np.float32, dimensions="number_of_winds") - test_dataset[:] = np.ones((50000)) + test_dataset[:] = np.ones(50000) test_dataset.test_attr = "attr" test_dataset.units = "test_units" @@ -581,8 +581,7 @@ def test_dataset(self): "file_key": "test_dataset", "fill_value": -999, "file_type": "test_file_type"}) - - np.testing.assert_allclose(dataset.values, np.ones((50000))) + np.testing.assert_allclose(dataset.values, np.ones(50000)) assert dataset.attrs["test_attr"] == "attr" assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 From 610d365c247393fe50b7dfce7a675eeab634bd22 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 14:07:50 +0200 Subject: [PATCH 0846/1416] Suppress division-by-zero warning in RatioSharpenedRGB --- satpy/composites/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 9295f94dc7..71b9bd0605 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1180,7 +1180,8 @@ def _combined_sharpened_info(self, info, new_attrs): def _get_sharpening_ratio(high_res, low_res): - ratio = high_res / low_res + with np.errstate(divide="ignore"): + ratio = high_res / low_res # make ratio a no-op (multiply by 1) where the ratio is NaN, infinity, # or it is negative. ratio[~np.isfinite(ratio) | (ratio < 0)] = 1.0 From 51ab6b7f00ca5e9e6073f55cc8529051db881922 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 14:33:28 +0200 Subject: [PATCH 0847/1416] Use ds.drop_vars(), adjust XArray version requirement to match --- satpy/composites/__init__.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 71b9bd0605..17d4c00075 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -204,7 +204,7 @@ def drop_coordinates(self, data_arrays): if coord not in ds.dims and any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] if drop: - new_arrays.append(ds.drop(drop)) + new_arrays.append(ds.drop_vars(drop)) else: new_arrays.append(ds) diff --git a/setup.py b/setup.py index cd1c43422e..a9bf050786 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ from setuptools import find_packages, setup requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", - "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.10.1, !=0.13.0", + "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.14.1", "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", "packaging", "pooch", "pyorbital"] From 5c11a5be684a517d4dada08e993708b4aadc89bf Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 14:46:26 +0200 Subject: [PATCH 0848/1416] Remove deprecated GreenCorrector --- satpy/composites/spectral.py | 21 ------------------- satpy/tests/compositor_tests/test_spectral.py | 14 +------------ 2 files changed, 1 insertion(+), 34 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 448d7cb26a..d656bab7ec 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -16,7 +16,6 @@ """Composite classes for spectral adjustments.""" import logging -import warnings from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata @@ -199,23 +198,3 @@ def _compute_blend_fraction(self, ndvi): + self.limits[0] return fraction - - -class GreenCorrector(SpectralBlender): - """Previous class used to blend channels for green band corrections. - - This method has been refactored to make it more generic. The replacement class is 'SpectralBlender' which computes - a weighted average based on N number of channels and N number of corresponding weights/fractions. A new class - called 'HybridGreen' has been created, which performs a correction of green bands centered at 0.51 microns - following Miller et al. (2016, :doi:`10.1175/BAMS-D-15-00154.2`) in order to improve true color imagery. - """ - - def __init__(self, *args, fractions=(0.85, 0.15), **kwargs): - """Set default keyword argument values.""" - warnings.warn( - "'GreenCorrector' is deprecated, use 'SpectralBlender' instead, or 'HybridGreen' for hybrid green" - " correction following Miller et al. (2016).", - UserWarning, - stacklevel=2 - ) - super().__init__(fractions=fractions, *args, **kwargs) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index e46cff4d0c..c7f07c0454 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -21,7 +21,7 @@ import pytest import xarray as xr -from satpy.composites.spectral import GreenCorrector, HybridGreen, NDVIHybridGreen, SpectralBlender +from satpy.composites.spectral import HybridGreen, NDVIHybridGreen, SpectralBlender from satpy.tests.utils import CustomScheduler @@ -67,18 +67,6 @@ def test_hybrid_green(self): data = res.compute() np.testing.assert_allclose(data, 0.23) - def test_green_corrector(self): - """Test the deprecated class for green corrections.""" - comp = GreenCorrector("blended_channel", fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c03)) - assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) - assert res.attrs["name"] == "blended_channel" - assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" - data = res.compute() - np.testing.assert_allclose(data, 0.23) - class TestNdviHybridGreenCompositor: """Test NDVI-weighted hybrid green correction of green band.""" From 934bdb30945189d5d9e65757767ef860c3b91bfb Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 13:04:55 +0000 Subject: [PATCH 0849/1416] Remove duplicate methods --- satpy/readers/fci_l2_nc.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 0948cd0e0a..03f8e94f55 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -419,25 +419,6 @@ def __init__(self, filename, filename_info, filetype_info): "number_of_winds": CHUNK_SIZE } ) - @property - def spacecraft_name(self): - """Get spacecraft name.""" - try: - return self.nc.attrs["platform"] - except KeyError: - # TODO if the platform attribute is not valid, return a default value - logger.warning("Spacecraft name cannot be obtained from file content, use default value instead") - return "MTI1" - - @property - def sensor_name(self): - """Get instrument name.""" - try: - return self.nc.attrs["data_source"] - except KeyError: - # TODO if the data_source attribute is not valid, return a default value - logger.warning("Sensor cannot be obtained from file content, use default value instead") - return "FCI" def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets. From 3d6d561986b433264f918aed3646387ce9fa676a Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Tue, 12 Dec 2023 13:07:40 +0000 Subject: [PATCH 0850/1416] Add test for invalide dataset --- satpy/tests/reader_tests/test_fci_l2_nc.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 8eff51e344..7853cba900 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -585,3 +585,12 @@ def test_dataset(self): assert dataset.attrs["test_attr"] == "attr" assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 + + def test_dataset_with_invalid_filekey(self): + """Test the correct execution of the get_dataset function with an invalid file_key.""" + invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), + {"name": "test_invalid", + "file_key": "test_invalid", + "fill_value": -999, + "file_type": "test_file_type"}) + assert invalid_dataset is None From bb56a486200b6436d00b9922f670ec5d0f40c3d0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 15:11:20 +0200 Subject: [PATCH 0851/1416] Remove GreenCorrector import --- satpy/composites/ahi.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/satpy/composites/ahi.py b/satpy/composites/ahi.py index bb96a94581..4826f84820 100644 --- a/satpy/composites/ahi.py +++ b/satpy/composites/ahi.py @@ -14,7 +14,3 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for AHI.""" - -# The green corrector used to be defined here, but was moved to spectral.py -# in Satpy 0.38 because it also applies to FCI. -from .spectral import GreenCorrector # noqa: F401 From f60b188938d16c0d365eaec8d4c961f8ec0a1a62 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 12 Dec 2023 13:16:53 +0000 Subject: [PATCH 0852/1416] Use `item` to select singleton array elements in AHI HSD. --- satpy/readers/ahi_hsd.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 889b858ff5..cf3fe018f5 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -419,12 +419,12 @@ def end_time(self): @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"][0])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"].item())) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"][0])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"].item())) @property def nominal_start_time(self): From 336fc9c97a08d9f367495ae356a7f0d6dcb4d0dd Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 15:19:30 +0200 Subject: [PATCH 0853/1416] Remove deprecated AHI composites --- satpy/etc/composites/ahi.yaml | 40 ----------------------------------- 1 file changed, 40 deletions(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index cda79a5fac..9c585d53de 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -15,46 +15,6 @@ modifiers: - solar_zenith_angle composites: - green: - deprecation_warning: "'green' is a deprecated composite. Use the equivalent 'hybrid_green' instead." - compositor: !!python/name:satpy.composites.spectral.HybridGreen - # FUTURE: Set a wavelength...see what happens. Dependency finding - # probably wouldn't work. - prerequisites: - # should we be using the most corrected or least corrected inputs? - # what happens if something requests more modifiers on top of this? - - wavelength: 0.51 - modifiers: [sunz_corrected, rayleigh_corrected] - - wavelength: 0.85 - modifiers: [sunz_corrected] - standard_name: toa_bidirectional_reflectance - - green_true_color_reproduction: - # JMA True Color Reproduction green band - # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html - deprecation_warning: "'green_true_color_reproduction' is a deprecated composite. Use the equivalent 'reproduced_green' instead." - compositor: !!python/name:satpy.composites.spectral.SpectralBlender - fractions: [0.6321, 0.2928, 0.0751] - prerequisites: - - name: B02 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: B03 - modifiers: [sunz_corrected, rayleigh_corrected] - - name: B04 - modifiers: [sunz_corrected] - standard_name: none - - green_nocorr: - deprecation_warning: "'green_nocorr' is a deprecated composite. Use the equivalent 'hybrid_green_nocorr' instead." - compositor: !!python/name:satpy.composites.spectral.HybridGreen - # FUTURE: Set a wavelength...see what happens. Dependency finding - # probably wouldn't work. - prerequisites: - # should we be using the most corrected or least corrected inputs? - # what happens if something requests more modifiers on top of this? - - wavelength: 0.51 - - wavelength: 0.85 - standard_name: toa_reflectance hybrid_green: compositor: !!python/name:satpy.composites.spectral.HybridGreen From 8f3704bcfd8a5ec4212d9350eafb70d87ad09e50 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 16:13:22 +0200 Subject: [PATCH 0854/1416] Use importlib.resources to read packaged data --- satpy/readers/mirs.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index 34edd02739..5f68af2d6c 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -18,6 +18,7 @@ """Interface to MiRS product.""" import datetime +import importlib import logging import os from collections import Counter @@ -34,13 +35,12 @@ LOG = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) -try: - # try getting setuptools/distribute's version of resource retrieval first - from pkg_resources import resource_string as get_resource_string -except ImportError: - from pkgutil import get_data as get_resource_string # type: ignore -# +def get_resource_string(mod_part, file_part): + """Read resource string.""" + ref = importlib.resources.files(mod_part).joinpath(file_part) + return ref.read_bytes() + # 'Polo' variable in MiRS files use these values for H/V polarization POLO_V = 2 From 2469168fba6d8f5557f01bafc5ee0c67a3a73bb5 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 12 Dec 2023 14:26:37 +0000 Subject: [PATCH 0855/1416] Use `item` to select singleton array elements in AHI HSD. --- satpy/readers/ahi_hsd.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index cf3fe018f5..313e5ccab5 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -498,8 +498,8 @@ def _get_area_def(self): pdict["h"] = float(self.proj_info["distance_from_earth_center"] * 1000 - pdict["a"]) pdict["b"] = float(self.proj_info["earth_polar_radius"] * 1000) pdict["ssp_lon"] = float(self.proj_info["sub_lon"]) - pdict["nlines"] = int(self.data_info["number_of_lines"][0]) - pdict["ncols"] = int(self.data_info["number_of_columns"][0]) + pdict["nlines"] = int(self.data_info["number_of_lines"].item()) + pdict["ncols"] = int(self.data_info["number_of_columns"].item()) pdict["scandir"] = "N2S" pdict["loff"] = pdict["loff"] + (self.segment_number * pdict["nlines"]) @@ -528,19 +528,19 @@ def _read_header(self, fp_): fpos = 0 header["block1"] = np.fromfile( fp_, dtype=_BASIC_INFO_TYPE, count=1) - fpos = fpos + int(header["block1"]["blocklength"][0]) + fpos = fpos + int(header["block1"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block1") fp_.seek(fpos, 0) header["block2"] = np.fromfile(fp_, dtype=_DATA_INFO_TYPE, count=1) - fpos = fpos + int(header["block2"]["blocklength"][0]) + fpos = fpos + int(header["block2"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block2") fp_.seek(fpos, 0) header["block3"] = np.fromfile(fp_, dtype=_PROJ_INFO_TYPE, count=1) - fpos = fpos + int(header["block3"]["blocklength"][0]) + fpos = fpos + int(header["block3"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block3") fp_.seek(fpos, 0) header["block4"] = np.fromfile(fp_, dtype=_NAV_INFO_TYPE, count=1) - fpos = fpos + int(header["block4"]["blocklength"][0]) + fpos = fpos + int(header["block4"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block4") fp_.seek(fpos, 0) header["block5"] = np.fromfile(fp_, dtype=_CAL_INFO_TYPE, count=1) @@ -553,7 +553,7 @@ def _read_header(self, fp_): cal = np.fromfile(fp_, dtype=_VISCAL_INFO_TYPE, count=1) else: cal = np.fromfile(fp_, dtype=_IRCAL_INFO_TYPE, count=1) - fpos = fpos + int(header["block5"]["blocklength"][0]) + fpos = fpos + int(header["block5"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block5") fp_.seek(fpos, 0) @@ -561,12 +561,12 @@ def _read_header(self, fp_): header["block6"] = np.fromfile( fp_, dtype=_INTER_CALIBRATION_INFO_TYPE, count=1) - fpos = fpos + int(header["block6"]["blocklength"][0]) + fpos = fpos + int(header["block6"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block6") fp_.seek(fpos, 0) header["block7"] = np.fromfile( fp_, dtype=_SEGMENT_INFO_TYPE, count=1) - fpos = fpos + int(header["block7"]["blocklength"][0]) + fpos = fpos + int(header["block7"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block7") fp_.seek(fpos, 0) header["block8"] = np.fromfile( @@ -576,7 +576,7 @@ def _read_header(self, fp_): corrections = [] for _i in range(ncorrs): corrections.append(np.fromfile(fp_, dtype=_NAVIGATION_CORRECTION_SUBINFO_TYPE, count=1)) - fpos = fpos + int(header["block8"]["blocklength"][0]) + fpos = fpos + int(header["block8"]["blocklength"].item()) self._check_fpos(fp_, fpos, 40, "block8") fp_.seek(fpos, 0) header["navigation_corrections"] = corrections @@ -591,7 +591,7 @@ def _read_header(self, fp_): dtype=_OBSERVATION_LINE_TIME_INFO_TYPE, count=1)) header["observation_time_information"] = lines_and_times - fpos = fpos + int(header["block9"]["blocklength"][0]) + fpos = fpos + int(header["block9"]["blocklength"].item()) self._check_fpos(fp_, fpos, 40, "block9") fp_.seek(fpos, 0) @@ -604,12 +604,12 @@ def _read_header(self, fp_): for _i in range(num_err_info_data): err_info_data.append(np.fromfile(fp_, dtype=_ERROR_LINE_INFO_TYPE, count=1)) header["error_information_data"] = err_info_data - fpos = fpos + int(header["block10"]["blocklength"][0]) + fpos = fpos + int(header["block10"]["blocklength"].item()) self._check_fpos(fp_, fpos, 40, "block10") fp_.seek(fpos, 0) header["block11"] = np.fromfile(fp_, dtype=_SPARE_TYPE, count=1) - fpos = fpos + int(header["block11"]["blocklength"][0]) + fpos = fpos + int(header["block11"]["blocklength"].item()) self._check_fpos(fp_, fpos, 0, "block11") fp_.seek(fpos, 0) @@ -617,8 +617,8 @@ def _read_header(self, fp_): def _read_data(self, fp_, header, resolution): """Read data block.""" - nlines = int(header["block2"]["number_of_lines"][0]) - ncols = int(header["block2"]["number_of_columns"][0]) + nlines = int(header["block2"]["number_of_lines"].item()) + ncols = int(header["block2"]["number_of_columns"].item()) chunks = normalize_low_res_chunks( ("auto", "auto"), (nlines, ncols), From 66bc5b5290901d17aa303e101b5af29ff55a851d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 16:29:48 +0200 Subject: [PATCH 0856/1416] Use nanosecond precision times in MVIRI FIDUCEO tests --- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index b03336c230..301f9751a3 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -61,10 +61,10 @@ {"sun_earth_distance_correction_applied": True, "sun_earth_distance_correction_factor": 1.} ) -acq_time_vis_exp = [np.datetime64("1970-01-01 00:30"), - np.datetime64("1970-01-01 00:30"), - np.datetime64("1970-01-01 02:30"), - np.datetime64("1970-01-01 02:30")] +acq_time_vis_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), + np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), + np.datetime64("1970-01-01 02:30").astype("datetime64[ns]"), + np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] vis_counts_exp = xr.DataArray( np.array( [[0., 17., 34., 51.], @@ -124,8 +124,8 @@ }, attrs=attrs_exp ) -acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30"), - np.datetime64("1970-01-01 02:30")] +acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), + np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] wv_counts_exp = xr.DataArray( np.array( [[0, 85], @@ -277,7 +277,8 @@ def fixture_fake_dataset(): dtype=np.uint8 ) ) - time = np.arange(4).astype("datetime64[h]").reshape(2, 2) + time = np.arange(4) * 60 * 60 * 1e9 + time = time.astype("datetime64[ns]").reshape(2, 2) ds = xr.Dataset( data_vars={ "count_vis": (("y", "x"), count_vis), From 3f552560d99d8209516cff4dfd033c15a11d9fab Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 12 Dec 2023 14:39:46 +0000 Subject: [PATCH 0857/1416] Repair AHI HSD tests. --- satpy/tests/reader_tests/test_ahi_hsd.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 2075b88947..7bf1562e1c 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -48,8 +48,8 @@ "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": [11000], - "number_of_lines": [1100], + "number_of_columns": np.array([11000]), + "number_of_lines": np.array([1100]), "spare": "", } FAKE_PROJ_INFO: InfoDict = { @@ -135,8 +135,8 @@ def test_region(self, fromfile, np2str): "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": [1000], - "number_of_lines": [1000], + "number_of_columns": np.array([1000]), + "number_of_lines": np.array([1000]), "spare": ""} area_def = fh.get_area_def(None) @@ -183,8 +183,8 @@ def test_segment(self, fromfile, np2str): "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, - "number_of_columns": [11000], - "number_of_lines": [1100], + "number_of_columns": np.array([11000]), + "number_of_lines": np.array([1100]), "spare": ""} area_def = fh.get_area_def(None) From 372e1c935dac8e1b171b298979ba62b506281f90 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Dec 2023 17:00:55 +0200 Subject: [PATCH 0858/1416] Filter out pyproj warnings --- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 301f9751a3..56bbd5212f 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -26,6 +26,7 @@ import numpy as np import pytest import xarray as xr +from pyproj import CRS from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_radius_parameters @@ -232,17 +233,12 @@ dims=("y", "x"), attrs=attrs_exp ) +projection = CRS(f"+proj=geos +lon_0=57.0 +h={ALTITUDE} +a={EQUATOR_RADIUS} +b={POLE_RADIUS}") area_vis_exp = AreaDefinition( area_id="geos_mviri_4x4", proj_id="geos_mviri_4x4", description="MVIRI Geostationary Projection", - projection={ - "proj": "geos", - "lon_0": 57.0, - "h": ALTITUDE, - "a": EQUATOR_RADIUS, - "b": POLE_RADIUS - }, + projection=projection, width=4, height=4, area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392] @@ -501,16 +497,22 @@ def test_angle_cache(self, interp_tiepoints, file_handler): def test_get_area_definition(self, file_handler, name, resolution, area_exp): """Test getting area definitions.""" + import warnings + dataset_id = make_dataid(name=name, resolution=resolution) area = file_handler.get_area_def(dataset_id) - a, b = proj4_radius_parameters(area.proj_dict) - a_exp, b_exp = proj4_radius_parameters(area_exp.proj_dict) - assert a == a_exp - assert b == b_exp - assert area.width == area_exp.width - assert area.height == area_exp.height - for key in ["h", "lon_0", "proj", "units"]: - assert area.proj_dict[key] == area_exp.proj_dict[key] + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + a, b = proj4_radius_parameters(area.proj_dict) + a_exp, b_exp = proj4_radius_parameters(area_exp.proj_dict) + assert a == a_exp + assert b == b_exp + assert area.width == area_exp.width + assert area.height == area_exp.height + for key in ["h", "lon_0", "proj", "units"]: + assert area.proj_dict[key] == area_exp.proj_dict[key] np.testing.assert_allclose(area.area_extent, area_exp.area_extent) def test_calib_exceptions(self, file_handler): From 5b7bbe73b226bc9d83ebdc63b83e7f65e9d0debe Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 09:45:12 +0200 Subject: [PATCH 0859/1416] Suppress PROJ4 UserWarning on lost accuracy --- satpy/tests/reader_tests/test_nwcsaf_nc.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 07d6cee174..a3235e99e7 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -268,13 +268,12 @@ def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrumen def test_get_area_def(self, nwcsaf_geo_ct_filehandler): """Test that get_area_def() returns proper area.""" dsid = {"name": "ct"} - - _check_area_def(nwcsaf_geo_ct_filehandler.get_area_def(dsid)) + _check_filehandler_area_def(nwcsaf_geo_ct_filehandler, dsid) def test_get_area_def_km(self, nwcsaf_old_geo_ct_filehandler): """Test that get_area_def() returns proper area when the projection is in km.""" dsid = {"name": "ct"} - _check_area_def(nwcsaf_old_geo_ct_filehandler.get_area_def(dsid)) + _check_filehandler_area_def(nwcsaf_old_geo_ct_filehandler, dsid) def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): """Test the scaling of the dataset and removal of obsolete attributes.""" @@ -506,12 +505,19 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel np.testing.assert_allclose(res.attrs["palette_meanings"], palette_meanings * COT_SCALE + COT_OFFSET) -def _check_area_def(area_definition): +def _check_filehandler_area_def(file_handler, dsid): + import warnings + correct_h = float(PROJ["gdal_projection"].split("+h=")[-1]) correct_a = float(PROJ["gdal_projection"].split("+a=")[-1].split()[0]) - assert area_definition.proj_dict["h"] == correct_h - assert area_definition.proj_dict["a"] == correct_a - assert area_definition.proj_dict["units"] == "m" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + area_definition = file_handler.get_area_def(dsid) + assert area_definition.proj_dict["h"] == correct_h + assert area_definition.proj_dict["a"] == correct_a + assert area_definition.proj_dict["units"] == "m" correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], From e3160bf0ca5874cb2b99ac9b6148ae2094e35866 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 10:18:34 +0200 Subject: [PATCH 0860/1416] Use modern chunk size setting in nwcsaf_nc reader --- satpy/readers/nwcsaf_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 5d8320f954..e3b3dc2d3d 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -36,11 +36,11 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import unzip_file -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_chunk_size_limit logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() +CHUNK_SIZE = get_chunk_size_limit() SENSOR = {"NOAA-19": "avhrr-3", "NOAA-18": "avhrr-3", From fbb437331fd983a1cb3553f9b9bd298d6b4fccdb Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 10:30:39 +0200 Subject: [PATCH 0861/1416] Use DataArray.drop_vars() instead of deprecated .drop() --- satpy/readers/ahi_l2_nc.py | 4 ++-- satpy/readers/goes_imager_nc.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 17823fed1e..7785c3994d 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -100,8 +100,8 @@ def get_dataset(self, key, info): # Data has 'Latitude' and 'Longitude' coords, these must be replaced. variable = variable.rename({"Rows": "y", "Columns": "x"}) - variable = variable.drop("Latitude") - variable = variable.drop("Longitude") + variable = variable.drop_vars("Latitude") + variable = variable.drop_vars("Longitude") variable.attrs.update(key.to_dict()) return variable diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 214852fffd..969151cb34 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -1087,7 +1087,7 @@ def get_dataset(self, key, info): # Set proper dimension names data = data.rename({"xc": "x", "yc": "y"}) - data = data.drop("time") + data = data.drop_vars("time") # Update metadata self._update_metadata(data, ds_info=info) From 12192b1ac153a124892fa3a45dae112bbf027bbd Mon Sep 17 00:00:00 2001 From: Youva <120452807+YouvaEUMex@users.noreply.github.com> Date: Wed, 13 Dec 2023 09:33:01 +0100 Subject: [PATCH 0862/1416] Update satpy/tests/reader_tests/test_fci_l2_nc.py Co-authored-by: Martin Raspaud --- satpy/tests/reader_tests/test_fci_l2_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 7853cba900..e7a312d4b8 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -508,10 +508,10 @@ def test_byte_extraction(self): assert dataset.values == 0 -class TestFciL2NCAMVFileHandler(unittest.TestCase): +class TestFciL2NCAMVFileHandler: """Test the FciL2NCFileHandler reader.""" - def setUp(self): + def setup_method(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly # Create unique filenames to prevent race conditions when tests are run in parallel From 2948a6f5aac96a0496cc1cac93f8c0fe75e7001f Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 13 Dec 2023 09:41:24 +0000 Subject: [PATCH 0863/1416] Add test triggering the error --- satpy/tests/reader_tests/test_satpy_cf_nc.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 0c22f5b3f1..6528a80723 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -147,6 +147,10 @@ def cf_scene(): "nadir_longitude": 1, "nadir_latitude": 1, "only_in_1": False + }, + "time_parameters": { + "nominal_start_time": tstart, + "nominal_end_time": tend } }) @@ -388,18 +392,17 @@ def test_read_prefixed_channels_by_user_no_prefix(self, cf_scene, nc_filename): np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord - def test_orbital_parameters(self, cf_scene, nc_filename): - """Test that the orbital parameters in attributes are handled correctly.""" + def test_decoding_of_dict_type_attributes(self, cf_scene, nc_filename): + """Test decoding of dict type attributes.""" cf_scene.save_datasets(writer="cf", filename=nc_filename) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["image0"]) - orig_attrs = cf_scene["image0"].attrs["orbital_parameters"] - new_attrs = scn_["image0"].attrs["orbital_parameters"] - assert isinstance(new_attrs, dict) - for key in orig_attrs: - assert orig_attrs[key] == new_attrs[key] + for attr_name in ["orbital_parameters", "time_parameters"]: + orig_attrs = cf_scene["image0"].attrs[attr_name] + new_attrs = scn_["image0"].attrs[attr_name] + assert new_attrs == orig_attrs def test_write_and_read_from_two_files(self, nc_filename, nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" From 6ee324821f85d93f11ade113a987b743c0eef011 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 13 Dec 2023 10:04:10 +0000 Subject: [PATCH 0864/1416] Decode time parameters to datetime --- satpy/readers/satpy_cf_nc.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 7a26ead72b..bf8908e604 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -313,7 +313,12 @@ def get_dataset(self, ds_id, ds_info): data.attrs.update(nc.attrs) # For now add global attributes to all datasets if "orbital_parameters" in data.attrs: data.attrs["orbital_parameters"] = _str2dict(data.attrs["orbital_parameters"]) - + if "time_parameters" in data.attrs: + time_params = _str2dict(data.attrs["time_parameters"]) + from dateutil.parser import isoparse + for key, val in time_params.items(): + time_params[key] = isoparse(val) + data.attrs["time_parameters"] = time_params return data def get_area_def(self, dataset_id): From cd4743ccddf1b943b540de9d792af9437526df1f Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 13 Dec 2023 10:21:09 +0000 Subject: [PATCH 0865/1416] Refactor dict type decoding --- satpy/readers/satpy_cf_nc.py | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index bf8908e604..2e85a166f8 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -311,16 +311,13 @@ def get_dataset(self, ds_id, ds_info): if name != ds_id["name"]: data = data.rename(ds_id["name"]) data.attrs.update(nc.attrs) # For now add global attributes to all datasets - if "orbital_parameters" in data.attrs: - data.attrs["orbital_parameters"] = _str2dict(data.attrs["orbital_parameters"]) - if "time_parameters" in data.attrs: - time_params = _str2dict(data.attrs["time_parameters"]) - from dateutil.parser import isoparse - for key, val in time_params.items(): - time_params[key] = isoparse(val) - data.attrs["time_parameters"] = time_params + self._decode_dict_type_attrs(data) return data + def _decode_dict_type_attrs(self, data): + for key in ["orbital_parameters", "time_parameters"]: + data.attrs[key] = _str2dict(data.attrs[key]) + def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" try: @@ -334,8 +331,18 @@ def get_area_def(self, dataset_id): raise NotImplementedError +def _datetime_parser(json_dict): + import dateutil.parser + for key, value in json_dict.items(): + try: + json_dict[key] = dateutil.parser.parse(value) + except (TypeError, ValueError): + pass + return json_dict + + def _str2dict(val): """Convert string to dictionary.""" if isinstance(val, str): - val = json.loads(val) + val = json.loads(val, object_hook=_datetime_parser) return val From 43b2c209909b4024f83b5c64018c62f37b6f6698 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 12:36:43 +0200 Subject: [PATCH 0866/1416] Use datetime64[ns] for JMA HRIT readers --- satpy/readers/hrit_jma.py | 7 ++++--- satpy/tests/reader_tests/test_ahi_hrit.py | 10 +++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index c273b9b578..0c88faf46b 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -198,9 +198,10 @@ def mjd2datetime64(mjd): """Convert Modified Julian Day (MJD) to datetime64.""" epoch = np.datetime64("1858-11-17 00:00") - day2usec = 24 * 3600 * 1E6 - mjd_usec = (mjd * day2usec).astype(np.int64).astype("timedelta64[us]") - return epoch + mjd_usec + day2nsec = 24 * 3600 * 1E9 + mjd_nsec = (mjd * day2nsec).astype(np.int64).astype("timedelta64[ns]") + + return epoch + mjd_nsec class HRITJMAFileHandler(HRITFileHandler): diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py index 1dbf36c66b..3db8e2d094 100644 --- a/satpy/tests/reader_tests/test_ahi_hrit.py +++ b/satpy/tests/reader_tests/test_ahi_hrit.py @@ -119,6 +119,7 @@ def test_init(self): # Check if scanline timestamps are there (dedicated test below) assert isinstance(reader.acq_time, np.ndarray) + assert reader.acq_time.dtype == np.dtype("datetime64[ns]") # Check platform assert reader.platform == HIMAWARI8 @@ -305,14 +306,13 @@ def test_get_dataset(self, base_get_dataset): def test_mjd2datetime64(self): """Test conversion from modified julian day to datetime64.""" from satpy.readers.hrit_jma import mjd2datetime64 - assert mjd2datetime64(np.array([0])) == np.datetime64("1858-11-17", "us") - assert mjd2datetime64(np.array([40587.5])) == np.datetime64("1970-01-01 12:00", "us") + assert mjd2datetime64(np.array([0])) == np.datetime64("1858-11-17", "ns") + assert mjd2datetime64(np.array([40587.5])) == np.datetime64("1970-01-01 12:00", "ns") def test_get_acq_time(self): """Test computation of scanline acquisition times.""" dt_line = np.arange(1, 11000+1).astype("timedelta64[s]") - acq_time_exp = np.datetime64("1970-01-01", "us") + dt_line - + acq_time_exp = np.datetime64("1970-01-01", "ns") + dt_line for platform in ["Himawari-8", "MTSAT-2"]: # Results are not exactly identical because timestamps are stored in # the header with only 6 decimals precision (max diff here: 45 msec). @@ -320,7 +320,7 @@ def test_get_acq_time(self): reader = self._get_reader(mda=mda) np.testing.assert_allclose(reader.acq_time.astype(np.int64), acq_time_exp.astype(np.int64), - atol=45000) + atol=45000000) def test_start_time_from_filename(self): """Test that by default the datetime in the filename is returned.""" From 566ab73dbc0359e081bf0a130821aa10d948de6c Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 12:42:37 +0200 Subject: [PATCH 0867/1416] Use datetime64[ns] in GOES imager netCDF reader tests --- satpy/tests/reader_tests/test_goes_imager_nc_eum.py | 2 +- satpy/tests/reader_tests/test_goes_imager_nc_noaa.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py index 68472aeb1a..e192bbe63f 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py @@ -52,7 +52,7 @@ def setUp(self, xr_): xr_.open_dataset.return_value = xr.Dataset( {"data": xr.DataArray(data=self.radiance, dims=("time", "yc", "xc")), - "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py index 1fd5e65cac..994f1336fd 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py @@ -58,7 +58,7 @@ def setUp(self, xr_): "lon": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), "lat": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), "time": xr.DataArray(data=np.array([self.time], - dtype="datetime64[ms]"), + dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([self.band]))}, attrs={"Satellite Sensor": "G-15"}) @@ -238,7 +238,7 @@ def dataset(self, lons_lats, channel_id): dims=("time", "yc", "xc") ) time = xr.DataArray( - [np.datetime64("2018-01-01 12:00:00")], + [np.datetime64("2018-01-01 12:00:00").astype("datetime64[ns]")], dims="time" ) bands = xr.DataArray([channel_id], dims="bands") @@ -369,7 +369,7 @@ def setUp(self, xr_): {"data": xr.DataArray(data=self.counts, dims=("time", "yc", "xc")), "lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), "lat": xr.DataArray(data=self.lat, dims=("yc", "xc")), - "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) From 8b29b8fb45848b8c96228385a8181dfee42ff4e6 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 13 Dec 2023 10:57:04 +0000 Subject: [PATCH 0868/1416] Only decode attributes if available --- satpy/readers/satpy_cf_nc.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 2e85a166f8..b9c932b852 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -316,7 +316,10 @@ def get_dataset(self, ds_id, ds_info): def _decode_dict_type_attrs(self, data): for key in ["orbital_parameters", "time_parameters"]: - data.attrs[key] = _str2dict(data.attrs[key]) + try: + data.attrs[key] = _str2dict(data.attrs[key]) + except KeyError: + continue def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" From 1b14e764212cf0af7076f3458593eeb9c7525aa1 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 13 Dec 2023 13:46:08 +0000 Subject: [PATCH 0869/1416] Replace the reader for a lazy reader using cached_property decorator --- satpy/readers/fci_l2_nc.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 03f8e94f55..78020cdcf9 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -22,6 +22,7 @@ import xarray as xr from pyresample import geometry +from satpy._compat import cached_property from satpy.readers._geos_area import get_geos_area_naming, make_ext from satpy.readers.eum_base import get_service_mode from satpy.readers.file_handlers import BaseFileHandler @@ -153,6 +154,7 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= self._projection = self.nc["mtg_geos_projection"] self.multi_dims = {"maximum_number_of_layers": "layer", "number_of_vis_channels": "vis_channel_id"} + def get_area_def(self, key): """Return the area definition.""" try: @@ -408,14 +410,15 @@ def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) - # Use xarray's default netcdf4 engine to open the file - self.nc = xr.open_dataset( + @cached_property + def nc(self): + """Read the file.""" + return xr.open_dataset( self.filename, decode_cf=True, mask_and_scale=True, chunks={ "number_of_images": CHUNK_SIZE, - # 'number_of_height_estimates': CHUNK_SIZE, "number_of_winds": CHUNK_SIZE } ) From a574c51015c2b6a35381dcd70a897d64b957eb58 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:01:31 +0200 Subject: [PATCH 0870/1416] Use datetime[ns] in SEVIRI HRIT tests --- satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index b9ff1f95ea..c332f0d3f9 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -201,6 +201,7 @@ def get_acq_time_cds(start_time, nlines): tline["days"][1:-1] = days_since_1958 * np.ones(nlines - 2) offset_second = (start_time - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()*1000 tline["milliseconds"][1:-1] = np.arange(nlines - 2)+offset_second + return tline @@ -211,7 +212,8 @@ def get_acq_time_exp(start_time, nlines): tline_exp[-1] = np.datetime64("NaT") tline_exp[1:-1] = np.datetime64(start_time) tline_exp[1:-1] += np.arange(nlines - 2).astype("timedelta64[ms]") - return tline_exp + + return tline_exp.astype("datetime64[ns]") def get_attrs_exp(projection_longitude=0.0): From 8b7c8416089b562ae5d277e3a663629031a4c8f8 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:05:51 +0200 Subject: [PATCH 0871/1416] Use datetime[ns] in SEVIRI native tests --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index ba7cf63447..6382517b55 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1048,10 +1048,10 @@ def _exp_data_array(): "standard_name": "counts", } ) - expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), - np.datetime64("1958-01-02 00:00:02"), - np.datetime64("1958-01-02 00:00:03"), - np.datetime64("1958-01-02 00:00:04")]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01").astype("datetime64[ns]"), + np.datetime64("1958-01-02 00:00:02").astype("datetime64[ns]"), + np.datetime64("1958-01-02 00:00:03").astype("datetime64[ns]"), + np.datetime64("1958-01-02 00:00:04").astype("datetime64[ns]")]) return expected def test_get_dataset_with_raw_metadata(self, file_handler): From 822f04bddf5f8f92e9bcc8bb950ee6a2781bd293 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:08:05 +0200 Subject: [PATCH 0872/1416] Use datetime[ns] in SEVIRI netCDF4 tests --- satpy/tests/reader_tests/test_seviri_l1b_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index f6a54aa60e..42e038a766 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -337,8 +337,8 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ "wavelength": "wavelength", "standard_name": "standard_name" } - expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), - np.datetime64("1958-01-02 00:00:02")]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01").astype("datetime64[ns]"), + np.datetime64("1958-01-02 00:00:02").astype("datetime64[ns]")]) expected = expected[::-1] # reader flips data upside down if mask_bad_quality_scan_lines: expected = file_handler._mask_bad_quality(expected, dataset_info) From a50c44aca2413eb18d8d2f274e221b5cae2cb873 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:25:40 +0200 Subject: [PATCH 0873/1416] Fix mj2datetime64 usage after nanosecond update --- satpy/readers/gms/gms5_vissr_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py index c8a88dfe25..51f33d657a 100644 --- a/satpy/readers/gms/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -307,7 +307,7 @@ def _get_orbital_parameters(self): } def _get_time_parameters(self): - start_time = mjd2datetime64(self._mode_block["observation_time_mjd"]) + start_time = mjd2datetime64(self._mode_block["observation_time_mjd"]).astype("datetime64[us]") start_time = start_time.astype(dt.datetime).replace(second=0, microsecond=0) end_time = start_time + dt.timedelta( minutes=25 From 6e3bc601515943e20d312fc5b01e0f5dc8d1bd80 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Wed, 13 Dec 2023 14:29:06 +0000 Subject: [PATCH 0874/1416] Revert all attempt to introduce tmp_path pytest fixture --- satpy/tests/reader_tests/test_fci_l2_nc.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index e7a312d4b8..44906c5040 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -509,7 +509,7 @@ def test_byte_extraction(self): assert dataset.values == 0 class TestFciL2NCAMVFileHandler: - """Test the FciL2NCFileHandler reader.""" + """Test the FciL2NCAMVFileHandler reader.""" def setup_method(self): """Set up the test by creating a test file and opening it with the reader.""" @@ -548,7 +548,8 @@ def setup_method(self): self.fh = FciL2NCAMVFileHandler(filename=self.test_file, filename_info={"channel":"test_channel"}, - filetype_info={}) + filetype_info={} + ) def tearDown(self): """Remove the previously created test file.""" From 399086ea7565ba7fd2e6c717f5a8801f8b471d5d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:32:17 +0200 Subject: [PATCH 0875/1416] Use datetime64[ns] in GOES imager EUM test --- satpy/tests/reader_tests/test_goes_imager_nc_eum.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py index e192bbe63f..189b76d5cd 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py @@ -139,7 +139,7 @@ def setUp(self, xr_): xr_.open_dataset.return_value = xr.Dataset( {"data": xr.DataArray(data=self.reflectance, dims=("time", "yc", "xc")), - "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) From 05a9a0b62bac25da4cdaf3f1aa437817c790160d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 16:53:31 +0200 Subject: [PATCH 0876/1416] Use .item() to get singleton array elements in GOES imager readers --- satpy/readers/goes_imager_nc.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 969151cb34..8ec3219eec 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -620,7 +620,7 @@ def __init__(self, filename, filename_info, filetype_info, geo_data=None): self.platform_name = self._get_platform_name( self.nc.attrs["Satellite Sensor"]) self.platform_shortname = self.platform_name.replace("-", "").lower() - self.gvar_channel = int(self.nc["bands"].values) + self.gvar_channel = int(self.nc["bands"].item()) self.sector = self._get_sector(channel=self.gvar_channel, nlines=self.nlines, ncols=self.ncols) @@ -731,9 +731,9 @@ def _get_area_def_uniform_sampling(self, lon0, channel): def start_time(self): """Start timestamp of the dataset.""" dt = self.nc["time"].dt - return datetime(year=int(dt.year), month=int(dt.month), day=int(dt.day), - hour=int(dt.hour), minute=int(dt.minute), - second=int(dt.second), microsecond=int(dt.microsecond)) + return datetime(year=int(dt.year.item()), month=int(dt.month.item()), day=int(dt.day.item()), + hour=int(dt.hour.item()), minute=int(dt.minute.item()), + second=int(dt.second.item()), microsecond=int(dt.microsecond.item())) @property def end_time(self): From 71d842f65e1f6ef06e538696cc4260d1d0bebf03 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 13 Dec 2023 17:27:57 +0200 Subject: [PATCH 0877/1416] Use .item() to get singleton array elements in EUM reader base --- satpy/readers/eum_base.py | 8 ++++---- satpy/tests/reader_tests/test_eum_base.py | 7 ++++--- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index 916ba9d444..3cbbb46433 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -33,14 +33,14 @@ def timecds2datetime(tcds): Works both with a dictionary and a numpy record_array. """ - days = int(tcds["Days"]) - milliseconds = int(tcds["Milliseconds"]) + days = int(tcds["Days"].item()) + milliseconds = int(tcds["Milliseconds"].item()) try: - microseconds = int(tcds["Microseconds"]) + microseconds = int(tcds["Microseconds"].item()) except (KeyError, ValueError): microseconds = 0 try: - microseconds += int(tcds["Nanoseconds"]) / 1000. + microseconds += int(tcds["Nanoseconds"].item()) / 1000. except (KeyError, ValueError): pass diff --git a/satpy/tests/reader_tests/test_eum_base.py b/satpy/tests/reader_tests/test_eum_base.py index 54de68201d..55ac977b59 100644 --- a/satpy/tests/reader_tests/test_eum_base.py +++ b/satpy/tests/reader_tests/test_eum_base.py @@ -39,17 +39,18 @@ class TestMakeTimeCdsDictionary(unittest.TestCase): def test_fun(self): """Test function for TestMakeTimeCdsDictionary.""" # time_cds_short - tcds = {"Days": 1, "Milliseconds": 2} + tcds = {"Days": np.array(1), "Milliseconds": np.array(2)} expected = datetime(1958, 1, 2, 0, 0, 0, 2000) assert timecds2datetime(tcds) == expected # time_cds - tcds = {"Days": 1, "Milliseconds": 2, "Microseconds": 3} + tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3)} expected = datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected # time_cds_expanded - tcds = {"Days": 1, "Milliseconds": 2, "Microseconds": 3, "Nanoseconds": 4} + tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3), + "Nanoseconds": np.array(4)} expected = datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected From fe9c1db295d29585739fa1ba3f25a2b6933cb18e Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 07:34:16 +0000 Subject: [PATCH 0878/1416] Add tests for default functionality of HighCloudCompositor and LowCloudCompositor. From 6dd9c4201bbbe564da9d4180294b0c734fb4b220 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 09:39:15 +0200 Subject: [PATCH 0879/1416] Fix/supress PROJ4 warnings --- satpy/readers/fci_l2_nc.py | 2 +- satpy/readers/geocat.py | 3 +- satpy/readers/satpy_cf_nc.py | 9 ++- satpy/tests/reader_tests/test_ahi_hsd.py | 12 +++- satpy/tests/reader_tests/test_ahi_l2_nc.py | 8 ++- satpy/tests/reader_tests/test_geos_area.py | 19 ++++-- .../reader_tests/test_goes_imager_hrit.py | 16 +++-- satpy/tests/reader_tests/test_gpm_imerg.py | 8 ++- satpy/tests/reader_tests/test_hrit_base.py | 9 ++- .../reader_tests/test_insat3d_img_l1b_h5.py | 10 ++- satpy/tests/reader_tests/test_nwcsaf_msg.py | 8 ++- .../reader_tests/test_oceancolorcci_l3_nc.py | 8 ++- satpy/tests/reader_tests/test_osisaf_l3.py | 63 ++++++++++++------- satpy/tests/reader_tests/test_satpy_cf_nc.py | 7 ++- .../reader_tests/test_seviri_l1b_hrit.py | 13 +++- 15 files changed, 146 insertions(+), 49 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index c387326f89..b03cbdfdaa 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -381,7 +381,7 @@ def _construct_area_def(self, dataset_id): stand_area_def.area_id, stand_area_def.description, "", - stand_area_def.proj_dict, + stand_area_def.crs, stand_area_def.x_size, stand_area_def.y_size, mod_area_extent) diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py index 185e7d3c13..852119b02e 100644 --- a/satpy/readers/geocat.py +++ b/satpy/readers/geocat.py @@ -36,7 +36,6 @@ import numpy as np from pyproj import Proj from pyresample import geometry -from pyresample.utils import proj4_str_to_dict from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 @@ -274,7 +273,7 @@ def get_area_def(self, dsid): area_name, area_name, area_name, - proj4_str_to_dict(proj), + proj, lon.shape[1], lon.shape[0], area_extent=extents, diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 7a26ead72b..073c8d4cf5 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -318,8 +318,15 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" + import warnings + try: - area = AreaDefinition.from_cf(self.filename) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + # FIXME: This should be silenced in Pyresample + area = AreaDefinition.from_cf(self.filename) return area except ValueError: # No CF compliant projection information was found in the netcdf file or diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 7bf1562e1c..6ed3267723 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -140,7 +140,11 @@ def test_region(self, fromfile, np2str): "spare": ""} area_def = fh.get_area_def(None) - proj_dict = area_def.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378137.0 assert b == 6356752.3 @@ -188,7 +192,11 @@ def test_segment(self, fromfile, np2str): "spare": ""} area_def = fh.get_area_def(None) - proj_dict = area_def.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area_def.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378137.0 assert b == 6356752.3 diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 7d4050ecf0..a90f24ea5d 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -75,6 +75,8 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" + import warnings + ps = "+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs" # Check case where input data is correct size. @@ -84,7 +86,11 @@ def test_ahi_l2_area_def(himl2_filename, caplog): assert area_def.width == dimensions["Columns"] assert area_def.height == dimensions["Rows"] assert np.allclose(area_def.area_extent, exp_ext) - assert area_def.proj4_string == ps + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj4_string == ps # Check case where input data is incorrect size. fh = ahil2_filehandler(himl2_filename) diff --git a/satpy/tests/reader_tests/test_geos_area.py b/satpy/tests/reader_tests/test_geos_area.py index fb0bb6f19b..077c0b0c7e 100644 --- a/satpy/tests/reader_tests/test_geos_area.py +++ b/satpy/tests/reader_tests/test_geos_area.py @@ -138,19 +138,26 @@ def test_get_xy_from_linecol(self): def test_get_area_definition(self): """Test the retrieval of the area definition.""" + import warnings + from pyresample.utils import proj4_radius_parameters + pdict, extent = self.make_pdict_ext(1, "N2S") good_res = (-3000.4032785810186, -3000.4032785810186) a_def = get_area_definition(pdict, extent) assert a_def.area_id == pdict["a_name"] assert a_def.resolution == good_res - assert a_def.proj_dict["proj"] == "geos" - assert a_def.proj_dict["units"] == "m" - a, b = proj4_radius_parameters(a_def.proj_dict) - assert a == 6378169 - assert b == 6356583.8 - assert a_def.proj_dict["h"] == 35785831 + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert a_def.proj_dict["proj"] == "geos" + assert a_def.proj_dict["units"] == "m" + a, b = proj4_radius_parameters(a_def.proj_dict) + assert a == 6378169 + assert b == 6356583.8 + assert a_def.proj_dict["h"] == 35785831 def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py index cafe7c1e2c..48078d0f0e 100644 --- a/satpy/tests/reader_tests/test_goes_imager_hrit.py +++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py @@ -172,6 +172,8 @@ def test_get_dataset(self, base_get_dataset): def test_get_area_def(self): """Test getting the area definition.""" + import warnings + self.reader.mda.update({ "cfac": 10216334, "lfac": 10216334, @@ -184,13 +186,17 @@ def test_get_area_def(self): resolution=3000) area = self.reader.get_area_def(dsid) - a, b = proj4_radius_parameters(area.proj_dict) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + a, b = proj4_radius_parameters(area.proj_dict) + assert area.proj_dict["h"] == ALTITUDE + assert area.proj_dict["lon_0"] == 100.1640625 + assert area.proj_dict["proj"] == "geos" + assert area.proj_dict["units"] == "m" assert a == EQUATOR_RADIUS assert b == POLE_RADIUS - assert area.proj_dict["h"] == ALTITUDE - assert area.proj_dict["lon_0"] == 100.1640625 - assert area.proj_dict["proj"] == "geos" - assert area.proj_dict["units"] == "m" assert area.width == 2816 assert area.height == 464 assert area.area_id == "goes-15_goes_imager_fd_3km" diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index 508be247d5..96dc65bbd4 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -105,6 +105,8 @@ def tearDown(self): def test_load_data(self): """Test loading data.""" + import warnings + from satpy.readers import load_reader # Filename to test, needed for start and end times @@ -130,6 +132,10 @@ def test_load_data(self): assert res["IRprecipitation"].resolution == 0.1 assert res["IRprecipitation"].area.width == 3600 assert res["IRprecipitation"].area.height == 1800 - assert res["IRprecipitation"].area.proj_dict == pdict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert res["IRprecipitation"].area.proj_dict == pdict np.testing.assert_almost_equal(res["IRprecipitation"].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index cb2dc6c3f4..133b45280e 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -221,9 +221,16 @@ def test_get_area_extent(self): def test_get_area_def(self): """Test getting an area definition.""" + import warnings + from pyresample.utils import proj4_radius_parameters + area = self.reader.get_area_def("VIS06") - proj_dict = area.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == 6356583.8 diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 92aef2b906..486177d2d5 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -273,12 +273,18 @@ def insat_filehandler(insat_filename): def test_filehandler_returns_area(insat_filehandler): """Test that filehandle returns an area.""" + import warnings + fh = insat_filehandler ds_id = make_dataid(name="MIR", resolution=4000, calibration="brightness_temperature") area_def = fh.get_area_def(ds_id) - lons, lats = area_def.get_lonlats(chunks=1000) - assert "+lon_0=" + str(subsatellite_longitude) in area_def.crs.to_proj4() + _ = area_def.get_lonlats(chunks=1000) + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert "+lon_0=" + str(subsatellite_longitude) in area_def.crs.to_proj4() def test_filehandler_has_start_and_end_time(insat_filehandler): diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 6d4dbfe53f..761f84d380 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -472,6 +472,8 @@ def cut_h5_object_ref(root, attr): def test_get_area_def(self): """Get the area definition.""" + import warnings + from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy.tests.utils import make_dataid @@ -487,7 +489,11 @@ def test_get_area_def(self): assert area_def.area_extent[i] == pytest.approx(aext_res[i], abs=1e-4) proj_dict = AREA_DEF_DICT["proj_dict"] - assert proj_dict["proj"] == area_def.proj_dict["proj"] + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert proj_dict["proj"] == area_def.proj_dict["proj"] # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index bdb0edfb03..da99fd2d27 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -166,6 +166,8 @@ def area_exp(self): def test_get_area_def(self, area_exp, fake_file_dict): """Test area definition.""" + import warnings + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) res = reader.load([ds_list_all[0]]) area = res[ds_list_all[0]].attrs["area"] @@ -174,7 +176,11 @@ def test_get_area_def(self, area_exp, fake_file_dict): assert area.area_extent == area_exp.area_extent assert area.width == area_exp.width assert area.height == area_exp.height - assert area.proj_dict == area_exp.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area.proj_dict == area_exp.proj_dict def test_bad_fname(self, fake_dataset, fake_file_dict): """Test case where an incorrect composite period is given.""" diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 3fa9e5bb35..f42a1d4648 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -16,6 +16,7 @@ """Module for testing the satpy.readers.osisaf_l3 module.""" import os +import warnings from datetime import datetime import numpy as np @@ -223,11 +224,15 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" assert area_def.width == 5 assert area_def.height == 2 @@ -243,10 +248,14 @@ def test_get_area_def_ease(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_lambert_azimuthal_equal_area" - assert area_def.proj_dict["R"] == 6371228 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "laea" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["R"] == 6371228 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "laea" assert area_def.width == 5 assert area_def.height == 2 @@ -279,11 +288,15 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" assert area_def.width == 5 assert area_def.height == 2 @@ -318,8 +331,12 @@ def test_get_area_def_grid(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_geographic_area" - assert area_def.proj_dict["datum"] == "WGS84" - assert area_def.proj_dict["proj"] == "longlat" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["datum"] == "WGS84" + assert area_def.proj_dict["proj"] == "longlat" assert area_def.width == 5 assert area_def.height == 2 @@ -353,11 +370,15 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert area_def.proj_dict["a"] == 6378273.0 + assert area_def.proj_dict["lat_0"] == -90 + assert area_def.proj_dict["lat_ts"] == -70 + assert area_def.proj_dict["lon_0"] == 0 + assert area_def.proj_dict["proj"] == "stere" assert area_def.width == 5 assert area_def.height == 2 diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 0c22f5b3f1..f279196ab4 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -249,11 +249,14 @@ def test_write_and_read_with_area_definition(self, cf_scene, nc_filename): expected_area = cf_scene["image0"].attrs["area"] actual_area = scn_["image0"].attrs["area"] assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent - assert expected_area.proj_dict == actual_area.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + assert expected_area.proj_dict == actual_area.proj_dict assert expected_area.shape == actual_area.shape assert expected_area.area_id == actual_area.area_id assert expected_area.description == actual_area.description - assert expected_area.proj_dict == actual_area.proj_dict def test_write_and_read_with_swath_definition(self, cf_scene, nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 0ce40d8dfc..e928468228 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -18,6 +18,7 @@ """The HRIT msg reader tests package.""" import unittest +import warnings from datetime import datetime from unittest import mock @@ -119,7 +120,11 @@ def test_get_area_def(self): from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) assert area.area_extent == (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356) - proj_dict = area.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == pytest.approx(6356583.8) @@ -168,7 +173,11 @@ def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) - proj_dict = area.proj_dict + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", + message=r"You will likely lose important projection information", + category=UserWarning) + proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == pytest.approx(6356583.8) From 30e89987c430d81a6236077e3a20acbbd9ad8704 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 07:44:35 +0000 Subject: [PATCH 0880/1416] Add tests for default functionality of HighCloudCompositor and LowCloudCompositor. --- satpy/tests/test_composites.py | 51 ++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 70bc2abf25..2fa87c1e1b 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -937,6 +937,57 @@ def test_call(self): np.testing.assert_allclose(res, exp) +class TestHighCloudCompositor: + """Test HighCloudCompositor.""" + + def setup_method(self): + """Create test data.""" + from pyresample.geometry import create_area_def + area = create_area_def(area_id="test", projection={"proj": "latlong"}, + center=(0, 45), width=3, height=3, resolution=35) + + self.data = xr.DataArray(da.from_array([[200, 250, 300], + [200, 250, 300], + [200, 250, 300]]), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, + attrs={"area": area}) + + def test_default_behaviour(self): + """Test general default functionality of compositor.""" + from satpy.composites import HighCloudCompositor + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = HighCloudCompositor(name="test") + res = comp([self.data]) + expexted_alpha = np.array([[1.0, 0.7142857, 0.0], + [1.0, 0.625, 0.0], + [1.0, 0.5555555, 0.0]]) + expected = np.stack([self.data, expexted_alpha]) + np.testing.assert_almost_equal(res.values, expected) + + +class TestLowCloudCompositor: + """Test LowCloudCompositor.""" + + def setup_method(self): + """Create test data.""" + self.btd = xr.DataArray(da.from_array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]]), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) + self.bt_win = xr.DataArray(da.from_array([[250, 250, 250], [250, 250, 250], [150, 150, 150]]), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) + self.lsm = xr.DataArray(da.from_array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]]), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) + + def test_default_behaviour(self): + """Test general default functionality of compositor.""" + from satpy.composites import LowCloudCompositor + with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + comp = LowCloudCompositor(name="test") + res = comp([self.btd, self.bt_win, self.lsm]) + expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) + expected = np.stack([self.btd, expexted_alpha]) + np.testing.assert_equal(res.values, expected) + + class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" From 169bfbd7f8a19528d33172b2e05a034e88c1505e Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 07:50:44 +0000 Subject: [PATCH 0881/1416] Set alpha chanel to transparent instead of brightness temperature difference to zero when hiding potential IR3.8 channel noise for cold cloud tops. --- satpy/composites/__init__.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index bc5a199aa0..550040e5f5 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1202,11 +1202,6 @@ def __call__(self, projectables, **kwargs): lsm = lsm.squeeze(drop=True) lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling - # Avoid spurious false alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops - # TODO Consolidate this. Should it really be set to zero and thus within the threshold range? What if the - # lower threshold would be changed to -1 - btd = btd.where(bt_win >= 230, 0.0) - # Call CloudCompositor for land surface pixels self.transition_min, self.transition_max = self.range_land res = super().__call__([btd.where(lsm.isin(self.values_land))], **kwargs) @@ -1218,6 +1213,10 @@ def __call__(self, projectables, **kwargs): # Compine resutls for land and sea/water surface pixels res = res.where(lsm.isin(self.values_land), res_sea) + # Make pixels with cold window channel brightness temperatures transparent to avoid spurious false + # alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops + res.loc["A"] = res.sel(bands="A").where(bt_win >= 230, 0.0) + return res From 130e768f6f5d5216aa2a8945d2bd8098bf61c39d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 07:52:35 +0000 Subject: [PATCH 0882/1416] Implement keyword for alpha channel inversion in CloudCompositor and use for LowCloudCompositor. Remove corresponding alpha channel inversion in enhancement recipe. --- satpy/composites/__init__.py | 9 +++++++-- satpy/etc/enhancements/generic.yaml | 4 ---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 550040e5f5..9281dfae4f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1015,7 +1015,7 @@ class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" def __init__(self, name, transition_min=258.15, transition_max=298.15, - transition_gamma=3.0, **kwargs): + invert_alpha=False, transition_gamma=3.0, **kwargs): """Collect custom configuration values. Args: @@ -1028,6 +1028,7 @@ def __init__(self, name, transition_min=258.15, transition_max=298.15, """ self.transition_min = transition_min self.transition_max = transition_max + self.invert_alpha = invert_alpha self.transition_gamma = transition_gamma super(CloudCompositor, self).__init__(name, **kwargs) @@ -1050,6 +1051,9 @@ def __call__(self, projectables, **kwargs): alpha = alpha.where(data <= tr_max, 0.) alpha = alpha.where((data <= tr_min) | (data > tr_max), slope * data + offset) + if self.invert_alpha: + alpha = 1.0 - alpha + # gamma adjustment alpha **= gamma res = super(CloudCompositor, self).__call__((data, alpha), **kwargs) @@ -1155,6 +1159,7 @@ class LowCloudCompositor(CloudCompositor): def __init__(self, name, values_land=(1,), values_sea=(0,), range_land=(0.0, 4.0), range_sea=(0.0, 4.0), + invert_alpha=True, transition_gamma=1.0, **kwargs): """Init info. @@ -1182,7 +1187,7 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), self.transition_gamma = transition_gamma self.transition_min = None # Placeholder for later use in CloudCompositor self.transition_max = None # Placeholder for later use in CloudCompositor - super().__init__(name, transition_gamma=transition_gamma, **kwargs) + super().__init__(name, invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite. diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index efb9b2c6fa..dfd5b5f5c6 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -969,10 +969,6 @@ enhancements: geo_color_low_clouds: standard_name: geo_color_low_clouds operations: - - name: inverse - method: !!python/name:satpy.enhancements.invert - args: - - [False, True] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: From 2da489b9c5d48ebf1e78b9faa0db3fa353848948 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 08:55:56 +0100 Subject: [PATCH 0883/1416] hvplot tests Test for areadefinition data (single band and rgb). Test for swath data (only single band) --- satpy/tests/scene_tests/test_conversions.py | 47 +++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index a886c3fa60..9b0dd9098e 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -81,6 +81,53 @@ def test_geoviews_basic_with_swath(self): # we assume that if we got something back, geoviews can use it assert gv_obj is not None + def test_hvplot_basic_with_area(self): + """Test converting a Scene to hvplot with a AreaDefinition.""" + from pyresample.geometry import AreaDefinition + scn = Scene() + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + hv_obj = scn.to_hvplot() + # we assume that if we got something back, hvplot can use it + assert hv_obj is not None + + def test_hvplot_rgb_with_area(self): + """Test converting a Scene to hvplot with a AreaDefinition.""" + from pyresample.geometry import AreaDefinition + scn = Scene() + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + scn["ds2"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + scn["ds3"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + hv_obj = scn.to_hvplot() + # we assume that if we got something back, hvplot can use it + assert hv_obj is not None + + def test_hvplot_basic_with_swath(self): + """Test converting a Scene to hvplot with a SwathDefinition.""" + from pyresample.geometry import SwathDefinition + scn = Scene() + longitude = xr.DataArray(da.zeros((2, 2))) + latitude = xr.DataArray(da.zeros((2, 2))) + area = SwathDefinition(longitude, latitude) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area, "units": "m"}) + hv_obj = scn.to_hvplot() + # we assume that if we got something back, hvplot can use it + assert hv_obj is not None class TestToXarrayConversion: """Test Scene.to_xarray() conversion.""" From 56138aa7c9c5365b6ada8a367414d106d97f00ac Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 08:44:13 +0000 Subject: [PATCH 0884/1416] Fix alpha inversion to keep dataset attributes. --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 9281dfae4f..8089464be6 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1052,7 +1052,7 @@ def __call__(self, projectables, **kwargs): alpha = alpha.where((data <= tr_min) | (data > tr_max), slope * data + offset) if self.invert_alpha: - alpha = 1.0 - alpha + alpha.data = 1.0 - alpha.data # gamma adjustment alpha **= gamma From 43e6ce596e235dd5d2eea5e2e102e3c5c2813cf9 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 08:46:23 +0000 Subject: [PATCH 0885/1416] Move initialization of parent class variables to parent class init. --- satpy/composites/__init__.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 8089464be6..cb5a10663c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1184,10 +1184,8 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), self.values_sea = values_sea if type(values_sea) in [list, tuple] else [values_sea] self.range_land = range_land self.range_sea = range_sea - self.transition_gamma = transition_gamma - self.transition_min = None # Placeholder for later use in CloudCompositor - self.transition_max = None # Placeholder for later use in CloudCompositor - super().__init__(name, invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) + super().__init__(name, transition_min=None, transition_max=None, + invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite. From ffa15e1bb241ea7d2b4c599f22e82137ad0d7c41 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 08:48:22 +0000 Subject: [PATCH 0886/1416] Fix indentation. --- satpy/tests/test_composites.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 2fa87c1e1b..6179fc8053 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -962,7 +962,7 @@ def test_default_behaviour(self): [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) - np.testing.assert_almost_equal(res.values, expected) + np.testing.assert_almost_equal(res.values, expected) class TestLowCloudCompositor: @@ -985,7 +985,7 @@ def test_default_behaviour(self): res = comp([self.btd, self.bt_win, self.lsm]) expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) - np.testing.assert_equal(res.values, expected) + np.testing.assert_equal(res.values, expected) class TestSingleBandCompositor(unittest.TestCase): From d82bc6c7d8e102c673b83b34d834a16d27cf1140 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 11:03:04 +0200 Subject: [PATCH 0887/1416] Handle UserWarnings from unmatching header blocks and observation times --- satpy/tests/reader_tests/test_ahi_hsd.py | 41 ++++++++++++++++-------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 6ed3267723..2a2c608a46 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -331,7 +331,10 @@ def test_read_band(self, calibrate, *mocks): with _fake_hsd_handler() as fh: fh.data_info["number_of_columns"] = ncols fh.data_info["number_of_lines"] = nrows - im = fh.read_band(mock.MagicMock(), mock.MagicMock()) + with warnings.catch_warnings(): + # The header isn't valid + warnings.filterwarnings("ignore", category=UserWarning, message=r"Actual .* header size") + im = fh.read_band(mock.MagicMock(), mock.MagicMock()) # Note: Within the earth's shape get_geostationary_mask() is True but the numpy.ma mask # is False mask = im.to_masked_array().mask @@ -366,7 +369,10 @@ def test_read_band(self, calibrate, *mocks): # Test if masking space pixels disables with appropriate flag fh.mask_space = False with mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_space") as mask_space: - fh.read_band(mock.MagicMock(), mock.MagicMock()) + with warnings.catch_warnings(): + # The header isn't valid + warnings.filterwarnings("ignore", category=UserWarning, message=r"Actual .* header size") + fh.read_band(mock.MagicMock(), mock.MagicMock()) mask_space.assert_not_called() def test_read_band_from_actual_file(self, hsd_file_jp01): @@ -377,14 +383,17 @@ def test_read_band_from_actual_file(self, hsd_file_jp01): key = {"name": "B01", "calibration": "counts", "resolution": 1000} import dask with dask.config.set({"array.chunk-size": "32MiB"}): - data = fh.read_band( - key, - { - "units": "%", - "standard_name": "toa_bidirectional_reflectance", - "wavelength": 2, - "resolution": 1000, - }) + with warnings.catch_warnings(): + # The header isn't valid + warnings.filterwarnings("ignore", category=UserWarning, message=r"Actual .* header size") + data = fh.read_band( + key, + { + "units": "%", + "standard_name": "toa_bidirectional_reflectance", + "wavelength": 2, + "resolution": 1000, + }) assert data.chunks == ((1100,) * 10, (1100,) * 10) assert data.dtype == data.compute().dtype assert data.dtype == np.float32 @@ -406,7 +415,10 @@ def test_scene_loading(self, calibrate, *mocks): fh.data_info["number_of_columns"] = ncols fh.data_info["number_of_lines"] = nrows scn = Scene(reader="ahi_hsd", filenames=["HS_H08_20210225_0700_B07_FLDK_R20_S0110.DAT"]) - scn.load(["B07"]) + with warnings.catch_warnings(): + # The header isn't valid + warnings.filterwarnings("ignore", category=UserWarning, message=r"Actual .* header size") + scn.load(["B07"]) im = scn["B07"] # Make sure space masking worked @@ -461,9 +473,8 @@ def test_blocklen_error(self, *mocks): # Expected and actual blocklength do not match fp_.tell.return_value = 100 - with warnings.catch_warnings(record=True) as w: + with pytest.raises(UserWarning, match=r"Actual .* header size does not match expected"): fh._check_fpos(fp_, fpos, 0, "header 1") - assert len(w) > 0 def test_is_valid_time(self): """Test that valid times are correctly identified.""" @@ -480,7 +491,9 @@ def test_time_rounding(self): mocker.return_value = True assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) mocker.return_value = False - assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) + with pytest.raises(UserWarning, + match=r"Observation timeline is fill value, not rounding observation time"): + assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) class TestAHICalibration(unittest.TestCase): From 5a84efde0e048ff8f13076bbc4e1bb38b835f657 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 11:06:09 +0200 Subject: [PATCH 0888/1416] Fix deprecated proj string property --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index a90f24ea5d..2a697d0a4a 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -90,7 +90,7 @@ def test_ahi_l2_area_def(himl2_filename, caplog): warnings.filterwarnings("ignore", message=r"You will likely lose important projection information", category=UserWarning) - assert area_def.proj4_string == ps + assert area_def.proj_str == ps # Check case where input data is incorrect size. fh = ahil2_filehandler(himl2_filename) From 1225b077912251b0d760a6393e8a06a8fd203614 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 11:17:02 +0200 Subject: [PATCH 0889/1416] Filter orbit polynomial warning in tests --- .../test_seviri_l1b_hrit_setup.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index c332f0d3f9..a885a5becc 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -49,6 +49,8 @@ def new_read_prologue(self): def get_fake_file_handler(observation_start_time, nlines, ncols, projection_longitude=0, orbit_polynomials=ORBIT_POLYNOMIALS): """Create a mocked SEVIRI HRIT file handler.""" + import warnings + prologue = get_fake_prologue(projection_longitude, orbit_polynomials) mda = get_fake_mda(nlines=nlines, ncols=ncols, start_time=observation_start_time) filename_info = get_fake_filename_info(observation_start_time) @@ -80,13 +82,16 @@ def get_fake_file_handler(observation_start_time, nlines, ncols, projection_long ) epilogue = mock.MagicMock(epilogue=epilogue) - reader = HRITMSGFileHandler( - "filename", - filename_info, - {"filetype": "info"}, - prologue, - epilogue - ) + with warnings.catch_warnings(): + # Orbit polynomial has no exact match, so filter the unnecessary warning + warnings.filterwarnings("ignore", category=UserWarning, message=r"No orbit polynomial valid for") + reader = HRITMSGFileHandler( + "filename", + filename_info, + {"filetype": "info"}, + prologue, + epilogue + ) reader.mda.update(mda) return reader From aae7c9ec28668074b89deab02e7b209a00964fcb Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:18:01 +0000 Subject: [PATCH 0890/1416] Move computation of expected data outside of the dask scheduler. --- satpy/tests/test_composites.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 6179fc8053..bcda95aba8 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -946,9 +946,7 @@ def setup_method(self): area = create_area_def(area_id="test", projection={"proj": "latlong"}, center=(0, 45), width=3, height=3, resolution=35) - self.data = xr.DataArray(da.from_array([[200, 250, 300], - [200, 250, 300], - [200, 250, 300]]), + self.data = xr.DataArray(da.from_array([[200, 250, 300], [200, 250, 300], [200, 250, 300]]), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, attrs={"area": area}) @@ -958,11 +956,11 @@ def test_default_behaviour(self): with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = HighCloudCompositor(name="test") res = comp([self.data]) - expexted_alpha = np.array([[1.0, 0.7142857, 0.0], - [1.0, 0.625, 0.0], - [1.0, 0.5555555, 0.0]]) - expected = np.stack([self.data, expexted_alpha]) - np.testing.assert_almost_equal(res.values, expected) + data = res.values + + expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) + expected = np.stack([self.data, expexted_alpha]) + np.testing.assert_almost_equal(data, expected) class TestLowCloudCompositor: @@ -983,9 +981,11 @@ def test_default_behaviour(self): with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) - expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) - expected = np.stack([self.btd, expexted_alpha]) - np.testing.assert_equal(res.values, expected) + data = res.values + + expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) + expected = np.stack([self.btd, expexted_alpha]) + np.testing.assert_equal(data, expected) class TestSingleBandCompositor(unittest.TestCase): From 27358020edf6bd47579756b345c64676353e72c8 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:30:15 +0000 Subject: [PATCH 0891/1416] Add data type preservation tests for HighCloudCompositor and LowCloudCompositor. --- satpy/tests/test_composites.py | 48 ++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index bcda95aba8..db29a8572e 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -946,47 +946,67 @@ def setup_method(self): area = create_area_def(area_id="test", projection={"proj": "latlong"}, center=(0, 45), width=3, height=3, resolution=35) - self.data = xr.DataArray(da.from_array([[200, 250, 300], [200, 250, 300], [200, 250, 300]]), - dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, - attrs={"area": area}) + self.data = xr.DataArray( + da.from_array(np.array([[200, 250, 300], [200, 250, 300], [200, 250, 300]], dtype=np.float32)), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, + attrs={"area": area} + ) - def test_default_behaviour(self): + def test_high_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import HighCloudCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = HighCloudCompositor(name="test") res = comp([self.data]) data = res.values - expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(data, expected) + def test_high_cloud_compositor_dtype(self): + """Test that the datatype is not altered by the compositor.""" + from satpy.composites import HighCloudCompositor + comp = HighCloudCompositor(name="test") + res = comp([self.data]) + assert res.data.dtype == np.float32 + class TestLowCloudCompositor: """Test LowCloudCompositor.""" def setup_method(self): """Create test data.""" - self.btd = xr.DataArray(da.from_array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]]), - dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) - self.bt_win = xr.DataArray(da.from_array([[250, 250, 250], [250, 250, 250], [150, 150, 150]]), - dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) - self.lsm = xr.DataArray(da.from_array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]]), - dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}) - - def test_default_behaviour(self): + self.btd = xr.DataArray( + da.from_array(np.array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]], dtype=np.float32)), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} + ) + self.bt_win = xr.DataArray( + da.from_array(np.array([[250, 250, 250], [250, 250, 250], [150, 150, 150]], dtype=np.float32)), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} + ) + self.lsm = xr.DataArray( + da.from_array(np.array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]], dtype=np.float32)), + dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} + ) + + def test_low_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import LowCloudCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) data = res.values - expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) np.testing.assert_equal(data, expected) + def test_low_cloud_compositor_dtype(self): + """Test that the datatype is not altered by the compositor.""" + from satpy.composites import LowCloudCompositor + comp = LowCloudCompositor(name="test") + res = comp([self.btd, self.bt_win, self.lsm]) + assert res.data.dtype == np.float32 + class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" From 5e9763cabe573f968d5d6aafbe2c701b326be030 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:32:23 +0000 Subject: [PATCH 0892/1416] Move computation of data outside dask schedule and set max number of jobs to 0. --- satpy/tests/test_composites.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index db29a8572e..bf47c9ff9a 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -955,13 +955,12 @@ def setup_method(self): def test_high_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import HighCloudCompositor - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = HighCloudCompositor(name="test") res = comp([self.data]) - data = res.values expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) - np.testing.assert_almost_equal(data, expected) + np.testing.assert_almost_equal(res.values, expected) def test_high_cloud_compositor_dtype(self): """Test that the datatype is not altered by the compositor.""" @@ -992,13 +991,12 @@ def setup_method(self): def test_low_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import LowCloudCompositor - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) - data = res.values expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) - np.testing.assert_equal(data, expected) + np.testing.assert_equal(res.values, expected) def test_low_cloud_compositor_dtype(self): """Test that the datatype is not altered by the compositor.""" From 88fe02cddfcceb95abb20cb93c2234134546854e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 11:38:35 +0200 Subject: [PATCH 0893/1416] Fix DeprecationWarning of empty Numpy array falsy with list support --- satpy/readers/satpy_cf_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 073c8d4cf5..2f9743e0f6 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -237,7 +237,7 @@ def _existing_datasets(self, configured_datasets=None): def fix_modifier_attr(self, ds_info): """Fix modifiers attribute.""" # Empty modifiers are read as [], which causes problems later - if "modifiers" in ds_info and not ds_info["modifiers"]: + if "modifiers" in ds_info and len(ds_info["modifiers"]) == 0: ds_info["modifiers"] = () try: try: From 6b3b40f0d11575266c1293aafcb8c1dde88c0e1e Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:53:07 +0000 Subject: [PATCH 0894/1416] Daskify computation of latitude array for HighCloudCompositor and preserve dtype. --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index cb5a10663c..9bb7043343 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1120,13 +1120,13 @@ def __call__(self, projectables, **kwargs): raise ValueError(f"Expected 1 dataset, got {len(projectables)}") data = projectables[0] - _, lats = data.attrs["area"].get_lonlats() + _, lats = data.attrs["area"].get_lonlats(chunks=data.chunks, dtype=data.dtype) lats = np.abs(lats) slope = (self.transition_min[1] - self.transition_min[0]) / (self.latitude_min[1] - self.latitude_min[0]) offset = self.transition_min[0] - slope * self.latitude_min[0] - tr_min_lat = xr.DataArray(name="tr_min_lat", coords=data.coords, dims=data.dims) + tr_min_lat = xr.DataArray(name="tr_min_lat", coords=data.coords, dims=data.dims).astype(data.dtype) tr_min_lat = tr_min_lat.where(lats >= self.latitude_min[0], self.transition_min[0]) tr_min_lat = tr_min_lat.where(lats <= self.latitude_min[1], self.transition_min[1]) tr_min_lat = tr_min_lat.where((lats < self.latitude_min[0]) | (lats > self.latitude_min[1]), From 48c1c63ef830421b631ed1a6e4aae5620ac0beba Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:56:53 +0000 Subject: [PATCH 0895/1416] use a common variable for testing dtype. --- satpy/tests/test_composites.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index bf47c9ff9a..f0515c0f93 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -945,9 +945,9 @@ def setup_method(self): from pyresample.geometry import create_area_def area = create_area_def(area_id="test", projection={"proj": "latlong"}, center=(0, 45), width=3, height=3, resolution=35) - + self.dtype = np.float32 self.data = xr.DataArray( - da.from_array(np.array([[200, 250, 300], [200, 250, 300], [200, 250, 300]], dtype=np.float32)), + da.from_array(np.array([[200, 250, 300], [200, 250, 300], [200, 250, 300]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, attrs={"area": area} ) @@ -967,7 +967,7 @@ def test_high_cloud_compositor_dtype(self): from satpy.composites import HighCloudCompositor comp = HighCloudCompositor(name="test") res = comp([self.data]) - assert res.data.dtype == np.float32 + assert res.data.dtype == self.dtype class TestLowCloudCompositor: @@ -975,16 +975,17 @@ class TestLowCloudCompositor: def setup_method(self): """Create test data.""" + self.dtype = np.float32 self.btd = xr.DataArray( - da.from_array(np.array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]], dtype=np.float32)), + da.from_array(np.array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) self.bt_win = xr.DataArray( - da.from_array(np.array([[250, 250, 250], [250, 250, 250], [150, 150, 150]], dtype=np.float32)), + da.from_array(np.array([[250, 250, 250], [250, 250, 250], [150, 150, 150]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) self.lsm = xr.DataArray( - da.from_array(np.array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]], dtype=np.float32)), + da.from_array(np.array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) @@ -1003,7 +1004,7 @@ def test_low_cloud_compositor_dtype(self): from satpy.composites import LowCloudCompositor comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) - assert res.data.dtype == np.float32 + assert res.data.dtype == self.dtype class TestSingleBandCompositor(unittest.TestCase): From e9530eff917ac8d6e70c65c89f0810624d70cff4 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 09:57:45 +0000 Subject: [PATCH 0896/1416] Remove obsolete TODOs. --- satpy/composites/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 9bb7043343..034e8a7821 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1114,8 +1114,6 @@ def __call__(self, projectables, **kwargs): `projectables` is expected to be a list or tuple with a single element: - index 0: Brightness temperature of a thermal infrared window channel (e.g. 10.5 microns). """ - # TODO Optimize and make sure that there are no early unnecessary dask computations. Is there a way to avoid - # computation of the latitude array? if len(projectables) != 1: raise ValueError(f"Expected 1 dataset, got {len(projectables)}") @@ -1196,7 +1194,6 @@ def __call__(self, projectables, **kwargs): - index 1. Brightness temperature of the window channel (used to filter out noise-induced false alarms). - index 2: Land-Sea-Mask. """ - # TODO Optimize and make sure that there are no early unnecessary dask computations if len(projectables) != 3: raise ValueError(f"Expected 3 datasets, got {len(projectables)}") From 0c2d5dd5901141789b9411a7e9cd1c2b0ff2ec77 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 12:29:22 +0200 Subject: [PATCH 0897/1416] Catch missing radiance adjustment warnings --- satpy/tests/reader_tests/test_slstr_l1b.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index 63a43c9c79..2bc384c5e2 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -18,10 +18,10 @@ """Module for testing the satpy.readers.nc_slstr module.""" import unittest import unittest.mock as mock -import warnings from datetime import datetime import numpy as np +import pytest import xarray as xr from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange @@ -151,7 +151,8 @@ def test_instantiate(self, bvs_, xr_): test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") assert test.view == "nadir" assert test.stripe == "a" - test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) + with pytest.raises(UserWarning, match=r"No radiance adjustment supplied for channel"): + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert test.start_time == good_start assert test.end_time == good_end xr_.open_dataset.assert_called() @@ -214,9 +215,8 @@ def test_radiance_calibration(self, xr_): test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") # Check warning is raised if we don't have calibration - with warnings.catch_warnings(record=True) as w: + with pytest.raises(UserWarning, match=r"No radiance adjustment supplied for channel"): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) - assert issubclass(w[-1].category, UserWarning) # Check user calibration is used correctly test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c", From 1057aab5b92886dde6df6004a512c5a268d1f090 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 12:58:13 +0200 Subject: [PATCH 0898/1416] Fix expected proj string --- satpy/tests/reader_tests/test_ahi_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 2a697d0a4a..910e0515a1 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -77,7 +77,7 @@ def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" import warnings - ps = "+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs" + ps = "+a=6378137 +h=35785863 +lon_0=140.7 +no_defs +proj=geos +rf=298.257024882273 +type=crs +units=m +x_0=0 +y_0=0" # Check case where input data is correct size. fh = ahil2_filehandler(himl2_filename) From e9e85189d2a9ace207dac849cef90c1ccae2cd02 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 12:58:45 +0200 Subject: [PATCH 0899/1416] Fix pytest.raises() to pytest.warns() --- satpy/tests/reader_tests/test_ahi_hsd.py | 4 ++-- satpy/tests/reader_tests/test_slstr_l1b.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 2a2c608a46..faa348b9af 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -473,7 +473,7 @@ def test_blocklen_error(self, *mocks): # Expected and actual blocklength do not match fp_.tell.return_value = 100 - with pytest.raises(UserWarning, match=r"Actual .* header size does not match expected"): + with pytest.warns(UserWarning, match=r"Actual .* header size does not match expected"): fh._check_fpos(fp_, fpos, 0, "header 1") def test_is_valid_time(self): @@ -491,7 +491,7 @@ def test_time_rounding(self): mocker.return_value = True assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) mocker.return_value = False - with pytest.raises(UserWarning, + with pytest.warns(UserWarning, match=r"Observation timeline is fill value, not rounding observation time"): assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index 2bc384c5e2..b6784d4e2b 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -151,7 +151,7 @@ def test_instantiate(self, bvs_, xr_): test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") assert test.view == "nadir" assert test.stripe == "a" - with pytest.raises(UserWarning, match=r"No radiance adjustment supplied for channel"): + with pytest.warns(UserWarning, match=r"No radiance adjustment supplied for channel"): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert test.start_time == good_start assert test.end_time == good_end @@ -215,7 +215,7 @@ def test_radiance_calibration(self, xr_): test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") # Check warning is raised if we don't have calibration - with pytest.raises(UserWarning, match=r"No radiance adjustment supplied for channel"): + with pytest.warns(UserWarning, match=r"No radiance adjustment supplied for channel"): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) # Check user calibration is used correctly From 1b57de0c2e98914498e49a8be7e4a2e12367899a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:12:46 +0200 Subject: [PATCH 0900/1416] Handle more orbit polynomial warnings --- satpy/tests/reader_tests/test_seviri_base.py | 9 +++++++-- satpy/tests/reader_tests/test_seviri_l1b_nc.py | 3 ++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 86f684bb5e..a07bb799bc 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -338,8 +338,12 @@ class TestOrbitPolynomialFinder: def test_get_orbit_polynomial(self, orbit_polynomials, time, orbit_polynomial_exp): """Test getting the satellite locator.""" + import warnings finder = OrbitPolynomialFinder(orbit_polynomials) - orbit_polynomial = finder.get_orbit_polynomial(time=time) + with warnings.catch_warnings(): + # There's no exact polynomial time match, filter the warning + warnings.filterwarnings("ignore", category=UserWarning, message=r"No orbit polynomial valid") + orbit_polynomial = finder.get_orbit_polynomial(time=time) assert orbit_polynomial == orbit_polynomial_exp @pytest.mark.parametrize( @@ -356,7 +360,8 @@ def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): """Test exceptions thrown while getting the satellite locator.""" finder = OrbitPolynomialFinder(orbit_polynomials) with pytest.raises(NoValidOrbitParams): - finder.get_orbit_polynomial(time=time) + with pytest.warns(UserWarning, match=r"No orbit polynomial valid"): + finder.get_orbit_polynomial(time=time) class TestMeirinkSlope: diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index 42e038a766..cd5e2c713f 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -381,7 +381,8 @@ def test_satpos_no_valid_orbit_polynomial(self, file_handler): } file_handler.nc["orbit_polynomial_start_time_day"] = 0 file_handler.nc["orbit_polynomial_end_time_day"] = 0 - res = file_handler.get_dataset(dataset_id, dataset_info) + with pytest.warns(UserWarning, match=r"No orbit polynomial valid for"): + res = file_handler.get_dataset(dataset_id, dataset_info) assert "satellite_actual_longitude" not in res.attrs[ "orbital_parameters"] From bdbf3f95c41c7e7e6ef8bb8bdbec894aa36c07cf Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:16:15 +0200 Subject: [PATCH 0901/1416] Fix proj authority usage --- satpy/readers/smos_l2_wind.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py index c982397c3c..4a909ee2e4 100644 --- a/satpy/readers/smos_l2_wind.py +++ b/satpy/readers/smos_l2_wind.py @@ -170,6 +170,6 @@ def get_area_def(self, dsid): description = "SMOS L2 Wind Equirectangular Projection" area_id = "smos_eqc" proj_id = "equirectangular" - proj_dict = {"init": self["/attr/geospatial_bounds_vertical_crs"]} - area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) + proj_str = self["/attr/geospatial_bounds_vertical_crs"] + area_def = AreaDefinition(area_id, description, proj_id, proj_str, width, height, area_extent, ) return area_def From d0dd78e8889ece1b9241be5e1b5876352eefb919 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:24:45 +0200 Subject: [PATCH 0902/1416] Suppress warning about missing DataArray coordinate in test data saving --- satpy/tests/reader_tests/test_satpy_cf_nc.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index f279196ab4..ec7ac34be4 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -260,12 +260,15 @@ def test_write_and_read_with_area_definition(self, cf_scene, nc_filename): def test_write_and_read_with_swath_definition(self, cf_scene, nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" - cf_scene.save_datasets(writer="cf", - filename=nc_filename, - engine="h5netcdf", - flatten_attrs=True, - pretty=True, - datasets=["swath_data"]) + with warnings.catch_warnings(): + # Filter out warning about missing lon/lat DataArray coordinates + warnings.filterwarnings("ignore", category=UserWarning, message=r"Coordinate .* referenced") + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="h5netcdf", + flatten_attrs=True, + pretty=True, + datasets=["swath_data"]) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["swath_data"]) From 8811914eb328173d846606a60ffeb6a58c0e9fb5 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:30:48 +0200 Subject: [PATCH 0903/1416] Silence warning about PNG not having geolocation information --- satpy/tests/reader_tests/test_generic_image.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 0ea143269f..cd347ce07e 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -21,6 +21,7 @@ import dask.array as da import numpy as np +import pytest import xarray as xr from satpy.tests.utils import make_dataid @@ -128,10 +129,13 @@ def tearDown(self): def test_png_scene(self): """Test reading PNG images via satpy.Scene().""" + from rasterio.errors import NotGeoreferencedWarning + from satpy import Scene fname = os.path.join(self.base_dir, "test_l.png") - scn = Scene(reader="generic_image", filenames=[fname]) + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[fname]) scn.load(["image"]) assert scn["image"].shape == (1, self.y_size, self.x_size) assert scn.sensor_names == {"images"} @@ -140,7 +144,8 @@ def test_png_scene(self): assert "area" not in scn["image"].attrs fname = os.path.join(self.base_dir, "20180101_0000_test_la.png") - scn = Scene(reader="generic_image", filenames=[fname]) + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[fname]) scn.load(["image"]) data = da.compute(scn["image"].data) assert scn["image"].shape == (1, self.y_size, self.x_size) From b2e9841af1e1b4408fadfe86e23fab59ed488746 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:32:46 +0200 Subject: [PATCH 0904/1416] Use width and height attributes instead of x/y_size --- satpy/readers/fci_l2_nc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index b03cbdfdaa..cd9bf5788f 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -372,7 +372,7 @@ def _construct_area_def(self, dataset_id): # Construct area definition from standardized area definition. stand_area_def = get_area_def(area_naming["area_id"]) - if (stand_area_def.x_size != self.ncols) | (stand_area_def.y_size != self.nlines): + if (stand_area_def.width != self.ncols) | (stand_area_def.height != self.nlines): raise NotImplementedError("Unrecognised AreaDefinition.") mod_area_extent = self._modify_area_extent(stand_area_def.area_extent) @@ -382,8 +382,8 @@ def _construct_area_def(self, dataset_id): stand_area_def.description, "", stand_area_def.crs, - stand_area_def.x_size, - stand_area_def.y_size, + stand_area_def.width, + stand_area_def.height, mod_area_extent) return area_def From b4974152c4723bebd0b88533c61d367682c37ef7 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:48:01 +0200 Subject: [PATCH 0905/1416] Get number of lines/columns from .sizes attribute --- satpy/readers/ahi_l2_nc.py | 4 ++-- satpy/readers/goes_imager_nc.py | 8 ++++---- satpy/readers/seviri_l1b_nc.py | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 7785c3994d..d6e6caa887 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -73,8 +73,8 @@ def __init__(self, filename, filename_info, filetype_info): raise ValueError("File is not a full disk scene") self.sensor = self.nc.attrs["instrument_name"].lower() - self.nlines = self.nc.dims["Columns"] - self.ncols = self.nc.dims["Rows"] + self.nlines = self.nc.sizes["Columns"] + self.ncols = self.nc.sizes["Rows"] self.platform_name = self.nc.attrs["satellite_name"] self.platform_shortname = filename_info["platform"] self._meta = None diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 8ec3219eec..1b88919886 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -615,8 +615,8 @@ def __init__(self, filename, filename_info, filetype_info, geo_data=None): mask_and_scale=False, chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) self.sensor = "goes_imager" - self.nlines = self.nc.dims["yc"] - self.ncols = self.nc.dims["xc"] + self.nlines = self.nc.sizes["yc"] + self.ncols = self.nc.sizes["xc"] self.platform_name = self._get_platform_name( self.nc.attrs["Satellite Sensor"]) self.platform_shortname = self.platform_name.replace("-", "").lower() @@ -1124,8 +1124,8 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=False, chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) self.sensor = "goes_imager" - self.nlines = self.nc.dims["yc"] - self.ncols = self.nc.dims["xc"] + self.nlines = self.nc.sizes["yc"] + self.ncols = self.nc.sizes["xc"] self.platform_name = GOESNCBaseFileHandler._get_platform_name( self.nc.attrs["Satellite Sensor"]) self.platform_shortname = self.platform_name.replace("-", "").lower() diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index 82e3b15297..22b55eceda 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -139,8 +139,8 @@ def get_metadata(self): "h": 35785831.00, "ssp_longitude": ssp_lon} - self.mda["number_of_lines"] = int(self.nc.dims["y"]) - self.mda["number_of_columns"] = int(self.nc.dims["x"]) + self.mda["number_of_lines"] = int(self.nc.sizes["y"]) + self.mda["number_of_columns"] = int(self.nc.sizes["x"]) # only needed for HRV channel which is not implemented yet # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) From 7b56afac24354e91d3347086035fe85e8c721c3a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 13:59:22 +0200 Subject: [PATCH 0906/1416] Make HRPT navigation timezone ignorant --- satpy/readers/hrpt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py index 2a54eed664..c4862e8169 100644 --- a/satpy/readers/hrpt.py +++ b/satpy/readers/hrpt.py @@ -78,7 +78,7 @@ def time_seconds(tc_array, year): word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( - str(year) + "-01-01T00:00:00Z", "s") + + str(year) + "-01-01T00:00:00", "s") + msecs[:].astype("timedelta64[ms]") + (day - 1)[:].astype("timedelta64[D]")) @@ -224,7 +224,7 @@ def calibrate_solar_channel(self, data, key): """Calibrate a solar channel.""" from pygac.calibration import calibrate_solar julian_days = ((np.datetime64(self.start_time) - - np.datetime64(str(self.year) + "-01-01T00:00:00Z")) + - np.datetime64(str(self.year) + "-01-01T00:00:00")) / np.timedelta64(1, "D")) data = calibrate_solar(data, _get_channel_index(key), self.year, julian_days, self.calibrator) From e6dc6165a4115191d8637b21b09467ab5db04681 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 12:16:19 +0000 Subject: [PATCH 0907/1416] Modify tests for NDVIHybridGreen compositor to assert 0 dask computations in compositor code. --- satpy/tests/compositor_tests/test_spectral.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index e46cff4d0c..7472016c00 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -97,7 +97,7 @@ def setup_method(self): def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") @@ -107,12 +107,12 @@ def test_ndvi_hybrid_green(self): assert isinstance(res.data, da.Array) assert res.attrs["name"] == "ndvi_hybrid_green" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" - data = res.values + data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) def test_ndvi_hybrid_green_dtype(self): """Test that the datatype is not altered by the compositor.""" - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)).compute() @@ -120,15 +120,15 @@ def test_ndvi_hybrid_green_dtype(self): def test_nonlinear_scaling(self): """Test non-linear scaling using `strength` term.""" - with dask.config.set(scheduler=CustomScheduler(max_computes=1)): + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) - res_np = res.data.compute() - assert res.dtype == res_np.dtype - assert res.dtype == np.float32 + res_np = res.data.compute() + assert res.dtype == res_np.dtype + assert res.dtype == np.float32 np.testing.assert_array_almost_equal(res.data, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) def test_invalid_strength(self): From 3259552f53e90b19e132acc1e8ce8a5e1144ef99 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 12:24:16 +0000 Subject: [PATCH 0908/1416] Fix typos. --- satpy/composites/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a7cc3d6e1c..09985d6ba1 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1079,7 +1079,7 @@ class HighCloudCompositor(CloudCompositor): - transition_min = transition_min[0] where abs(latitude) < latitude_min(0) - transition_min = transition_min[1] where abs(latitude) > latitude_min(0) - - transition_min = linear interpolation between transition_min[0] and transition_min[1] as a funtion + - transition_min = linear interpolation between transition_min[0] and transition_min[1] as a function of where abs(latitude). """ @@ -1139,7 +1139,7 @@ def __call__(self, projectables, **kwargs): class LowCloudCompositor(CloudCompositor): """Detect low-level clouds based on thresholding and use it as a mask for compositing during night-time. - This compsitor computes the brightness temperature difference between a window channel (e.g. 10.5 micron) + This compositor computes the brightness temperature difference between a window channel (e.g. 10.5 micron) and the near-infrared channel e.g. (3.8 micron) and uses this brightness temperature difference, `BTD`, to create a partially transparent mask for compositing. From d0993fdaceb7657d5b405123d3136f3087153c07 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 14:36:15 +0200 Subject: [PATCH 0909/1416] Convert floats and ints to match the nc.Variable datatype --- satpy/tests/reader_tests/test_seadas_l2.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py index 01de26e96b..d3037e6b55 100644 --- a/satpy/tests/reader_tests/test_seadas_l2.py +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -198,9 +198,12 @@ def _add_variable_to_netcdf_file(nc, var_name, var_info): fill_value=var_info.get("fill_value")) v[:] = var_info["data"] for attr_key, attr_val in var_info["attrs"].items(): + if isinstance(attr_val, (int, float)): + attr_val = v.dtype.type(attr_val) setattr(v, attr_key, attr_val) + class TestSEADAS: """Test the SEADAS L2 file reader.""" From 2da18407a77d2b4a3833e4c39e0384095fa0a9f9 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 14:57:35 +0200 Subject: [PATCH 0910/1416] Move AreaDefinition.from_cf() PROJ warning suppression to Pyresample --- satpy/readers/satpy_cf_nc.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 2f9743e0f6..1dfd68a206 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -318,15 +318,8 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" - import warnings - try: - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - # FIXME: This should be silenced in Pyresample - area = AreaDefinition.from_cf(self.filename) + area = AreaDefinition.from_cf(self.filename) return area except ValueError: # No CF compliant projection information was found in the netcdf file or From 1bb565587ca5cd8ff845ff4d4de5045bb052ae8d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 15:03:55 +0200 Subject: [PATCH 0911/1416] Replace GradientSearchResampler with the helper method in dosctring --- satpy/resample.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/resample.py b/satpy/resample.py index 336e3fec11..8b8f67dabf 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -42,7 +42,7 @@ "bucket_sum", "Sum Bucket Resampling", :class:`~satpy.resample.BucketSum` "bucket_count", "Count Bucket Resampling", :class:`~satpy.resample.BucketCount` "bucket_fraction", "Fraction Bucket Resampling", :class:`~satpy.resample.BucketFraction` - "gradient_search", "Gradient Search Resampling", :class:`~pyresample.gradient.GradientSearchResampler` + "gradient_search", "Gradient Search Resampling", :meth:`~pyresample.gradient.create_gradient_search_resampler` The resampling algorithm used can be specified with the ``resampler`` keyword argument and defaults to ``nearest``: From 5c49858463b1a4d9fa4dd93312923278e3ba7aa9 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 14 Dec 2023 14:46:08 +0100 Subject: [PATCH 0912/1416] Use fixtuers for test AMV file --- satpy/tests/reader_tests/test_fci_l2_nc.py | 113 ++++++++++----------- 1 file changed, 56 insertions(+), 57 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 44906c5040..84681b0f02 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -508,66 +508,65 @@ def test_byte_extraction(self): assert dataset.values == 0 -class TestFciL2NCAMVFileHandler: - """Test the FciL2NCAMVFileHandler reader.""" - - def setup_method(self): - """Set up the test by creating a test file and opening it with the reader.""" - # Easiest way to test the reader is to create a test netCDF file on the fly - # Create unique filenames to prevent race conditions when tests are run in parallel - self.test_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_file, "w") as nc: - # Create dimensions - nc.createDimension("number_of_winds", 50000) - - # add global attributes - nc.data_source = "test_data_source" - nc.platform = "test_platform" - - # Add datasets - latitude = nc.createVariable("latitude", np.float32, dimensions=("number_of_winds",)) - latitude[:] = np.arange(50000) - - longitude = nc.createVariable("y", np.float32, dimensions=("number_of_winds",)) - longitude[:] = np.arange(50000) - - qi = nc.createVariable("product_quality", np.int8) - qi[:] = 99. - test_dataset = nc.createVariable("test_dataset", np.float32, - dimensions="number_of_winds") - test_dataset[:] = np.ones(50000) - test_dataset.test_attr = "attr" - test_dataset.units = "test_units" - - mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) - mtg_geos_projection.longitude_of_projection_origin = 0.0 - mtg_geos_projection.semi_major_axis = 6378137. - mtg_geos_projection.inverse_flattening = 298.257223563 - mtg_geos_projection.perspective_point_height = 35786400. +@pytest.fixture(scope="module") +def amv_file(tmp_path_factory): + """Create an AMV file.""" + test_file = tmp_path_factory.mktemp("data") / "fci_l2_amv.nc" + + with Dataset(test_file, "w") as nc: + # Create dimensions + nc.createDimension("number_of_winds", 50000) + + # add global attributes + nc.data_source = "test_data_source" + nc.platform = "test_platform" + + # Add datasets + latitude = nc.createVariable("latitude", np.float32, dimensions=("number_of_winds",)) + latitude[:] = np.arange(50000) + + longitude = nc.createVariable("y", np.float32, dimensions=("number_of_winds",)) + longitude[:] = np.arange(50000) + + qi = nc.createVariable("product_quality", np.int8) + qi[:] = 99. + + test_dataset = nc.createVariable("test_dataset", np.float32, + dimensions="number_of_winds") + test_dataset[:] = np.ones(50000) + test_dataset.test_attr = "attr" + test_dataset.units = "test_units" + + mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) + mtg_geos_projection.longitude_of_projection_origin = 0.0 + mtg_geos_projection.semi_major_axis = 6378137. + mtg_geos_projection.inverse_flattening = 298.257223563 + mtg_geos_projection.perspective_point_height = 35786400. + return test_file + + +@pytest.fixture(scope="module") +def amv_filehandler(amv_file): + """Create an AMV filehandler.""" + return FciL2NCAMVFileHandler(filename=amv_file, + filename_info={"channel":"test_channel"}, + filetype_info={} + ) - self.fh = FciL2NCAMVFileHandler(filename=self.test_file, - filename_info={"channel":"test_channel"}, - filetype_info={} - ) - def tearDown(self): - """Remove the previously created test file.""" - # First delete the file handler, forcing the file to be closed if still open - del self.fh - # Then we can safely remove the file from the system - with suppress(OSError): - os.remove(self.test_file) +class TestFciL2NCAMVFileHandler: + """Test the FciL2NCAMVFileHandler reader.""" - def test_all_basic(self): + def test_all_basic(self, amv_filehandler, amv_file): """Test all basic functionalities.""" - assert self.fh.spacecraft_name == "test_platform" - assert self.fh.sensor_name == "test_data_source" - assert self.fh.ssp_lon == 0.0 + assert amv_filehandler.spacecraft_name == "test_platform" + assert amv_filehandler.sensor_name == "test_data_source" + assert amv_filehandler.ssp_lon == 0.0 - global_attributes = self.fh._get_global_attributes() + global_attributes = amv_filehandler._get_global_attributes() expected_global_attributes = { - "filename": self.test_file, + "filename": amv_file, "spacecraft_name": "test_platform", "sensor": "test_data_source", "platform_name": "test_platform", @@ -575,9 +574,9 @@ def test_all_basic(self): } assert global_attributes == expected_global_attributes - def test_dataset(self): + def test_dataset(self, amv_filehandler): """Test the correct execution of the get_dataset function with a valid file_key.""" - dataset = self.fh.get_dataset(make_dataid(name="test_dataset", resolution=2000), + dataset = amv_filehandler.get_dataset(make_dataid(name="test_dataset", resolution=2000), {"name": "test_dataset", "file_key": "test_dataset", "fill_value": -999, @@ -587,9 +586,9 @@ def test_dataset(self): assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 - def test_dataset_with_invalid_filekey(self): + def test_dataset_with_invalid_filekey(self, amv_filehandler): """Test the correct execution of the get_dataset function with an invalid file_key.""" - invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), + invalid_dataset = amv_filehandler.get_dataset(make_dataid(name="test_invalid", resolution=2000), {"name": "test_invalid", "file_key": "test_invalid", "fill_value": -999, From 24422ad5ea9e59e41b1daac50d6842e7d8f0b9ab Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 16:17:11 +0200 Subject: [PATCH 0913/1416] Use CRS objects for testing area equality --- satpy/tests/reader_tests/test_ahi_hsd.py | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index faa348b9af..000abb2b58 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -105,7 +105,8 @@ class TestAHIHSDNavigation(unittest.TestCase): @mock.patch("satpy.readers.ahi_hsd.np.fromfile") def test_region(self, fromfile, np2str): """Test region navigation.""" - from pyresample.utils import proj4_radius_parameters + from pyproj import CRS + np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch("satpy.readers.ahi_hsd.open", m, create=True): @@ -140,18 +141,9 @@ def test_region(self, fromfile, np2str): "spare": ""} area_def = fh.get_area_def(None) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area_def.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378137.0 - assert b == 6356752.3 - assert proj_dict["h"] == 35785863.0 - assert proj_dict["lon_0"] == 140.7 - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + expected_crs = CRS.from_dict(dict(a=6378137.0, b=6356752.3, h= 35785863.0, + lon_0=140.7, proj="geos", units="m")) + assert area_def.crs == expected_crs np.testing.assert_allclose(area_def.area_extent, (592000.0038256242, 4132000.0267018233, 1592000.0102878273, 5132000.033164027)) From 55d3622c343a3a5923b622318c71a1073aa796b5 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 14 Dec 2023 16:07:56 +0100 Subject: [PATCH 0914/1416] add failing test for unsorted segments --- satpy/tests/test_yaml_reader.py | 35 +++++++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 41439a1ac6..3f1db6a977 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -971,10 +971,11 @@ def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_p get_segment_position_info = MagicMock() get_segment_position_info.return_value = chk_pos_info - fh = MagicMock() filetype_info = {"expected_segments": expected_segments, "file_type": "filetype1"} filename_info = {"segment": segment} + + fh = _create_mocked_basic_fh() fh.filetype_info = filetype_info fh.filename_info = filename_info fh.get_area_def = get_area_def @@ -983,6 +984,12 @@ def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_p return fh, seg_area +def _create_mocked_basic_fh(): + fake_fh = MagicMock() + fake_fh.filename_info = {} + fake_fh.filetype_info = {} + return fake_fh + class TestGEOSegmentYAMLReader(unittest.TestCase): """Test GEOSegmentYAMLReader.""" @@ -993,9 +1000,7 @@ def test_get_expected_segments(self, cfh): from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() - fake_fh = MagicMock() - fake_fh.filename_info = {} - fake_fh.filetype_info = {} + fake_fh = _create_mocked_basic_fh() cfh.return_value = {"ft1": [fake_fh]} # default (1) @@ -1030,6 +1035,28 @@ def test_get_expected_segments(self, cfh): es = created_fhs["ft1"][0].filename_info["segment"] assert es == 5 + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) + @patch.object(yr.FileYAMLReader, "create_filehandlers") + def test_segments_sorting(self, cfh): + """Test that segment filehandlers are sorted by segment number.""" + from satpy.readers.yaml_reader import GEOSegmentYAMLReader + reader = GEOSegmentYAMLReader() + + # create filehandlers with different segment numbers + fake_fh_1 = _create_mocked_basic_fh() + fake_fh_1.filename_info["segment"] = 1 + fake_fh_2 = _create_mocked_basic_fh() + fake_fh_2.filename_info["segment"] = 2 + fake_fh_3 = _create_mocked_basic_fh() + fake_fh_3.filename_info["segment"] = 3 + + # put the filehandlers in an unsorted order + cfh.return_value = {"ft1": [fake_fh_1, fake_fh_3, fake_fh_2]} + + # check that the created filehandlers are sorted by segment number + created_fhs = reader.create_filehandlers(["fake.nc"]) + assert [fh.filename_info["segment"] for fh in created_fhs["ft1"]] == [1, 2, 3] + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") @patch("satpy.readers.yaml_reader.xr") From be666eb1aa6c0f5dd763e42787e4b264bd7ac2e3 Mon Sep 17 00:00:00 2001 From: youva Aoun Date: Thu, 14 Dec 2023 15:12:24 +0000 Subject: [PATCH 0915/1416] Fix name for amv lat/lon to avoid dupliacte in the yaml --- satpy/etc/readers/fci_l2_nc.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index f9c12849eb..1ad5d576a0 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -2837,13 +2837,13 @@ datasets: file_key: channel_id standard_name: channel_id - latitude: + amv_latitude: name: latitude file_type: nc_fci_amv file_key: latitude standard_name: latitude - longitude: + amv_longitude: name: longitude file_type: nc_fci_amv file_key: longitude From 27041a451f3e6ca4933f75a7167ab272ae23dc07 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 16:32:53 +0100 Subject: [PATCH 0916/1416] add control for swath data --- satpy/scene.py | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index e2d292a992..aea5b44cfe 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -39,10 +39,10 @@ from satpy.utils import convert_remote_files_to_fsspec, get_storage_options_from_reader_kwargs from satpy.writers import load_writer -try: - import hvplot.xarray as hvplot_xarray # noqa -except ImportError: - hvplot_xarray = None +#try: +# import hvplot.xarray as hvplot_xarray # noqa +#except ImportError: +# hvplot_xarray = None LOG = logging.getLogger(__name__) @@ -1092,6 +1092,7 @@ def to_hvplot(self, datasets=None, *args, **kwargs): plot.ash+plot.IR_108 """ + def _get_crs(xarray_ds): return xarray_ds.area.to_cartopy_crs() @@ -1112,17 +1113,27 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, **defaults) - if hvplot_xarray is None: - raise ImportError("'hvplot' must be installed to use this feature") + #def _check_hvplot_library(): + # if hvplot_xarray is None: + # raise ImportError("'hvplot' must be installed to use this feature") +# +# _check_hvplot_library() plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) - ccrs = _get_crs(xarray_ds) + + if hasattr(xarray_ds, "area") and hasattr(xarray_ds.area, "to_cartopy_crs"): + ccrs = _get_crs(xarray_ds) + defaults={"x":"x","y":"y"} + else: + ccrs = None + defaults={"x":"longitude","y":"latitude"} + if datasets is None: datasets = list(xarray_ds.keys()) - defaults = dict(x="x", y="y", data_aspect=1, project=True, geo=True, + defaults.update(data_aspect=1, project=True, geo=True, crs=ccrs, projection=ccrs, rasterize=True, coastline="110m", cmap="Plasma", responsive=True, dynamic=False, framewise=True, colorbar=False, global_extent=False, xlabel="Longitude", @@ -1130,6 +1141,9 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): defaults.update(kwargs) + #if "latitude" in xarray_ds.coords: + # defaults.update({"x":"longitude","y":"latitude"}) + for element in datasets: title = f"{element} @ {_get_timestamp(xarray_ds)}" if xarray_ds[element].shape[0] == 3: From 9dd1b28e7f9b9086a9c9872d75c17e03abee544e Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 14 Dec 2023 16:43:58 +0100 Subject: [PATCH 0917/1416] move sorting from _load_dataset to create_filehandlers so that it acts also when pad_data is False --- satpy/readers/yaml_reader.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index ff3599052a..73b5f7b6ee 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1157,7 +1157,13 @@ class GEOSegmentYAMLReader(GEOFlippableFileYAMLReader): """ def create_filehandlers(self, filenames, fh_kwargs=None): - """Create file handler objects and determine expected segments for each.""" + """Create file handler objects and determine expected segments for each. + + Additionally, sort the filehandlers by segment number to avoid + issues with filenames where start_time or alphabetic sorting does not + produce the correct order. + + """ created_fhs = super(GEOSegmentYAMLReader, self).create_filehandlers( filenames, fh_kwargs=fh_kwargs) @@ -1171,6 +1177,11 @@ def create_filehandlers(self, filenames, fh_kwargs=None): # add segment key-values for FCI filehandlers if "segment" not in fh.filename_info: fh.filename_info["segment"] = fh.filename_info.get("count_in_repeat_cycle", 1) + + # sort by segment number + for file_type in created_fhs.keys(): + created_fhs[file_type] = sorted(created_fhs[file_type], key=lambda x: x.filename_info.get("segment", 1)) + return created_fhs def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): @@ -1322,11 +1333,9 @@ def _find_missing_segments(file_handlers, ds_info, dsid): failure = True counter = 1 expected_segments = 1 - # get list of file handlers in segment order - # (ex. first segment, second segment, etc) - handlers = sorted(file_handlers, key=lambda x: x.filename_info.get("segment", 1)) + projectable = None - for fh in handlers: + for fh in file_handlers: if fh.filetype_info["file_type"] in ds_info["file_type"]: expected_segments = fh.filetype_info["expected_segments"] From d8af76815c7e1a5db334e2bbf7807faaccdbed6e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 17:47:12 +0200 Subject: [PATCH 0918/1416] Use CRS objects as much as possible instead of suppressing the warnings --- satpy/readers/nwcsaf_nc.py | 7 ++- satpy/tests/reader_tests/test_ahi_hsd.py | 18 ++---- satpy/tests/reader_tests/test_ahi_l2_nc.py | 10 ++-- satpy/tests/reader_tests/test_geos_area.py | 17 ++---- .../reader_tests/test_goes_imager_hrit.py | 20 ++----- satpy/tests/reader_tests/test_gpm_imerg.py | 8 +-- satpy/tests/reader_tests/test_hrit_base.py | 20 ++----- .../reader_tests/test_insat3d_img_l1b_h5.py | 8 +-- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 17 +----- satpy/tests/reader_tests/test_nwcsaf_msg.py | 13 +---- satpy/tests/reader_tests/test_nwcsaf_nc.py | 19 +++--- .../reader_tests/test_oceancolorcci_l3_nc.py | 1 + satpy/tests/reader_tests/test_osisaf_l3.py | 58 +++++-------------- .../reader_tests/test_seviri_l1b_hrit.py | 37 ++++-------- 14 files changed, 71 insertions(+), 182 deletions(-) diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index e3b3dc2d3d..e9809bdce5 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -347,8 +347,13 @@ def get_area_def(self, dsid): @staticmethod def _ensure_crs_extents_in_meters(crs, area_extent): """Fix units in Earth shape, satellite altitude and 'units' attribute.""" + import warnings if "kilo" in crs.axis_info[0].unit_name: - proj_dict = crs.to_dict() + with warnings.catch_warnings(): + # The proj dict route is the only feasible way to modify the area, suppress the warning it causes + warnings.filterwarnings("ignore", category=UserWarning, + message="You will likely lose important projection information") + proj_dict = crs.to_dict() proj_dict["units"] = "m" if "a" in proj_dict: proj_dict["a"] *= 1000. diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 000abb2b58..7c88c9e5ac 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -151,7 +151,8 @@ def test_region(self, fromfile, np2str): @mock.patch("satpy.readers.ahi_hsd.np.fromfile") def test_segment(self, fromfile, np2str): """Test segment navigation.""" - from pyresample.utils import proj4_radius_parameters + from pyproj import CRS + np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch("satpy.readers.ahi_hsd.open", m, create=True): @@ -184,18 +185,9 @@ def test_segment(self, fromfile, np2str): "spare": ""} area_def = fh.get_area_def(None) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area_def.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378137.0 - assert b == 6356752.3 - assert proj_dict["h"] == 35785863.0 - assert proj_dict["lon_0"] == 140.7 - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + expected_crs = CRS.from_dict(dict(a=6378137.0, b=6356752.3, h= 35785863.0, + lon_0=140.7, proj="geos", units="m")) + assert area_def.crs == expected_crs np.testing.assert_allclose(area_def.area_extent, (-5500000.035542117, -3300000.021325271, 5500000.035542117, -2200000.0142168473)) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 910e0515a1..817738bb82 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -75,7 +75,7 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - import warnings + from pyproj import CRS ps = "+a=6378137 +h=35785863 +lon_0=140.7 +no_defs +proj=geos +rf=298.257024882273 +type=crs +units=m +x_0=0 +y_0=0" @@ -86,11 +86,9 @@ def test_ahi_l2_area_def(himl2_filename, caplog): assert area_def.width == dimensions["Columns"] assert area_def.height == dimensions["Rows"] assert np.allclose(area_def.area_extent, exp_ext) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_str == ps + + expected_crs = CRS(ps) + assert area_def.crs == expected_crs # Check case where input data is incorrect size. fh = ahil2_filehandler(himl2_filename) diff --git a/satpy/tests/reader_tests/test_geos_area.py b/satpy/tests/reader_tests/test_geos_area.py index 077c0b0c7e..0b104c2672 100644 --- a/satpy/tests/reader_tests/test_geos_area.py +++ b/satpy/tests/reader_tests/test_geos_area.py @@ -138,9 +138,7 @@ def test_get_xy_from_linecol(self): def test_get_area_definition(self): """Test the retrieval of the area definition.""" - import warnings - - from pyresample.utils import proj4_radius_parameters + from pyproj import CRS pdict, extent = self.make_pdict_ext(1, "N2S") good_res = (-3000.4032785810186, -3000.4032785810186) @@ -148,16 +146,9 @@ def test_get_area_definition(self): a_def = get_area_definition(pdict, extent) assert a_def.area_id == pdict["a_name"] assert a_def.resolution == good_res - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert a_def.proj_dict["proj"] == "geos" - assert a_def.proj_dict["units"] == "m" - a, b = proj4_radius_parameters(a_def.proj_dict) - assert a == 6378169 - assert b == 6356583.8 - assert a_def.proj_dict["h"] == 35785831 + + expected_crs = CRS(dict(proj="geos", units="m", a=6378169, b=6356583.8, h=35785831)) + assert a_def.crs == expected_crs def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py index 48078d0f0e..0cb0ff0959 100644 --- a/satpy/tests/reader_tests/test_goes_imager_hrit.py +++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py @@ -22,13 +22,10 @@ from unittest import mock import numpy as np -from pyresample.utils import proj4_radius_parameters from xarray import DataArray from satpy.readers.goes_imager_hrit import ( ALTITUDE, - EQUATOR_RADIUS, - POLE_RADIUS, HRITGOESFileHandler, HRITGOESPrologueFileHandler, make_gvar_float, @@ -172,7 +169,7 @@ def test_get_dataset(self, base_get_dataset): def test_get_area_def(self): """Test getting the area definition.""" - import warnings + from pyproj import CRS self.reader.mda.update({ "cfac": 10216334, @@ -186,17 +183,10 @@ def test_get_area_def(self): resolution=3000) area = self.reader.get_area_def(dsid) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - a, b = proj4_radius_parameters(area.proj_dict) - assert area.proj_dict["h"] == ALTITUDE - assert area.proj_dict["lon_0"] == 100.1640625 - assert area.proj_dict["proj"] == "geos" - assert area.proj_dict["units"] == "m" - assert a == EQUATOR_RADIUS - assert b == POLE_RADIUS + expected_crs = CRS(dict(h=ALTITUDE, lon_0=100.1640625, proj="geos", units="m", + rf=295.488065897001, a=6378169)) + assert area.crs == expected_crs + assert area.width == 2816 assert area.height == 464 assert area.area_id == "goes-15_goes_imager_fd_3km" diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index 96dc65bbd4..a75e59863f 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -105,7 +105,7 @@ def tearDown(self): def test_load_data(self): """Test loading data.""" - import warnings + from pyproj import CRS from satpy.readers import load_reader @@ -132,10 +132,6 @@ def test_load_data(self): assert res["IRprecipitation"].resolution == 0.1 assert res["IRprecipitation"].area.width == 3600 assert res["IRprecipitation"].area.height == 1800 - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert res["IRprecipitation"].area.proj_dict == pdict + assert res["IRprecipitation"].area.crs == CRS(pdict) np.testing.assert_almost_equal(res["IRprecipitation"].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 133b45280e..12317f11f1 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -221,23 +221,13 @@ def test_get_area_extent(self): def test_get_area_def(self): """Test getting an area definition.""" - import warnings - - from pyresample.utils import proj4_radius_parameters + from pyproj import CRS area = self.reader.get_area_def("VIS06") - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378169.0 - assert b == 6356583.8 - assert proj_dict["h"] == 35785831.0 - assert proj_dict["lon_0"] == 44.0 - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + + expected_crs = CRS(dict(proj="geos", a=6378169.0, b=6356583.8, h=35785831.0, lon_0=44.0, units="m")) + assert area.crs == expected_crs + assert area.area_extent == (-77771774058.38356, -77771774058.38356, 30310525626438.438, 3720765401003.719) diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 486177d2d5..9fa7af224d 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -273,18 +273,12 @@ def insat_filehandler(insat_filename): def test_filehandler_returns_area(insat_filehandler): """Test that filehandle returns an area.""" - import warnings - fh = insat_filehandler ds_id = make_dataid(name="MIR", resolution=4000, calibration="brightness_temperature") area_def = fh.get_area_def(ds_id) _ = area_def.get_lonlats(chunks=1000) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert "+lon_0=" + str(subsatellite_longitude) in area_def.crs.to_proj4() + assert subsatellite_longitude == area_def.crs.to_cf()["longitude_of_projection_origin"] def test_filehandler_has_start_and_end_time(insat_filehandler): diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 56bbd5212f..04694c145a 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -28,7 +28,6 @@ import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition -from pyresample.utils import proj4_radius_parameters from satpy.readers.mviri_l1b_fiduceo_nc import ( ALTITUDE, @@ -497,22 +496,10 @@ def test_angle_cache(self, interp_tiepoints, file_handler): def test_get_area_definition(self, file_handler, name, resolution, area_exp): """Test getting area definitions.""" - import warnings - dataset_id = make_dataid(name=name, resolution=resolution) area = file_handler.get_area_def(dataset_id) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - a, b = proj4_radius_parameters(area.proj_dict) - a_exp, b_exp = proj4_radius_parameters(area_exp.proj_dict) - assert a == a_exp - assert b == b_exp - assert area.width == area_exp.width - assert area.height == area_exp.height - for key in ["h", "lon_0", "proj", "units"]: - assert area.proj_dict[key] == area_exp.proj_dict[key] + + assert area.crs == area_exp.crs np.testing.assert_allclose(area.area_extent, area_exp.area_extent) def test_calib_exceptions(self, file_handler): diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 761f84d380..1c8e0fb793 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -472,7 +472,7 @@ def cut_h5_object_ref(root, attr): def test_get_area_def(self): """Get the area definition.""" - import warnings + from pyproj import CRS from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy.tests.utils import make_dataid @@ -488,15 +488,8 @@ def test_get_area_def(self): for i in range(4): assert area_def.area_extent[i] == pytest.approx(aext_res[i], abs=1e-4) - proj_dict = AREA_DEF_DICT["proj_dict"] - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert proj_dict["proj"] == area_def.proj_dict["proj"] - # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: - # for key in proj_dict: - # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) + expected_crs = CRS(AREA_DEF_DICT["proj_dict"]) + assert expected_crs == area_def.crs assert AREA_DEF_DICT["x_size"] == area_def.width assert AREA_DEF_DICT["y_size"] == area_def.height diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index a3235e99e7..4f6755f390 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -506,18 +506,13 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel def _check_filehandler_area_def(file_handler, dsid): - import warnings - - correct_h = float(PROJ["gdal_projection"].split("+h=")[-1]) - correct_a = float(PROJ["gdal_projection"].split("+a=")[-1].split()[0]) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - area_definition = file_handler.get_area_def(dsid) - assert area_definition.proj_dict["h"] == correct_h - assert area_definition.proj_dict["a"] == correct_a - assert area_definition.proj_dict["units"] == "m" + from pyproj import CRS + + area_definition = file_handler.get_area_def(dsid) + + expected_crs = CRS(PROJ["gdal_projection"]) + assert area_definition.crs == expected_crs + correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index da99fd2d27..90b9d4432f 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -180,6 +180,7 @@ def test_get_area_def(self, area_exp, fake_file_dict): warnings.filterwarnings("ignore", message=r"You will likely lose important projection information", category=UserWarning) + # The corresponding CRS objects do not match even if the proj dicts match, so use the dicts assert area.proj_dict == area_exp.proj_dict def test_bad_fname(self, fake_dataset, fake_file_dict): diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index f42a1d4648..798aabf61e 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -16,12 +16,12 @@ """Module for testing the satpy.readers.osisaf_l3 module.""" import os -import warnings from datetime import datetime import numpy as np import pytest import xarray as xr +from pyproj import CRS from satpy import DataQuery from satpy.readers.osisaf_l3_nc import OSISAFL3NCFileHandler @@ -224,15 +224,9 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 @@ -248,14 +242,9 @@ def test_get_area_def_ease(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_lambert_azimuthal_equal_area" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["R"] == 6371228 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "laea" + + expected_crs = CRS(dict(R=6371228, lat_0=-90, lon_0=0, proj="laea")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 @@ -288,15 +277,9 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 @@ -331,12 +314,9 @@ def test_get_area_def_grid(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_geographic_area" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["datum"] == "WGS84" - assert area_def.proj_dict["proj"] == "longlat" + + expected_crs = CRS(dict(datum="WGS84", proj="longlat")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 @@ -370,15 +350,9 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - assert area_def.proj_dict["a"] == 6378273.0 - assert area_def.proj_dict["lat_0"] == -90 - assert area_def.proj_dict["lat_ts"] == -70 - assert area_def.proj_dict["lon_0"] == 0 - assert area_def.proj_dict["proj"] == "stere" + + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index e928468228..3fe00edc80 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -18,7 +18,6 @@ """The HRIT msg reader tests package.""" import unittest -import warnings from datetime import datetime from unittest import mock @@ -26,6 +25,7 @@ import pytest import xarray as xr from numpy import testing as npt +from pyproj import CRS import satpy.tests.reader_tests.test_seviri_l1b_hrit_setup as setup from satpy.readers.seviri_l1b_hrit import HRITMSGEpilogueFileHandler, HRITMSGFileHandler, HRITMSGPrologueFileHandler @@ -117,21 +117,12 @@ def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): def test_get_area_def(self): """Test getting the area def.""" - from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) assert area.area_extent == (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378169.0 - assert b == pytest.approx(6356583.8) - assert proj_dict["h"] == 35785831.0 - assert proj_dict["lon_0"] == 0.0 - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + + expected_crs = CRS(dict(a=6378169.0, b=6356583.8, h=35785831.0, lon_0=0.0, proj="geos", units="m")) + assert expected_crs == area.crs + self.reader.fill_hrv = False area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, @@ -171,20 +162,12 @@ def _get_fake_data(self): def test_get_area_def(self): """Test getting the area def.""" - from pyresample.utils import proj4_radius_parameters area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", - message=r"You will likely lose important projection information", - category=UserWarning) - proj_dict = area.proj_dict - a, b = proj4_radius_parameters(proj_dict) - assert a == 6378169.0 - assert b == pytest.approx(6356583.8) - assert proj_dict["h"] == 35785831.0 - assert proj_dict["lon_0"] == self.projection_longitude - assert proj_dict["proj"] == "geos" - assert proj_dict["units"] == "m" + + expected_crs = CRS(dict(a=6378169.0, b=6356583.8, h=35785831.0, lon_0=self.projection_longitude, + proj="geos", units="m")) + assert area.crs == expected_crs + assert area.area_extent == (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356) # Data shifted by 1.5km to N-W From 8b9c46e6e895cd98c0347b5ef9ae45334853bb65 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 16:08:22 +0000 Subject: [PATCH 0919/1416] Fix bug related to initial variables being overwritten and later used again with wrong dimensions. Add unit test to catch this. --- satpy/composites/__init__.py | 44 ++++++++++++++++++---------------- satpy/tests/test_composites.py | 8 +++++++ 2 files changed, 31 insertions(+), 21 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 09985d6ba1..4153a85963 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1083,30 +1083,31 @@ class HighCloudCompositor(CloudCompositor): of where abs(latitude). """ - def __init__(self, name, transition_min=(210., 230.), transition_max=300, latitude_min=(30., 60.), # noqa: D417 - transition_gamma=1.0, **kwargs): + def __init__(self, name, transition_min_limits=(210., 230.), latitude_min_limits=(30., 60.), # noqa: D417 + transition_max=300, transition_gamma=1.0, **kwargs): """Collect custom configuration values. Args: - transition_min (tuple): Brightness temperature values used to identify opaque white - clouds at different latitudes + transition_min_limits (tuple): Brightness temperature values used to identify opaque white + clouds at different latitudes transition_max (float): Brightness temperatures above this value are not considered to be high clouds -> transparent - latitude_min (tuple): Latitude values defining the intervals for computing latitude-dependent - transition_min values. + latitude_min_limits (tuple): Latitude values defining the intervals for computing latitude-dependent + `transition_min` values from `transition_min_limits`. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature range (`transition_min` to `transition_max`). """ - if len(transition_min) != 2: - raise ValueError(f"Expected 2 `transition_min` values, got {len(transition_min)}") - if len(latitude_min) != 2: - raise ValueError(f"Expected 2 `latitude_min` values, got {len(latitude_min)}") + if len(transition_min_limits) != 2: + raise ValueError(f"Expected 2 `transition_min_limits` values, got {len(transition_min_limits)}") + if len(transition_min_limits) != 2: + raise ValueError(f"Expected 2 `latitude_min_limits` values, got {len(transition_min_limits)}") if type(transition_max) in [list, tuple]: raise ValueError(f"Expected `transition_max` to be of type float, is of type {type(transition_max)}") - self.latitude_min = latitude_min - super().__init__(name, transition_min=transition_min, transition_max=transition_max, + self.transition_min_limits = transition_min_limits + self.latitude_min_limits = latitude_min_limits + super().__init__(name, transition_min=None, transition_max=transition_max, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): @@ -1122,16 +1123,17 @@ def __call__(self, projectables, **kwargs): _, lats = data.attrs["area"].get_lonlats(chunks=data.chunks, dtype=data.dtype) lats = np.abs(lats) - slope = (self.transition_min[1] - self.transition_min[0]) / (self.latitude_min[1] - self.latitude_min[0]) - offset = self.transition_min[0] - slope * self.latitude_min[0] - - tr_min_lat = xr.DataArray(name="tr_min_lat", coords=data.coords, dims=data.dims).astype(data.dtype) - tr_min_lat = tr_min_lat.where(lats >= self.latitude_min[0], self.transition_min[0]) - tr_min_lat = tr_min_lat.where(lats <= self.latitude_min[1], self.transition_min[1]) - tr_min_lat = tr_min_lat.where((lats < self.latitude_min[0]) | (lats > self.latitude_min[1]), - slope * lats + offset) + slope = (self.transition_min_limits[1] - self.transition_min_limits[0]) / \ + (self.latitude_min_limits[1] - self.latitude_min_limits[0]) + offset = self.transition_min_limits[0] - slope * self.latitude_min_limits[0] - self.transition_min = tr_min_lat + # Compute pixel-level latitude dependent transition_min values and pass to parent CloudCompositor class + transition_min = xr.DataArray(name="transition_min", coords=data.coords, dims=data.dims).astype(data.dtype) + transition_min = transition_min.where(lats >= self.latitude_min_limits[0], self.transition_min_limits[0]) + transition_min = transition_min.where(lats <= self.latitude_min_limits[1], self.transition_min_limits[1]) + transition_min = transition_min.where((lats < self.latitude_min_limits[0]) | + (lats > self.latitude_min_limits[1]), slope * lats + offset) + self.transition_min = transition_min return super().__call__(projectables, **kwargs) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 6b79a06a99..aa4f56f9de 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -973,6 +973,14 @@ def test_high_cloud_compositor(self): expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(res.values, expected) + def test_high_cloud_compositor_multiple_calls(self): + """Test that the modified init variables are reset properly when calling the compositor multiple times.""" + from satpy.composites import HighCloudCompositor + comp = HighCloudCompositor(name="test") + res = comp([self.data]) + res2 = comp([self.data]) + np.testing.assert_equal(res.values, res2.values) + def test_high_cloud_compositor_dtype(self): """Test that the datatype is not altered by the compositor.""" from satpy.composites import HighCloudCompositor From ef3ebfc7fc5a65e39a60a8d5b8e3242188941335 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 16:56:57 +0000 Subject: [PATCH 0920/1416] Use crude stretch instead in order to increase image contrast of especially cold cloud tops. --- satpy/etc/enhancements/generic.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index dfd5b5f5c6..25680d6db9 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -964,7 +964,7 @@ enhancements: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: - stretch: linear + stretch: crude geo_color_low_clouds: standard_name: geo_color_low_clouds @@ -972,7 +972,7 @@ enhancements: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: - stretch: linear + stretch: crude - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: From eaa24be4804d292482f855ae64f0070b6ec138ad Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 14 Dec 2023 16:58:05 +0000 Subject: [PATCH 0921/1416] Refine blending range for DayNightCompositor. --- satpy/etc/composites/abi.yaml | 4 ++-- satpy/etc/composites/ahi.yaml | 4 ++-- satpy/etc/composites/fci.yaml | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index e950ba027f..b40f353e6a 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -756,8 +756,8 @@ composites: # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index e088bcf1a6..066b9cf0f2 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -506,8 +506,8 @@ composites: geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color_ndvi_green diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index d564cc1f36..f27011d301 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -128,8 +128,8 @@ composites: # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color From 739ce460f16550be0b2428109eb8e49d049707f0 Mon Sep 17 00:00:00 2001 From: andream Date: Thu, 14 Dec 2023 18:01:14 +0100 Subject: [PATCH 0922/1416] sort the reader attribute file_handlers instead of the returned created filehandlers and change test accordingly --- satpy/readers/yaml_reader.py | 10 ++++++---- satpy/tests/test_yaml_reader.py | 6 +++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 73b5f7b6ee..3171449b03 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1178,12 +1178,14 @@ def create_filehandlers(self, filenames, fh_kwargs=None): if "segment" not in fh.filename_info: fh.filename_info["segment"] = fh.filename_info.get("count_in_repeat_cycle", 1) - # sort by segment number - for file_type in created_fhs.keys(): - created_fhs[file_type] = sorted(created_fhs[file_type], key=lambda x: x.filename_info.get("segment", 1)) - + self._sort_segment_filehandler_by_segment_number() return created_fhs + def _sort_segment_filehandler_by_segment_number(self): + for file_type in self.file_handlers.keys(): + self.file_handlers[file_type] = sorted(self.file_handlers[file_type], + key=lambda x: x.filename_info.get("segment", 0)) + def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): """Load only a piece of the dataset.""" if not pad_data: diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 3f1db6a977..0b0293e453 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -1051,11 +1051,11 @@ def test_segments_sorting(self, cfh): fake_fh_3.filename_info["segment"] = 3 # put the filehandlers in an unsorted order - cfh.return_value = {"ft1": [fake_fh_1, fake_fh_3, fake_fh_2]} + reader.file_handlers = {"ft1": [fake_fh_1, fake_fh_3, fake_fh_2]} # check that the created filehandlers are sorted by segment number - created_fhs = reader.create_filehandlers(["fake.nc"]) - assert [fh.filename_info["segment"] for fh in created_fhs["ft1"]] == [1, 2, 3] + reader.create_filehandlers(["fake.nc"]) + assert [fh.filename_info["segment"] for fh in reader.file_handlers["ft1"]] == [1, 2, 3] @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") From 850b46e575d2a52e3ba7f63ff91ecb313a0a22bd Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 14 Dec 2023 19:24:11 +0200 Subject: [PATCH 0923/1416] Add missing projection parameter --- satpy/tests/reader_tests/test_osisaf_l3.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 798aabf61e..80fb581db7 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -225,7 +225,7 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) assert area_def.crs == expected_crs assert area_def.width == 5 @@ -278,7 +278,7 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) assert area_def.crs == expected_crs assert area_def.width == 5 @@ -351,7 +351,8 @@ def test_get_area_def_stere(self, tmp_path): area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" - expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere")) + expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) + assert area_def.crs == expected_crs assert area_def.width == 5 From 203acca9e8261028af826a1050a89241f9113956 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 14 Dec 2023 14:07:46 -0600 Subject: [PATCH 0924/1416] Fix composites failing on non-aligned geolocation coordinates --- satpy/composites/__init__.py | 18 ++++++++++++++++++ satpy/tests/test_composites.py | 34 +++++++++++++++++++++++++++++++++- 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 5fa9ca575b..a0ce73caea 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -185,6 +185,7 @@ def match_data_arrays(self, data_arrays): """ self.check_geolocation(data_arrays) new_arrays = self.drop_coordinates(data_arrays) + new_arrays = self.align_geo_coordinates(new_arrays) new_arrays = list(unify_chunks(*new_arrays)) return new_arrays @@ -210,6 +211,23 @@ def drop_coordinates(self, data_arrays): return new_arrays + def align_geo_coordinates(self, data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: + """Align DataArrays along geolocation coordinates. + + See :func:`~xarray.align` for more information. This function uses + the "override" join method to essentially ignore differences between + coordinates. The :meth:`check_geolocation` should be called before + this to ensure that geolocation coordinates and "area" are compatible. + The :meth:`drop_coordinates` method should be called before this to + ensure that coordinates that are considered "negligible" when computing + composites do not affect alignment. + + """ + non_geo_coords = tuple( + coord_name for data_arr in data_arrays + for coord_name in data_arr.coords if coord_name not in ("x", "y")) + return xr.align(*data_arrays, join="override", exclude=non_geo_coords) + def check_geolocation(self, data_arrays): """Check that the geolocations of the *data_arrays* are compatible. diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 4a7b2a2ce9..830a427c4a 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -37,7 +37,7 @@ # - tmp_path -class TestMatchDataArrays(unittest.TestCase): +class TestMatchDataArrays: """Test the utility method 'match_data_arrays'.""" def _get_test_ds(self, shape=(50, 100), dims=("y", "x")): @@ -132,6 +132,38 @@ def test_nondimensional_coords(self): ret_datasets = comp.match_data_arrays([ds, ds]) assert "acq_time" not in ret_datasets[0].coords + def test_almost_equal_geo_coordinates(self): + """Test that coordinates that are almost-equal still match. + + See https://github.com/pytroll/satpy/issues/2668 for discussion. + + Various operations like cropping and resampling can cause + geo-coordinates (y, x) to be very slightly unequal due to floating + point precision. This test makes sure that even in those cases we + can still generate composites from DataArrays with these coordinates. + + """ + from satpy.composites import CompositeBase + from satpy.resample import add_crs_xy_coords + + comp = CompositeBase("test_comp") + data_arr1 = self._get_test_ds(shape=(2, 2)) + data_arr1 = add_crs_xy_coords(data_arr1, data_arr1.attrs["area"]) + data_arr2 = self._get_test_ds(shape=(2, 2)) + data_arr2 = data_arr2.assign_coords( + x=data_arr1.coords["x"] + 0.000001, + y=data_arr1.coords["y"], + crs=data_arr1.coords["crs"], + ) + # data_arr2 = add_crs_xy_coords(data_arr2, data_arr2.attrs["area"]) + # data_arr2.assign_coords(x=data_arr2.coords["x"].copy() + 1.1) + # default xarray alignment would fail and collapse one of our dims + assert 0 in (data_arr2 - data_arr1).shape + new_data_arr1, new_data_arr2 = comp.match_data_arrays([data_arr1, data_arr2]) + assert 0 not in new_data_arr1.shape + assert 0 not in new_data_arr2.shape + assert 0 not in (new_data_arr2 - new_data_arr1).shape + class TestRatioSharpenedCompositors: """Test RatioSharpenedRGB and SelfSharpendRGB compositors.""" From cfbcaf76d8c726e0ffa1a7b7df9d2c81c3f4c4f2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 14 Dec 2023 14:14:17 -0600 Subject: [PATCH 0925/1416] Cleanup match_data_arrays and add type annotations --- satpy/composites/__init__.py | 86 ++++++++++++++++++------------------ 1 file changed, 44 insertions(+), 42 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a0ce73caea..ef4a559322 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -157,7 +157,7 @@ def apply_modifier_info(self, origin, destination): elif o.get(k) is not None: d[k] = o[k] - def match_data_arrays(self, data_arrays): + def match_data_arrays(self, data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: """Match data arrays so that they can be used together in a composite. For the purpose of this method, "can be used together" means: @@ -189,46 +189,7 @@ def match_data_arrays(self, data_arrays): new_arrays = list(unify_chunks(*new_arrays)) return new_arrays - def drop_coordinates(self, data_arrays): - """Drop negligible non-dimensional coordinates. - - Drops negligible coordinates if they do not correspond to any - dimension. Negligible coordinates are defined in the - :attr:`NEGLIGIBLE_COORDS` module attribute. - - Args: - data_arrays (List[arrays]): Arrays to be checked - """ - new_arrays = [] - for ds in data_arrays: - drop = [coord for coord in ds.coords - if coord not in ds.dims and - any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] - if drop: - new_arrays.append(ds.drop_vars(drop)) - else: - new_arrays.append(ds) - - return new_arrays - - def align_geo_coordinates(self, data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: - """Align DataArrays along geolocation coordinates. - - See :func:`~xarray.align` for more information. This function uses - the "override" join method to essentially ignore differences between - coordinates. The :meth:`check_geolocation` should be called before - this to ensure that geolocation coordinates and "area" are compatible. - The :meth:`drop_coordinates` method should be called before this to - ensure that coordinates that are considered "negligible" when computing - composites do not affect alignment. - - """ - non_geo_coords = tuple( - coord_name for data_arr in data_arrays - for coord_name in data_arr.coords if coord_name not in ("x", "y")) - return xr.align(*data_arrays, join="override", exclude=non_geo_coords) - - def check_geolocation(self, data_arrays): + def check_geolocation(self, data_arrays: Sequence[xr.DataArray]) -> None: """Check that the geolocations of the *data_arrays* are compatible. For the purpose of this method, "compatible" means: @@ -238,7 +199,7 @@ def check_geolocation(self, data_arrays): - If all have an area, the areas should be all the same. Args: - data_arrays (List[arrays]): Arrays to be checked + data_arrays: Arrays to be checked Raises: :class:`IncompatibleAreas`: @@ -269,6 +230,47 @@ def check_geolocation(self, data_arrays): "'{}'".format(self.attrs["name"])) raise IncompatibleAreas("Areas are different") + @staticmethod + def drop_coordinates(data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: + """Drop negligible non-dimensional coordinates. + + Drops negligible coordinates if they do not correspond to any + dimension. Negligible coordinates are defined in the + :attr:`NEGLIGIBLE_COORDS` module attribute. + + Args: + data_arrays (List[arrays]): Arrays to be checked + """ + new_arrays = [] + for ds in data_arrays: + drop = [coord for coord in ds.coords + if coord not in ds.dims and + any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] + if drop: + new_arrays.append(ds.drop_vars(drop)) + else: + new_arrays.append(ds) + + return new_arrays + + @staticmethod + def align_geo_coordinates(data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: + """Align DataArrays along geolocation coordinates. + + See :func:`~xarray.align` for more information. This function uses + the "override" join method to essentially ignore differences between + coordinates. The :meth:`check_geolocation` should be called before + this to ensure that geolocation coordinates and "area" are compatible. + The :meth:`drop_coordinates` method should be called before this to + ensure that coordinates that are considered "negligible" when computing + composites do not affect alignment. + + """ + non_geo_coords = tuple( + coord_name for data_arr in data_arrays + for coord_name in data_arr.coords if coord_name not in ("x", "y")) + return list(xr.align(*data_arrays, join="override", exclude=non_geo_coords)) + class DifferenceCompositor(CompositeBase): """Make the difference of two data arrays.""" From fb8ff3ba2f14f5edf9a03b1edd1288943d3620f5 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 21:39:21 +0100 Subject: [PATCH 0926/1416] import hvplot directly inside method As Martin has suggested I'm importing directly inside method the hvplot library to remove an if condition and resolve "too complex" pre-commit control --- satpy/scene.py | 26 ++++++-------------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index aea5b44cfe..fe6bbce1f9 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -39,12 +39,6 @@ from satpy.utils import convert_remote_files_to_fsspec, get_storage_options_from_reader_kwargs from satpy.writers import load_writer -#try: -# import hvplot.xarray as hvplot_xarray # noqa -#except ImportError: -# hvplot_xarray = None - - LOG = logging.getLogger(__name__) @@ -1074,12 +1068,12 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami return gview def to_hvplot(self, datasets=None, *args, **kwargs): - """Convert satpy Scene to Hvplot. + """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. Args: datasets (list): Limit included products to these datasets. - kwargs: hvplot options dictionary. args: Arguments coming from hvplot + kwargs: hvplot options dictionary. Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. @@ -1087,10 +1081,11 @@ def to_hvplot(self, datasets=None, *args, **kwargs): Example usage:: scene_list = ['ash','IR_108'] + scn = Scene() + scn.load(scene_list) + scn = scn.resample('eurol') plot = scn.to_hvplot(datasets=scene_list) - plot.ash+plot.IR_108 - """ def _get_crs(xarray_ds): @@ -1113,12 +1108,7 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, **defaults) - #def _check_hvplot_library(): - # if hvplot_xarray is None: - # raise ImportError("'hvplot' must be installed to use this feature") -# -# _check_hvplot_library() - + import hvplot.xarray as hvplot_xarray # noqa plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) @@ -1129,7 +1119,6 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): ccrs = None defaults={"x":"longitude","y":"latitude"} - if datasets is None: datasets = list(xarray_ds.keys()) @@ -1141,9 +1130,6 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): defaults.update(kwargs) - #if "latitude" in xarray_ds.coords: - # defaults.update({"x":"longitude","y":"latitude"}) - for element in datasets: title = f"{element} @ {_get_timestamp(xarray_ds)}" if xarray_ds[element].shape[0] == 3: From 807357a4d4ea1a4f4cad740d5978d534b1e61b20 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 22:45:58 +0100 Subject: [PATCH 0927/1416] Add holoviews required library --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 33d417bf2e..49b00ae4e0 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other "geoviews": ["geoviews"], - "hvplot": ["hvplot", "geoviews", "cartopy"], + "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], "overlays": ["pycoast", "pydecorate"], "satpos_from_tle": ["skyfield", "astropy"], "tests": test_requires, From 8b7bba7bede6bc1b0f3c8ebc4d70a5a2b8cea501 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Thu, 14 Dec 2023 23:41:10 +0100 Subject: [PATCH 0928/1416] Clean up tests for NDVIHybridGreen compositor. --- satpy/tests/compositor_tests/test_spectral.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index 7472016c00..c9a7ab62b6 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -103,10 +103,10 @@ def test_ndvi_hybrid_green(self): # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)) - assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) - assert res.attrs["name"] == "ndvi_hybrid_green" - assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "ndvi_hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) @@ -115,7 +115,7 @@ def test_ndvi_hybrid_green_dtype(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c02, self.c03)).compute() + res = comp((self.c01, self.c02, self.c03)) assert res.data.dtype == np.float32 def test_nonlinear_scaling(self): @@ -124,7 +124,6 @@ def test_nonlinear_scaling(self): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") - res = comp((self.c01, self.c02, self.c03)) res_np = res.data.compute() assert res.dtype == res_np.dtype From ebceffbd015fcf6c31056a328316f9d28669674c Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Thu, 14 Dec 2023 23:44:09 +0100 Subject: [PATCH 0929/1416] Add instance checks. --- satpy/tests/test_composites.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index aa4f56f9de..d17188846c 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -969,6 +969,8 @@ def test_high_cloud_compositor(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = HighCloudCompositor(name="test") res = comp([self.data]) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(res.values, expected) @@ -1014,6 +1016,8 @@ def test_low_cloud_compositor(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) np.testing.assert_equal(res.values, expected) From 9c277391fc20a7883533a3441fea17de72f23a1d Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Thu, 14 Dec 2023 23:49:30 +0100 Subject: [PATCH 0930/1416] Remove trailing whitespace. --- satpy/tests/test_composites.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index d17188846c..4f82467ab9 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -970,7 +970,7 @@ def test_high_cloud_compositor(self): comp = HighCloudCompositor(name="test") res = comp([self.data]) assert isinstance(res, xr.DataArray) - assert isinstance(res.data, da.Array) + assert isinstance(res.data, da.Array) expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(res.values, expected) From aad6ea810f237ae481ed72c63357a0bc7f532bf8 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 22:45:58 +0100 Subject: [PATCH 0931/1416] Add holoviews required library --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 33d417bf2e..d31c21364a 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,8 @@ "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other "geoviews": ["geoviews"], - "hvplot": ["hvplot", "geoviews", "cartopy"], + "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], + "holoviews": ["holoviews"], "overlays": ["pycoast", "pydecorate"], "satpos_from_tle": ["skyfield", "astropy"], "tests": test_requires, From fca35cd5772335093b1e2defdbf7a52fb7f804da Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Thu, 14 Dec 2023 22:45:58 +0100 Subject: [PATCH 0932/1416] Add holoviews required library --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index d31c21364a..49b00ae4e0 100644 --- a/setup.py +++ b/setup.py @@ -77,7 +77,6 @@ # Other "geoviews": ["geoviews"], "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], - "holoviews": ["holoviews"], "overlays": ["pycoast", "pydecorate"], "satpos_from_tle": ["skyfield", "astropy"], "tests": test_requires, From 7918f375de45272055b26d8d7ba1f3caf4aba759 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 15 Dec 2023 00:44:30 +0100 Subject: [PATCH 0933/1416] Revert "Add holoviews required library" This reverts commit 807357a4d4ea1a4f4cad740d5978d534b1e61b20. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 49b00ae4e0..33d417bf2e 100644 --- a/setup.py +++ b/setup.py @@ -76,7 +76,7 @@ "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other "geoviews": ["geoviews"], - "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], + "hvplot": ["hvplot", "geoviews", "cartopy"], "overlays": ["pycoast", "pydecorate"], "satpos_from_tle": ["skyfield", "astropy"], "tests": test_requires, From 1e9dbf29d90ba6c72bfa3110e1006b76baf66b8c Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 15 Dec 2023 06:25:20 +0000 Subject: [PATCH 0934/1416] Add data validity tests. --- satpy/composites/__init__.py | 4 ++-- satpy/tests/test_composites.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 4153a85963..a286f078c4 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1100,8 +1100,8 @@ def __init__(self, name, transition_min_limits=(210., 230.), latitude_min_limits """ if len(transition_min_limits) != 2: raise ValueError(f"Expected 2 `transition_min_limits` values, got {len(transition_min_limits)}") - if len(transition_min_limits) != 2: - raise ValueError(f"Expected 2 `latitude_min_limits` values, got {len(transition_min_limits)}") + if len(latitude_min_limits) != 2: + raise ValueError(f"Expected 2 `latitude_min_limits` values, got {len(latitude_min_limits)}") if type(transition_max) in [list, tuple]: raise ValueError(f"Expected `transition_max` to be of type float, is of type {type(transition_max)}") diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 4f82467ab9..f6726bc7e9 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -990,6 +990,24 @@ def test_high_cloud_compositor_dtype(self): res = comp([self.data]) assert res.data.dtype == self.dtype + def test_high_cloud_compositor_validity_checks(self): + """Test that errors are raised for invalid input data and settings.""" + from satpy.composites import HighCloudCompositor + + with pytest.raises(ValueError, match="Expected 2 `transition_min_limits` values, got 1"): + _ = HighCloudCompositor("test", transition_min_limits=(210., )) + + with pytest.raises(ValueError, match="Expected 2 `latitude_min_limits` values, got 3"): + _ = HighCloudCompositor("test", latitude_min_limits=(20., 40., 60.)) + + with pytest.raises(ValueError, match="Expected `transition_max` to be of type float, " + "is of type "): + _ = HighCloudCompositor("test", transition_max=(250., 300.)) + + comp = HighCloudCompositor("test") + with pytest.raises(ValueError, match="Expected 1 dataset, got 2"): + _ = comp([self.data, self.data]) + class TestLowCloudCompositor: """Test LowCloudCompositor.""" @@ -1029,6 +1047,20 @@ def test_low_cloud_compositor_dtype(self): res = comp([self.btd, self.bt_win, self.lsm]) assert res.data.dtype == self.dtype + def test_low_cloud_compositor_validity_checks(self): + """Test that errors are raised for invalid input data and settings.""" + from satpy.composites import LowCloudCompositor + + with pytest.raises(ValueError, match="Expected 2 `range_land` values, got 1"): + _ = LowCloudCompositor("test", range_land=(2.0, )) + + with pytest.raises(ValueError, match="Expected 2 `range_water` values, got 1"): + _ = LowCloudCompositor("test", range_water=(2.0,)) + + comp = LowCloudCompositor("test") + with pytest.raises(ValueError, match="Expected 3 datasets, got 2"): + _ = comp([self.btd, self.lsm]) + class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" From 30eaf9ed65d6ba89948a1bcbad67099e676032b5 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 15 Dec 2023 06:26:33 +0000 Subject: [PATCH 0935/1416] Rename 'sea' to 'water'. --- satpy/composites/__init__.py | 34 +++++++++++++++++----------------- satpy/etc/composites/abi.yaml | 4 ++-- satpy/etc/composites/ahi.yaml | 4 ++-- satpy/etc/composites/fci.yaml | 6 +++--- 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a286f078c4..88375bc933 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1148,8 +1148,8 @@ class LowCloudCompositor(CloudCompositor): Pixels with `BTD` values below a given threshold will be transparent, whereas pixels with `BTD` values above another threshold will be opaque. The transparency of all other `BTD` values will be a linear function of the `BTD` value itself. Two sets of thresholds are used, one set for land surface types - (`range_land`) and another one for sea/water surface types (`range_sea`), respectively. Hence, - this compositor requires a land-sea-mask as a prerequisite input. This follows the GeoColor + (`range_land`) and another one for water surface types (`range_water`), respectively. Hence, + this compositor requires a land-water-mask as a prerequisite input. This follows the GeoColor implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`), but with some adjustments to the thresholds based on recent developments and feedback from CIRA. @@ -1157,9 +1157,9 @@ class LowCloudCompositor(CloudCompositor): only applicable during night-time. """ - def __init__(self, name, values_land=(1,), values_sea=(0,), # noqa: D417 + def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 range_land=(0.0, 4.0), - range_sea=(0.0, 4.0), + range_water=(0.0, 4.0), invert_alpha=True, transition_gamma=1.0, **kwargs): """Init info. @@ -1167,12 +1167,12 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), # noqa: D417 Collect custom configuration values. Args: - values_land (list): List of values used to identify land surface pixels in the land-sea-mask. - values_sea (list): List of values used to identify sea/water surface pixels in the land-sea-mask. + values_land (list): List of values used to identify land surface pixels in the land-water-mask. + values_water (list): List of values used to identify water surface pixels in the land-water-mask. range_land (tuple): Threshold values used for masking low-level clouds from the brightness temperature difference over land surface types. - range_sea (tuple): Threshold values used for masking low-level clouds from the brightness temperature - difference over sea/water. + range_water (tuple): Threshold values used for masking low-level clouds from the brightness temperature + difference over water. invert_alpha (bool): Invert the alpha channel to make low data values transparent and high data values opaque. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness @@ -1180,13 +1180,13 @@ def __init__(self, name, values_land=(1,), values_sea=(0,), # noqa: D417 """ if len(range_land) != 2: raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") - if len(range_sea) != 2: - raise ValueError(f"Expected 2 `range_sea` values, got {len(range_sea)}") + if len(range_water) != 2: + raise ValueError(f"Expected 2 `range_water` values, got {len(range_water)}") self.values_land = values_land if type(values_land) in [list, tuple] else [values_land] - self.values_sea = values_sea if type(values_sea) in [list, tuple] else [values_sea] + self.values_water = values_water if type(values_water) in [list, tuple] else [values_water] self.range_land = range_land - self.range_sea = range_sea + self.range_water = range_water super().__init__(name, transition_min=None, transition_max=None, invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) @@ -1211,12 +1211,12 @@ def __call__(self, projectables, **kwargs): self.transition_min, self.transition_max = self.range_land res = super().__call__([btd.where(lsm.isin(self.values_land))], **kwargs) - # Call CloudCompositor for sea/water surface pixels - self.transition_min, self.transition_max = self.range_sea - res_sea = super().__call__([btd.where(lsm.isin(self.values_sea))], **kwargs) + # Call CloudCompositor for /water surface pixels + self.transition_min, self.transition_max = self.range_water + res_water = super().__call__([btd.where(lsm.isin(self.values_water))], **kwargs) - # Compine resutls for land and sea/water surface pixels - res = res.where(lsm.isin(self.values_land), res_sea) + # Compine resutls for land and water surface pixels + res = res.where(lsm.isin(self.values_land), res_water) # Make pixels with cold window channel brightness temperatures transparent to avoid spurious false # alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index b40f353e6a..cae7a7035f 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -773,7 +773,7 @@ composites: geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor - values_sea: 0 + values_water: 0 values_land: 100 prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -782,7 +782,7 @@ composites: - name: C07 - name: C13 - compositor: !!python/name:satpy.composites.StaticImageCompositor - standard_name: land_sea_mask + standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 066b9cf0f2..3db0d20f3c 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -523,7 +523,7 @@ composites: geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor - values_sea: 0 + values_water: 0 values_land: 100 prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -532,7 +532,7 @@ composites: - name: B07 - name: B13 - compositor: !!python/name:satpy.composites.StaticImageCompositor - standard_name: land_sea_mask + standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index f27011d301..775331c08b 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -145,10 +145,10 @@ composites: geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor - values_sea: 0 + values_water: 0 values_land: 100 + range_water: [1.35, 5.0] range_land: [4.35, 6.75] - range_sea: [1.35, 5.0] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: @@ -156,7 +156,7 @@ composites: - name: ir_38 - name: ir_105 - compositor: !!python/name:satpy.composites.StaticImageCompositor - standard_name: land_sea_mask + standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" From 60fa8a1c9d914f223a4917de63baff191ffa9f8a Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 15 Dec 2023 06:37:21 +0000 Subject: [PATCH 0936/1416] Add description and reference in geo_color composite recipes. --- satpy/etc/composites/abi.yaml | 7 +++++++ satpy/etc/composites/ahi.yaml | 8 +++++++- satpy/etc/composites/fci.yaml | 7 +++++++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index cae7a7035f..4700aa470b 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -756,6 +756,13 @@ composites: # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor + description: > + GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true + color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a + high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static + surface terrain layer with city lights (NASA Black Marble). + references: + Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 3db0d20f3c..fe64a2bcae 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -503,9 +503,15 @@ composites: - _night_background_hires # GeoColor - geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor + description: > + GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true + color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a + high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static + surface terrain layer with city lights (NASA Black Marble). + references: + Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 775331c08b..0f0e98f4e0 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -128,6 +128,13 @@ composites: # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor + description: > + GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true + color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a + high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static + surface terrain layer with city lights (NASA Black Marble). + references: + Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend From 4df285d66c1882075e30ea0f064808098be17af9 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 09:05:01 +0200 Subject: [PATCH 0937/1416] Fix proj authority usage --- satpy/tests/writer_tests/test_mitiff.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index b4ff371dab..f1519cf7a1 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -844,23 +844,23 @@ def test_convert_proj4_string(self): from pyresample.geometry import AreaDefinition from satpy.writers.mitiff import MITIFFWriter - checks = [{"epsg": "+init=EPSG:32631", + checks = [{"epsg": "EPSG:32631", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, - {"epsg": "+init=EPSG:32632", + {"epsg": "EPSG:32632", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, - {"epsg": "+init=EPSG:32633", + {"epsg": "EPSG:32633", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, - {"epsg": "+init=EPSG:32634", + {"epsg": "EPSG:32634", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, - {"epsg": "+init=EPSG:32635", + {"epsg": "EPSG:32635", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}] From 013b49fe2e014d39995bd05f9372e60f1812acf0 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 09:19:31 +0200 Subject: [PATCH 0938/1416] Fix/suppress PROJ warnings about losing projection information --- satpy/tests/writer_tests/test_mitiff.py | 54 +++++++++++-------------- satpy/writers/mitiff.py | 8 +++- 2 files changed, 31 insertions(+), 31 deletions(-) diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index f1519cf7a1..4e8878687a 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -52,14 +52,13 @@ def _get_test_datasets(self): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -119,14 +118,13 @@ def _get_test_datasets_sensor_set(self): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -186,14 +184,14 @@ def _get_test_dataset(self, bands=3): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict + area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -217,14 +215,14 @@ def _get_test_one_dataset(self): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict + area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. h=36000. +units=km"), + CRS("+proj=geos +datum=WGS84 +ellps=WGS84 +lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -248,14 +246,14 @@ def _get_test_one_dataset_sensor_set(self): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict + area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. h=36000. +units=km"), + CRS("+proj=geos +datum=WGS84 +ellps=WGS84 +lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -278,14 +276,14 @@ def _get_test_dataset_with_bad_values(self, bands=3): from datetime import datetime import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict + area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -313,8 +311,8 @@ def _get_test_dataset_calibration(self, bands=6): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict from satpy.scene import Scene from satpy.tests.utils import make_dsq @@ -322,8 +320,7 @@ def _get_test_dataset_calibration(self, bands=6): "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -418,8 +415,8 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict from satpy.scene import Scene from satpy.tests.utils import make_dsq @@ -427,8 +424,7 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -473,16 +469,15 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict from satpy.tests.utils import make_dsq area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -508,16 +503,15 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): import dask.array as da import xarray as xr + from pyproj import CRS from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict from satpy.tests.utils import make_dsq area_def = AreaDefinition( "test", "test", "test", - proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " - "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), + CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 950fce8b21..3658ac16b7 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -221,6 +221,8 @@ def _add_sizes(self, datasets, first_dataset): return _image_description def _add_proj4_string(self, datasets, first_dataset): + import warnings + proj4_string = " Proj string: " if isinstance(datasets, list): @@ -232,7 +234,11 @@ def _add_proj4_string(self, datasets, first_dataset): if hasattr(area, "crs") and area.crs.to_epsg() is not None: proj4_string += "+init=EPSG:{}".format(area.crs.to_epsg()) else: - proj4_string += area.proj_str + # Filter out the PROJ warning of losing projection information + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=UserWarning, + message=r"You will likely lose important projection information") + proj4_string += area.proj_str x_0 = 0 y_0 = 0 From 755ec6b886ac2516e5c0f8c8021f9637d75a39b7 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 09:38:22 +0200 Subject: [PATCH 0939/1416] Use datetime64[ns] in CF writer --- satpy/cf/coords.py | 4 ++-- satpy/tests/writer_tests/test_cf.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/cf/coords.py b/satpy/cf/coords.py index 9220632fcb..2449ab79ee 100644 --- a/satpy/cf/coords.py +++ b/satpy/cf/coords.py @@ -291,8 +291,8 @@ def add_time_bounds_dimension(ds: xr.Dataset, time: str = "time") -> xr.Dataset: if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) - ds["time_bnds"] = xr.DataArray([[np.datetime64(start_time), - np.datetime64(end_time)]], + ds["time_bnds"] = xr.DataArray([[np.datetime64(start_time, "ns"), + np.datetime64(end_time, "ns")]], dims=["time", "bnds_1d"]) ds[time].attrs["bounds"] = "time_bnds" ds[time].attrs["standard_name"] = "time" diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 62c9995cde..d37b612bb2 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -240,7 +240,7 @@ def test_single_time_value(self): test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], - coords={"time": np.datetime64("2018-05-30T10:05:00")}, + coords={"time": np.datetime64("2018-05-30T10:05:00", "ns")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: @@ -255,7 +255,7 @@ def test_time_coordinate_on_a_swath(self): scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", - "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) + "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype="datetime64[ns]") scn["test-array"] = xr.DataArray(test_array, dims=["y", "x"], coords={"time": ("y", times)}, @@ -273,7 +273,7 @@ def test_bounds(self): test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: @@ -307,12 +307,12 @@ def test_bounds_minimum(self): test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn["test-arrayB"] = xr.DataArray(test_arrayB, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: @@ -330,12 +330,12 @@ def test_bounds_missing_time_info(self): test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn["test-arrayB"] = xr.DataArray(test_arrayB, dims=["x", "y", "time"], - coords={"time": [np.datetime64("2018-05-30T10:05:00")]}) + coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: @@ -350,7 +350,7 @@ def test_unlimited_dims_kwarg(self): test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], - coords={"time": np.datetime64("2018-05-30T10:05:00")}, + coords={"time": np.datetime64("2018-05-30T10:05:00", "ns")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: From cd4cd7362bc3f62b0129f7cb466e031f04bc270e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 09:57:42 +0200 Subject: [PATCH 0940/1416] Catch warning about pretty time formatting --- satpy/tests/writer_tests/test_cf.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index d37b612bb2..bb87ff8c30 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -208,8 +208,10 @@ def test_groups(self): attrs={"name": "HRV", "start_time": tstart, "end_time": tend}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf", groups={"visir": ["IR_108", "VIS006"], "hrv": ["HRV"]}, - pretty=True) + with pytest.warns(UserWarning, match=r"Cannot pretty-format"): + scn.save_datasets(filename=filename, writer="cf", + groups={"visir": ["IR_108", "VIS006"], "hrv": ["HRV"]}, + pretty=True) nc_root = xr.open_dataset(filename) assert "history" in nc_root.attrs From 36b09d1f0036a6f47927d0a819ed9d38bb0c113e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 10:00:27 +0200 Subject: [PATCH 0941/1416] Catch warning of invalid NetCDF dataset name --- satpy/tests/writer_tests/test_cf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index bb87ff8c30..020cb10ec3 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -152,7 +152,8 @@ def test_save_dataset_a_digit_no_prefix_include_attr(self): scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="") + with pytest.warns(UserWarning, match=r"Invalid NetCDF dataset name"): + scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="") with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f["1"][:], [1, 2, 3]) assert "original_name" not in f["1"].attrs From 8ea9e300bfdd48eff704cdbe68072e2442e3e45f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 10:14:53 +0200 Subject: [PATCH 0942/1416] Add area definitions to remove unnecessary warnings --- satpy/tests/test_writers.py | 16 +++++++++++++++- satpy/tests/writer_tests/test_geotiff.py | 18 ++++++++++++++++-- 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index e2bfd898ab..bc68d767c1 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -548,13 +548,20 @@ def setUp(self): import tempfile from datetime import datetime + from pyresample.geometry import AreaDefinition + from satpy.scene import Scene + adef = AreaDefinition( + "test", "test", "test", "EPSG:4326", + 100, 200, (-180., -90., 180., 90.), + ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0)} + "start_time": datetime(2018, 1, 1, 0, 0, 0), + "area": adef} ) self.scn = Scene() self.scn["test"] = ds1 @@ -650,8 +657,14 @@ def setup_method(self): import tempfile from datetime import datetime + from pyresample.geometry import AreaDefinition + from satpy.scene import Scene + adef = AreaDefinition( + "test", "test", "test", "EPSG:4326", + 100, 200, (-180., -90., 180., 90.), + ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), @@ -659,6 +672,7 @@ def setup_method(self): "name": "test", "start_time": datetime(2018, 1, 1, 0, 0, 0), "sensor": "fake_sensor", + "area": adef, } ) ds2 = ds1.copy() diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py index 74fcd43609..8925857637 100644 --- a/satpy/tests/writer_tests/test_geotiff.py +++ b/satpy/tests/writer_tests/test_geotiff.py @@ -32,12 +32,19 @@ def _get_test_datasets_2d(): """Create a single 2D test dataset.""" + from pyresample.geometry import AreaDefinition + + adef = AreaDefinition( + "test", "test", "test", "EPSG:4326", + 100, 200, (-180., -90., 180., 90.), + ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": datetime.utcnow(), - "units": "K"} + "units": "K", + "area": adef} ) return [ds1] @@ -54,12 +61,19 @@ def _get_test_datasets_2d_nonlinear_enhancement(): def _get_test_datasets_3d(): """Create a single 3D test dataset.""" + from pyresample.geometry import AreaDefinition + + adef = AreaDefinition( + "test", "test", "test", "EPSG:4326", + 100, 200, (-180., -90., 180., 90.), + ) ds1 = xr.DataArray( da.zeros((3, 100, 200), chunks=50), dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"name": "test", - "start_time": datetime.utcnow()} + "start_time": datetime.utcnow(), + "area": adef} ) return [ds1] From d16728b5d2a9c24aa14d263bc58214fa63ad47b2 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 15 Dec 2023 09:14:55 +0100 Subject: [PATCH 0943/1416] Add holoviews in documentation --- doc/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 3aa810420e..37c197c6eb 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -70,7 +70,7 @@ def __getattr__(cls, name): for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() # type: ignore -autodoc_mock_imports = ["cf", "glymur", "h5netcdf", "imageio", "mipp", "netCDF4", +autodoc_mock_imports = ["cf", "glymur", "h5netcdf", "holoviews", "imageio", "mipp", "netCDF4", "pygac", "pygrib", "pyhdf", "pyninjotiff", "pyorbital", "pyspectral", "rasterio", "trollimage", "zarr"] From 612e927726e445d6459267458dec1e2e6532dc7a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 15 Dec 2023 10:39:45 +0200 Subject: [PATCH 0944/1416] Handle warnings from encoding time in CF --- satpy/tests/writer_tests/test_cf.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 020cb10ec3..6d1d15527b 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -247,7 +247,7 @@ def test_single_time_value(self): attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf") + scn.save_datasets(filename=filename, writer="cf", encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") @@ -264,7 +264,8 @@ def test_time_coordinate_on_a_swath(self): coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf", pretty=True) + scn.save_datasets(filename=filename, writer="cf", pretty=True, + encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) @@ -280,7 +281,11 @@ def test_bounds(self): attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf") + with warnings.catch_warnings(): + # The purpose is to use the default time encoding, silence the warning + warnings.filterwarnings("ignore", category=UserWarning, + message=r"Times can't be serialized faithfully to int64 with requested units") + scn.save_datasets(filename=filename, writer="cf") # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") @@ -319,7 +324,8 @@ def test_bounds_minimum(self): attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf") + scn.save_datasets(filename=filename, writer="cf", + encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeB]], dtype="datetime64[m]") np.testing.assert_array_equal(f["time_bnds"], bounds_exp) @@ -340,7 +346,8 @@ def test_bounds_missing_time_info(self): dims=["x", "y", "time"], coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf") + scn.save_datasets(filename=filename, writer="cf", + encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeA]], dtype="datetime64[m]") np.testing.assert_array_equal(f["time_bnds"], bounds_exp) @@ -357,7 +364,8 @@ def test_unlimited_dims_kwarg(self): attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"]) + scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"], + encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename) as f: assert set(f.encoding["unlimited_dims"]) == {"time"} From 8f44b3112853366e9914eeafb267e243bf413fb6 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Fri, 15 Dec 2023 08:47:15 +0000 Subject: [PATCH 0945/1416] Put new invert_alpha keyword as last optional keyword to ensure backwards compatibility. --- satpy/composites/__init__.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 88375bc933..fe21623010 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1014,7 +1014,7 @@ class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: D417 - invert_alpha=False, transition_gamma=3.0, **kwargs): + transition_gamma=3.0, invert_alpha=False, **kwargs): """Collect custom configuration values. Args: @@ -1022,15 +1022,15 @@ def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: clouds -> opaque white transition_max (float): Values above this are cloud free -> transparent + transition_gamma (float): Gamma correction to apply at the end invert_alpha (bool): Invert the alpha channel to make low data values transparent and high data values opaque. - transition_gamma (float): Gamma correction to apply at the end """ self.transition_min = transition_min self.transition_max = transition_max - self.invert_alpha = invert_alpha self.transition_gamma = transition_gamma + self.invert_alpha = invert_alpha super(CloudCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, **kwargs): @@ -1160,8 +1160,8 @@ class LowCloudCompositor(CloudCompositor): def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 range_land=(0.0, 4.0), range_water=(0.0, 4.0), - invert_alpha=True, - transition_gamma=1.0, **kwargs): + transition_gamma=1.0, + invert_alpha=True, **kwargs): """Init info. Collect custom configuration values. @@ -1173,10 +1173,10 @@ def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 difference over land surface types. range_water (tuple): Threshold values used for masking low-level clouds from the brightness temperature difference over water. - invert_alpha (bool): Invert the alpha channel to make low data values transparent - and high data values opaque. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature difference range. + invert_alpha (bool): Invert the alpha channel to make low data values transparent + and high data values opaque. """ if len(range_land) != 2: raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") @@ -1188,7 +1188,7 @@ def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 self.range_land = range_land self.range_water = range_water super().__init__(name, transition_min=None, transition_max=None, - invert_alpha=invert_alpha, transition_gamma=transition_gamma, **kwargs) + transition_gamma=transition_gamma, invert_alpha=invert_alpha, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite. From 9b0bcae1169bf81297383e77806403625389d5b9 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Fri, 15 Dec 2023 10:44:15 +0100 Subject: [PATCH 0946/1416] Holoviews inside to_hvplot method --- satpy/scene.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index fe6bbce1f9..d1ba795ac8 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -25,7 +25,6 @@ import numpy as np import xarray as xr -from holoviews import Overlay from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition from xarray import DataArray @@ -1109,6 +1108,8 @@ def _plot_quadmesh(xarray_ds, variable, **defaults): **defaults) import hvplot.xarray as hvplot_xarray # noqa + from holoviews import Overlay + plot = Overlay() xarray_ds = self.to_xarray_dataset(datasets) From 36aa47145eabb1f1bea4adb5483606edbd4946c8 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 12:38:11 +0100 Subject: [PATCH 0947/1416] add a check for the presence of file_handlers attribute --- satpy/readers/yaml_reader.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 3171449b03..5444d7e16f 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1182,9 +1182,10 @@ def create_filehandlers(self, filenames, fh_kwargs=None): return created_fhs def _sort_segment_filehandler_by_segment_number(self): - for file_type in self.file_handlers.keys(): - self.file_handlers[file_type] = sorted(self.file_handlers[file_type], - key=lambda x: x.filename_info.get("segment", 0)) + if hasattr(self, "file_handlers"): + for file_type in self.file_handlers.keys(): + self.file_handlers[file_type] = sorted(self.file_handlers[file_type], + key=lambda x: x.filename_info.get("segment", 0)) def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): """Load only a piece of the dataset.""" From 42a863d560aabb9af32f04044671d153d8f3ab7f Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 15:37:38 +0100 Subject: [PATCH 0948/1416] add ir_sandwich and ir_sandwich_with_night_colorized_ir_clouds --- satpy/etc/composites/fci.yaml | 40 +++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 0f0e98f4e0..2da7b9f593 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -47,15 +47,6 @@ composites: modifiers: [ sunz_corrected ] standard_name: toa_bidirectional_reflectance - binary_cloud_mask: - # This will set all clear pixels to '0', all pixles with cloudy features (meteorological/dust/ash clouds) to '1' and - # missing/undefined pixels to 'nan'. This can be used for the the official EUMETSAT cloud mask product (CLM). - compositor: !!python/name:satpy.composites.CategoricalDataCompositor - prerequisites: - - name: 'cloud_state' - lut: [.nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan] - standard_name: binary_cloud_mask - true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > @@ -180,3 +171,34 @@ composites: prerequisites: - geo_color_high_clouds - geo_color_background_with_low_clouds + + ir_sandwich: + compositor: !!python/name:satpy.composites.SandwichCompositor + standard_name: ir_sandwich + prerequisites: + - name: 'vis_06' + modifiers: [ sunz_corrected ] + - name: colorized_ir_clouds + + colorized_ir_clouds: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: 'ir_105' + standard_name: colorized_ir_clouds + + ir_sandwich_with_night_colorized_ir_clouds: + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 73 + lim_high: 82 + prerequisites: + - ir_sandwich + - colorized_ir_clouds + binary_cloud_mask: + # This will set all clear pixels to '0', all pixles with cloudy features (meteorological/dust/ash clouds) to '1' and + # missing/undefined pixels to 'nan'. This can be used for the the official EUMETSAT cloud mask product (CLM). + compositor: !!python/name:satpy.composites.CategoricalDataCompositor + prerequisites: + - name: 'cloud_state' + lut: [ .nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan ] + standard_name: binary_cloud_mask From 4f3eb61678199b0f1273eac0fe2f1fef9d352813 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 15:38:27 +0100 Subject: [PATCH 0949/1416] fix comment typos --- satpy/etc/composites/fci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 2da7b9f593..aae4de313d 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -195,8 +195,8 @@ composites: - ir_sandwich - colorized_ir_clouds binary_cloud_mask: - # This will set all clear pixels to '0', all pixles with cloudy features (meteorological/dust/ash clouds) to '1' and - # missing/undefined pixels to 'nan'. This can be used for the the official EUMETSAT cloud mask product (CLM). + # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and + # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). compositor: !!python/name:satpy.composites.CategoricalDataCompositor prerequisites: - name: 'cloud_state' From 7a15b6d220f2a563818268f6fdc7ccb956a8a07a Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 15:47:30 +0100 Subject: [PATCH 0950/1416] add cloud_type and cloud_phase --- satpy/etc/composites/fci.yaml | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index aae4de313d..5fa8997731 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -194,6 +194,39 @@ composites: prerequisites: - ir_sandwich - colorized_ir_clouds + + cloud_type: + description: > + Equal to cimss_cloud_type, but with additional sunz_reducer modifier to avoid saturation at the terminator. + references: + EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: nir_13 + modifiers: [ sunz_corrected, sunz_reduced ] + - name: vis_06 + modifiers: [ sunz_corrected, sunz_reduced ] + - name: nir_16 + modifiers: [ sunz_corrected, sunz_reduced ] + standard_name: cimss_cloud_type + + cloud_phase: + description: > + Equal to cloud_phase, but with additional sunz_reducer modifier to avoid saturation at the terminator. + references: + EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: nir_16 + modifiers: [sunz_corrected, sunz_reduced] + - name: nir_22 + modifiers: [sunz_corrected, sunz_reduced] + - name: vis_06 + modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] + standard_name: cloud_phase + binary_cloud_mask: # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). From d326eef012c27b212a8f8734857f971156f97152 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 15:56:43 +0100 Subject: [PATCH 0951/1416] match all projectables instead of only subset in NDVIHybridGreen __call__ --- satpy/composites/spectral.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index d656bab7ec..138f17bd85 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -159,9 +159,9 @@ def __call__(self, projectables, optional_datasets=None, **attrs): LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " f"{self.limits[1]}] and strength {self.strength}.") - ndvi_input = self.match_data_arrays([projectables[1], projectables[2]]) + ndvi_input = self.match_data_arrays(projectables) - ndvi = (ndvi_input[1] - ndvi_input[0]) / (ndvi_input[1] + ndvi_input[0]) + ndvi = (ndvi_input[2] - ndvi_input[1]) / (ndvi_input[2] + ndvi_input[1]) ndvi = ndvi.clip(self.ndvi_min, self.ndvi_max) From 5301dcab4141098cd78781137f784f068e96d1f9 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 17:57:37 +0100 Subject: [PATCH 0952/1416] use projectables in match_data_arrays return and add test for coordinates alignment --- satpy/composites/spectral.py | 4 ++-- satpy/tests/compositor_tests/test_spectral.py | 21 ++++++++++++++++--- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 138f17bd85..f7219ec94d 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -159,9 +159,9 @@ def __call__(self, projectables, optional_datasets=None, **attrs): LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " f"{self.limits[1]}] and strength {self.strength}.") - ndvi_input = self.match_data_arrays(projectables) + projectables = self.match_data_arrays(projectables) - ndvi = (ndvi_input[2] - ndvi_input[1]) / (ndvi_input[2] + ndvi_input[1]) + ndvi = (projectables[2] - projectables[1]) / (projectables[2] + projectables[1]) ndvi = ndvi.clip(self.ndvi_min, self.ndvi_max) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index eb3db8de5c..a68f9f2f0a 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -73,15 +73,16 @@ class TestNdviHybridGreenCompositor: def setup_method(self): """Initialize channels.""" + coord_val = [1.0, 2.0] self.c01 = xr.DataArray( da.from_array(np.array([[0.25, 0.30], [0.20, 0.30]], dtype=np.float32), chunks=25), - dims=("y", "x"), attrs={"name": "C02"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C02"}) self.c02 = xr.DataArray( da.from_array(np.array([[0.25, 0.30], [0.25, 0.35]], dtype=np.float32), chunks=25), - dims=("y", "x"), attrs={"name": "C03"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C03"}) self.c03 = xr.DataArray( da.from_array(np.array([[0.35, 0.35], [0.28, 0.65]], dtype=np.float32), chunks=25), - dims=("y", "x"), attrs={"name": "C04"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C04"}) def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" @@ -123,3 +124,17 @@ def test_invalid_strength(self): with pytest.raises(ValueError, match="Expected strength greater than 0.0, got 0.0."): _ = NDVIHybridGreen("ndvi_hybrid_green", strength=0.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") + + def test_with_slightly_mismatching_coord_input(self): + """Test the case where an input (typically the red band) has a slightly different coordinate. + + If match_data_arrays is called correctly, the coords will be aligned and the array will have the expected shape. + + """ + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") + + c02_bad_shape = self.c02.copy() + c02_bad_shape.coords["y"] = [1.1, 2.] + res = comp((self.c01, c02_bad_shape, self.c03)) + assert res.shape == (2, 2) From fb2ec9e17d600e3ec170d11d154ed485563d527f Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 15 Dec 2023 18:00:46 +0100 Subject: [PATCH 0953/1416] make codefactor happy --- satpy/tests/compositor_tests/test_spectral.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index a68f9f2f0a..2f7d9fd7cb 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -79,10 +79,10 @@ def setup_method(self): dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C02"}) self.c02 = xr.DataArray( da.from_array(np.array([[0.25, 0.30], [0.25, 0.35]], dtype=np.float32), chunks=25), - dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C03"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C03"}) self.c03 = xr.DataArray( da.from_array(np.array([[0.35, 0.35], [0.28, 0.65]], dtype=np.float32), chunks=25), - dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C04"}) + dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C04"}) def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" From 6f4ca385748516b3ee11c718051d773b3334e509 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 13:04:37 +0800 Subject: [PATCH 0954/1416] Update __init__.py --- satpy/composites/__init__.py | 49 +++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 12 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a70bbea86f..46dbe1b6fc 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1679,9 +1679,10 @@ def __call__(self, *args, **kwargs): class BackgroundCompositor(GenericCompositor): """A compositor that overlays one composite on top of another.""" - def __call__(self, projectables, *args, **kwargs): + def __call__(self, projectables, bg_fill_in=True, *args, **kwargs): """Call the compositor.""" projectables = self.match_data_arrays(projectables) + self.bg_fill_in = bg_fill_in # Get enhanced datasets foreground = enhance2dataset(projectables[0], convert_p=True) background = enhance2dataset(projectables[1], convert_p=True) @@ -1717,17 +1718,41 @@ def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray ) -> list[xr.DataArray]: if "A" in foreground.attrs["mode"]: - # Use alpha channel as weight and blend the two composites - alpha = foreground.sel(bands="A") - data = [] - # NOTE: there's no alpha band in the output image, it will - # be added by the data writer - for band in foreground.mode[:-1]: - fg_band = foreground.sel(bands=band) - bg_band = background.sel(bands=band) - chan = (fg_band * alpha + bg_band * (1 - alpha)) - chan = xr.where(chan.isnull(), bg_band, chan) - data.append(chan) + if "A" not in background.attrs["mode"]: + # Use alpha channel as weight and blend the two composites + alpha = foreground.sel(bands="A") + data = [] + # NOTE: there's no alpha band in the output image, it will + # be added by the data writer + for band in foreground.mode[:-1]: + fg_band = foreground.sel(bands=band) + bg_band = background.sel(bands=band) + chan = (fg_band * alpha + bg_band * (1 - alpha)) + # Fill the area where foreground is Nan with background + if self.bg_fill_in: + chan = xr.where(chan.isnull(), bg_band, chan) + data.append(chan) + + else: + # Both foreground and background have alpha channels + # Use them to build a new alpha channel and blend the two composites + alpha_fore = foreground.sel(bands="A") + alpha_back = background.sel(bands="A") + data = [] + new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) + + for band in foreground.mode: + fg_band = foreground.sel(bands=band) + bg_band = background.sel(bands=band) + if band != "A": + chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha + else: + chan = new_alpha + # Fill the area where foreground is Nan with background + if self.bg_fill_in: + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + data.append(chan) + else: data_arr = xr.where(foreground.isnull(), background, foreground) # Split to separate bands so the mode is correct From c21651a02d0366c0448bb2aeaa11a89be03a5b0d Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 14:03:59 +0800 Subject: [PATCH 0955/1416] Update __init__.py --- satpy/composites/__init__.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 46dbe1b6fc..079f07fbd4 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1677,7 +1677,13 @@ def __call__(self, *args, **kwargs): class BackgroundCompositor(GenericCompositor): - """A compositor that overlays one composite on top of another.""" + """A compositor that overlays one composite on top of another. + + Args: + bg_fill_in (bool): True means the compositor will fill the area where + foreground is Nan with background. + False means it will just leave the area blank. + """ def __call__(self, projectables, bg_fill_in=True, *args, **kwargs): """Call the compositor.""" @@ -1694,7 +1700,10 @@ def __call__(self, projectables, bg_fill_in=True, *args, **kwargs): background = add_bands(background, foreground["bands"]) attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) - data = self._get_merged_image_data(foreground, background) + if self.bg_fill_in: + data = self._get_merged_image_data(foreground, background, bg_fill_in=True) + else: + data = self._get_merged_image_data(foreground, background, bg_fill_in=False) res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res @@ -1715,7 +1724,8 @@ def _combine_metadata_with_mode_and_sensor(self, @staticmethod def _get_merged_image_data(foreground: xr.DataArray, - background: xr.DataArray + background: xr.DataArray, + bg_fill_in=True ) -> list[xr.DataArray]: if "A" in foreground.attrs["mode"]: if "A" not in background.attrs["mode"]: @@ -1729,7 +1739,7 @@ def _get_merged_image_data(foreground: xr.DataArray, bg_band = background.sel(bands=band) chan = (fg_band * alpha + bg_band * (1 - alpha)) # Fill the area where foreground is Nan with background - if self.bg_fill_in: + if bg_fill_in: chan = xr.where(chan.isnull(), bg_band, chan) data.append(chan) @@ -1749,7 +1759,7 @@ def _get_merged_image_data(foreground: xr.DataArray, else: chan = new_alpha # Fill the area where foreground is Nan with background - if self.bg_fill_in: + if bg_fill_in: chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) data.append(chan) From 09c281eaeab61042fecf148aefc5521db61d3f14 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 15:33:48 +0800 Subject: [PATCH 0956/1416] Update test_composites.py --- satpy/tests/test_composites.py | 38 +++++++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index b5d5a54b96..39e8485475 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1497,28 +1497,46 @@ def setup_class(cls): @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( - ("foreground_bands", "background_bands", "exp_bands", "exp_result"), + ("foreground_bands", "background_bands", "bg_fill_in", "exp_bands", "exp_result"), [ - ("L", "L", "L", np.array([[1.0, 0.5], [0.0, 1.0]])), - ("LA", "LA", "L", np.array([[1.0, 0.75], [0.5, 1.0]])), - ("RGB", "RGB", "RGB", np.array([ + ("L", "L", True, "L", np.array([[1.0, 0.5], [0.0, 1.0]])), + ("L", "L", False, "L", np.array([[1.0, 0.5], [0.0, 1.0]])), + ("LA", "LA", True, "LA", np.array([[[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), + ("LA", "LA", False, "LA", np.array([[[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), + ("RGB", "RGB", True, "RGB", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), - ("RGBA", "RGBA", "RGB", np.array([ + ("RGB", "RGB", False, "RGB", np.array([ + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]]])), + ("RGBA", "RGBA", True, "RGBA", np.array([ + [[1., 0.75], [0.5, 1.]], + [[1., 0.75], [0.5, 1.]], + [[1., 0.75], [0.5, 1.]], + [[1.0, 1.0], [1.0, 1.0]]])), + ("RGBA", "RGBA", False, "RGBA", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]]])), - ("RGBA", "RGB", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], + [[1.0, 1.0], [1.0, 1.0]]])), + ("RGBA", "RGB", True, "RGBA", np.array([ [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]]])), + [[1., 0.75], [0.5, 1.]], + [[1., 0.75], [0.5, 1.]], + [[1.0, 1.0], [1.0, 1.0]]])), + ("RGBA", "RGB", False, "RGBA", np.array([ + [[1., 0.75], [0.5, 1.]], + [[1., 0.75], [0.5, 1.]], + [[1., 0.75], [0.5, 1.]], + [[1.0, 1.0], [1.0, 1.0]]])), ] ) - def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): + def test_call(self, foreground_bands, background_bands, bg_fill_in, exp_bands, exp_result): """Test the background compositing.""" from satpy.composites import BackgroundCompositor - comp = BackgroundCompositor("name") + comp = BackgroundCompositor("name", bg_fill_in=bg_fill_in) # L mode images foreground_data = self.foreground_data[foreground_bands] From d82057a8c544978788f7d64fc7f396f649294017 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 16:21:00 +0800 Subject: [PATCH 0957/1416] Update __init__.py --- satpy/composites/__init__.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 079f07fbd4..e99201740d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1722,6 +1722,10 @@ def _combine_metadata_with_mode_and_sensor(self, attrs["sensor"] = self._get_sensors([foreground, background]) return attrs + @staticmethod + def _fill_nan_area(channel, filler): + return xr.where(channel.isnull(), filler, chan) + @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray, @@ -1740,7 +1744,7 @@ def _get_merged_image_data(foreground: xr.DataArray, chan = (fg_band * alpha + bg_band * (1 - alpha)) # Fill the area where foreground is Nan with background if bg_fill_in: - chan = xr.where(chan.isnull(), bg_band, chan) + chan = BackgroundCompositor._fill_nan_area(chan, bg_band) data.append(chan) else: @@ -1760,7 +1764,7 @@ def _get_merged_image_data(foreground: xr.DataArray, chan = new_alpha # Fill the area where foreground is Nan with background if bg_fill_in: - chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + chan = BackgroundCompositor._fill_nan_area(chan, bg_band * alpha_back) data.append(chan) else: From 557853a6ea2e794133bd48f5bfebf7ae4952da98 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 16:22:24 +0800 Subject: [PATCH 0958/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index e99201740d..8b652e37cb 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1724,7 +1724,7 @@ def _combine_metadata_with_mode_and_sensor(self, @staticmethod def _fill_nan_area(channel, filler): - return xr.where(channel.isnull(), filler, chan) + return xr.where(channel.isnull(), filler) @staticmethod def _get_merged_image_data(foreground: xr.DataArray, From 61930e6d9fd25f8e77ff19df9ab678ffb0559e80 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 16:23:37 +0800 Subject: [PATCH 0959/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 8b652e37cb..431f8bd66e 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1724,7 +1724,7 @@ def _combine_metadata_with_mode_and_sensor(self, @staticmethod def _fill_nan_area(channel, filler): - return xr.where(channel.isnull(), filler) + return xr.where(channel.isnull(), filler, channel) @staticmethod def _get_merged_image_data(foreground: xr.DataArray, From 1d12212a0afe4c0e780e0fec45a66eb26a891fa9 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 16:46:06 +0800 Subject: [PATCH 0960/1416] Update __init__.py --- satpy/composites/__init__.py | 62 ++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 431f8bd66e..43405d4b6b 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1722,49 +1722,49 @@ def _combine_metadata_with_mode_and_sensor(self, attrs["sensor"] = self._get_sensors([foreground, background]) return attrs - @staticmethod - def _fill_nan_area(channel, filler): - return xr.where(channel.isnull(), filler, channel) - @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray, bg_fill_in=True ) -> list[xr.DataArray]: if "A" in foreground.attrs["mode"]: - if "A" not in background.attrs["mode"]: - # Use alpha channel as weight and blend the two composites - alpha = foreground.sel(bands="A") - data = [] - # NOTE: there's no alpha band in the output image, it will - # be added by the data writer - for band in foreground.mode[:-1]: - fg_band = foreground.sel(bands=band) - bg_band = background.sel(bands=band) - chan = (fg_band * alpha + bg_band * (1 - alpha)) + # Use alpha channel as weight and blend the two composites + # If both foreground and background have alpha channels + # Use them to build a new alpha channel and blend the two composites + alpha_fore = foreground.sel(bands="A") + alpha_back = background.sel(bands="A") if "A" in background.attrs["mode"] else None + new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) + + data = [] + + if "A" in background.attrs["mode"]: + cutoff = 0 + else: + cutoff = -1 + + for band in foreground.mode[: cutoff]: + fg_band = foreground.sel(bands=band) + bg_band = background.sel(bands=band) + + if "A" in background.attrs["mode"]: + chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha \ + if band != "A" else new_alpha + # Fill the area where foreground is Nan with background if bg_fill_in: - chan = BackgroundCompositor._fill_nan_area(chan, bg_band) + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + data.append(chan) - else: - # Both foreground and background have alpha channels - # Use them to build a new alpha channel and blend the two composites - alpha_fore = foreground.sel(bands="A") - alpha_back = background.sel(bands="A") - data = [] - new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) - - for band in foreground.mode: - fg_band = foreground.sel(bands=band) - bg_band = background.sel(bands=band) - if band != "A": - chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha - else: - chan = new_alpha + else: + # NOTE: there's no alpha band in the output image, it will + # be added by the data writer + chan = (fg_band * alpha_fore + bg_band * (1 - alpha_fore)) + # Fill the area where foreground is Nan with background if bg_fill_in: - chan = BackgroundCompositor._fill_nan_area(chan, bg_band * alpha_back) + chan = xr.where(chan.isnull(), bg_band, chan) + data.append(chan) else: From d1b9dd89a1b0adaeea93d408bc716a7701ea4498 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 16:53:16 +0800 Subject: [PATCH 0961/1416] Update __init__.py --- satpy/composites/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 43405d4b6b..54f756fed0 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1738,11 +1738,11 @@ def _get_merged_image_data(foreground: xr.DataArray, data = [] if "A" in background.attrs["mode"]: - cutoff = 0 + band_list = foreground.mode else: - cutoff = -1 + band_list = foreground.mode[: -1] - for band in foreground.mode[: cutoff]: + for band in band_list: fg_band = foreground.sel(bands=band) bg_band = background.sel(bands=band) From 0ef787d9886c7891b3cded5d5c6b59dcdf783ca8 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 17:15:08 +0800 Subject: [PATCH 0962/1416] Update __init__.py --- satpy/composites/__init__.py | 28 ++++++++-------------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 54f756fed0..d29854aabf 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1732,7 +1732,7 @@ def _get_merged_image_data(foreground: xr.DataArray, # If both foreground and background have alpha channels # Use them to build a new alpha channel and blend the two composites alpha_fore = foreground.sel(bands="A") - alpha_back = background.sel(bands="A") if "A" in background.attrs["mode"] else None + alpha_back = background.sel(bands="A") if "A" in background.attrs["mode"] else 1 new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) data = [] @@ -1740,32 +1740,20 @@ def _get_merged_image_data(foreground: xr.DataArray, if "A" in background.attrs["mode"]: band_list = foreground.mode else: - band_list = foreground.mode[: -1] + band_list = foreground.mode[:-1] for band in band_list: fg_band = foreground.sel(bands=band) bg_band = background.sel(bands=band) - if "A" in background.attrs["mode"]: - chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha \ - if band != "A" else new_alpha + chan = (fg_band * alpha_fore + + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha if band != "A" else new_alpha - # Fill the area where foreground is Nan with background - if bg_fill_in: - chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + # Fill the area where foreground is Nan with background + if bg_fill_in: + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) - data.append(chan) - - else: - # NOTE: there's no alpha band in the output image, it will - # be added by the data writer - chan = (fg_band * alpha_fore + bg_band * (1 - alpha_fore)) - - # Fill the area where foreground is Nan with background - if bg_fill_in: - chan = xr.where(chan.isnull(), bg_band, chan) - - data.append(chan) + data.append(chan) else: data_arr = xr.where(foreground.isnull(), background, foreground) From afd9f8acec651cee94c5b0ad42af44e6dd89a088 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 17:20:57 +0800 Subject: [PATCH 0963/1416] Update __init__.py --- satpy/composites/__init__.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d29854aabf..28250ed0b8 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1729,8 +1729,6 @@ def _get_merged_image_data(foreground: xr.DataArray, ) -> list[xr.DataArray]: if "A" in foreground.attrs["mode"]: # Use alpha channel as weight and blend the two composites - # If both foreground and background have alpha channels - # Use them to build a new alpha channel and blend the two composites alpha_fore = foreground.sel(bands="A") alpha_back = background.sel(bands="A") if "A" in background.attrs["mode"] else 1 new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) @@ -1749,9 +1747,7 @@ def _get_merged_image_data(foreground: xr.DataArray, chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha if band != "A" else new_alpha - # Fill the area where foreground is Nan with background - if bg_fill_in: - chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) if bg_fill_in else chan data.append(chan) From 19e41df8cfad4ceaa83473839463db6d6d0f7bb0 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 17:38:35 +0800 Subject: [PATCH 0964/1416] Update __init__.py --- satpy/composites/__init__.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 28250ed0b8..259e8c04bb 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1700,10 +1700,7 @@ def __call__(self, projectables, bg_fill_in=True, *args, **kwargs): background = add_bands(background, foreground["bands"]) attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) - if self.bg_fill_in: - data = self._get_merged_image_data(foreground, background, bg_fill_in=True) - else: - data = self._get_merged_image_data(foreground, background, bg_fill_in=False) + data = self._get_merged_image_data(foreground, background, bg_fill_in=self.bg_fill_in) res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res @@ -1734,11 +1731,7 @@ def _get_merged_image_data(foreground: xr.DataArray, new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) data = [] - - if "A" in background.attrs["mode"]: - band_list = foreground.mode - else: - band_list = foreground.mode[:-1] + band_list = foreground.mode if "A" in background.attrs["mode"] else foreground.mode[:-1] for band in band_list: fg_band = foreground.sel(bands=band) From 13896c371f44e1d6096bdc9b0686c57f0ca22ba8 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 17:54:09 +0800 Subject: [PATCH 0965/1416] Update __init__.py --- satpy/composites/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 259e8c04bb..abe9439812 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1740,7 +1740,8 @@ def _get_merged_image_data(foreground: xr.DataArray, chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha if band != "A" else new_alpha - chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) if bg_fill_in else chan + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) if ( + bg_fill_in and chan != new_alpha) else chan data.append(chan) From 23ddfba76d327f3a923c254a4b8e8ee0001e851d Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 16 Dec 2023 18:08:46 +0800 Subject: [PATCH 0966/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index abe9439812..29708c402d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1741,7 +1741,7 @@ def _get_merged_image_data(foreground: xr.DataArray, bg_band * alpha_back * (1 - alpha_fore)) / new_alpha if band != "A" else new_alpha chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) if ( - bg_fill_in and chan != new_alpha) else chan + bg_fill_in and band != "A") else chan data.append(chan) From a87631eaff33b3cbd22d8f7c255500a54d362491 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 17 Dec 2023 23:48:45 +0800 Subject: [PATCH 0967/1416] Update __init__.py --- satpy/composites/__init__.py | 102 ++++++++++++++++++++++++++++++----- 1 file changed, 88 insertions(+), 14 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 29708c402d..3cc4401b6c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1679,19 +1679,57 @@ def __call__(self, *args, **kwargs): class BackgroundCompositor(GenericCompositor): """A compositor that overlays one composite on top of another. - Args: - bg_fill_in (bool): True means the compositor will fill the area where - foreground is Nan with background. - False means it will just leave the area blank. + Beside foreground and background, a third optional dataset could be passed + to the compositor to use its Nan area for masking. This is useful when you + reproject a specific local-area image (e.g. a geostationary satellite view) + to global extent, and put it on a global background (e.g. NASA's Black Marble) + while making other areas of the world transparent, only keeping the local one. + + To use this function in a YAML configuration file, add the third dataset + as ``optional_prerequisites``: + + .. code-block:: yaml + + night_cloud_alpha_2000_with_background: + compositor: !!python/name:satpy.composites.BackgroundCompositor + prerequisites: + - name: night_cloud_alpha_2000 + - name: static_night + optional_prerequisites: + - name: IR105 + """ + def __init__(self, name, mask_value=None, **kwargs): # noqa: D417 + """Collect custom configuration values. - def __call__(self, projectables, bg_fill_in=True, *args, **kwargs): + Args: + mask_value (float / None): If it's a float, all the pixels where the third dataset + values that are equal to this will be masked out. + If it's a string, it only accepts strings related to + ``np.nan``, e.g. "np.nan" / "nan" / "Nan" / "Null". + Otherwise, it will be set to ``np.nan``. + If not set by the user, it will also be ``np.nan``. + This argument could be helpful when you try to use a + StaticImageCompositor for mask. Please note: If you + are using ``mask_value`` then your mask dataset + shouldn't include an alpha channel. + + """ + self.mask_value = mask_value if mask_value is not None else np.nan + + super(BackgroundCompositor, self).__init__(name, **kwargs) + + def __call__(self, projectables, optional_datasets=None, *args, **kwargs): """Call the compositor.""" - projectables = self.match_data_arrays(projectables) - self.bg_fill_in = bg_fill_in + optional_datasets = tuple() if optional_datasets is None else optional_datasets + projectables = self.match_data_arrays(projectables + optional_datasets) # Get enhanced datasets foreground = enhance2dataset(projectables[0], convert_p=True) background = enhance2dataset(projectables[1], convert_p=True) + mask_dataset = enhance2dataset(projectables[2], convert_p=True) if not optional_datasets == [] else None + + original_bg_mode = background.attrs["mode"] + # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA @@ -1699,8 +1737,16 @@ def __call__(self, projectables, bg_fill_in=True, *args, **kwargs): foreground = add_bands(foreground, background["bands"]) background = add_bands(background, foreground["bands"]) + # True means the alpha channel of the background was initially generated, e.g. by CloudCompositor + # not newly added through 'add_bands' + # False means it was newly added, or it just doesn't exist + # This is important in the next steps + original_bg_alpha = True if ("A" in original_bg_mode and "A" in background.attrs["mode"]) else False + + mask = self._get_mask(mask_dataset, self.mask_value) + attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) - data = self._get_merged_image_data(foreground, background, bg_fill_in=self.bg_fill_in) + data = self._get_merged_image_data(foreground, background, original_bg_alpha=original_bg_alpha, mask=mask) res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res @@ -1719,19 +1765,48 @@ def _combine_metadata_with_mode_and_sensor(self, attrs["sensor"] = self._get_sensors([foreground, background]) return attrs + @staticmethod + def _get_mask(dataset: xr.DataArray, mask_value): + if dataset is None: + mask = None + else: + # If the mask_dataset already has an alpha channel, just use it as mask + # Otherwise build one + if "A" in dataset.attrs["mode"]: + mask = dataset.sel(bands="A") + else: + if np.isnan(mask_value): + mask = xr.where(dataset.isnull(), 0, 1) + else: + mask = xr.where(dataset == mask_value, 0, 1) + + return mask + @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray, - bg_fill_in=True + original_bg_alpha: bool, + mask: xr.DataArray ) -> list[xr.DataArray]: if "A" in foreground.attrs["mode"]: - # Use alpha channel as weight and blend the two composites + # Use alpha channels as weights and blend the two composites alpha_fore = foreground.sel(bands="A") - alpha_back = background.sel(bands="A") if "A" in background.attrs["mode"] else 1 + # If the background alpha is authentic just use it + # If not, it is full of 1 meaning it's a forged one, or it doesn't exist(but we still need it) + alpha_back = background.sel(bands="A") if original_bg_alpha else xr.full_like(alpha_fore, 1) + # Any way we need a new alpha for the new image new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) + # Do the area-masking job + if mask is not None: + alpha_fore.data = np.minimum(alpha_fore.data, mask.data[0]) + alpha_back.data = np.minimum(alpha_back.data, mask.data[0]) + new_alpha.data = np.minimum(new_alpha.data, mask.data[0]) data = [] - band_list = foreground.mode if "A" in background.attrs["mode"] else foreground.mode[:-1] + # If the background alpha is authentic or area-masking is effective + # The compositor will pass the new_alpha to the writer + # Otherwise it will leave the writer to decide + band_list = foreground.mode if (original_bg_alpha or mask is not None)else foreground.mode[:-1] for band in band_list: fg_band = foreground.sel(bands=band) @@ -1740,8 +1815,7 @@ def _get_merged_image_data(foreground: xr.DataArray, chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha if band != "A" else new_alpha - chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) if ( - bg_fill_in and band != "A") else chan + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) data.append(chan) From e20ea4182a0cf14791e40e9886c9724167f49920 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 18 Dec 2023 09:06:34 +0100 Subject: [PATCH 0968/1416] Update changelog for v0.46.0 --- CHANGELOG.md | 56 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa85b83f56..8730209f99 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,59 @@ +## Version 0.46.0 (2023/12/18) + +### Issues Closed + +* [Issue 2668](https://github.com/pytroll/satpy/issues/2668) - FCI HRFI true_color unavailable even after native resampling if upper_right_corner is used ([PR 2690](https://github.com/pytroll/satpy/pull/2690) by [@djhoese](https://github.com/djhoese)) +* [Issue 2664](https://github.com/pytroll/satpy/issues/2664) - Cannot generate day-night composites +* [Issue 2654](https://github.com/pytroll/satpy/issues/2654) - Unable to read radiance with AVHRR EPS ([PR 2655](https://github.com/pytroll/satpy/pull/2655) by [@mraspaud](https://github.com/mraspaud)) +* [Issue 2647](https://github.com/pytroll/satpy/issues/2647) - Preservation of input data dtype in processing FCI data +* [Issue 2618](https://github.com/pytroll/satpy/issues/2618) - GCOM-C Support (Continued) ([PR 1094](https://github.com/pytroll/satpy/pull/1094) by [@mraspaud](https://github.com/mraspaud)) +* [Issue 2588](https://github.com/pytroll/satpy/issues/2588) - FCI chunks/segments out of order if pad_data=False ([PR 2692](https://github.com/pytroll/satpy/pull/2692) by [@ameraner](https://github.com/ameraner)) +* [Issue 2263](https://github.com/pytroll/satpy/issues/2263) - VIIRS day composite 'snow_age' does not work with Satpy 0.37.1 +* [Issue 1496](https://github.com/pytroll/satpy/issues/1496) - Improve error reporting of satpy.utils.get_satpos +* [Issue 1086](https://github.com/pytroll/satpy/issues/1086) - Add a reader for GCOM-C Level 1 data ([PR 1094](https://github.com/pytroll/satpy/pull/1094) by [@mraspaud](https://github.com/mraspaud)) + +In this release 9 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2694](https://github.com/pytroll/satpy/pull/2694) - Match all projectables in `NDVIHybridGreen.__call__` to avoid coordinate mismatch errors ([2668](https://github.com/pytroll/satpy/issues/2668), [2668](https://github.com/pytroll/satpy/issues/2668)) +* [PR 2692](https://github.com/pytroll/satpy/pull/2692) - Anticipate filehandler sorting in `GEOSegmentYAMLReader` to have sorted handlers also with `pad_data=False` ([2588](https://github.com/pytroll/satpy/issues/2588)) +* [PR 2690](https://github.com/pytroll/satpy/pull/2690) - Fix composites failing on non-aligned geolocation coordinates ([2668](https://github.com/pytroll/satpy/issues/2668)) +* [PR 2682](https://github.com/pytroll/satpy/pull/2682) - Update AHI HSD reader to correctly handle singleton arrays. +* [PR 2674](https://github.com/pytroll/satpy/pull/2674) - Update xarray version in CF writer tests for compression kwarg +* [PR 2671](https://github.com/pytroll/satpy/pull/2671) - Workaround AWIPS bug not handling integers properly in "awips_tiled" writer +* [PR 2669](https://github.com/pytroll/satpy/pull/2669) - Fix RealisticColors compositor upcasting data to float64 +* [PR 2655](https://github.com/pytroll/satpy/pull/2655) - Fix missing radiance units in eps l1b ([2654](https://github.com/pytroll/satpy/issues/2654)) + +#### Features added + +* [PR 2683](https://github.com/pytroll/satpy/pull/2683) - Fci/l2/amv/reader +* [PR 2679](https://github.com/pytroll/satpy/pull/2679) - Update MiRS reader coefficient files to newer version +* [PR 2677](https://github.com/pytroll/satpy/pull/2677) - Add remaining JPSS satellite platform aliases to "mirs" reader ([665](https://github.com/ssec/polar2grid/issues/665)) +* [PR 2669](https://github.com/pytroll/satpy/pull/2669) - Fix RealisticColors compositor upcasting data to float64 +* [PR 2660](https://github.com/pytroll/satpy/pull/2660) - Update tropomi_l2 reader with "_reduced" file patterns +* [PR 2557](https://github.com/pytroll/satpy/pull/2557) - Add baseline for GeoColor composite including FCI, AHI and ABI recipes +* [PR 2106](https://github.com/pytroll/satpy/pull/2106) - Add Scene function to use Hvplot backend visualization +* [PR 1094](https://github.com/pytroll/satpy/pull/1094) - Add Gcom-C sgli reader ([2618](https://github.com/pytroll/satpy/issues/2618), [1086](https://github.com/pytroll/satpy/issues/1086)) + +#### Backward incompatible changes + +* [PR 2684](https://github.com/pytroll/satpy/pull/2684) - Get rid of warnings in compositor tests + +#### Clean ups + +* [PR 2691](https://github.com/pytroll/satpy/pull/2691) - Reduce the number of warnings in writer tests +* [PR 2690](https://github.com/pytroll/satpy/pull/2690) - Fix composites failing on non-aligned geolocation coordinates ([2668](https://github.com/pytroll/satpy/issues/2668)) +* [PR 2684](https://github.com/pytroll/satpy/pull/2684) - Get rid of warnings in compositor tests +* [PR 2681](https://github.com/pytroll/satpy/pull/2681) - Get rid of warnings in resampler tests +* [PR 2676](https://github.com/pytroll/satpy/pull/2676) - Convert times in SEVIRI readers to nanosecond precision to silence warnings +* [PR 2658](https://github.com/pytroll/satpy/pull/2658) - Update unstable version of h5py in CI + +In this release 23 pull requests were closed. + + ## Version 0.45.0 (2023/11/29) ### Issues Closed From 74a3d14f24e797d79c02274e9b8cc7eb104ca3a3 Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Mon, 18 Dec 2023 09:15:41 +0100 Subject: [PATCH 0969/1416] to_geoviews and to_hvplot in _scene_converters.py As requested by David in #2106 --- satpy/_scene_converters.py | 137 +++++++++++++++++++++++++++++++++++++ satpy/scene.py | 130 +---------------------------------- 2 files changed, 138 insertions(+), 129 deletions(-) diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index fbc0a7a627..ce0ee27c1e 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -17,6 +17,7 @@ import xarray as xr +from satpy.composites import enhance2dataset from satpy.dataset import DataID @@ -36,6 +37,142 @@ def _get_dataarrays_from_identifiers(scn, identifiers): return dataarrays +def to_geoviews(scn, gvtype=None, datasets=None, + kdims=None, vdims=None, dynamic=False): + """Convert satpy Scene to geoviews. + + Args: + scn (satpy.Scene): Satpy Scene. + gvtype (gv plot type): + One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points + Default to :class:`geoviews.Image`. + See Geoviews documentation for details. + datasets (list): Limit included products to these datasets + kdims (list of str): + Key dimensions. See geoviews documentation for more information. + vdims (list of str, optional): + Value dimensions. See geoviews documentation for more information. + If not given defaults to first data variable + dynamic (bool, optional): Load and compute data on-the-fly during + visualization. Default is ``False``. See + https://holoviews.org/user_guide/Gridded_Datasets.html#working-with-xarray-data-types + for more information. Has no effect when data to be visualized + only has 2 dimensions (y/x or longitude/latitude) and doesn't + require grouping via the Holoviews ``groupby`` function. + + Returns: geoviews object + + Todo: + * better handling of projection information in datasets which are + to be passed to geoviews + + """ + import geoviews as gv + from cartopy import crs # noqa + if gvtype is None: + gvtype = gv.Image + + ds = scn.to_xarray_dataset(datasets) + + if vdims is None: + # by default select first data variable as display variable + vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name + + if hasattr(ds, "area") and hasattr(ds.area, "to_cartopy_crs"): + dscrs = ds.area.to_cartopy_crs() + gvds = gv.Dataset(ds, crs=dscrs) + else: + gvds = gv.Dataset(ds) + + # holoviews produces a log warning if you pass groupby arguments when groupby isn't used + groupby_kwargs = {"dynamic": dynamic} if gvds.ndims != 2 else {} + if "latitude" in ds.coords: + gview = gvds.to(gv.QuadMesh, kdims=["longitude", "latitude"], + vdims=vdims, **groupby_kwargs) + else: + gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, + **groupby_kwargs) + + return gview + +def to_hvplot(scn, datasets=None, *args, **kwargs): + """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. + + Args: + scn (satpy.Scene): Satpy Scene. + datasets (list): Limit included products to these datasets. + args: Arguments coming from hvplot + kwargs: hvplot options dictionary. + + Returns: + hvplot object that contains within it the plots of datasets list. + As default it contains all Scene datasets plots and a plot title + is shown. + + Example usage:: + + scene_list = ['ash','IR_108'] + scn = Scene() + scn.load(scene_list) + scn = scn.resample('eurol') + plot = scn.to_hvplot(datasets=scene_list) + plot.ash+plot.IR_108 + """ + + def _get_crs(xarray_ds): + return xarray_ds.area.to_cartopy_crs() + + def _get_timestamp(xarray_ds): + time = xarray_ds.attrs["start_time"] + return time.strftime("%Y %m %d -- %H:%M UTC") + + def _get_units(xarray_ds, variable): + return xarray_ds[variable].attrs["units"] + + def _plot_rgb(xarray_ds, variable, **defaults): + img = enhance2dataset(xarray_ds[variable]) + return img.hvplot.rgb(bands="bands", title=title, + clabel="", **defaults) + + def _plot_quadmesh(xarray_ds, variable, **defaults): + return xarray_ds[variable].hvplot.quadmesh( + clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, + **defaults) + + import hvplot.xarray as hvplot_xarray # noqa + from holoviews import Overlay + + plot = Overlay() + xarray_ds = scn.to_xarray_dataset(datasets) + + if hasattr(xarray_ds, "area") and hasattr(xarray_ds.area, "to_cartopy_crs"): + ccrs = _get_crs(xarray_ds) + defaults={"x":"x","y":"y"} + else: + ccrs = None + defaults={"x":"longitude","y":"latitude"} + + if datasets is None: + datasets = list(xarray_ds.keys()) + + defaults.update(data_aspect=1, project=True, geo=True, + crs=ccrs, projection=ccrs, rasterize=True, + coastline="110m", cmap="Plasma", responsive=True, + dynamic=False, framewise=True,colorbar=False, + global_extent=False, xlabel="Longitude", + ylabel="Latitude") + + defaults.update(kwargs) + + for element in datasets: + title = f"{element} @ {_get_timestamp(xarray_ds)}" + if xarray_ds[element].shape[0] == 3: + plot[element] = _plot_rgb(xarray_ds, element, **defaults) + else: + plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) + + return plot + def to_xarray(scn, datasets=None, # DataID header_attrs=None, diff --git a/satpy/scene.py b/satpy/scene.py index d1ba795ac8..5ba8832729 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -28,7 +28,7 @@ from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition from xarray import DataArray -from satpy.composites import IncompatibleAreas, enhance2dataset +from satpy.composites import IncompatibleAreas from satpy.composites.config_loader import load_compositor_configs_for_sensors from satpy.dataset import DataID, DataQuery, DatasetDict, combine_metadata, dataset_walker, replace_anc from satpy.dependency_tree import DependencyTree @@ -1012,134 +1012,6 @@ def show(self, dataset_id, overlay=None): img.show() return img - def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynamic=False): - """Convert satpy Scene to geoviews. - - Args: - gvtype (gv plot type): - One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points - Default to :class:`geoviews.Image`. - See Geoviews documentation for details. - datasets (list): Limit included products to these datasets - kdims (list of str): - Key dimensions. See geoviews documentation for more information. - vdims (list of str, optional): - Value dimensions. See geoviews documentation for more information. - If not given defaults to first data variable - dynamic (bool, optional): Load and compute data on-the-fly during - visualization. Default is ``False``. See - https://holoviews.org/user_guide/Gridded_Datasets.html#working-with-xarray-data-types - for more information. Has no effect when data to be visualized - only has 2 dimensions (y/x or longitude/latitude) and doesn't - require grouping via the Holoviews ``groupby`` function. - - Returns: geoviews object - - Todo: - * better handling of projection information in datasets which are - to be passed to geoviews - - """ - import geoviews as gv - from cartopy import crs # noqa - if gvtype is None: - gvtype = gv.Image - - ds = self.to_xarray_dataset(datasets) - - if vdims is None: - # by default select first data variable as display variable - vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name - - if hasattr(ds, "area") and hasattr(ds.area, "to_cartopy_crs"): - dscrs = ds.area.to_cartopy_crs() - gvds = gv.Dataset(ds, crs=dscrs) - else: - gvds = gv.Dataset(ds) - - # holoviews produces a log warning if you pass groupby arguments when groupby isn't used - groupby_kwargs = {"dynamic": dynamic} if gvds.ndims != 2 else {} - if "latitude" in ds.coords: - gview = gvds.to(gv.QuadMesh, kdims=["longitude", "latitude"], vdims=vdims, **groupby_kwargs) - else: - gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, **groupby_kwargs) - - return gview - - def to_hvplot(self, datasets=None, *args, **kwargs): - """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. - - Args: - datasets (list): Limit included products to these datasets. - args: Arguments coming from hvplot - kwargs: hvplot options dictionary. - - Returns: hvplot object that contains within it the plots of datasets list. - As default it contains all Scene datasets plots and a plot title is shown. - - Example usage:: - - scene_list = ['ash','IR_108'] - scn = Scene() - scn.load(scene_list) - scn = scn.resample('eurol') - plot = scn.to_hvplot(datasets=scene_list) - plot.ash+plot.IR_108 - """ - - def _get_crs(xarray_ds): - return xarray_ds.area.to_cartopy_crs() - - def _get_timestamp(xarray_ds): - time = xarray_ds.attrs["start_time"] - return time.strftime("%Y %m %d -- %H:%M UTC") - - def _get_units(xarray_ds, variable): - return xarray_ds[variable].attrs["units"] - - def _plot_rgb(xarray_ds, variable, **defaults): - img = enhance2dataset(xarray_ds[variable]) - return img.hvplot.rgb(bands="bands", title=title, - clabel="", **defaults) - - def _plot_quadmesh(xarray_ds, variable, **defaults): - return xarray_ds[variable].hvplot.quadmesh( - clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, - **defaults) - - import hvplot.xarray as hvplot_xarray # noqa - from holoviews import Overlay - - plot = Overlay() - xarray_ds = self.to_xarray_dataset(datasets) - - if hasattr(xarray_ds, "area") and hasattr(xarray_ds.area, "to_cartopy_crs"): - ccrs = _get_crs(xarray_ds) - defaults={"x":"x","y":"y"} - else: - ccrs = None - defaults={"x":"longitude","y":"latitude"} - - if datasets is None: - datasets = list(xarray_ds.keys()) - - defaults.update(data_aspect=1, project=True, geo=True, - crs=ccrs, projection=ccrs, rasterize=True, coastline="110m", - cmap="Plasma", responsive=True, dynamic=False, framewise=True, - colorbar=False, global_extent=False, xlabel="Longitude", - ylabel="Latitude") - - defaults.update(kwargs) - - for element in datasets: - title = f"{element} @ {_get_timestamp(xarray_ds)}" - if xarray_ds[element].shape[0] == 3: - plot[element] = _plot_rgb(xarray_ds, element, **defaults) - else: - plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) - - return plot - def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From 9d306e59df1faa01e4fba5bdbdd9e5ff31172404 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 18 Dec 2023 16:32:23 +0800 Subject: [PATCH 0970/1416] composite and test --- satpy/composites/__init__.py | 148 ++++++++++++++++++--------------- satpy/tests/test_composites.py | 95 +++++++++++++-------- 2 files changed, 144 insertions(+), 99 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 3cc4401b6c..176ccc5764 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1680,7 +1680,8 @@ class BackgroundCompositor(GenericCompositor): """A compositor that overlays one composite on top of another. Beside foreground and background, a third optional dataset could be passed - to the compositor to use its Nan area for masking. This is useful when you + to the compositor to use it for masking. If this dataset contains + more than one band, only the first band will be used. This is useful when you reproject a specific local-area image (e.g. a geostationary satellite view) to global extent, and put it on a global background (e.g. NASA's Black Marble) while making other areas of the world transparent, only keeping the local one. @@ -1703,16 +1704,12 @@ def __init__(self, name, mask_value=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: - mask_value (float / None): If it's a float, all the pixels where the third dataset - values that are equal to this will be masked out. - If it's a string, it only accepts strings related to - ``np.nan``, e.g. "np.nan" / "nan" / "Nan" / "Null". - Otherwise, it will be set to ``np.nan``. - If not set by the user, it will also be ``np.nan``. + mask_value (float / None): All the pixels on the stacked image where the values + of the third dataset that are equal to this will be + masked out. + If not set by the user, it will be ``np.nan``. This argument could be helpful when you try to use a - StaticImageCompositor for mask. Please note: If you - are using ``mask_value`` then your mask dataset - shouldn't include an alpha channel. + local image(StaticImageCompositor) for masking. """ self.mask_value = mask_value if mask_value is not None else np.nan @@ -1721,14 +1718,12 @@ def __init__(self, name, mask_value=None, **kwargs): # noqa: D417 def __call__(self, projectables, optional_datasets=None, *args, **kwargs): """Call the compositor.""" - optional_datasets = tuple() if optional_datasets is None else optional_datasets + optional_datasets = [] if optional_datasets is None else optional_datasets projectables = self.match_data_arrays(projectables + optional_datasets) # Get enhanced datasets foreground = enhance2dataset(projectables[0], convert_p=True) background = enhance2dataset(projectables[1], convert_p=True) - mask_dataset = enhance2dataset(projectables[2], convert_p=True) if not optional_datasets == [] else None - - original_bg_mode = background.attrs["mode"] + mask_dataset = projectables[2] if not optional_datasets == [] else None # Adjust bands so that they match # L/RGB -> RGB/RGB @@ -1737,16 +1732,14 @@ def __call__(self, projectables, optional_datasets=None, *args, **kwargs): foreground = add_bands(foreground, background["bands"]) background = add_bands(background, foreground["bands"]) - # True means the alpha channel of the background was initially generated, e.g. by CloudCompositor - # not newly added through 'add_bands' - # False means it was newly added, or it just doesn't exist - # This is important in the next steps - original_bg_alpha = True if ("A" in original_bg_mode and "A" in background.attrs["mode"]) else False - mask = self._get_mask(mask_dataset, self.mask_value) + alpha_fore = self._get_alpha(foreground) + alpha_back = self._get_alpha(background) + output_mode = self._get_output_mode(foreground, background, mask) attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) - data = self._get_merged_image_data(foreground, background, original_bg_alpha=original_bg_alpha, mask=mask) + data = self._get_merged_image_data(foreground, background, mask=mask, + alpha_fore=alpha_fore, alpha_back=alpha_back, output_mode=output_mode) res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res @@ -1770,59 +1763,84 @@ def _get_mask(dataset: xr.DataArray, mask_value): if dataset is None: mask = None else: - # If the mask_dataset already has an alpha channel, just use it as mask - # Otherwise build one - if "A" in dataset.attrs["mode"]: - mask = dataset.sel(bands="A") + dataset = dataset.isel(bands=0) + if np.isnan(mask_value): + mask = xr.where(dataset.isnull(), 0, 1) else: - if np.isnan(mask_value): - mask = xr.where(dataset.isnull(), 0, 1) - else: - mask = xr.where(dataset == mask_value, 0, 1) + mask = xr.where(dataset == mask_value, 0, 1) return mask + @staticmethod + def _get_alpha(dataset: xr.DataArray): + # If the dataset contains an alpha channel, just use it + # If not, we still need one. So build it and fill it with 1 + if "A" in dataset.attrs['mode']: + alpha = dataset.sel(bands="A") + else: + first_band = dataset.isel(bands=0) + alpha = xr.full_like(first_band, 1) + alpha['bands'] = "A" + + # There could be Nans in the alpha, especially for original ones + # Replace them with 0, so they won't affect new_alpha + alpha = xr.where(alpha.isnull(), 0, alpha) + + return alpha + + @staticmethod + def _get_output_mode(foreground: xr.DataArray, + background: xr.DataArray, + mask: xr.DataArray): + # Get the output bands of the stacked image + # Actually, it's about deciding whether to pass the new alpha band of the stacked image to the writer + # Or just leave the write for decision + + # If both images have alpha band or just background has one, the new alpha band will be passed to the writer + # If area-masking is needed, the same + # If neither of the images has alpha band but area-masking is still needed, the same + if "A" in foreground.attrs['mode']: + if "A" in background.attrs['mode']: + output_mode = background.mode + else: + output_mode = background.mode if mask is None else foreground.mode + else: + if "A" in background.attrs['mode']: + output_mode = background.mode + else: + output_mode = foreground.mode if mask is None else foreground.mode + "A" + + return output_mode + @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray, - original_bg_alpha: bool, - mask: xr.DataArray + mask: xr.DataArray, + alpha_fore: xr.DataArray, + alpha_back: xr.DataArray, + output_mode: str, ) -> list[xr.DataArray]: - if "A" in foreground.attrs["mode"]: - # Use alpha channels as weights and blend the two composites - alpha_fore = foreground.sel(bands="A") - # If the background alpha is authentic just use it - # If not, it is full of 1 meaning it's a forged one, or it doesn't exist(but we still need it) - alpha_back = background.sel(bands="A") if original_bg_alpha else xr.full_like(alpha_fore, 1) - # Any way we need a new alpha for the new image - new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) - # Do the area-masking job - if mask is not None: - alpha_fore.data = np.minimum(alpha_fore.data, mask.data[0]) - alpha_back.data = np.minimum(alpha_back.data, mask.data[0]) - new_alpha.data = np.minimum(new_alpha.data, mask.data[0]) - - data = [] - # If the background alpha is authentic or area-masking is effective - # The compositor will pass the new_alpha to the writer - # Otherwise it will leave the writer to decide - band_list = foreground.mode if (original_bg_alpha or mask is not None)else foreground.mode[:-1] - - for band in band_list: - fg_band = foreground.sel(bands=band) - bg_band = background.sel(bands=band) - - chan = (fg_band * alpha_fore + - bg_band * alpha_back * (1 - alpha_fore)) / new_alpha if band != "A" else new_alpha - - chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) - - data.append(chan) - else: - data_arr = xr.where(foreground.isnull(), background, foreground) - # Split to separate bands so the mode is correct - data = [data_arr.sel(bands=b) for b in data_arr["bands"]] + new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) + + # Do the masking job + if mask is not None: + alpha_fore.data = np.minimum(alpha_fore.data, mask.data) + alpha_back.data = np.minimum(alpha_back.data, mask.data) + new_alpha.data = np.minimum(new_alpha.data, mask.data) + + data = [] + + for band in output_mode: + fg_band = foreground.sel(bands=band) if band != "A" else new_alpha + bg_band = background.sel(bands=band) if band != "A" else new_alpha + + chan = (fg_band * alpha_fore + + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha if band != "A" else new_alpha + + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + + data.append(chan) return data diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 39e8485475..725eb23895 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1493,53 +1493,74 @@ def setup_class(cls): [[1.0, 0.5], [0.0, np.nan]], [[0.5, 0.5], [0.5, 0.5]]]), } + mask_data = { + "L": np.array([[[1., 0.5], [0., np.nan]]]), + "RGB": np.array([ + [[1., 0.5], [0., np.nan]], + [[1., 0.5], [0., np.nan]], + [[1., 0.5], [0., np.nan]]]), + } cls.foreground_data = foreground_data + cls.mask_data = mask_data @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( - ("foreground_bands", "background_bands", "bg_fill_in", "exp_bands", "exp_result"), + ("foreground_bands", "background_bands", "mask", "mask_bands", "mask_value", "exp_bands", "exp_result"), [ - ("L", "L", True, "L", np.array([[1.0, 0.5], [0.0, 1.0]])), - ("L", "L", False, "L", np.array([[1.0, 0.5], [0.0, 1.0]])), - ("LA", "LA", True, "LA", np.array([[[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), - ("LA", "LA", False, "LA", np.array([[[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), - ("RGB", "RGB", True, "RGB", np.array([ - [[1., 0.5], [0., 1.]], - [[1., 0.5], [0., 1.]], - [[1., 0.5], [0., 1.]]])), - ("RGB", "RGB", False, "RGB", np.array([ - [[1., 0.5], [0., 1.]], - [[1., 0.5], [0., 1.]], - [[1., 0.5], [0., 1.]]])), - ("RGBA", "RGBA", True, "RGBA", np.array([ - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], - [[1.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGBA", False, "RGBA", np.array([ - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], + ("L", "L", True, "L", None, "LA", np.array([[[1.0, 0.5], [0.0, 0.0]],[[1.0, 1.0], [1.0, 0.0]]])), + ("L", "L", True, "RGB", None, "LA", np.array([[[1.0, 0.5], [0.0, 0.0]], [[1.0, 1.0], [1.0, 0.0]]])), + ("L", "LA", False, "L", None, "LA", np.array([[[1.0, 0.5], [0.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), + ("LA", "LA", False, "RGB", None, "LA", np.array([[[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), + ("LA", "RGB", True, "L", None, "RGBA", np.array([ + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 1.0], [1.0, 0.0]]])), + ("RGB", "RGB", True, "L", 1, "RGBA", np.array([ + [[0.0, 0.5], [0.0, 1.0]], + [[0.0, 0.5], [0.0, 1.0]], + [[0.0, 0.5], [0.0, 1.0]], + [[0.0, 1.0], [1.0, 1.0]]])), + ("RGB", "RGB", True, "RGB", 0.5, "RGBA", np.array([ + [[1.0, 0.0], [0.0, 1.0]], + [[1.0, 0.0], [0.0, 1.0]], + [[1.0, 0.0], [0.0, 1.0]], + [[1.0, 0.0], [1.0, 1.0]]])), + ("RGB", "RGBA", False, "L", 1, "RGBA", np.array([ + [[1.0, 0.5], [0.0, 1.0]], + [[1.0, 0.5], [0.0, 1.0]], + [[1.0, 0.5], [0.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGB", True, "RGBA", np.array([ - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], - [[1.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGB", False, "RGBA", np.array([ - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], - [[1., 0.75], [0.5, 1.]], + ("RGBA", "RGB", True, "L", None, "RGBA", np.array([ + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 1.0], [1.0, 0.0]]])), + ("RGBA", "RGB", True, "RGB", 0, "RGBA", np.array([ + [[1.0, 0.75], [0.0, 1.0]], + [[1.0, 0.75], [0.0, 1.0]], + [[1.0, 0.75], [0.0, 1.0]], + [[1.0, 1.0], [0.0, 1.0]]])), + ("RGBA", "RGBA", True, "L", 0.5, "RGBA", np.array([ + [[1.0, 0.0], [0.5, 1.0]], + [[1.0, 0.0], [0.5, 1.0]], + [[1.0, 0.0], [0.5, 1.0]], + [[1.0, 0.0], [1.0, 1.0]]])), + ("RGBA", "RGBA", False, "RGB", 0, "RGBA", np.array([ + [[1.0, 0.75], [0.5, 1.0]], + [[1.0, 0.75], [0.5, 1.0]], + [[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), ] ) - def test_call(self, foreground_bands, background_bands, bg_fill_in, exp_bands, exp_result): + def test_call(self, foreground_bands, background_bands, mask, mask_bands, mask_value, exp_bands, exp_result): """Test the background compositing.""" from satpy.composites import BackgroundCompositor - comp = BackgroundCompositor("name", bg_fill_in=bg_fill_in) + comp = BackgroundCompositor("name", mask_value=mask_value) # L mode images foreground_data = self.foreground_data[foreground_bands] + mask_data = self.mask_data[mask_bands] attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), dims=("bands", "y", "x"), @@ -1549,7 +1570,13 @@ def test_call(self, foreground_bands, background_bands, bg_fill_in, exp_bands, e background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) - res = comp([foreground, background]) + attrs = {"mode": mask_bands, "area": "foo"} + mask_dataset = xr.DataArray(da.from_array(mask_data), + dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, + attrs=attrs) + res = comp([foreground, background], optional_datasets=[mask_dataset]) if mask else \ + comp([foreground, background]) assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) assert res.attrs["mode"] == exp_bands From 01af472a36ea037f5f891eaa6fbfe0ec9bc63133 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 08:34:41 +0000 Subject: [PATCH 0971/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 176ccc5764..a76c58d71c 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1795,7 +1795,7 @@ def _get_output_mode(foreground: xr.DataArray, # Get the output bands of the stacked image # Actually, it's about deciding whether to pass the new alpha band of the stacked image to the writer # Or just leave the write for decision - + # If both images have alpha band or just background has one, the new alpha band will be passed to the writer # If area-masking is needed, the same # If neither of the images has alpha band but area-masking is still needed, the same From d1ef7dc8f0489c0ed30ad9ea2f1576bed0fbc575 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 18 Dec 2023 16:36:23 +0800 Subject: [PATCH 0972/1416] Update __init__.py --- satpy/composites/__init__.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 176ccc5764..3c59ca9635 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1775,12 +1775,12 @@ def _get_mask(dataset: xr.DataArray, mask_value): def _get_alpha(dataset: xr.DataArray): # If the dataset contains an alpha channel, just use it # If not, we still need one. So build it and fill it with 1 - if "A" in dataset.attrs['mode']: + if "A" in dataset.attrs["mode"]: alpha = dataset.sel(bands="A") else: first_band = dataset.isel(bands=0) alpha = xr.full_like(first_band, 1) - alpha['bands'] = "A" + alpha["bands"] = "A" # There could be Nans in the alpha, especially for original ones # Replace them with 0, so they won't affect new_alpha @@ -1799,13 +1799,13 @@ def _get_output_mode(foreground: xr.DataArray, # If both images have alpha band or just background has one, the new alpha band will be passed to the writer # If area-masking is needed, the same # If neither of the images has alpha band but area-masking is still needed, the same - if "A" in foreground.attrs['mode']: - if "A" in background.attrs['mode']: + if "A" in foreground.attrs["mode"]: + if "A" in background.attrs["mode"]: output_mode = background.mode else: output_mode = background.mode if mask is None else foreground.mode else: - if "A" in background.attrs['mode']: + if "A" in background.attrs["mode"]: output_mode = background.mode else: output_mode = foreground.mode if mask is None else foreground.mode + "A" From 1d20d45d15ed5baee7ef41a7a700d78c3f3a2692 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 18 Dec 2023 16:47:46 +0800 Subject: [PATCH 0973/1416] Update __init__.py --- satpy/composites/__init__.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 12496f42b1..c6717aa141 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1763,7 +1763,12 @@ def _get_mask(dataset: xr.DataArray, mask_value): if dataset is None: mask = None else: - dataset = dataset.isel(bands=0) + # If mask dataset is a composite, then extract its first band + try: + dataset = dataset.isel(bands=0) + except ValueError: + pass + if np.isnan(mask_value): mask = xr.where(dataset.isnull(), 0, 1) else: From 89818ad1ba03dc8a0edbdac25780f193cffcf6a3 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 18 Dec 2023 17:31:21 +0800 Subject: [PATCH 0974/1416] Update test_composites.py --- satpy/tests/test_composites.py | 59 ++++++++++++++++++++++++---------- 1 file changed, 42 insertions(+), 17 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 725eb23895..29f77c25ed 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1500,67 +1500,82 @@ def setup_class(cls): [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]]]), } + mask_no_bands_data = np.array([[1., 0.5], [0., np.nan]]) cls.foreground_data = foreground_data cls.mask_data = mask_data + cls.mask_no_bands_data = mask_no_bands_data @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( - ("foreground_bands", "background_bands", "mask", "mask_bands", "mask_value", "exp_bands", "exp_result"), + ("foreground_bands", "background_bands", "mask", "mask_no_bands", + "mask_bands", "mask_value", "exp_bands", "exp_result"), [ - ("L", "L", True, "L", None, "LA", np.array([[[1.0, 0.5], [0.0, 0.0]],[[1.0, 1.0], [1.0, 0.0]]])), - ("L", "L", True, "RGB", None, "LA", np.array([[[1.0, 0.5], [0.0, 0.0]], [[1.0, 1.0], [1.0, 0.0]]])), - ("L", "LA", False, "L", None, "LA", np.array([[[1.0, 0.5], [0.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), - ("LA", "LA", False, "RGB", None, "LA", np.array([[[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), - ("LA", "RGB", True, "L", None, "RGBA", np.array([ + ("L", "L", True, False, "L", None, "LA", np.array([[[1.0, 0.5], [0.0, 0.0]],[[1.0, 1.0], [1.0, 0.0]]])), + ("L", "L", True, False, "RGB", None, "LA", np.array([[[1.0, 0.5], [0.0, 0.0]], [[1.0, 1.0], [1.0, 0.0]]])), + ("L", "LA", False, False, "L", None, "LA", np.array([[[1.0, 0.5], [0.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), + ("LA", "LA", False, False, "RGB", None, "LA", np.array([[[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), + ("LA", "RGB", True, False, "L", None, "RGBA", np.array([ [[1.0, 0.75], [0.5, 0.0]], [[1.0, 0.75], [0.5, 0.0]], [[1.0, 0.75], [0.5, 0.0]], [[1.0, 1.0], [1.0, 0.0]]])), - ("RGB", "RGB", True, "L", 1, "RGBA", np.array([ + ("RGB", "RGB", True, False, "L", 1, "RGBA", np.array([ [[0.0, 0.5], [0.0, 1.0]], [[0.0, 0.5], [0.0, 1.0]], [[0.0, 0.5], [0.0, 1.0]], [[0.0, 1.0], [1.0, 1.0]]])), - ("RGB", "RGB", True, "RGB", 0.5, "RGBA", np.array([ + ("RGB", "RGB", True, False, "RGB", 0.5, "RGBA", np.array([ [[1.0, 0.0], [0.0, 1.0]], [[1.0, 0.0], [0.0, 1.0]], [[1.0, 0.0], [0.0, 1.0]], [[1.0, 0.0], [1.0, 1.0]]])), - ("RGB", "RGBA", False, "L", 1, "RGBA", np.array([ + ("RGB", "RGBA", False, False, "L", 1, "RGBA", np.array([ [[1.0, 0.5], [0.0, 1.0]], [[1.0, 0.5], [0.0, 1.0]], [[1.0, 0.5], [0.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGB", True, "L", None, "RGBA", np.array([ + ("RGBA", "RGB", True, False, "L", None, "RGBA", np.array([ [[1.0, 0.75], [0.5, 0.0]], [[1.0, 0.75], [0.5, 0.0]], [[1.0, 0.75], [0.5, 0.0]], [[1.0, 1.0], [1.0, 0.0]]])), - ("RGBA", "RGB", True, "RGB", 0, "RGBA", np.array([ + ("RGBA", "RGB", True, False, "RGB", 0, "RGBA", np.array([ [[1.0, 0.75], [0.0, 1.0]], [[1.0, 0.75], [0.0, 1.0]], [[1.0, 0.75], [0.0, 1.0]], [[1.0, 1.0], [0.0, 1.0]]])), - ("RGBA", "RGBA", True, "L", 0.5, "RGBA", np.array([ + ("RGBA", "RGB", False, False, "RGB", 0, "RGBA", np.array([ + [[1.0, 0.75], [0.5, 1.0]], + [[1.0, 0.75], [0.5, 1.0]], + [[1.0, 0.75], [0.5, 1.0]], + [[1.0, 1.0], [1.0, 1.0]]])), + ("RGBA", "RGBA", True, False, "L", 0.5, "RGBA", np.array([ [[1.0, 0.0], [0.5, 1.0]], [[1.0, 0.0], [0.5, 1.0]], [[1.0, 0.0], [0.5, 1.0]], [[1.0, 0.0], [1.0, 1.0]]])), - ("RGBA", "RGBA", False, "RGB", 0, "RGBA", np.array([ + ("RGBA", "RGBA", False, False, "RGB", 0, "RGBA", np.array([ [[1.0, 0.75], [0.5, 1.0]], [[1.0, 0.75], [0.5, 1.0]], [[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), + ("RGBA", "RGBA", True, True, "L", None, "RGBA", np.array([ + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 0.75], [0.5, 0.0]], + [[1.0, 1.0], [1.0, 0.0]]])), ] ) - def test_call(self, foreground_bands, background_bands, mask, mask_bands, mask_value, exp_bands, exp_result): + def test_call(self, foreground_bands, background_bands, mask, mask_no_bands, mask_bands, mask_value, + exp_bands, exp_result): """Test the background compositing.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name", mask_value=mask_value) # L mode images foreground_data = self.foreground_data[foreground_bands] - mask_data = self.mask_data[mask_bands] + mask_data = self.mask_data[mask_bands] if not mask_no_bands else self.mask_data["L"] + attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), dims=("bands", "y", "x"), @@ -1575,8 +1590,18 @@ def test_call(self, foreground_bands, background_bands, mask, mask_bands, mask_v dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) - res = comp([foreground, background], optional_datasets=[mask_dataset]) if mask else \ - comp([foreground, background]) + attrs = {"area": "foo"} + mask_no_bands_dataset = xr.DataArray(da.from_array(self.mask_no_bands_data), + dims=("y", "x"), + attrs=attrs) + if mask and not mask_no_bands: + res = comp([foreground, background], optional_datasets=[mask_dataset]) + elif mask and mask_no_bands: + res = comp([foreground, background], optional_datasets=[mask_no_bands_dataset]) + else: + res = comp([foreground, background]) + print(res.data.compute()) + assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) assert res.attrs["mode"] == exp_bands From 27eb1582cf3275c83c348064f28288e786837c0a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 18 Dec 2023 18:08:38 +0800 Subject: [PATCH 0975/1416] EDIT --- satpy/composites/__init__.py | 15 ++++++--------- satpy/tests/test_composites.py | 4 ++-- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index c6717aa141..2bb8163129 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1804,16 +1804,13 @@ def _get_output_mode(foreground: xr.DataArray, # If both images have alpha band or just background has one, the new alpha band will be passed to the writer # If area-masking is needed, the same # If neither of the images has alpha band but area-masking is still needed, the same - if "A" in foreground.attrs["mode"]: - if "A" in background.attrs["mode"]: - output_mode = background.mode - else: - output_mode = background.mode if mask is None else foreground.mode + if "A" in background.attrs["mode"]: + output_mode = background.mode else: - if "A" in background.attrs["mode"]: - output_mode = background.mode - else: - output_mode = foreground.mode if mask is None else foreground.mode + "A" + output_mode = ( + background.mode if "A" in foreground.attrs["mode"] and mask is None else + foreground.mode if mask is None else foreground.mode + "A" + ) return output_mode diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 29f77c25ed..e50389a326 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1519,7 +1519,7 @@ def setup_class(cls): [[1.0, 0.75], [0.5, 0.0]], [[1.0, 0.75], [0.5, 0.0]], [[1.0, 1.0], [1.0, 0.0]]])), - ("RGB", "RGB", True, False, "L", 1, "RGBA", np.array([ + ("RGB", "RGB", True, True, "L", 1, "RGBA", np.array([ [[0.0, 0.5], [0.0, 1.0]], [[0.0, 0.5], [0.0, 1.0]], [[0.0, 0.5], [0.0, 1.0]], @@ -1549,7 +1549,7 @@ def setup_class(cls): [[1.0, 0.75], [0.5, 1.0]], [[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGBA", True, False, "L", 0.5, "RGBA", np.array([ + ("RGBA", "RGBA", True, True, "L", 0.5, "RGBA", np.array([ [[1.0, 0.0], [0.5, 1.0]], [[1.0, 0.0], [0.5, 1.0]], [[1.0, 0.0], [0.5, 1.0]], From d9ec74428ca2cdbc31ee87ae65be7f6110070b9c Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 18 Dec 2023 10:09:10 +0000 Subject: [PATCH 0976/1416] Convert any attribute starting with "{" to string --- satpy/readers/satpy_cf_nc.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index b9c932b852..24c5ef438f 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -315,11 +315,8 @@ def get_dataset(self, ds_id, ds_info): return data def _decode_dict_type_attrs(self, data): - for key in ["orbital_parameters", "time_parameters"]: - try: - data.attrs[key] = _str2dict(data.attrs[key]) - except KeyError: - continue + for key, val in data.attrs.items(): + data.attrs[key] = _str2dict(val) def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" @@ -346,6 +343,6 @@ def _datetime_parser(json_dict): def _str2dict(val): """Convert string to dictionary.""" - if isinstance(val, str): + if isinstance(val, str) and val.startswith("{"): val = json.loads(val, object_hook=_datetime_parser) return val From ff4cb2ee737856ae49b08b8698dedc03bcaca412 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 18 Dec 2023 18:20:27 +0800 Subject: [PATCH 0977/1416] Update test_composites.py --- satpy/tests/test_composites.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index e50389a326..8a69ee69a3 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1594,12 +1594,11 @@ def test_call(self, foreground_bands, background_bands, mask, mask_no_bands, mas mask_no_bands_dataset = xr.DataArray(da.from_array(self.mask_no_bands_data), dims=("y", "x"), attrs=attrs) - if mask and not mask_no_bands: - res = comp([foreground, background], optional_datasets=[mask_dataset]) - elif mask and mask_no_bands: - res = comp([foreground, background], optional_datasets=[mask_no_bands_dataset]) - else: - res = comp([foreground, background]) + optional_datasets = [mask_dataset] if mask and not mask_no_bands else [ + mask_no_bands_dataset] if mask and mask_no_bands else [] + + res = comp([foreground, background], optional_datasets=optional_datasets) + print(res.data.compute()) assert res.attrs["area"] == "foo" From f01cbe117dd948d9da07458fb92f649d06c90443 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 18 Dec 2023 10:34:47 +0000 Subject: [PATCH 0978/1416] Refactor test scene setup (vis006) --- satpy/tests/reader_tests/test_satpy_cf_nc.py | 122 +++++++++++-------- 1 file changed, 69 insertions(+), 53 deletions(-) diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 6528a80723..20ab15ad0b 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -88,10 +88,76 @@ def _create_test_netcdf(filename, resolution=742): @pytest.fixture(scope="session") -def cf_scene(): +def area(): + """Get fake area definition.""" + area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685) + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} + area = AreaDefinition("test", + "test", + "test", + proj_dict, + 2, + 2, + area_extent) + return area + + +@pytest.fixture(scope="session") +def common_attrs(area): + """Get common dataset attributes.""" + return { + "start_time": datetime(2019, 4, 1, 12, 0), + "end_time": datetime(2019, 4, 1, 12, 15), + "platform_name": "tirosn", + "orbit_number": 99999, + "area": area + } + + +@pytest.fixture(scope="session") +def vis006(area, common_attrs): + """Get fake VIS006 dataset.""" + x, y = area.get_proj_coords() + y_visir = y[:, 0] + x_visir = x[0, :] + attrs = { + "name": "image0", + "id_tag": "ch_r06", + "coordinates": "lat lon", + "resolution": 1000, + "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "orbital_parameters": { + "projection_longitude": 1, + "projection_latitude": 1, + "projection_altitude": 1, + "satellite_nominal_longitude": 1, + "satellite_nominal_latitude": 1, + "satellite_actual_longitude": 1, + "satellite_actual_latitude": 1, + "satellite_actual_altitude": 1, + "nadir_longitude": 1, + "nadir_latitude": 1, + "only_in_1": False + }, + "time_parameters": { + "nominal_start_time": common_attrs["start_time"], + "nominal_end_time": common_attrs["end_time"] + } + } + attrs.update(common_attrs) + coords = {"y": y_visir, "x": x_visir, "acq_time": ("y", [1, 2])} + vis006 = xr.DataArray(np.array([[1, 2], [3, 4]]), + dims=("y", "x"), + coords=coords, + attrs=attrs) + return vis006 + + +@pytest.fixture(scope="session") +def cf_scene(vis006, common_attrs, area): """Create a cf scene.""" - tstart = datetime(2019, 4, 1, 12, 0) - tend = datetime(2019, 4, 1, 12, 15) data_visir = np.array([[1, 2], [3, 4]]) z_visir = [1, 2, 3, 4, 5, 6, 7] qual_data = [[1, 2, 3, 4, 5, 6, 7], @@ -100,60 +166,10 @@ def cf_scene(): lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) - proj_dict = { - "a": 6378169.0, "b": 6356583.8, "h": 35785831.0, - "lon_0": 0.0, "proj": "geos", "units": "m" - } - x_size, y_size = data_visir.shape - area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685) - area = AreaDefinition( - "test", - "test", - "test", - proj_dict, - x_size, - y_size, - area_extent, - ) - x, y = area.get_proj_coords() y_visir = y[:, 0] x_visir = x[0, :] - common_attrs = { - "start_time": tstart, - "end_time": tend, - "platform_name": "tirosn", - "orbit_number": 99999, - "area": area - } - - vis006 = xr.DataArray(data_visir, - dims=("y", "x"), - coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, - attrs={ - "name": "image0", "id_tag": "ch_r06", - "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", - "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), - "orbital_parameters": { - "projection_longitude": 1, - "projection_latitude": 1, - "projection_altitude": 1, - "satellite_nominal_longitude": 1, - "satellite_nominal_latitude": 1, - "satellite_actual_longitude": 1, - "satellite_actual_latitude": 1, - "satellite_actual_altitude": 1, - "nadir_longitude": 1, - "nadir_latitude": 1, - "only_in_1": False - }, - "time_parameters": { - "nominal_start_time": tstart, - "nominal_end_time": tend - } - }) - ir_108 = xr.DataArray(data_visir, dims=("y", "x"), coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, From 6e0fdf57b21f94c74639a936bffc4065e73f3802 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 18 Dec 2023 10:44:57 +0000 Subject: [PATCH 0979/1416] Refactor test scene setup (ir_108) --- satpy/tests/reader_tests/test_satpy_cf_nc.py | 67 +++++++++++++------- 1 file changed, 44 insertions(+), 23 deletions(-) diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 20ab15ad0b..674f895b22 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -89,7 +89,7 @@ def _create_test_netcdf(filename, resolution=742): @pytest.fixture(scope="session") def area(): - """Get fake area definition.""" + """Get area definition.""" area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685) proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "lon_0": 0.0, "proj": "geos", "units": "m"} @@ -116,11 +116,18 @@ def common_attrs(area): @pytest.fixture(scope="session") -def vis006(area, common_attrs): - """Get fake VIS006 dataset.""" +def xy_coords(area): + """Get projection coordinates.""" x, y = area.get_proj_coords() - y_visir = y[:, 0] - x_visir = x[0, :] + y = y[:, 0] + x = x[0, :] + return x, y + + +@pytest.fixture(scope="session") +def vis006(xy_coords, common_attrs): + """Get VIS006 dataset.""" + x, y = xy_coords attrs = { "name": "image0", "id_tag": "ch_r06", @@ -147,7 +154,7 @@ def vis006(area, common_attrs): } } attrs.update(common_attrs) - coords = {"y": y_visir, "x": x_visir, "acq_time": ("y", [1, 2])} + coords = {"y": y, "x": x, "acq_time": ("y", [1, 2])} vis006 = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), coords=coords, @@ -156,13 +163,38 @@ def vis006(area, common_attrs): @pytest.fixture(scope="session") -def cf_scene(vis006, common_attrs, area): - """Create a cf scene.""" - data_visir = np.array([[1, 2], [3, 4]]) - z_visir = [1, 2, 3, 4, 5, 6, 7] +def ir_108(xy_coords): + """Get IR_108 dataset.""" + x, y = xy_coords + coords = {"y": y, "x": x, "acq_time": ("y", [1, 2])} + attrs = {"name": "image1", "id_tag": "ch_tb11", "coordinates": "lat lon"} + ir_108 = xr.DataArray(np.array([[1, 2], [3, 4]]), + dims=("y", "x"), + coords=coords, + attrs=attrs) + return ir_108 + + +@pytest.fixture(scope="session") +def qual_flags(xy_coords): + """Get quality flags.""" qual_data = [[1, 2, 3, 4, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7]] - time_vis006 = [1, 2] + x, y = xy_coords + z = [1, 2, 3, 4, 5, 6, 7] + coords = {"y": y, "z": z, "acq_time": ("y", [1, 2])} + qual_f = xr.DataArray(qual_data, + dims=("y", "z"), + coords=coords, + attrs={"name": "qual_flags", + "id_tag": "qual_flags"}) + return qual_f + + +@pytest.fixture(scope="session") +def cf_scene(vis006, ir_108, qual_flags, common_attrs, area): + """Create a cf scene.""" + data_visir = np.array([[1, 2], [3, 4]]) lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) @@ -170,17 +202,6 @@ def cf_scene(vis006, common_attrs, area): y_visir = y[:, 0] x_visir = x[0, :] - ir_108 = xr.DataArray(data_visir, - dims=("y", "x"), - coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, - attrs={"name": "image1", "id_tag": "ch_tb11", "coordinates": "lat lon"}) - qual_f = xr.DataArray(qual_data, - dims=("y", "z"), - coords={"y": y_visir, "z": z_visir, "acq_time": ("y", time_vis006)}, - attrs={ - "name": "qual_flags", - "id_tag": "qual_flags" - }) lat = xr.DataArray(lat, dims=("y", "x"), coords={"y": y_visir, "x": x_visir}, @@ -223,7 +244,7 @@ def cf_scene(vis006, common_attrs, area): "1": prefix_data, "lat": lat, "lon": lon, - "qual_flags": qual_f + "qual_flags": qual_flags } for key in scene_dict: From a9354759421460df803b1bcfeca5cf4eb955b79b Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 18 Dec 2023 18:45:29 +0800 Subject: [PATCH 0980/1416] Update test_composites.py --- satpy/tests/test_composites.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 8a69ee69a3..a3c8145762 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1510,10 +1510,18 @@ def setup_class(cls): ("foreground_bands", "background_bands", "mask", "mask_no_bands", "mask_bands", "mask_value", "exp_bands", "exp_result"), [ - ("L", "L", True, False, "L", None, "LA", np.array([[[1.0, 0.5], [0.0, 0.0]],[[1.0, 1.0], [1.0, 0.0]]])), - ("L", "L", True, False, "RGB", None, "LA", np.array([[[1.0, 0.5], [0.0, 0.0]], [[1.0, 1.0], [1.0, 0.0]]])), - ("L", "LA", False, False, "L", None, "LA", np.array([[[1.0, 0.5], [0.0, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), - ("LA", "LA", False, False, "RGB", None, "LA", np.array([[[1.0, 0.75], [0.5, 1.0]], [[1.0, 1.0], [1.0, 1.0]]])), + ("L", "L", True, False, "L", None, "LA", np.array([ + [[1.0, 0.5], [0.0, 0.0]], + [[1.0, 1.0], [1.0, 0.0]]])), + ("L", "L", True, False, "RGB", None, "LA", np.array([ + [[1.0, 0.5], [0.0, 0.0]], + [[1.0, 1.0], [1.0, 0.0]]])), + ("L", "LA", False, False, "L", None, "LA", np.array([ + [[1.0, 0.5], [0.0, 1.0]], + [[1.0, 1.0], [1.0, 1.0]]])), + ("LA", "LA", False, False, "RGB", None, "LA", np.array([ + [[1.0, 0.75], [0.5, 1.0]], + [[1.0, 1.0], [1.0, 1.0]]])), ("LA", "RGB", True, False, "L", None, "RGBA", np.array([ [[1.0, 0.75], [0.5, 0.0]], [[1.0, 0.75], [0.5, 0.0]], From e06657f28dbab8b315cb71e3c37c6c3716a1f6ce Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 18 Dec 2023 18:45:41 +0800 Subject: [PATCH 0981/1416] Update test_composites.py --- satpy/tests/test_composites.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index a3c8145762..4fce43fe05 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1607,8 +1607,6 @@ def test_call(self, foreground_bands, background_bands, mask, mask_no_bands, mas res = comp([foreground, background], optional_datasets=optional_datasets) - print(res.data.compute()) - assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) assert res.attrs["mode"] == exp_bands From 168ced2040cb088d95929598ee8e1d80b2db215d Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 18 Dec 2023 10:59:58 +0000 Subject: [PATCH 0982/1416] Refactor test scene setup (rest) --- satpy/tests/reader_tests/test_satpy_cf_nc.py | 99 ++++++++++---------- 1 file changed, 52 insertions(+), 47 deletions(-) diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 674f895b22..0710aae57c 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -153,7 +153,6 @@ def vis006(xy_coords, common_attrs): "nominal_end_time": common_attrs["end_time"] } } - attrs.update(common_attrs) coords = {"y": y, "x": x, "acq_time": ("y", [1, 2])} vis006 = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), @@ -192,63 +191,69 @@ def qual_flags(xy_coords): @pytest.fixture(scope="session") -def cf_scene(vis006, ir_108, qual_flags, common_attrs, area): - """Create a cf scene.""" - data_visir = np.array([[1, 2], [3, 4]]) +def lonlats(xy_coords): + """Get longitudes and latitudes.""" + x, y = xy_coords lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) + attrs = {"name": "lat", + "standard_name": "latitude", + "modifiers": np.array([])} + dims = ("y", "x") + coords = {"y": y, "x": x} + lat = xr.DataArray(lat, dims=dims, coords=coords, attrs=attrs) + lon = xr.DataArray(lon, dims=dims, coords=coords, attrs=attrs) + return lon, lat - x, y = area.get_proj_coords() - y_visir = y[:, 0] - x_visir = x[0, :] - lat = xr.DataArray(lat, - dims=("y", "x"), - coords={"y": y_visir, "x": x_visir}, - attrs={ - "name": "lat", - "standard_name": "latitude", - "modifiers": np.array([]) - }) - lon = xr.DataArray(lon, - dims=("y", "x"), - coords={"y": y_visir, "x": x_visir}, - attrs={ - "name": "lon", - "standard_name": "longitude", - "modifiers": np.array([]) - }) - - # for prefix testing - prefix_data = xr.DataArray(data_visir, +@pytest.fixture(scope="session") +def prefix_data(xy_coords, area): + """Get dataset whose name should be prefixed.""" + x, y = xy_coords + attrs = {"name": "1", + "id_tag": "ch_r06", + "coordinates": "lat lon", + "resolution": 1000, + "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "area": area} + prefix_data = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), - coords={"y": y_visir, "x": x_visir}, - attrs={ - "name": "1", "id_tag": "ch_r06", - "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", - "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), - "area": area - }) - - # for swath testing + coords={"y": y, "x": x}, + attrs=attrs) + return prefix_data + + +@pytest.fixture(scope="session") +def swath_data(prefix_data, lonlats): + """Get swath data.""" + lon, lat = lonlats area = SwathDefinition(lons=lon, lats=lat) swath_data = prefix_data.copy() swath_data.attrs.update({"name": "swath_data", "area": area}) + return swath_data + +@pytest.fixture(scope="session") +def datasets(vis006, ir_108, qual_flags, lonlats, prefix_data, swath_data): + """Get datasets belonging to the scene.""" + lon, lat = lonlats + return {"image0": vis006, + "image1": ir_108, + "swath_data": swath_data, + "1": prefix_data, + "lat": lat, + "lon": lon, + "qual_flags": qual_flags} + + +@pytest.fixture(scope="session") +def cf_scene(datasets, common_attrs): + """Create a cf scene.""" scene = Scene() scene.attrs["sensor"] = ["avhrr-1", "avhrr-2", "avhrr-3"] - scene_dict = { - "image0": vis006, - "image1": ir_108, - "swath_data": swath_data, - "1": prefix_data, - "lat": lat, - "lon": lon, - "qual_flags": qual_flags - } - - for key in scene_dict: - scene[key] = scene_dict[key] + for key in datasets: + scene[key] = datasets[key] if key != "swath_data": scene[key].attrs.update(common_attrs) return scene From f11db543b2d42e634ddc6a8c935d17094ea1bc4c Mon Sep 17 00:00:00 2001 From: Dario Stelitano Date: Mon, 18 Dec 2023 10:28:49 +0100 Subject: [PATCH 0983/1416] to_hvplot and to_geoviews called in scene.py --- satpy/scene.py | 64 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/satpy/scene.py b/satpy/scene.py index 5ba8832729..24f3264dcd 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1012,6 +1012,70 @@ def show(self, dataset_id, overlay=None): img.show() return img + def to_geoviews(self, gvtype=None, datasets=None, + kdims=None, vdims=None, dynamic=False): + """Convert satpy Scene to geoviews. + + Args: + scn (satpy.Scene): Satpy Scene. + gvtype (gv plot type): + One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points + Default to :class:`geoviews.Image`. + See Geoviews documentation for details. + datasets (list): Limit included products to these datasets + kdims (list of str): + Key dimensions. See geoviews documentation for more information. + vdims (list of str, optional): + Value dimensions. See geoviews documentation for more information. + If not given defaults to first data variable + dynamic (bool, optional): Load and compute data on-the-fly during + visualization. Default is ``False``. See + https://holoviews.org/user_guide/Gridded_Datasets.html#working-with-xarray-data-types + for more information. Has no effect when data to be visualized + only has 2 dimensions (y/x or longitude/latitude) and doesn't + require grouping via the Holoviews ``groupby`` function. + + Returns: geoviews object + + Todo: + * better handling of projection information in datasets which are + to be passed to geoviews + + """ + from satpy._scene_converters import to_geoviews + return to_geoviews(self, gvtype=None, datasets=None, + kdims=None, vdims=None, dynamic=False) + + + def to_hvplot(self, datasets=None, *args, **kwargs): + """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. + + Args: + scn (satpy.Scene): Satpy Scene. + datasets (list): Limit included products to these datasets. + args: Arguments coming from hvplot + kwargs: hvplot options dictionary. + + Returns: + hvplot object that contains within it the plots of datasets list. + As default it contains all Scene datasets plots and a plot title + is shown. + + Example usage:: + + scene_list = ['ash','IR_108'] + scn = Scene() + scn.load(scene_list) + scn = scn.resample('eurol') + plot = scn.to_hvplot(datasets=scene_list) + plot.ash+plot.IR_108 + """ + from satpy._scene_converters import to_hvplot + + return to_hvplot(self, datasets=None, *args, **kwargs) + + + def to_xarray_dataset(self, datasets=None): """Merge all xr.DataArrays of a scene to a xr.DataSet. From e7c85447f8a4b0302e91a238d17bbc11ea39e52e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 18 Dec 2023 09:16:11 -0600 Subject: [PATCH 0984/1416] Update intersphinx reference URLs --- doc/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 37c197c6eb..49e47b2cc2 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -278,7 +278,7 @@ def __getattr__(cls, name): # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "dask": ("https://docs.dask.org/en/latest", None), - "geoviews": ("http://geoviews.org", None), + "geoviews": ("https://geoviews.org", None), "jobqueue": ("https://jobqueue.dask.org/en/latest", None), "numpy": ("https://numpy.org/doc/stable", None), "pydecorate": ("https://pydecorate.readthedocs.io/en/stable", None), @@ -287,7 +287,7 @@ def __getattr__(cls, name): "pyresample": ("https://pyresample.readthedocs.io/en/stable", None), "pytest": ("https://docs.pytest.org/en/stable/", None), "python": ("https://docs.python.org/3", None), - "scipy": ("http://scipy.github.io/devdocs", None), + "scipy": ("https://scipy.github.io/devdocs", None), "trollimage": ("https://trollimage.readthedocs.io/en/stable", None), "trollsift": ("https://trollsift.readthedocs.io/en/stable", None), "xarray": ("https://xarray.pydata.org/en/stable", None), From 3e473cd5a099d8204498642b82983c4682c1ea0b Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 18 Dec 2023 09:17:38 -0600 Subject: [PATCH 0985/1416] Fix eps_l1b reader Delayed usage causing docs failures Delayed objects should always be staticmethods or global functions so they can be easily serialized. --- satpy/readers/eps_l1b.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index fbeb3ecba6..a74aac1559 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -224,12 +224,6 @@ def _interpolate(self, lons_like, lats_like): " and earth views = " + str(self.pixels)) - @delayed(nout=2, pure=True) - def _interpolate_20km_to_1km(self, lons, lats): - # Note: delayed will cast input dask-arrays to numpy arrays (needed by metop20kmto1km). - from geotiepoints import metop20kmto1km - return metop20kmto1km(lons, lats) - def _get_full_angles(self, solar_zenith, sat_zenith, solar_azimuth, sat_azimuth): nav_sample_rate = self["NAV_SAMPLE_RATE"] @@ -403,3 +397,10 @@ def end_time(self): """Get end time.""" # return datetime.strptime(self["SENSING_END"], "%Y%m%d%H%M%SZ") return self._end_time + + +@delayed(nout=2, pure=True) +def _interpolate_20km_to_1km(lons, lats): + # Note: delayed will cast input dask-arrays to numpy arrays (needed by metop20kmto1km). + from geotiepoints import metop20kmto1km + return metop20kmto1km(lons, lats) From 9707e7033d70195f782c8d7fb8df3d8bc29f013e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 18 Dec 2023 09:40:43 -0600 Subject: [PATCH 0986/1416] Fix reference to delayed function in eps_l1b reader --- satpy/readers/eps_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index a74aac1559..9ba5dece43 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -212,7 +212,7 @@ def _get_full_lonlats_uncached(self): def _interpolate(self, lons_like, lats_like): nav_sample_rate = self["NAV_SAMPLE_RATE"] if nav_sample_rate == 20 and self.pixels == 2048: - lons_like_1km, lats_like_1km = self._interpolate_20km_to_1km(lons_like, lats_like) + lons_like_1km, lats_like_1km = _interpolate_20km_to_1km(lons_like, lats_like) lons_like_1km = da.from_delayed(lons_like_1km, dtype=lons_like.dtype, shape=(self.scanlines, self.pixels)) lats_like_1km = da.from_delayed(lats_like_1km, dtype=lats_like.dtype, From bf989febc6dd2aa17e76aa0cd40376a6b6b7b6ed Mon Sep 17 00:00:00 2001 From: BengtRydberg Date: Mon, 18 Dec 2023 17:42:11 +0100 Subject: [PATCH 0987/1416] correcting pixel position from tiepoints reconsttruction --- satpy/readers/ici_l1b_nc.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index b063c51c4f..7adef62e4b 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -197,7 +197,10 @@ def _interpolate_geo( n_subs = longitude.n_subs lons = da.zeros((n_scan.size, n_samples, horns.size)) lats = da.zeros((n_scan.size, n_samples, horns.size)) - n_subs = np.linspace(0, n_samples - 1, n_subs.size).astype(int) + n_subs = np.append( + np.arange(0, n_samples, np.ceil(n_samples / n_subs.size)), + n_samples - 1 + ).astype(int) for horn in horns.values: satint = GeoInterpolator( (longitude.values[:, :, horn], latitude.values[:, :, horn]), From 2aef549ef105952a8087c7ae0cb1d940629b1f00 Mon Sep 17 00:00:00 2001 From: BengtRydberg Date: Mon, 18 Dec 2023 18:37:05 +0100 Subject: [PATCH 0988/1416] adding test --- satpy/tests/reader_tests/test_ici_l1b_nc.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py index 498ca88705..a5909b249d 100644 --- a/satpy/tests/reader_tests/test_ici_l1b_nc.py +++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py @@ -445,8 +445,14 @@ def test_interpolate_geo(self, reader): """Test interpolate geographic coordinates.""" shape = (N_SCAN, N_SUBS, N_HORNS) dims = ("n_scan", "n_subs", "n_horns") + sub_pos = np.append( + np.arange(0, N_SAMPLES, np.ceil(N_SAMPLES / N_SUBS)), + N_SAMPLES - 1 + ) longitude = xr.DataArray( - 2. * np.ones(shape), + np.tile( # longitudes between 0 and 10 + 10 * sub_pos / sub_pos[-1], (N_SCAN, N_HORNS, 1) + ).swapaxes(1, 2), dims=dims, coords={ "n_horns": np.arange(N_HORNS), @@ -462,7 +468,9 @@ def test_interpolate_geo(self, reader): expect_shape = (N_SCAN, N_SAMPLES, N_HORNS) assert lon.shape == expect_shape assert lat.shape == expect_shape - np.testing.assert_allclose(lon, 2.0) + np.testing.assert_allclose(lon[:, 0, :], 0.) + np.testing.assert_allclose(lon[:, -1, :], 10.) + np.testing.assert_allclose(np.diff(lon[0, :, 0]), 10 / (N_SAMPLES - 1)) np.testing.assert_allclose(lat, 1.0) def test_interpolate_viewing_angle(self, reader): From 393451ff04b3907660ab3143e88124ed587165b3 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 19 Dec 2023 18:57:50 +0800 Subject: [PATCH 0989/1416] update structure --- satpy/composites/__init__.py | 90 ++++++++++----------- satpy/tests/test_composites.py | 141 ++++++++++++++++----------------- 2 files changed, 109 insertions(+), 122 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 2bb8163129..8ea76825d7 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1720,9 +1720,12 @@ def __call__(self, projectables, optional_datasets=None, *args, **kwargs): """Call the compositor.""" optional_datasets = [] if optional_datasets is None else optional_datasets projectables = self.match_data_arrays(projectables + optional_datasets) + # Get enhanced datasets foreground = enhance2dataset(projectables[0], convert_p=True) background = enhance2dataset(projectables[1], convert_p=True) + before_bg_mode = background.attrs["mode"] + mask_dataset = projectables[2] if not optional_datasets == [] else None # Adjust bands so that they match @@ -1731,15 +1734,17 @@ def __call__(self, projectables, optional_datasets=None, *args, **kwargs): # RGB/RGBA -> RGBA/RGBA foreground = add_bands(foreground, background["bands"]) background = add_bands(background, foreground["bands"]) + after_bg_mode = background.attrs["mode"] + + # It's important to judge whether the alpha band of background is initially generated, e.g. by CloudCompositor + # Or it's just added through 'add_bands' + # The result will be used to decide the output image mode + initial_bg_alpha = True if "A" in before_bg_mode and "A" in after_bg_mode else False mask = self._get_mask(mask_dataset, self.mask_value) - alpha_fore = self._get_alpha(foreground) - alpha_back = self._get_alpha(background) - output_mode = self._get_output_mode(foreground, background, mask) attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) - data = self._get_merged_image_data(foreground, background, mask=mask, - alpha_fore=alpha_fore, alpha_back=alpha_back, output_mode=output_mode) + data = self._get_merged_image_data(foreground, background, mask=mask, initial_bg_alpha=initial_bg_alpha) res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res @@ -1763,7 +1768,7 @@ def _get_mask(dataset: xr.DataArray, mask_value): if dataset is None: mask = None else: - # If mask dataset is a composite, then extract its first band + # If mask dataset is a composite, extract its first band try: dataset = dataset.isel(bands=0) except ValueError: @@ -1776,53 +1781,30 @@ def _get_mask(dataset: xr.DataArray, mask_value): return mask - @staticmethod - def _get_alpha(dataset: xr.DataArray): - # If the dataset contains an alpha channel, just use it - # If not, we still need one. So build it and fill it with 1 - if "A" in dataset.attrs["mode"]: - alpha = dataset.sel(bands="A") - else: - first_band = dataset.isel(bands=0) - alpha = xr.full_like(first_band, 1) - alpha["bands"] = "A" - - # There could be Nans in the alpha, especially for original ones - # Replace them with 0, so they won't affect new_alpha - alpha = xr.where(alpha.isnull(), 0, alpha) - - return alpha - - @staticmethod - def _get_output_mode(foreground: xr.DataArray, - background: xr.DataArray, - mask: xr.DataArray): - # Get the output bands of the stacked image - # Actually, it's about deciding whether to pass the new alpha band of the stacked image to the writer - # Or just leave the write for decision - - # If both images have alpha band or just background has one, the new alpha band will be passed to the writer - # If area-masking is needed, the same - # If neither of the images has alpha band but area-masking is still needed, the same - if "A" in background.attrs["mode"]: - output_mode = background.mode - else: - output_mode = ( - background.mode if "A" in foreground.attrs["mode"] and mask is None else - foreground.mode if mask is None else foreground.mode + "A" - ) - - return output_mode - @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray, mask: xr.DataArray, - alpha_fore: xr.DataArray, - alpha_back: xr.DataArray, - output_mode: str, + initial_bg_alpha: bool, ) -> list[xr.DataArray]: + def _get_alpha(dataset: xr.DataArray): + # If the dataset contains an alpha channel, just use it + # If not, we still need one. So build it and fill it with 1 + if "A" in dataset.attrs["mode"]: + alpha = dataset.sel(bands="A") + else: + first_band = dataset.isel(bands=0) + alpha = xr.full_like(first_band, 1) + alpha["bands"] = "A" + # There could be Nans in the alpha, especially through 'add_bands' + # Replace them with 0 to prevent cases like 1 + nan = nan, so they won't affect new_alpha + alpha = xr.where(alpha.isnull(), 0, alpha) + + return alpha + + alpha_fore = _get_alpha(foreground) + alpha_back = _get_alpha(background) new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) # Do the masking job @@ -1833,14 +1815,24 @@ def _get_merged_image_data(foreground: xr.DataArray, data = [] + # Unless background has an initial alpha band, there will be no alpha band in the output image + # Let the writer decide + output_mode = background.mode if initial_bg_alpha else background.mode.replace("A", "") + + # If we let the writer decide alpha band, we must fill the transparent areas in the image with np.nan first + # The best way is through the new alpha + new_alpha_nan = xr.where(alpha_fore + alpha_back == 0, np.nan, new_alpha) if "A" not in output_mode \ + else new_alpha + for band in output_mode: fg_band = foreground.sel(bands=band) if band != "A" else new_alpha bg_band = background.sel(bands=band) if band != "A" else new_alpha chan = (fg_band * alpha_fore + - bg_band * alpha_back * (1 - alpha_fore)) / new_alpha if band != "A" else new_alpha + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha_nan if band != "A" else new_alpha - chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + if mask is None: + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) data.append(chan) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 4fce43fe05..6006957d3e 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1488,10 +1488,10 @@ def setup_class(cls): [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]]]), "RGBA": np.array([ - [[1.0, 0.5], [0.0, np.nan]], - [[1.0, 0.5], [0.0, np.nan]], - [[1.0, 0.5], [0.0, np.nan]], - [[0.5, 0.5], [0.5, 0.5]]]), + [[1.0, 0.5], [0., np.nan]], + [[1.0, 0.5], [0., np.nan]], + [[1.0, 0.5], [0., np.nan]], + [[0.5, 0.5], [0., 0.5]]]), } mask_data = { "L": np.array([[[1., 0.5], [0., np.nan]]]), @@ -1507,82 +1507,53 @@ def setup_class(cls): @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( - ("foreground_bands", "background_bands", "mask", "mask_no_bands", - "mask_bands", "mask_value", "exp_bands", "exp_result"), + ("foreground_bands", "background_bands", "mask", "mask_value", "exp_bands", "exp_result"), [ - ("L", "L", True, False, "L", None, "LA", np.array([ - [[1.0, 0.5], [0.0, 0.0]], - [[1.0, 1.0], [1.0, 0.0]]])), - ("L", "L", True, False, "RGB", None, "LA", np.array([ - [[1.0, 0.5], [0.0, 0.0]], - [[1.0, 1.0], [1.0, 0.0]]])), - ("L", "LA", False, False, "L", None, "LA", np.array([ - [[1.0, 0.5], [0.0, 1.0]], - [[1.0, 1.0], [1.0, 1.0]]])), - ("LA", "LA", False, False, "RGB", None, "LA", np.array([ - [[1.0, 0.75], [0.5, 1.0]], - [[1.0, 1.0], [1.0, 1.0]]])), - ("LA", "RGB", True, False, "L", None, "RGBA", np.array([ - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 1.0], [1.0, 0.0]]])), - ("RGB", "RGB", True, True, "L", 1, "RGBA", np.array([ - [[0.0, 0.5], [0.0, 1.0]], - [[0.0, 0.5], [0.0, 1.0]], - [[0.0, 0.5], [0.0, 1.0]], - [[0.0, 1.0], [1.0, 1.0]]])), - ("RGB", "RGB", True, False, "RGB", 0.5, "RGBA", np.array([ - [[1.0, 0.0], [0.0, 1.0]], - [[1.0, 0.0], [0.0, 1.0]], - [[1.0, 0.0], [0.0, 1.0]], + ("L", "L", "L", None, "L", np.array([ + [1.0, 0.5], [0., np.nan]])), + ("L", "LA", "RGB", 0.5, "LA", np.array([ + [[1.0, np.nan], [0.0, np.nan]], [[1.0, 0.0], [1.0, 1.0]]])), - ("RGB", "RGBA", False, False, "L", 1, "RGBA", np.array([ - [[1.0, 0.5], [0.0, 1.0]], + ("LA", "LA", "L", None, "LA", np.array([ + [[1.0, 0.75], [0.5, np.nan]], + [[1.0, 1.0], [1.0, 0.0]]])), + ("LA", "RGB", "L", 1, "RGB", np.array([ + [[np.nan, 0.75], [0.5, np.nan]], + [[np.nan, 0.75], [0.5, np.nan]], + [[np.nan, 0.75], [0.5, np.nan]]])), + ("RGB", "RGB", "None", None, "RGB", np.array([ [[1.0, 0.5], [0.0, 1.0]], [[1.0, 0.5], [0.0, 1.0]], - [[1.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGB", True, False, "L", None, "RGBA", np.array([ - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 1.0], [1.0, 0.0]]])), - ("RGBA", "RGB", True, False, "RGB", 0, "RGBA", np.array([ - [[1.0, 0.75], [0.0, 1.0]], - [[1.0, 0.75], [0.0, 1.0]], - [[1.0, 0.75], [0.0, 1.0]], - [[1.0, 1.0], [0.0, 1.0]]])), - ("RGBA", "RGB", False, False, "RGB", 0, "RGBA", np.array([ - [[1.0, 0.75], [0.5, 1.0]], - [[1.0, 0.75], [0.5, 1.0]], - [[1.0, 0.75], [0.5, 1.0]], - [[1.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGBA", True, True, "L", 0.5, "RGBA", np.array([ - [[1.0, 0.0], [0.5, 1.0]], - [[1.0, 0.0], [0.5, 1.0]], - [[1.0, 0.0], [0.5, 1.0]], + [[1.0, 0.5], [0.0, 1.0]]])), + ("RGB", "RGBA", "L", 1, "RGBA", np.array([ + [[np.nan, 0.5], [0.0, np.nan]], + [[np.nan, 0.5], [0.0, np.nan]], + [[np.nan, 0.5], [0.0, np.nan]], + [[0.0, 1.0], [1.0, 1.0]]])), + ("RGBA", "RGB", "L", None, "RGB", np.array([ + [[1.0, 0.75], [1.0, np.nan]], + [[1.0, 0.75], [1.0, np.nan]], + [[1.0, 0.75], [1.0, np.nan]]])), + ("RGBA", "RGB", "None", None, "RGB", np.array([ + [[1.0, 0.75], [1.0, 1.0]], + [[1.0, 0.75], [1.0, 1.0]], + [[1.0, 0.75], [1.0, 1.0]]])), + ("RGBA", "RGBA", "RGB", 0.5, "RGBA", np.array([ + [[1.0, np.nan], [1.0, np.nan]], + [[1.0, np.nan], [1.0, np.nan]], + [[1.0, np.nan], [1.0, np.nan]], [[1.0, 0.0], [1.0, 1.0]]])), - ("RGBA", "RGBA", False, False, "RGB", 0, "RGBA", np.array([ - [[1.0, 0.75], [0.5, 1.0]], - [[1.0, 0.75], [0.5, 1.0]], - [[1.0, 0.75], [0.5, 1.0]], - [[1.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGBA", True, True, "L", None, "RGBA", np.array([ - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 0.75], [0.5, 0.0]], - [[1.0, 1.0], [1.0, 0.0]]])), ] ) - def test_call(self, foreground_bands, background_bands, mask, mask_no_bands, mask_bands, mask_value, - exp_bands, exp_result): + def test_call(self, foreground_bands, background_bands, mask, mask_value, exp_bands, exp_result): """Test the background compositing.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name", mask_value=mask_value) # L mode images foreground_data = self.foreground_data[foreground_bands] - mask_data = self.mask_data[mask_bands] if not mask_no_bands else self.mask_data["L"] + mask_bands = mask if mask != "None" else "L" + mask_data = self.mask_data[mask_bands] attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), @@ -1598,12 +1569,8 @@ def test_call(self, foreground_bands, background_bands, mask, mask_no_bands, mas dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) - attrs = {"area": "foo"} - mask_no_bands_dataset = xr.DataArray(da.from_array(self.mask_no_bands_data), - dims=("y", "x"), - attrs=attrs) - optional_datasets = [mask_dataset] if mask and not mask_no_bands else [ - mask_no_bands_dataset] if mask and mask_no_bands else [] + + optional_datasets = [mask_dataset] if mask != "None" else [] res = comp([foreground, background], optional_datasets=optional_datasets) @@ -1635,6 +1602,34 @@ def test_multiple_sensors(self): assert res.attrs["mode"] == "L" assert res.attrs["sensor"] == {"abi", "glm"} + @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) + def test_mask_with_no_bands(self): + """Test the background compositing with mask that doesn't have 'bands' in its coords.""" + from satpy.composites import BackgroundCompositor + comp = BackgroundCompositor("name", mask_value=None) + + attrs = {"mode": "RGBA", "area": "foo"} + foreground = xr.DataArray(da.from_array(self.foreground_data["RGBA"]), + dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, + attrs=attrs) + attrs = {"mode": "RGB", "area": "foo"} + background = xr.DataArray(da.ones((len("RGB"), 2, 2)), dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, + attrs=attrs) + attrs = {"area": "foo"} + mask_no_bands_dataset = xr.DataArray(da.from_array(self.mask_no_bands_data), + dims=("y", "x"), + attrs=attrs) + + res = comp([foreground, background], optional_datasets=[mask_no_bands_dataset]) + + assert res.attrs["area"] == "foo" + assert res.attrs["mode"] == "RGB" + np.testing.assert_allclose(res, np.array([[[1.0, 0.75], [1.0, np.nan]], + [[1.0, 0.75], [1.0, np.nan]], + [[1.0, 0.75], [1.0, np.nan]]])) + class TestMaskingCompositor: """Test case for the simple masking compositor.""" From a9b876a2cb5f1bc21cb6b7eb10f7c4756e1de1ca Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 19 Dec 2023 11:41:30 +0000 Subject: [PATCH 0990/1416] Factorize attribute decoding --- satpy/readers/satpy_cf_nc.py | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 24c5ef438f..4a814dd586 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -311,13 +311,10 @@ def get_dataset(self, ds_id, ds_info): if name != ds_id["name"]: data = data.rename(ds_id["name"]) data.attrs.update(nc.attrs) # For now add global attributes to all datasets - self._decode_dict_type_attrs(data) + decoder = DatasetAttributeDecoder() + decoder.decode_attrs(data) return data - def _decode_dict_type_attrs(self, data): - for key, val in data.attrs.items(): - data.attrs[key] = _str2dict(val) - def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" try: @@ -331,6 +328,24 @@ def get_area_def(self, dataset_id): raise NotImplementedError +class DatasetAttributeDecoder: + """Decode attributes from cf-compatible to Python object.""" + + def decode_attrs(self, dataset): + """Decode dataset attributes.""" + self._decode_dict_type_attrs(dataset) + + def _decode_dict_type_attrs(self, data): + for key, val in data.attrs.items(): + data.attrs[key] = self._str2dict(val) + + def _str2dict(self, val): + """Convert string to dictionary.""" + if isinstance(val, str) and val.startswith("{"): + val = json.loads(val, object_hook=_datetime_parser) + return val + + def _datetime_parser(json_dict): import dateutil.parser for key, value in json_dict.items(): @@ -339,10 +354,3 @@ def _datetime_parser(json_dict): except (TypeError, ValueError): pass return json_dict - - -def _str2dict(val): - """Convert string to dictionary.""" - if isinstance(val, str) and val.startswith("{"): - val = json.loads(val, object_hook=_datetime_parser) - return val From edf933a0a1811b443d1dc35cb4ae4ab2317b29fe Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 19 Dec 2023 12:01:07 +0000 Subject: [PATCH 0991/1416] Use datetime to parse timestamps --- satpy/readers/satpy_cf_nc.py | 4 ++-- satpy/tests/reader_tests/test_satpy_cf_nc.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 4a814dd586..c8188da77f 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -179,6 +179,7 @@ import itertools import json import logging +from datetime import datetime import xarray as xr from pyresample import AreaDefinition @@ -347,10 +348,9 @@ def _str2dict(self, val): def _datetime_parser(json_dict): - import dateutil.parser for key, value in json_dict.items(): try: - json_dict[key] = dateutil.parser.parse(value) + json_dict[key] = datetime.fromisoformat(value) except (TypeError, ValueError): pass return json_dict diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 0710aae57c..e3540a4df7 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -107,7 +107,7 @@ def area(): def common_attrs(area): """Get common dataset attributes.""" return { - "start_time": datetime(2019, 4, 1, 12, 0), + "start_time": datetime(2019, 4, 1, 12, 0, 0, 123456), "end_time": datetime(2019, 4, 1, 12, 15), "platform_name": "tirosn", "orbit_number": 99999, From 414f2d6ff182aa2b9cd8a9006cb3dacd96ab9059 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 19 Dec 2023 12:37:40 +0000 Subject: [PATCH 0992/1416] Decode all timestamps to datetime --- satpy/readers/satpy_cf_nc.py | 33 ++++++++++++++------ satpy/tests/reader_tests/test_satpy_cf_nc.py | 11 ++++++- 2 files changed, 33 insertions(+), 11 deletions(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index c8188da77f..73c26fccdd 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -334,23 +334,36 @@ class DatasetAttributeDecoder: def decode_attrs(self, dataset): """Decode dataset attributes.""" - self._decode_dict_type_attrs(dataset) + self._decode_dict_type_attrs(dataset.attrs) + self._decode_timestamps(dataset.attrs) - def _decode_dict_type_attrs(self, data): - for key, val in data.attrs.items(): - data.attrs[key] = self._str2dict(val) + def _decode_dict_type_attrs(self, attrs): + for key, val in attrs.items(): + attrs[key] = self._str2dict(val) def _str2dict(self, val): """Convert string to dictionary.""" if isinstance(val, str) and val.startswith("{"): - val = json.loads(val, object_hook=_datetime_parser) + val = json.loads(val, object_hook=_datetime_parser_json) return val + def _decode_timestamps(self, attrs): + for key, value in attrs.items(): + timestamp = _str2datetime(value) + if timestamp: + attrs[key] = timestamp -def _datetime_parser(json_dict): + +def _datetime_parser_json(json_dict): for key, value in json_dict.items(): - try: - json_dict[key] = datetime.fromisoformat(value) - except (TypeError, ValueError): - pass + timestamp = _str2datetime(value) + if timestamp: + json_dict[key] = timestamp return json_dict + + +def _str2datetime(string): + try: + return datetime.fromisoformat(string) + except (TypeError, ValueError): + return None diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index e3540a4df7..d2c50ee908 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -111,7 +111,8 @@ def common_attrs(area): "end_time": datetime(2019, 4, 1, 12, 15), "platform_name": "tirosn", "orbit_number": 99999, - "area": area + "area": area, + "my_timestamp": datetime(2000, 1, 1) } @@ -446,6 +447,14 @@ def test_decoding_of_dict_type_attributes(self, cf_scene, nc_filename): new_attrs = scn_["image0"].attrs[attr_name] assert new_attrs == orig_attrs + def test_decoding_of_timestamps(self, cf_scene, nc_filename): + """Test decoding of timestamps.""" + cf_scene.save_datasets(writer="cf", filename=nc_filename) + scn = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) + scn.load(["image0"]) + expected = cf_scene["image0"].attrs["my_timestamp"] + assert scn["image0"].attrs["my_timestamp"] == expected + def test_write_and_read_from_two_files(self, nc_filename, nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" _create_test_netcdf(nc_filename, resolution=742) From 453c485041c08e4ca2dc514ecc4a48ad683435e4 Mon Sep 17 00:00:00 2001 From: yukaribbba <72339781+yukaribbba@users.noreply.github.com> Date: Wed, 20 Dec 2023 09:15:31 +0800 Subject: [PATCH 0993/1416] Update satpy/composites/__init__.py Co-authored-by: David Hoese --- satpy/composites/__init__.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 8ea76825d7..c172f80b23 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1704,12 +1704,10 @@ def __init__(self, name, mask_value=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: - mask_value (float / None): All the pixels on the stacked image where the values - of the third dataset that are equal to this will be - masked out. - If not set by the user, it will be ``np.nan``. - This argument could be helpful when you try to use a - local image(StaticImageCompositor) for masking. + mask_value (float | None): Value of the third dataset used to generate + the mask for the stacked image. Defaults to ``np.nan``. This is + useful when wanting additional masking for a static image + (e.g. :class:`StaticImageCompositor`). """ self.mask_value = mask_value if mask_value is not None else np.nan From 1187e1f65159a6df7834f9be8ef85adb197fb336 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 20 Dec 2023 09:16:25 +0800 Subject: [PATCH 0994/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index c172f80b23..6f2ddd2574 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1724,7 +1724,7 @@ def __call__(self, projectables, optional_datasets=None, *args, **kwargs): background = enhance2dataset(projectables[1], convert_p=True) before_bg_mode = background.attrs["mode"] - mask_dataset = projectables[2] if not optional_datasets == [] else None + mask_dataset = projectables[2] if len(projecables) >= 3 else None # Adjust bands so that they match # L/RGB -> RGB/RGB From 707334f531f63517d4fac34560a33a0bb3857151 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 20 Dec 2023 09:17:38 +0800 Subject: [PATCH 0995/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 6f2ddd2574..e987bb4127 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1724,7 +1724,7 @@ def __call__(self, projectables, optional_datasets=None, *args, **kwargs): background = enhance2dataset(projectables[1], convert_p=True) before_bg_mode = background.attrs["mode"] - mask_dataset = projectables[2] if len(projecables) >= 3 else None + mask_dataset = projectables[2] if len(projectables) >= 3 else None # Adjust bands so that they match # L/RGB -> RGB/RGB From 621f10c76048bd1f1d0e611df9495e42e250cbc2 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 20 Dec 2023 10:11:28 +0800 Subject: [PATCH 0996/1416] Remove masking function --- satpy/composites/__init__.py | 76 ++------------------- satpy/tests/test_composites.py | 120 +++++++++------------------------ 2 files changed, 38 insertions(+), 158 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index e987bb4127..d9d8fec00b 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1677,55 +1677,17 @@ def __call__(self, *args, **kwargs): class BackgroundCompositor(GenericCompositor): - """A compositor that overlays one composite on top of another. + """A compositor that overlays one composite on top of another.""" - Beside foreground and background, a third optional dataset could be passed - to the compositor to use it for masking. If this dataset contains - more than one band, only the first band will be used. This is useful when you - reproject a specific local-area image (e.g. a geostationary satellite view) - to global extent, and put it on a global background (e.g. NASA's Black Marble) - while making other areas of the world transparent, only keeping the local one. - - To use this function in a YAML configuration file, add the third dataset - as ``optional_prerequisites``: - - .. code-block:: yaml - - night_cloud_alpha_2000_with_background: - compositor: !!python/name:satpy.composites.BackgroundCompositor - prerequisites: - - name: night_cloud_alpha_2000 - - name: static_night - optional_prerequisites: - - name: IR105 - - """ - def __init__(self, name, mask_value=None, **kwargs): # noqa: D417 - """Collect custom configuration values. - - Args: - mask_value (float | None): Value of the third dataset used to generate - the mask for the stacked image. Defaults to ``np.nan``. This is - useful when wanting additional masking for a static image - (e.g. :class:`StaticImageCompositor`). - - """ - self.mask_value = mask_value if mask_value is not None else np.nan - - super(BackgroundCompositor, self).__init__(name, **kwargs) - - def __call__(self, projectables, optional_datasets=None, *args, **kwargs): + def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" - optional_datasets = [] if optional_datasets is None else optional_datasets - projectables = self.match_data_arrays(projectables + optional_datasets) + projectables = self.match_data_arrays(projectables) # Get enhanced datasets foreground = enhance2dataset(projectables[0], convert_p=True) background = enhance2dataset(projectables[1], convert_p=True) before_bg_mode = background.attrs["mode"] - mask_dataset = projectables[2] if len(projectables) >= 3 else None - # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA @@ -1739,10 +1701,8 @@ def __call__(self, projectables, optional_datasets=None, *args, **kwargs): # The result will be used to decide the output image mode initial_bg_alpha = True if "A" in before_bg_mode and "A" in after_bg_mode else False - mask = self._get_mask(mask_dataset, self.mask_value) - attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) - data = self._get_merged_image_data(foreground, background, mask=mask, initial_bg_alpha=initial_bg_alpha) + data = self._get_merged_image_data(foreground, background, initial_bg_alpha=initial_bg_alpha) res = super(BackgroundCompositor, self).__call__(data, **kwargs) res.attrs.update(attrs) return res @@ -1761,28 +1721,9 @@ def _combine_metadata_with_mode_and_sensor(self, attrs["sensor"] = self._get_sensors([foreground, background]) return attrs - @staticmethod - def _get_mask(dataset: xr.DataArray, mask_value): - if dataset is None: - mask = None - else: - # If mask dataset is a composite, extract its first band - try: - dataset = dataset.isel(bands=0) - except ValueError: - pass - - if np.isnan(mask_value): - mask = xr.where(dataset.isnull(), 0, 1) - else: - mask = xr.where(dataset == mask_value, 0, 1) - - return mask - @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray, - mask: xr.DataArray, initial_bg_alpha: bool, ) -> list[xr.DataArray]: def _get_alpha(dataset: xr.DataArray): @@ -1805,12 +1746,6 @@ def _get_alpha(dataset: xr.DataArray): alpha_back = _get_alpha(background) new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) - # Do the masking job - if mask is not None: - alpha_fore.data = np.minimum(alpha_fore.data, mask.data) - alpha_back.data = np.minimum(alpha_back.data, mask.data) - new_alpha.data = np.minimum(new_alpha.data, mask.data) - data = [] # Unless background has an initial alpha band, there will be no alpha band in the output image @@ -1829,8 +1764,7 @@ def _get_alpha(dataset: xr.DataArray): chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha_nan if band != "A" else new_alpha - if mask is None: - chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) data.append(chan) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 6006957d3e..f5c85dbfec 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1488,72 +1488,52 @@ def setup_class(cls): [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]]]), "RGBA": np.array([ - [[1.0, 0.5], [0., np.nan]], - [[1.0, 0.5], [0., np.nan]], - [[1.0, 0.5], [0., np.nan]], - [[0.5, 0.5], [0., 0.5]]]), - } - mask_data = { - "L": np.array([[[1., 0.5], [0., np.nan]]]), - "RGB": np.array([ [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]], - [[1., 0.5], [0., np.nan]]]), + [[1., 0.5], [0., np.nan]], + [[0.5, 0.5], [0., 0.5]]]), } - mask_no_bands_data = np.array([[1., 0.5], [0., np.nan]]) cls.foreground_data = foreground_data - cls.mask_data = mask_data - cls.mask_no_bands_data = mask_no_bands_data @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( - ("foreground_bands", "background_bands", "mask", "mask_value", "exp_bands", "exp_result"), + ("foreground_bands", "background_bands", "exp_bands", "exp_result"), [ - ("L", "L", "L", None, "L", np.array([ - [1.0, 0.5], [0., np.nan]])), - ("L", "LA", "RGB", 0.5, "LA", np.array([ - [[1.0, np.nan], [0.0, np.nan]], - [[1.0, 0.0], [1.0, 1.0]]])), - ("LA", "LA", "L", None, "LA", np.array([ - [[1.0, 0.75], [0.5, np.nan]], - [[1.0, 1.0], [1.0, 0.0]]])), - ("LA", "RGB", "L", 1, "RGB", np.array([ - [[np.nan, 0.75], [0.5, np.nan]], - [[np.nan, 0.75], [0.5, np.nan]], - [[np.nan, 0.75], [0.5, np.nan]]])), - ("RGB", "RGB", "None", None, "RGB", np.array([ - [[1.0, 0.5], [0.0, 1.0]], - [[1.0, 0.5], [0.0, 1.0]], - [[1.0, 0.5], [0.0, 1.0]]])), - ("RGB", "RGBA", "L", 1, "RGBA", np.array([ - [[np.nan, 0.5], [0.0, np.nan]], - [[np.nan, 0.5], [0.0, np.nan]], - [[np.nan, 0.5], [0.0, np.nan]], - [[0.0, 1.0], [1.0, 1.0]]])), - ("RGBA", "RGB", "L", None, "RGB", np.array([ - [[1.0, 0.75], [1.0, np.nan]], - [[1.0, 0.75], [1.0, np.nan]], - [[1.0, 0.75], [1.0, np.nan]]])), - ("RGBA", "RGB", "None", None, "RGB", np.array([ - [[1.0, 0.75], [1.0, 1.0]], - [[1.0, 0.75], [1.0, 1.0]], - [[1.0, 0.75], [1.0, 1.0]]])), - ("RGBA", "RGBA", "RGB", 0.5, "RGBA", np.array([ - [[1.0, np.nan], [1.0, np.nan]], - [[1.0, np.nan], [1.0, np.nan]], - [[1.0, np.nan], [1.0, np.nan]], - [[1.0, 0.0], [1.0, 1.0]]])), + ("L", "L", "L", np.array([[1., 0.5], [0., 1.]])), + ("LA", "LA", "LA", np.array([ + [[1., 0.75], [0.5, 1.]], + [[1., 1.], [1., 1.]]])), + ("LA", "RGB", "RGB", np.array([ + [[1., 0.75], [0.5, 1.]], + [[1., 0.75], [0.5, 1.]], + [[1., 0.75], [0.5, 1.]]])), + ("RGB", "RGB", "RGB", np.array([ + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]]])), + ("RGB", "RGBA", "RGBA", np.array([ + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 1.], [1., 1.]]])), + ("RGBA", "RGBA", "RGBA", np.array([ + [[1., 0.75], [1., 1.]], + [[1., 0.75], [1., 1.]], + [[1., 0.75], [1., 1.]], + [[1., 1.], [1., 1.]]])), + ("RGBA", "RGB", "RGB", np.array([ + [[1., 0.75], [1., 1.]], + [[1., 0.75], [1., 1.]], + [[1., 0.75], [1., 1.]]])), ] ) - def test_call(self, foreground_bands, background_bands, mask, mask_value, exp_bands, exp_result): + def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): """Test the background compositing.""" from satpy.composites import BackgroundCompositor - comp = BackgroundCompositor("name", mask_value=mask_value) + comp = BackgroundCompositor("name") # L mode images foreground_data = self.foreground_data[foreground_bands] - mask_bands = mask if mask != "None" else "L" - mask_data = self.mask_data[mask_bands] attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), @@ -1564,15 +1544,9 @@ def test_call(self, foreground_bands, background_bands, mask, mask_value, exp_ba background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) - attrs = {"mode": mask_bands, "area": "foo"} - mask_dataset = xr.DataArray(da.from_array(mask_data), - dims=("bands", "y", "x"), - coords={"bands": [c for c in attrs["mode"]]}, - attrs=attrs) - - optional_datasets = [mask_dataset] if mask != "None" else [] - res = comp([foreground, background], optional_datasets=optional_datasets) + res = comp([foreground, background]) + print(res.data.compute()) assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) @@ -1602,34 +1576,6 @@ def test_multiple_sensors(self): assert res.attrs["mode"] == "L" assert res.attrs["sensor"] == {"abi", "glm"} - @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) - def test_mask_with_no_bands(self): - """Test the background compositing with mask that doesn't have 'bands' in its coords.""" - from satpy.composites import BackgroundCompositor - comp = BackgroundCompositor("name", mask_value=None) - - attrs = {"mode": "RGBA", "area": "foo"} - foreground = xr.DataArray(da.from_array(self.foreground_data["RGBA"]), - dims=("bands", "y", "x"), - coords={"bands": [c for c in attrs["mode"]]}, - attrs=attrs) - attrs = {"mode": "RGB", "area": "foo"} - background = xr.DataArray(da.ones((len("RGB"), 2, 2)), dims=("bands", "y", "x"), - coords={"bands": [c for c in attrs["mode"]]}, - attrs=attrs) - attrs = {"area": "foo"} - mask_no_bands_dataset = xr.DataArray(da.from_array(self.mask_no_bands_data), - dims=("y", "x"), - attrs=attrs) - - res = comp([foreground, background], optional_datasets=[mask_no_bands_dataset]) - - assert res.attrs["area"] == "foo" - assert res.attrs["mode"] == "RGB" - np.testing.assert_allclose(res, np.array([[[1.0, 0.75], [1.0, np.nan]], - [[1.0, 0.75], [1.0, np.nan]], - [[1.0, 0.75], [1.0, np.nan]]])) - class TestMaskingCompositor: """Test case for the simple masking compositor.""" From 786f09bbee995d3f18c3ae10423c3f9b1bf9a5e5 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 20 Dec 2023 10:13:35 +0800 Subject: [PATCH 0997/1416] Update test_composites.py --- satpy/tests/test_composites.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index f5c85dbfec..9654441c92 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1546,7 +1546,6 @@ def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): attrs=attrs) res = comp([foreground, background]) - print(res.data.compute()) assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) From 37853ad8595feea8aded3cce9154ae38fd41b31e Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 20 Dec 2023 16:59:38 +0000 Subject: [PATCH 0998/1416] Move encoding to separate module --- satpy/cf/decoding.py | 74 +++++++++++++++++++++++++++ satpy/readers/satpy_cf_nc.py | 46 +---------------- satpy/tests/cf_tests/test_decoding.py | 64 +++++++++++++++++++++++ 3 files changed, 140 insertions(+), 44 deletions(-) create mode 100644 satpy/cf/decoding.py create mode 100644 satpy/tests/cf_tests/test_decoding.py diff --git a/satpy/cf/decoding.py b/satpy/cf/decoding.py new file mode 100644 index 0000000000..0d7a9d22be --- /dev/null +++ b/satpy/cf/decoding.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""CF decoding.""" +import copy +import json +from datetime import datetime + + +def decode_attrs(attrs): + """Decode CF-encoded attributes to Python object. + + Converts timestamps to datetime and strings starting with "{" to + dictionary. + + Args: + attrs (dict): Attributes to be decoded + + Returns (dict): Decoded attributes + """ + attrs = copy.deepcopy(attrs) + _decode_dict_type_attrs(attrs) + _decode_timestamps(attrs) + return attrs + + +def _decode_dict_type_attrs(attrs): + for key, val in attrs.items(): + attrs[key] = _str2dict(val) + + +def _str2dict(val): + """Convert string to dictionary.""" + if isinstance(val, str) and val.startswith("{"): + val = json.loads(val, object_hook=_datetime_parser_json) + return val + + +def _decode_timestamps(attrs): + for key, value in attrs.items(): + timestamp = _str2datetime(value) + if timestamp: + attrs[key] = timestamp + + +def _datetime_parser_json(json_dict): + """Traverse JSON dictionary and parse timestamps.""" + for key, value in json_dict.items(): + timestamp = _str2datetime(value) + if timestamp: + json_dict[key] = timestamp + return json_dict + + +def _str2datetime(string): + """Convert string to datetime object.""" + try: + return datetime.fromisoformat(string) + except (TypeError, ValueError): + return None diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 73c26fccdd..5fab6e6235 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -177,13 +177,12 @@ """ import itertools -import json import logging -from datetime import datetime import xarray as xr from pyresample import AreaDefinition +import satpy.cf.decoding from satpy.dataset.dataid import WavelengthRange from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size @@ -312,8 +311,7 @@ def get_dataset(self, ds_id, ds_info): if name != ds_id["name"]: data = data.rename(ds_id["name"]) data.attrs.update(nc.attrs) # For now add global attributes to all datasets - decoder = DatasetAttributeDecoder() - decoder.decode_attrs(data) + data.attrs = satpy.cf.decoding.decode_attrs(data.attrs) return data def get_area_def(self, dataset_id): @@ -327,43 +325,3 @@ def get_area_def(self, dataset_id): # with the yaml_reader NotImplementedError is raised. logger.debug("No AreaDefinition to load from nc file. Falling back to SwathDefinition.") raise NotImplementedError - - -class DatasetAttributeDecoder: - """Decode attributes from cf-compatible to Python object.""" - - def decode_attrs(self, dataset): - """Decode dataset attributes.""" - self._decode_dict_type_attrs(dataset.attrs) - self._decode_timestamps(dataset.attrs) - - def _decode_dict_type_attrs(self, attrs): - for key, val in attrs.items(): - attrs[key] = self._str2dict(val) - - def _str2dict(self, val): - """Convert string to dictionary.""" - if isinstance(val, str) and val.startswith("{"): - val = json.loads(val, object_hook=_datetime_parser_json) - return val - - def _decode_timestamps(self, attrs): - for key, value in attrs.items(): - timestamp = _str2datetime(value) - if timestamp: - attrs[key] = timestamp - - -def _datetime_parser_json(json_dict): - for key, value in json_dict.items(): - timestamp = _str2datetime(value) - if timestamp: - json_dict[key] = timestamp - return json_dict - - -def _str2datetime(string): - try: - return datetime.fromisoformat(string) - except (TypeError, ValueError): - return None diff --git a/satpy/tests/cf_tests/test_decoding.py b/satpy/tests/cf_tests/test_decoding.py new file mode 100644 index 0000000000..c20cddf6da --- /dev/null +++ b/satpy/tests/cf_tests/test_decoding.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for CF decoding.""" +from datetime import datetime + +import pytest + +import satpy.cf.decoding + + +class TestDecodeAttrs: + """Test decoding of CF-encoded attributes.""" + + @pytest.fixture() + def attrs(self): + """Get CF-encoded attributes.""" + return { + "my_integer": 0, + "my_float": 0.0, + "my_list": [1, 2, 3], + "my_timestamp1": "2000-01-01", + "my_timestamp2": "2000-01-01 12:15:33", + "my_timestamp3": "2000-01-01 12:15:33.123456", + "my_dict": '{"a": {"b": [1, 2, 3]}, "c": {"d": "2000-01-01 12:15:33.123456"}}' + } + + @pytest.fixture() + def expected(self): + """Get expected decoded results.""" + return { + "my_integer": 0, + "my_float": 0.0, + "my_list": [1, 2, 3], + "my_timestamp1": datetime(2000, 1, 1), + "my_timestamp2": datetime(2000, 1, 1, 12, 15, 33), + "my_timestamp3": datetime(2000, 1, 1, 12, 15, 33, 123456), + "my_dict": {"a": {"b": [1, 2, 3]}, + "c": {"d": datetime(2000, 1, 1, 12, 15, 33, 123456)}} + } + + def test_decoding(self, attrs, expected): + """Test decoding of CF-encoded attributes.""" + res = satpy.cf.decoding.decode_attrs(attrs) + assert res == expected + + def test_decoding_doesnt_modify_original(self, attrs): + """Test that decoding doesn't modify the original attributes.""" + satpy.cf.decoding.decode_attrs(attrs) + assert isinstance(attrs["my_dict"], str) From 26eaaf13f99eec86fc7d4b554d60ea5e144592c5 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 21 Dec 2023 10:37:13 +0800 Subject: [PATCH 0999/1416] Update __init__.py --- satpy/composites/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index d9d8fec00b..5701842a2b 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1765,6 +1765,7 @@ def _get_alpha(dataset: xr.DataArray): bg_band * alpha_back * (1 - alpha_fore)) / new_alpha_nan if band != "A" else new_alpha chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) + chan["bands"] = band data.append(chan) From ad0895091110f31fc6e92dfd6c9b15ba023bdbeb Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 21 Dec 2023 09:32:06 -0600 Subject: [PATCH 1000/1416] Fix 'viirs_edr' renaming two sets of dimensions to the same names --- satpy/readers/viirs_edr.py | 24 ++++++++++++++-------- satpy/tests/reader_tests/test_viirs_edr.py | 1 + 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index b0eaf7b7ba..eaf2f53d42 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -62,6 +62,7 @@ import logging from typing import Iterable +import dask.array as da import xarray as xr from satpy import DataID @@ -93,11 +94,6 @@ def __init__(self, filename, filename_info, filetype_info): "Along_Scan_750m": -1, "Along_Track_750m": row_chunks_m, }) - if "Columns" in self.nc.dims: - self.nc = self.nc.rename({"Columns": "x", "Rows": "y"}) - elif "Along_Track_375m" in self.nc.dims: - self.nc = self.nc.rename({"Along_Scan_375m": "x", "Along_Track_375m": "y"}) - self.nc = self.nc.rename({"Along_Scan_750m": "x", "Along_Track_750m": "y"}) # For some reason, no 'standard_name' is defined in some netCDF files, so # here we manually make the definitions. @@ -134,7 +130,8 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: # delete the coordinates here so the base reader doesn't try to # make a SwathDefinition data_arr = data_arr.reset_coords(drop=True) - return data_arr + + return self._rename_dims(data_arr) def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: # xarray auto mask and scale handled any fills from the file @@ -152,6 +149,16 @@ def _decode_flag_meanings(data_arr: xr.DataArray): # only handle CF-standard flag meanings data_arr.attrs["flag_meanings"] = [flag for flag in data_arr.attrs["flag_meanings"].split(" ")] + @staticmethod + def _rename_dims(data_arr: xr.DataArray) -> xr.DataArray: + if "Columns" in data_arr.dims: + data_arr = data_arr.rename({"Columns": "x", "Rows": "y"}) + if "Along_Track_375m" in data_arr.dims: + data_arr = data_arr.rename({"Along_Scan_375m": "x", "Along_Track_375m": "y"}) + if "Along_Track_750m" in data_arr.dims: + data_arr = data_arr.rename({"Along_Scan_750m": "x", "Along_Track_750m": "y"}) + return data_arr + @property def start_time(self): """Get first date/time when observations were recorded.""" @@ -277,7 +284,7 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: new_data_arr = new_data_arr.where(good_mask) return new_data_arr - def _get_veg_index_good_mask(self) -> xr.DataArray: + def _get_veg_index_good_mask(self) -> da.Array: # each mask array should be TRUE when pixels are UNACCEPTABLE qf1 = self.nc["QF1 Surface Reflectance"] has_sun_glint = (qf1 & 0b11000000) > 0 @@ -306,8 +313,7 @@ def _get_veg_index_good_mask(self) -> xr.DataArray: ) # upscale from M-band resolution to I-band resolution bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) - good_mask_iband = xr.DataArray(~bad_mask_iband_dask, dims=qf1.dims) - return good_mask_iband + return ~bad_mask_iband_dask class VIIRSLSTHandler(VIIRSJRRFileHandler): diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index a6932520c0..d042576d05 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -152,6 +152,7 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), } for data_arr in data_arrs.values(): + data_arr.encoding["chunksizes"] = data_arr.shape if "scale_factor" not in data_arr.attrs: continue data_arr.encoding["dtype"] = np.int16 From a53068e9050dab6a2c530e73889bc4054c3862af Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 22 Dec 2023 00:25:53 +0800 Subject: [PATCH 1001/1416] something changed in metadata combination --- satpy/composites/__init__.py | 3 ++- satpy/tests/test_composites.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 5701842a2b..5a20530a8f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1704,7 +1704,8 @@ def __call__(self, projectables, *args, **kwargs): attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) data = self._get_merged_image_data(foreground, background, initial_bg_alpha=initial_bg_alpha) res = super(BackgroundCompositor, self).__call__(data, **kwargs) - res.attrs.update(attrs) + attrs.update(res.attrs) + res.attrs = attrs return res def _combine_metadata_with_mode_and_sensor(self, diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 9654441c92..ef866ac341 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1573,7 +1573,7 @@ def test_multiple_sensors(self): assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, np.array([[1., 0.5], [0., 1.]])) assert res.attrs["mode"] == "L" - assert res.attrs["sensor"] == {"abi", "glm"} + assert res.attrs["sensor"] is None class TestMaskingCompositor: From f4569bf275f0e9861d3d47f4a02c50be6b43808e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 22 Dec 2023 10:36:36 +0800 Subject: [PATCH 1002/1416] update metadata --- satpy/composites/__init__.py | 2 ++ satpy/tests/test_composites.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 5a20530a8f..fc476b3658 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1703,6 +1703,8 @@ def __call__(self, projectables, *args, **kwargs): attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) data = self._get_merged_image_data(foreground, background, initial_bg_alpha=initial_bg_alpha) + for data_arr in data: + data_arr.attrs = attrs res = super(BackgroundCompositor, self).__call__(data, **kwargs) attrs.update(res.attrs) res.attrs = attrs diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index ef866ac341..9654441c92 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1573,7 +1573,7 @@ def test_multiple_sensors(self): assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, np.array([[1., 0.5], [0., 1.]])) assert res.attrs["mode"] == "L" - assert res.attrs["sensor"] is None + assert res.attrs["sensor"] == {"abi", "glm"} class TestMaskingCompositor: From 8d41b76e822f4451df8cc10432d1022133d191ac Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 23 Dec 2023 09:10:41 +0800 Subject: [PATCH 1003/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index fc476b3658..eefd5258ef 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1753,7 +1753,7 @@ def _get_alpha(dataset: xr.DataArray): # Unless background has an initial alpha band, there will be no alpha band in the output image # Let the writer decide - output_mode = background.mode if initial_bg_alpha else background.mode.replace("A", "") + output_mode = background.attrs["mode"] if initial_bg_alpha else background.attrs["mode"].replace("A", "") # If we let the writer decide alpha band, we must fill the transparent areas in the image with np.nan first # The best way is through the new alpha From e602e180353a1a67573d50ddb3832a5e89df9e53 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 19:58:23 +0000 Subject: [PATCH 1004/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.7 → v0.1.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.7...v0.1.9) - [github.com/pre-commit/mirrors-mypy: v1.7.1 → v1.8.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.7.1...v1.8.0) - [github.com/pycqa/isort: 5.13.1 → 5.13.2](https://github.com/pycqa/isort/compare/5.13.1...5.13.2) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a398bd445f..37c458982a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.7' + rev: 'v0.1.9' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.7.1' # Use the sha / tag you want to point at + rev: 'v1.8.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: @@ -29,7 +29,7 @@ repos: - types-requests args: ["--python-version", "3.9", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort - rev: 5.13.1 + rev: 5.13.2 hooks: - id: isort language_version: python3 From c569e442e2c2d589ed159136c5c6f84af001c993 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:00:59 -0600 Subject: [PATCH 1005/1416] Add support for NOAA-21 in MiRS limb correction --- satpy/readers/mirs.py | 7 ++++--- satpy/tests/reader_tests/test_mirs.py | 13 ++++++++++--- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index 5f68af2d6c..362ed1371c 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -304,9 +304,10 @@ def force_time(self, key): def _get_coeff_filenames(self): """Retrieve necessary files for coefficients if needed.""" coeff_fn = {"sea": None, "land": None} - if self.platform_name == "noaa-20": - coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_noaa20.txt") - coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_noaa20.txt") + if self.platform_name.startswith("noaa"): + suffix = self.platform_name[-2:] + coeff_fn["land"] = retrieve(f"readers/limbcoef_atmsland_noaa{suffix}.txt") + coeff_fn["sea"] = retrieve(f"readers/limbcoef_atmssea_noaa{suffix}.txt") if self.platform_name == "npp": coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_snpp.txt") coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_snpp.txt") diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index 69f5543411..d68f8143db 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -29,6 +29,7 @@ METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc" NPP_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r6_npp_s201702061601000_e201702061607000_c202012201658410.nc" N20_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n20_s201702061601000_e201702061607000_c202012201658410.nc" +N21_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n21_s201702061601000_e201702061607000_c202012201658410.nc" OTHER_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_gpm_s201702061601000_e201702061607000_c202010080001310.nc" EXAMPLE_FILES = [METOP_FILE, NPP_MIRS_L2_SWATH, OTHER_MIRS_L2_SWATH] @@ -102,10 +103,8 @@ def fake_coeff_from_fn(fn): next_line = " {}\n".format(" ".join([str(x) for x in locations[idx - 1]])) coeff_str.append(next_line) for fov in range(1, N_FOV+1): - random_coeff = np.random.rand(all_nchx[nx]) random_coeff = np.ones(all_nchx[nx]) str_coeff = " ".join([str(x) for x in random_coeff]) - random_means = np.random.uniform(261, 267, all_nchx[nx]) random_means = np.zeros(all_nchx[nx]) str_means = " ".join([str(x) for x in random_means]) error_val = np.random.uniform(0, 4) @@ -309,6 +308,7 @@ def _check_attrs(data_arr, platform_name): ([METOP_FILE], TEST_VARS, "metop-a"), ([NPP_MIRS_L2_SWATH], TEST_VARS, "npp"), ([N20_MIRS_L2_SWATH], TEST_VARS, "noaa-20"), + ([N21_MIRS_L2_SWATH], TEST_VARS, "noaa-21"), ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), ] ) @@ -323,9 +323,16 @@ def test_basic_load(self, filenames, loadable_ids, loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables, fh_kwargs=reader_kw) with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ - fd, mock.patch("satpy.readers.mirs.retrieve"): + fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: fd.side_effect = fake_coeff_from_fn loaded_data_arrs = r.load(loadable_ids) + if reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21"): + suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" + assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) + for calls_args in rtv.call_args_list: + assert calls_args[0][0].endswith(f"_{suffix}.txt") + else: + rtv.assert_not_called() assert len(loaded_data_arrs) == len(loadable_ids) test_data = fake_open_dataset(filenames[0]) From e5c50f593e4457b683a9e2c3247fd0a5a2178d64 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:05:21 -0600 Subject: [PATCH 1006/1416] Refactor mirs tests --- satpy/tests/reader_tests/test_mirs.py | 78 +++++++++++++-------------- 1 file changed, 39 insertions(+), 39 deletions(-) diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index d68f8143db..d12a553235 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -268,40 +268,6 @@ def test_available_datasets(self, filenames, expected_datasets): for var_name in expected_datasets: assert var_name in avails - @staticmethod - def _check_area(data_arr): - from pyresample.geometry import SwathDefinition - area = data_arr.attrs["area"] - assert isinstance(area, SwathDefinition) - - @staticmethod - def _check_fill(data_arr): - assert "_FillValue" not in data_arr.attrs - if np.issubdtype(data_arr.dtype, np.floating): - # we started with float32, it should stay that way - assert data_arr.dtype.type == np.float64 - - @staticmethod - def _check_valid_range(data_arr, test_valid_range): - # valid_range is popped out of data_arr.attrs when it is applied - assert "valid_range" not in data_arr.attrs - assert data_arr.data.min() >= test_valid_range[0] - assert data_arr.data.max() <= test_valid_range[1] - - @staticmethod - def _check_fill_value(data_arr, test_fill_value): - assert "_FillValue" not in data_arr.attrs - assert not (data_arr.data == test_fill_value).any() - - @staticmethod - def _check_attrs(data_arr, platform_name): - attrs = data_arr.attrs - assert "scale_factor" not in attrs - assert "platform_name" in attrs - assert attrs["platform_name"] == platform_name - assert attrs["start_time"] == START_TIME - assert attrs["end_time"] == END_TIME - @pytest.mark.parametrize( ("filenames", "loadable_ids", "platform_name"), [ @@ -340,18 +306,18 @@ def test_basic_load(self, filenames, loadable_ids, data_arr = data_arr.compute() var_name = data_arr.attrs["name"] if var_name not in ["latitude", "longitude"]: - self._check_area(data_arr) - self._check_fill(data_arr) - self._check_attrs(data_arr, platform_name) + _check_area(data_arr) + _check_fill(data_arr) + _check_attrs(data_arr, platform_name) input_fake_data = test_data["BT"] if "btemp" in var_name \ else test_data[var_name] if "valid_range" in input_fake_data.attrs: valid_range = input_fake_data.attrs["valid_range"] - self._check_valid_range(data_arr, valid_range) + _check_valid_range(data_arr, valid_range) if "_FillValue" in input_fake_data.attrs: fill_value = input_fake_data.attrs["_FillValue"] - self._check_fill_value(data_arr, fill_value) + _check_fill_value(data_arr, fill_value) sensor = data_arr.attrs["sensor"] if reader_kw.get("limb_correction", True) and sensor == "atms": @@ -359,3 +325,37 @@ def test_basic_load(self, filenames, loadable_ids, else: fd.assert_not_called() assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] + + +def _check_area(data_arr): + from pyresample.geometry import SwathDefinition + area = data_arr.attrs["area"] + assert isinstance(area, SwathDefinition) + + +def _check_fill(data_arr): + assert "_FillValue" not in data_arr.attrs + if np.issubdtype(data_arr.dtype, np.floating): + # we started with float32, it should stay that way + assert data_arr.dtype.type == np.float64 + + +def _check_valid_range(data_arr, test_valid_range): + # valid_range is popped out of data_arr.attrs when it is applied + assert "valid_range" not in data_arr.attrs + assert data_arr.data.min() >= test_valid_range[0] + assert data_arr.data.max() <= test_valid_range[1] + + +def _check_fill_value(data_arr, test_fill_value): + assert "_FillValue" not in data_arr.attrs + assert not (data_arr.data == test_fill_value).any() + + +def _check_attrs(data_arr, platform_name): + attrs = data_arr.attrs + assert "scale_factor" not in attrs + assert "platform_name" in attrs + assert attrs["platform_name"] == platform_name + assert attrs["start_time"] == START_TIME + assert attrs["end_time"] == END_TIME From 31c65986ab180633ae24a7827f05d303199aab59 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:13:55 -0600 Subject: [PATCH 1007/1416] Refactor mirs tests --- satpy/tests/reader_tests/test_mirs.py | 200 ++++++++++++-------------- 1 file changed, 92 insertions(+), 108 deletions(-) diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index d12a553235..2ca3325139 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -16,7 +16,8 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . -"""Module for testing the satpy.readers.tropomi_l2 module.""" +"""Module for testing the satpy.readers.mirs module.""" +from __future__ import annotations import os from datetime import datetime @@ -26,6 +27,10 @@ import pytest import xarray as xr +from satpy._config import config_search_paths +from satpy.readers import load_reader +from satpy.readers.yaml_reader import FileYAMLReader + METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc" NPP_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r6_npp_s201702061601000_e201702061607000_c202012201658410.nc" N20_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n20_s201702061601000_e201702061607000_c202012201658410.nc" @@ -218,113 +223,92 @@ def fake_open_dataset(filename, **kwargs): return _get_datasets_with_attributes() -class TestMirsL2_NcReader: - """Test mirs Reader.""" - - yaml_file = "mirs.yaml" - - def setup_method(self): - """Read fake data.""" - from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) - - @pytest.mark.parametrize( - ("filenames", "expected_loadables"), - [ - ([METOP_FILE], 1), - ([NPP_MIRS_L2_SWATH], 1), - ([OTHER_MIRS_L2_SWATH], 1), - ] - ) - def test_reader_creation(self, filenames, expected_loadables): - """Test basic initialization.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: - od.side_effect = fake_open_dataset - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - assert len(loadables) == expected_loadables - r.create_filehandlers(loadables) - # make sure we have some files - assert r.file_handlers - - @pytest.mark.parametrize( - ("filenames", "expected_datasets"), - [ - ([METOP_FILE], DS_IDS), - ([NPP_MIRS_L2_SWATH], DS_IDS), - ([OTHER_MIRS_L2_SWATH], DS_IDS), - ] - ) - def test_available_datasets(self, filenames, expected_datasets): - """Test that variables are dynamically discovered.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: - od.side_effect = fake_open_dataset - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - r.create_filehandlers(loadables) - avails = list(r.available_dataset_names) - for var_name in expected_datasets: - assert var_name in avails - - @pytest.mark.parametrize( - ("filenames", "loadable_ids", "platform_name"), - [ - ([METOP_FILE], TEST_VARS, "metop-a"), - ([NPP_MIRS_L2_SWATH], TEST_VARS, "npp"), - ([N20_MIRS_L2_SWATH], TEST_VARS, "noaa-20"), - ([N21_MIRS_L2_SWATH], TEST_VARS, "noaa-21"), - ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), - ] - ) - @pytest.mark.parametrize("reader_kw", [{}, {"limb_correction": False}]) - def test_basic_load(self, filenames, loadable_ids, - platform_name, reader_kw): - """Test that variables are loaded properly.""" - from satpy.readers import load_reader - with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: - od.side_effect = fake_open_dataset - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames(filenames) - r.create_filehandlers(loadables, fh_kwargs=reader_kw) - with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ - fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: - fd.side_effect = fake_coeff_from_fn - loaded_data_arrs = r.load(loadable_ids) - if reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21"): - suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" - assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) - for calls_args in rtv.call_args_list: - assert calls_args[0][0].endswith(f"_{suffix}.txt") - else: - rtv.assert_not_called() - assert len(loaded_data_arrs) == len(loadable_ids) - - test_data = fake_open_dataset(filenames[0]) - for _data_id, data_arr in loaded_data_arrs.items(): - data_arr = data_arr.compute() - var_name = data_arr.attrs["name"] - if var_name not in ["latitude", "longitude"]: - _check_area(data_arr) - _check_fill(data_arr) - _check_attrs(data_arr, platform_name) - - input_fake_data = test_data["BT"] if "btemp" in var_name \ - else test_data[var_name] - if "valid_range" in input_fake_data.attrs: - valid_range = input_fake_data.attrs["valid_range"] - _check_valid_range(data_arr, valid_range) - if "_FillValue" in input_fake_data.attrs: - fill_value = input_fake_data.attrs["_FillValue"] - _check_fill_value(data_arr, fill_value) - - sensor = data_arr.attrs["sensor"] - if reader_kw.get("limb_correction", True) and sensor == "atms": - fd.assert_called() - else: - fd.assert_not_called() - assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] +@pytest.mark.parametrize( + ("filenames", "expected_datasets"), + [ + ([METOP_FILE], DS_IDS), + ([NPP_MIRS_L2_SWATH], DS_IDS), + ([OTHER_MIRS_L2_SWATH], DS_IDS), + ] +) +def test_available_datasets(filenames, expected_datasets): + """Test that variables are dynamically discovered.""" + r = _create_fake_reader(filenames, {}) + avails = list(r.available_dataset_names) + for var_name in expected_datasets: + assert var_name in avails + + +@pytest.mark.parametrize( + ("filenames", "loadable_ids", "platform_name"), + [ + ([METOP_FILE], TEST_VARS, "metop-a"), + ([NPP_MIRS_L2_SWATH], TEST_VARS, "npp"), + ([N20_MIRS_L2_SWATH], TEST_VARS, "noaa-20"), + ([N21_MIRS_L2_SWATH], TEST_VARS, "noaa-21"), + ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), + ] +) +@pytest.mark.parametrize("reader_kw", [{}, {"limb_correction": False}]) +def test_basic_load(filenames, loadable_ids, platform_name, reader_kw): + """Test that variables are loaded properly.""" + r = _create_fake_reader(filenames, reader_kw) + with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ + fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: + fd.side_effect = fake_coeff_from_fn + loaded_data_arrs = r.load(loadable_ids) + if reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21"): + suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" + assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) + for calls_args in rtv.call_args_list: + assert calls_args[0][0].endswith(f"_{suffix}.txt") + else: + rtv.assert_not_called() + assert len(loaded_data_arrs) == len(loadable_ids) + + test_data = fake_open_dataset(filenames[0]) + for _data_id, data_arr in loaded_data_arrs.items(): + data_arr = data_arr.compute() + var_name = data_arr.attrs["name"] + if var_name not in ["latitude", "longitude"]: + _check_area(data_arr) + _check_fill(data_arr) + _check_attrs(data_arr, platform_name) + + input_fake_data = test_data["BT"] if "btemp" in var_name \ + else test_data[var_name] + if "valid_range" in input_fake_data.attrs: + valid_range = input_fake_data.attrs["valid_range"] + _check_valid_range(data_arr, valid_range) + if "_FillValue" in input_fake_data.attrs: + fill_value = input_fake_data.attrs["_FillValue"] + _check_fill_value(data_arr, fill_value) + + sensor = data_arr.attrs["sensor"] + if reader_kw.get("limb_correction", True) and sensor == "atms": + fd.assert_called() + else: + fd.assert_not_called() + assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] + + +def _create_fake_reader( + filenames: list[str], + reader_kwargs: dict, + exp_loadable_files: int | None = None +) -> FileYAMLReader: + exp_loadable_files = exp_loadable_files if exp_loadable_files is not None else len(filenames) + reader_configs = config_search_paths(os.path.join("readers", "mirs.yaml")) + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: + od.side_effect = fake_open_dataset + r = load_reader(reader_configs) + loadables = r.select_files_from_pathnames(filenames) + r.create_filehandlers(loadables, fh_kwargs=reader_kwargs) + + assert isinstance(r, FileYAMLReader) + assert len(loadables) == exp_loadable_files + assert r.file_handlers + return r def _check_area(data_arr): From 34e6c7022dbc005d3bc236dbaf3d976e40fdfda6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:30:56 -0600 Subject: [PATCH 1008/1416] Refactor mirs tests --- satpy/tests/reader_tests/test_mirs.py | 84 ++++++++++++++++----------- 1 file changed, 49 insertions(+), 35 deletions(-) diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index 2ca3325139..f4c6bcbed1 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -28,6 +28,7 @@ import xarray as xr from satpy._config import config_search_paths +from satpy.dataset import DataID from satpy.readers import load_reader from satpy.readers.yaml_reader import FileYAMLReader @@ -253,43 +254,15 @@ def test_available_datasets(filenames, expected_datasets): def test_basic_load(filenames, loadable_ids, platform_name, reader_kw): """Test that variables are loaded properly.""" r = _create_fake_reader(filenames, reader_kw) - with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ - fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: - fd.side_effect = fake_coeff_from_fn - loaded_data_arrs = r.load(loadable_ids) - if reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21"): - suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" - assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) - for calls_args in rtv.call_args_list: - assert calls_args[0][0].endswith(f"_{suffix}.txt") - else: - rtv.assert_not_called() - assert len(loaded_data_arrs) == len(loadable_ids) test_data = fake_open_dataset(filenames[0]) - for _data_id, data_arr in loaded_data_arrs.items(): - data_arr = data_arr.compute() - var_name = data_arr.attrs["name"] - if var_name not in ["latitude", "longitude"]: - _check_area(data_arr) - _check_fill(data_arr) - _check_attrs(data_arr, platform_name) - - input_fake_data = test_data["BT"] if "btemp" in var_name \ - else test_data[var_name] - if "valid_range" in input_fake_data.attrs: - valid_range = input_fake_data.attrs["valid_range"] - _check_valid_range(data_arr, valid_range) - if "_FillValue" in input_fake_data.attrs: - fill_value = input_fake_data.attrs["_FillValue"] - _check_fill_value(data_arr, fill_value) - - sensor = data_arr.attrs["sensor"] - if reader_kw.get("limb_correction", True) and sensor == "atms": - fd.assert_called() - else: - fd.assert_not_called() - assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] + exp_limb_corr = reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21") + loaded_data_arrs = _load_and_check_limb_correction_variables(r, loadable_ids, platform_name, exp_limb_corr) + for _data_id, data_arr_dask in loaded_data_arrs.items(): + data_arr = data_arr_dask.compute() + assert data_arr.dtype is data_arr_dask.dtype + # assert data_arr.dtype is np.float32 + _check_metadata(data_arr, test_data, platform_name) def _create_fake_reader( @@ -311,6 +284,47 @@ def _create_fake_reader( return r +def _load_and_check_limb_correction_variables( + reader: FileYAMLReader, + loadable_ids: list[str], + platform_name: str, + exp_limb_corr: bool +) -> dict[DataID, xr.DataArray]: + with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ + fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: + fd.side_effect = fake_coeff_from_fn + loaded_data_arrs = reader.load(loadable_ids) + if exp_limb_corr: + fd.assert_called() + suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" + assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) + for calls_args in rtv.call_args_list: + assert calls_args[0][0].endswith(f"_{suffix}.txt") + else: + fd.assert_not_called() + rtv.assert_not_called() + assert len(loaded_data_arrs) == len(loadable_ids) + return loaded_data_arrs + + +def _check_metadata(data_arr: xr.DataArray, test_data: xr.Dataset, platform_name: str) -> None: + var_name = data_arr.attrs["name"] + if var_name not in ["latitude", "longitude"]: + _check_area(data_arr) + _check_fill(data_arr) + _check_attrs(data_arr, platform_name) + + input_fake_data = test_data["BT"] if "btemp" in var_name else test_data[var_name] + if "valid_range" in input_fake_data.attrs: + valid_range = input_fake_data.attrs["valid_range"] + _check_valid_range(data_arr, valid_range) + if "_FillValue" in input_fake_data.attrs: + fill_value = input_fake_data.attrs["_FillValue"] + _check_fill_value(data_arr, fill_value) + + assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] + + def _check_area(data_arr): from pyresample.geometry import SwathDefinition area = data_arr.attrs["area"] From fb21a71a2999c98ce68265d86b45c0b52270b1c9 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 3 Jan 2024 09:59:30 -0600 Subject: [PATCH 1009/1416] Fix floating point preservation in MiRS reader --- satpy/readers/mirs.py | 8 ++-- satpy/tests/reader_tests/test_mirs.py | 53 ++++++++++++++------------- 2 files changed, 32 insertions(+), 29 deletions(-) diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index 362ed1371c..3ddb223ec3 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -336,19 +336,21 @@ def _nan_for_dtype(data_arr_dtype): return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): return np.datetime64("NaT") - return np.nan + return np.float32(np.nan) @staticmethod def _scale_data(data_arr, scale_factor, add_offset): """Scale data, if needed.""" scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: - data_arr = data_arr * scale_factor + add_offset + data_arr = data_arr * np.float32(scale_factor) + np.float32(add_offset) return data_arr def _fill_data(self, data_arr, fill_value, scale_factor, add_offset): """Fill missing data with NaN.""" if fill_value is not None: + # NOTE: Sfc_type and other category products are not detected or handled properly + # and will be converted from integers to 32-bit floats in this step fill_value = self._scale_data(fill_value, scale_factor, add_offset) fill_out = self._nan_for_dtype(data_arr.dtype) data_arr = data_arr.where(data_arr != fill_value, fill_out) @@ -373,7 +375,7 @@ def apply_attributes(self, data, ds_info): """ try: - global_attr_fill = self.nc.missing_value + global_attr_fill = self.nc.attrs["missing_value"] except AttributeError: global_attr_fill = 1.0 diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index f4c6bcbed1..b857147e47 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -43,21 +43,25 @@ N_CHANNEL = 22 N_FOV = 96 N_SCANLINE = 100 -DEFAULT_FILE_DTYPE = np.float64 +DEFAULT_FILE_DTYPE = np.float32 DEFAULT_2D_SHAPE = (N_SCANLINE, N_FOV) DEFAULT_DATE = datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(23.09356, 36.42844, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) -FREQ = xr.DataArray([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, - 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, - 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], - dims="Channel", - attrs={"description": "Central Frequencies (GHz)"}) -POLO = xr.DataArray([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, - 3, 3, 3][:N_CHANNEL], dims="Channel", - attrs={"description": "Polarizations"}) +FREQ = xr.DataArray( + np.array([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, + 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, + 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], dtype=np.float32), + dims="Channel", + attrs={"description": "Central Frequencies (GHz)"}, +) +POLO = xr.DataArray( + np.array([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3][:N_CHANNEL], dtype=np.int16), + dims="Channel", + attrs={"description": "Polarizations"}, +) DS_IDS = ["RR", "longitude", "latitude"] TEST_VARS = ["btemp_88v", "btemp_165h", @@ -125,7 +129,7 @@ def fake_coeff_from_fn(fn): def _get_datasets_with_attributes(**kwargs): """Represent files with two resolution of variables in them (ex. OCEAN).""" - bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). + bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL, dtype=np.int16). reshape(N_SCANLINE, N_FOV, N_CHANNEL), attrs={"long_name": "Channel Temperature (K)", "units": "Kelvin", @@ -134,7 +138,7 @@ def _get_datasets_with_attributes(**kwargs): "_FillValue": -999, "valid_range": [0, 50000]}, dims=("Scanline", "Field_of_view", "Channel")) - rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), + rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "units": "mm/hr", "coordinates": "Longitude Latitude", @@ -142,7 +146,7 @@ def _get_datasets_with_attributes(**kwargs): "_FillValue": -999, "valid_range": [0, 1000]}, dims=("Scanline", "Field_of_view")) - sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), + sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", "units": "1", @@ -170,7 +174,7 @@ def _get_datasets_with_attributes(**kwargs): "Longitude": longitude } - attrs = {"missing_value": -999.} + attrs = {"missing_value": -999} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds @@ -178,17 +182,17 @@ def _get_datasets_with_attributes(**kwargs): def _get_datasets_with_less_attributes(): """Represent files with two resolution of variables in them (ex. OCEAN).""" - bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). + bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL, dtype=np.int16). reshape(N_SCANLINE, N_FOV, N_CHANNEL), attrs={"long_name": "Channel Temperature (K)", "scale_factor": 0.01}, dims=("Scanline", "Field_of_view", "Channel")) - rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), + rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "scale_factor": 0.1}, dims=("Scanline", "Field_of_view")) - sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), + sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, dims=("Scanline", "Field_of_view")) @@ -260,8 +264,12 @@ def test_basic_load(filenames, loadable_ids, platform_name, reader_kw): loaded_data_arrs = _load_and_check_limb_correction_variables(r, loadable_ids, platform_name, exp_limb_corr) for _data_id, data_arr_dask in loaded_data_arrs.items(): data_arr = data_arr_dask.compute() - assert data_arr.dtype is data_arr_dask.dtype - # assert data_arr.dtype is np.float32 + assert data_arr.dtype == data_arr_dask.dtype + if np.issubdtype(data_arr.dtype, np.floating): + # we started with float32, it should stay that way + # NOTE: Sfc_type does not have enough metadata to dynamically force integer type + # even though it is a mask/category product + assert data_arr.dtype.type == np.float32 _check_metadata(data_arr, test_data, platform_name) @@ -311,7 +319,7 @@ def _check_metadata(data_arr: xr.DataArray, test_data: xr.Dataset, platform_name var_name = data_arr.attrs["name"] if var_name not in ["latitude", "longitude"]: _check_area(data_arr) - _check_fill(data_arr) + assert "_FillValue" not in data_arr.attrs _check_attrs(data_arr, platform_name) input_fake_data = test_data["BT"] if "btemp" in var_name else test_data[var_name] @@ -331,13 +339,6 @@ def _check_area(data_arr): assert isinstance(area, SwathDefinition) -def _check_fill(data_arr): - assert "_FillValue" not in data_arr.attrs - if np.issubdtype(data_arr.dtype, np.floating): - # we started with float32, it should stay that way - assert data_arr.dtype.type == np.float64 - - def _check_valid_range(data_arr, test_valid_range): # valid_range is popped out of data_arr.attrs when it is applied assert "valid_range" not in data_arr.attrs From 9beba3c3b5fa7c531769b73a1e14d521e3c7b490 Mon Sep 17 00:00:00 2001 From: martin-rdz Date: Wed, 19 Jul 2023 17:28:49 +0000 Subject: [PATCH 1010/1416] adapt viirs_sdr reader for aws s3 --- satpy/readers/hdf5_utils.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/satpy/readers/hdf5_utils.py b/satpy/readers/hdf5_utils.py index 428d64e2f1..a4a92c2ad1 100644 --- a/satpy/readers/hdf5_utils.py +++ b/satpy/readers/hdf5_utils.py @@ -27,6 +27,7 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy.utils import get_legacy_chunk_size +from satpy.readers import open_file_or_filename LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() @@ -43,7 +44,8 @@ def __init__(self, filename, filename_info, filetype_info): self._attrs_cache = {} try: - file_handle = h5py.File(self.filename, "r") + f_obj = open_file_or_filename(self.filename) + file_handle = h5py.File(f_obj, "r") except IOError: LOG.exception( "Failed reading file %s. Possibly corrupted file", self.filename) @@ -73,7 +75,8 @@ def _collect_attrs(self, name, attrs): def get_reference(self, name, key): """Get reference.""" - with h5py.File(self.filename, "r") as hf: + f_obj = open_file_or_filename(self.filename) + with h5py.File(f_obj, "r") as hf: return self._get_reference(hf, hf[name].attrs[key]) def _get_reference(self, hf, ref): @@ -97,7 +100,8 @@ def __getitem__(self, key): val = self.file_content[key] if isinstance(val, h5py.Dataset): # these datasets are closed and inaccessible when the file is closed, need to reopen - dset = h5py.File(self.filename, "r")[key] + f_obj = open_file_or_filename(self.filename) + dset = h5py.File(f_obj, "r")[key] dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) if dset.ndim == 2: From 130361bc894c36fccb4131e17466abc4d52c80f5 Mon Sep 17 00:00:00 2001 From: martin-rdz Date: Tue, 12 Sep 2023 11:49:30 +0000 Subject: [PATCH 1011/1416] fix issue where open_file_or_filename tries to open pathlib path --- satpy/readers/__init__.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index c8fc0a8b69..5eeed7ca46 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -22,6 +22,7 @@ import os import pickle # nosec B403 import warnings +import pathlib from datetime import datetime, timedelta from functools import total_ordering @@ -779,8 +780,11 @@ def _get_compression(file): def open_file_or_filename(unknown_file_thing): """Try to open the *unknown_file_thing*, otherwise return the filename.""" - try: - f_obj = unknown_file_thing.open() - except AttributeError: + if isinstance(unknown_file_thing, (pathlib.WindowsPath, pathlib.PosixPath)): f_obj = unknown_file_thing + else: + try: + f_obj = unknown_file_thing.open() + except AttributeError: + f_obj = unknown_file_thing return f_obj From d3fe3fe5f71479ee5f4fae30dfe3728932a8cd2e Mon Sep 17 00:00:00 2001 From: martin-rdz Date: Tue, 3 Oct 2023 09:51:23 +0000 Subject: [PATCH 1012/1416] replaced PosixPath and WindowsPath with pathlib.Path following suggestion by @djhoese --- satpy/readers/__init__.py | 4 ++-- satpy/readers/hdf5_utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 5eeed7ca46..7cac7edb5d 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -20,9 +20,9 @@ import logging import os +import pathlib import pickle # nosec B403 import warnings -import pathlib from datetime import datetime, timedelta from functools import total_ordering @@ -780,7 +780,7 @@ def _get_compression(file): def open_file_or_filename(unknown_file_thing): """Try to open the *unknown_file_thing*, otherwise return the filename.""" - if isinstance(unknown_file_thing, (pathlib.WindowsPath, pathlib.PosixPath)): + if isinstance(unknown_file_thing, pathlib.Path): f_obj = unknown_file_thing else: try: diff --git a/satpy/readers/hdf5_utils.py b/satpy/readers/hdf5_utils.py index a4a92c2ad1..cfce968cf3 100644 --- a/satpy/readers/hdf5_utils.py +++ b/satpy/readers/hdf5_utils.py @@ -24,10 +24,10 @@ import numpy as np import xarray as xr +from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy.utils import get_legacy_chunk_size -from satpy.readers import open_file_or_filename LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() From 1c65aaf8f42257cae0e58661a744b14d4ab37151 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 4 Jan 2024 14:01:37 -0600 Subject: [PATCH 1013/1416] Switch FSFile tests to pytest and fixtures --- satpy/tests/test_readers.py | 156 +++++++++++++++++++++--------------- 1 file changed, 91 insertions(+), 65 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index d91e2b6fed..b7144391cf 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -18,11 +18,14 @@ """Test classes and functions in the readers/__init__.py module.""" import builtins +import contextlib import os import sys import unittest import warnings from contextlib import suppress +from pathlib import Path +from typing import Iterator from unittest import mock import pytest @@ -954,126 +957,149 @@ def _posixify_path(filename): return driveless_name.replace("\\", "/") -class TestFSFile(unittest.TestCase): - """Test the FSFile class.""" +@pytest.fixture(scope="module") +def random_string(): + """Random string to be used as fake file content.""" + return _generate_random_string() - def setUp(self): - """Set up the instance.""" - import tempfile - import zipfile - from pathlib import Path - import fsspec - self.random_string = _generate_random_string() - self.local_filename = os.path.join(tempfile.gettempdir(), self.random_string) - Path(self.local_filename).touch() - self.local_file = fsspec.open(self.local_filename) - - self.random_string2 = _generate_random_string() - self.local_filename2 = os.path.join(tempfile.gettempdir(), self.random_string2) - Path(self.local_filename2).touch() - self.zip_name = os.path.join(tempfile.gettempdir(), self.random_string2 + ".zip") - zip_file = zipfile.ZipFile(self.zip_name, "w", zipfile.ZIP_DEFLATED) - zip_file.write(self.local_filename2) - zip_file.close() - os.remove(self.local_filename2) +@pytest.fixture(scope="module") +def local_filename(tmp_path_factory, random_string): + """Create simple on-disk file.""" + with _local_file(tmp_path_factory, random_string) as local_path: + yield local_path - def tearDown(self): - """Destroy the instance.""" - os.remove(self.local_filename) - with suppress(PermissionError): - os.remove(self.zip_name) - def test_regular_filename_is_returned_with_str(self): +@contextlib.contextmanager +def _local_file(tmp_path_factory, filename: str) -> Iterator[Path]: + tmp_path = tmp_path_factory.mktemp("local_files") + local_filename = tmp_path / filename + local_filename.touch() + yield local_filename + local_filename.unlink() + + +@pytest.fixture(scope="module") +def local_file(local_filename): + """Open local file with fsspec.""" + import fsspec + + return fsspec.open(local_filename) + + +@pytest.fixture(scope="module") +def local_filename2(tmp_path_factory): + """Create a second local file.""" + random_string2 = _generate_random_string() + with _local_file(tmp_path_factory, random_string2) as local_path: + yield local_path + + +@pytest.fixture(scope="module") +def local_zip_file(local_filename2): + """Create local zip file containing one local file.""" + import zipfile + + zip_name = Path(str(local_filename2) + ".zip") + zip_file = zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) + zip_file.write(local_filename2) + zip_file.close() + yield zip_name + with suppress(PermissionError): + zip_name.unlink() + + +class TestFSFile: + """Test the FSFile class.""" + + def test_regular_filename_is_returned_with_str(self, random_string): """Test that str give the filename.""" from satpy.readers import FSFile - assert str(FSFile(self.random_string)) == self.random_string + assert str(FSFile(random_string)) == random_string - def test_fsfile_with_regular_filename_abides_pathlike(self): + def test_fsfile_with_regular_filename_abides_pathlike(self, random_string): """Test that FSFile abides PathLike for regular filenames.""" from satpy.readers import FSFile - assert os.fspath(FSFile(self.random_string)) == self.random_string + assert os.fspath(FSFile(random_string)) == random_string - def test_fsfile_with_regular_filename_and_fs_spec_abides_pathlike(self): + def test_fsfile_with_regular_filename_and_fs_spec_abides_pathlike(self, random_string): """Test that FSFile abides PathLike for filename+fs instances.""" from satpy.readers import FSFile - assert os.fspath(FSFile(self.random_string, fs=None)) == self.random_string + assert os.fspath(FSFile(random_string, fs=None)) == random_string - def test_fsfile_with_pathlike(self): + def test_fsfile_with_pathlike(self, local_filename): """Test FSFile with path-like object.""" from pathlib import Path from satpy.readers import FSFile - f = FSFile(Path(self.local_filename)) - assert str(f) == os.fspath(f) == self.local_filename + f = FSFile(Path(local_filename)) + assert str(f) == os.fspath(f) == str(local_filename) - def test_fsfile_with_fs_open_file_abides_pathlike(self): + def test_fsfile_with_fs_open_file_abides_pathlike(self, local_file, random_string): """Test that FSFile abides PathLike for fsspec OpenFile instances.""" from satpy.readers import FSFile - assert os.fspath(FSFile(self.local_file)).endswith(self.random_string) + assert os.fspath(FSFile(local_file)).endswith(random_string) - def test_repr_includes_filename(self): + def test_repr_includes_filename(self, local_file, random_string): """Test that repr includes the filename.""" from satpy.readers import FSFile - assert self.random_string in repr(FSFile(self.local_file)) + assert random_string in repr(FSFile(local_file)) - def test_open_regular_file(self): + def test_open_regular_file(self, local_filename): """Test opening a regular file.""" from satpy.readers import FSFile - _assert_is_open_file_and_close(FSFile(self.local_filename).open()) + _assert_is_open_file_and_close(FSFile(local_filename).open()) - def test_open_local_fs_file(self): + def test_open_local_fs_file(self, local_file): """Test opening a localfs file.""" from satpy.readers import FSFile - _assert_is_open_file_and_close(FSFile(self.local_file).open()) + _assert_is_open_file_and_close(FSFile(local_file).open()) - def test_open_zip_fs_regular_filename(self): + def test_open_zip_fs_regular_filename(self, local_filename2, local_zip_file): """Test opening a zipfs with a regular filename provided.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile - zip_fs = ZipFileSystem(self.zip_name) - file = FSFile(_posixify_path(self.local_filename2), zip_fs) + zip_fs = ZipFileSystem(local_zip_file) + file = FSFile(_posixify_path(local_filename2), zip_fs) _assert_is_open_file_and_close(file.open()) - def test_open_zip_fs_openfile(self): + def test_open_zip_fs_openfile(self, local_filename2, local_zip_file): """Test opening a zipfs openfile.""" import fsspec from satpy.readers import FSFile - open_file = fsspec.open("zip:/" + _posixify_path(self.local_filename2) + "::file://" + self.zip_name) + open_file = fsspec.open("zip:/" + _posixify_path(local_filename2) + "::file://" + str(local_zip_file)) file = FSFile(open_file) _assert_is_open_file_and_close(file.open()) - def test_sorting_fsfiles(self): + def test_sorting_fsfiles(self, local_filename, local_filename2, local_zip_file): """Test sorting FSFiles.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile - zip_fs = ZipFileSystem(self.zip_name) - file1 = FSFile(self.local_filename2, zip_fs) + zip_fs = ZipFileSystem(local_zip_file) + file1 = FSFile(local_filename2, zip_fs) - file2 = FSFile(self.local_filename) + file2 = FSFile(local_filename) extra_file = os.path.normpath("/somedir/bla") sorted_filenames = [os.fspath(file) for file in sorted([file1, file2, extra_file])] expected_filenames = sorted([extra_file, os.fspath(file1), os.fspath(file2)]) assert sorted_filenames == expected_filenames - def test_equality(self): + def test_equality(self, local_filename, local_filename2, local_zip_file): """Test that FSFile compares equal when it should.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile - zip_fs = ZipFileSystem(self.zip_name) - assert FSFile(self.local_filename) == FSFile(self.local_filename) - assert (FSFile(self.local_filename, zip_fs) == - FSFile(self.local_filename, zip_fs)) - assert (FSFile(self.local_filename, zip_fs) != - FSFile(self.local_filename)) - assert FSFile(self.local_filename) != FSFile(self.local_filename2) - - def test_hash(self): + zip_fs = ZipFileSystem(local_zip_file) + assert FSFile(local_filename) == FSFile(local_filename) + assert (FSFile(local_filename, zip_fs) == FSFile(local_filename, zip_fs)) + assert (FSFile(local_filename, zip_fs) != FSFile(local_filename)) + assert FSFile(local_filename) != FSFile(local_filename2) + + def test_hash(self, local_filename, local_filename2, local_zip_file): """Test that FSFile hashing behaves sanely.""" from fsspec.implementations.cached import CachingFileSystem from fsspec.implementations.local import LocalFileSystem @@ -1082,9 +1108,9 @@ def test_hash(self): from satpy.readers import FSFile lfs = LocalFileSystem() - zfs = ZipFileSystem(self.zip_name) + zfs = ZipFileSystem(local_zip_file) cfs = CachingFileSystem(fs=lfs) # make sure each name/fs-combi has its own hash assert len({hash(FSFile(fn, fs)) - for fn in {self.local_filename, self.local_filename2} - for fs in [None, lfs, zfs, cfs]}) == 2*4 + for fn in {local_filename, local_filename2} + for fs in [None, lfs, zfs, cfs]}) == 2 * 4 From 362c2e9d2a46f7a91e9ebc9e18fd6fc4b06274f3 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 4 Jan 2024 14:15:34 -0600 Subject: [PATCH 1014/1416] Add more details to open_file_or_filename docstring --- satpy/readers/__init__.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 7cac7edb5d..21554ba465 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -779,7 +779,15 @@ def _get_compression(file): def open_file_or_filename(unknown_file_thing): - """Try to open the *unknown_file_thing*, otherwise return the filename.""" + """Try to open the provided file "thing" if needed, otherwise return the filename or Path. + + This wraps the logic of getting something like an fsspec OpenFile object + that is not directly supported by most reading libraries and making it + usable. If a :class:`pathlib.Path` object or something that is not + open-able is provided then that object is passed along. In the case of + fsspec OpenFiles their ``.open()`` method is called and the result returned. + + """ if isinstance(unknown_file_thing, pathlib.Path): f_obj = unknown_file_thing else: From c5d4f6b7b1f3b21afd506e5cbdd5eb4ddf7515b2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 4 Jan 2024 14:32:43 -0600 Subject: [PATCH 1015/1416] Add initial open_file_or_filename tests --- satpy/tests/test_readers.py | 121 ++++++++++++++++++++++++++++++++++++ 1 file changed, 121 insertions(+) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index b7144391cf..0dad685694 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -28,7 +28,10 @@ from typing import Iterator from unittest import mock +import numpy as np import pytest +import xarray as xr +from pytest_lazyfixture import lazy_fixture from satpy.dataset.data_dict import get_key from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange @@ -1114,3 +1117,121 @@ def test_hash(self, local_filename, local_filename2, local_zip_file): assert len({hash(FSFile(fn, fs)) for fn in {local_filename, local_filename2} for fs in [None, lfs, zfs, cfs]}) == 2 * 4 + + +@pytest.fixture(scope="module") +def local_netcdf_filename(tmp_path_factory): + """Create a simple local NetCDF file.""" + filename = tmp_path_factory.mktemp("fake_netcdfs") / "test.nc" + ds = xr.Dataset() + ds.attrs = { + "attr1": "a", + "attr2": 2, + } + ds["var1"] = xr.DataArray(np.zeros((10, 10), dtype=np.int16), dims=("y", "x")) + ds.to_netcdf(filename) + + yield str(filename) + filename.unlink() + + +@pytest.fixture(scope="module") +def local_netcdf_path(local_netcdf_filename): + """Get Path object pointing to local netcdf file.""" + return Path(local_netcdf_filename) + + +@pytest.fixture(scope="module") +def local_netcdf_fsspec(local_netcdf_filename): + """Get fsspec OpenFile object pointing to local netcdf file.""" + import fsspec + + return fsspec.open(local_netcdf_filename) + + +@pytest.fixture(scope="module") +def local_netcdf_fsfile(local_netcdf_fsspec): + """Get FSFile object wrapping an fsspec OpenFile pointing to local netcdf file.""" + from satpy.readers import FSFile + + return FSFile(local_netcdf_fsspec) + + +def _open_xarray_netcdf4(): + from functools import partial + + pytest.importorskip("netCDF4") + return partial(xr.open_dataset, engine="netcdf4") + + +def _open_xarray_h5netcdf(): + from functools import partial + + pytest.importorskip("h5netcdf") + return partial(xr.open_dataset, engine="h5netcdf") + + +def _open_xarray_default(): + pytest.importorskip("netCDF4") + pytest.importorskip("h5netcdf") + return xr.open_dataset + + +@pytest.fixture(scope="module") +def local_hdf5_filename(tmp_path_factory): + """Create on-disk HDF5 file.""" + import h5py + + filename = tmp_path_factory.mktemp("fake_hdf5s") / "test.h5" + h = h5py.File(filename, "w") + h.create_dataset("var1", data=np.zeros((10, 10), dtype=np.int16)) + h.close() + + yield str(filename) + filename.unlink() + + +@pytest.fixture(scope="module") +def local_hdf5_path(local_hdf5_filename): + """Get Path object pointing to local HDF5 file.""" + return Path(local_hdf5_filename) + + +@pytest.fixture(scope="module") +def local_hdf5_fsspec(local_hdf5_filename): + """Get fsspec OpenFile pointing to local HDF5 file.""" + import fsspec + + return fsspec.open(local_hdf5_filename) + + +def _open_h5py(): + h5py = pytest.importorskip("h5py") + return h5py.File + + +@pytest.mark.parametrize( + ("file_thing", "create_read_func"), + [ + (lazy_fixture("local_netcdf_filename"), _open_xarray_default), + (lazy_fixture("local_netcdf_filename"), _open_xarray_netcdf4), + (lazy_fixture("local_netcdf_filename"), _open_xarray_h5netcdf), + (lazy_fixture("local_netcdf_path"), _open_xarray_default), + (lazy_fixture("local_netcdf_path"), _open_xarray_netcdf4), + (lazy_fixture("local_netcdf_path"), _open_xarray_h5netcdf), + (lazy_fixture("local_netcdf_fsspec"), _open_xarray_default), + (lazy_fixture("local_netcdf_fsspec"), _open_xarray_h5netcdf), + (lazy_fixture("local_netcdf_fsfile"), _open_xarray_default), + (lazy_fixture("local_netcdf_fsfile"), _open_xarray_h5netcdf), + (lazy_fixture("local_hdf5_filename"), _open_h5py), + (lazy_fixture("local_hdf5_path"), _open_h5py), + (lazy_fixture("local_hdf5_fsspec"), _open_h5py), + ], +) +def test_open_file_or_filename(file_thing, create_read_func): + """Test various combinations of file-like things and opening them with various libraries.""" + from satpy.readers import open_file_or_filename + + read_func = create_read_func() + open_thing = open_file_or_filename(file_thing) + read_func(open_thing) From 180a0b8bdb416862b71d6e0bc51b07841c397393 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 4 Jan 2024 15:01:08 -0600 Subject: [PATCH 1016/1416] Allow Windows temporary files to fail to be removed --- satpy/tests/test_readers.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 0dad685694..beeb16b972 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -1132,7 +1132,8 @@ def local_netcdf_filename(tmp_path_factory): ds.to_netcdf(filename) yield str(filename) - filename.unlink() + with suppress(PermissionError): + filename.unlink() @pytest.fixture(scope="module") @@ -1188,7 +1189,8 @@ def local_hdf5_filename(tmp_path_factory): h.close() yield str(filename) - filename.unlink() + with suppress(PermissionError): + filename.unlink() @pytest.fixture(scope="module") From 2185a19a797cd8e56f56e3cfceb756da833d1664 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 5 Jan 2024 16:07:34 +0100 Subject: [PATCH 1017/1416] add config option to turn off mitiff corner corretion --- satpy/tests/writer_tests/test_mitiff.py | 51 +++++++++++++++++++++++++ satpy/writers/mitiff.py | 24 ++++++++---- 2 files changed, 67 insertions(+), 8 deletions(-) diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 4e8878687a..1a9e8850d6 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -879,6 +879,57 @@ def test_convert_proj4_string(self): proj4_string = w._add_proj4_string(ds1, ds1) assert proj4_string == check["proj4"] + def test_correction_proj4_string(self): + """Test correction of proj4 lower left coordinate.""" + import dask.array as da + import xarray as xr + from pyresample.geometry import AreaDefinition + + from satpy.writers.mitiff import MITIFFWriter + area_def = AreaDefinition( + "test", + "test", + "test", + "+proj=merc", + 100, + 200, + (-1000., -1500., 1000., 1500.), + ) + + ds1 = xr.DataArray( + da.zeros((10, 20), chunks=20), + dims=("y", "x"), + attrs={"area": area_def} + ) + default_expected_proj4_string = ' Proj string: +init=EPSG:3395 +towgs84=0,0,0 +units=km +x_0=1020.000000 +y_0=1515.000000\n' + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) + proj4_string = w._add_proj4_string(ds1, ds1) + assert proj4_string == default_expected_proj4_string + + kwargs = {'mitiff_pixel_adjustment': False} + new_expected_proj4_string = ' Proj string: +init=EPSG:3395 +towgs84=0,0,0 +units=km +x_0=1000.000000 +y_0=1500.000000\n' + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) + proj4_string = w._add_proj4_string(ds1, ds1, **kwargs) + assert proj4_string == new_expected_proj4_string + + area_def2 = AreaDefinition( + "test", + "test", + "test", + "+proj=merc +x_0=0 +y_0=0", + 100, + 200, + (-1000., -1500., 1000., 1500.), + ) + ds2 = xr.DataArray( + da.zeros((10, 20), chunks=20), + dims=("y", "x"), + attrs={"area": area_def2} + ) + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) + proj4_string = w._add_proj4_string(ds2, ds2, **kwargs) + assert proj4_string == new_expected_proj4_string + def test_save_dataset_palette(self): """Test writer operation as palette.""" from satpy.writers.mitiff import MITIFFWriter diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 3658ac16b7..a97fe5de24 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -220,7 +220,7 @@ def _add_sizes(self, datasets, first_dataset): return _image_description - def _add_proj4_string(self, datasets, first_dataset): + def _add_proj4_string(self, datasets, first_dataset, **kwargs): import warnings proj4_string = " Proj string: " @@ -259,31 +259,39 @@ def _add_proj4_string(self, datasets, first_dataset): if "units" not in proj4_string: proj4_string += " +units=km" - proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0) + proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0, **kwargs) LOG.debug("proj4_string: %s", proj4_string) proj4_string += "\n" return proj4_string - def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, y_0): + def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, y_0, **kwargs): if isinstance(datasets, list): dataset = first_dataset else: dataset = datasets + corner_correction_x = dataset.attrs["area"].pixel_size_x + corner_correction_y = dataset.attrs["area"].pixel_size_y + try: + if kwargs['mitiff_pixel_adjustment'] is False: + corner_correction_x = 0 + corner_correction_y = 0 + except KeyError: + pass if "x_0" not in proj4_string: proj4_string += " +x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + - dataset.attrs["area"].pixel_size_x) + x_0) + corner_correction_x) + x_0) proj4_string += " +y_0=%.6f" % ( (-dataset.attrs["area"].area_extent[1] + - dataset.attrs["area"].pixel_size_y) + y_0) + corner_correction_y) + y_0) elif "+x_0=0" in proj4_string and "+y_0=0" in proj4_string: proj4_string = proj4_string.replace("+x_0=0", "+x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + - dataset.attrs["area"].pixel_size_x) + x_0)) + corner_correction_x) + x_0)) proj4_string = proj4_string.replace("+y_0=0", "+y_0=%.6f" % ( (-dataset.attrs["area"].area_extent[1] + - dataset.attrs["area"].pixel_size_y) + y_0)) + corner_correction_y) + y_0)) return proj4_string def _convert_epsg_to_proj(self, proj4_string, x_0): @@ -563,7 +571,7 @@ def _make_image_description(self, datasets, **kwargs): _image_description += " Map projection: Stereographic\n" - _image_description += self._add_proj4_string(datasets, first_dataset) + _image_description += self._add_proj4_string(datasets, first_dataset, **kwargs) _image_description += " TrueLat: 60N\n" _image_description += " GridRot: 0\n" From c3eb0ecb7dbee225db54764bea064aac88d392e4 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 5 Jan 2024 16:16:00 +0100 Subject: [PATCH 1018/1416] refactor a bit --- satpy/writers/mitiff.py | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index a97fe5de24..bf27d19840 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -259,25 +259,18 @@ def _add_proj4_string(self, datasets, first_dataset, **kwargs): if "units" not in proj4_string: proj4_string += " +units=km" - proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0, **kwargs) + if isinstance(datasets, list): + _dataset = first_dataset + else: + _dataset = datasets + proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, **kwargs) LOG.debug("proj4_string: %s", proj4_string) proj4_string += "\n" return proj4_string - def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, y_0, **kwargs): - if isinstance(datasets, list): - dataset = first_dataset - else: - dataset = datasets - corner_correction_x = dataset.attrs["area"].pixel_size_x - corner_correction_y = dataset.attrs["area"].pixel_size_y - try: - if kwargs['mitiff_pixel_adjustment'] is False: - corner_correction_x = 0 - corner_correction_y = 0 - except KeyError: - pass + def _append_projection_center(self, proj4_string, dataset, x_0, y_0, **kwargs): + corner_correction_x, corner_correction_y = self._set_correction_size(dataset, kwargs) if "x_0" not in proj4_string: proj4_string += " +x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + @@ -294,6 +287,17 @@ def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, corner_correction_y) + y_0)) return proj4_string + def _set_correction_size(self, dataset, kwargs): + corner_correction_x = dataset.attrs["area"].pixel_size_x + corner_correction_y = dataset.attrs["area"].pixel_size_y + try: + if kwargs['mitiff_pixel_adjustment'] is False: + corner_correction_x = 0 + corner_correction_y = 0 + except KeyError: + pass + return corner_correction_x,corner_correction_y + def _convert_epsg_to_proj(self, proj4_string, x_0): if "EPSG:32631" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32631", From 814c5fe4df9ee16189cf4df73f1012091ed6b6fe Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 5 Jan 2024 23:21:20 +0800 Subject: [PATCH 1019/1416] Update __init__.py --- satpy/composites/__init__.py | 44 +++++++++++++++++++++++++----------- 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index eefd5258ef..46e11447e3 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1694,15 +1694,16 @@ def __call__(self, projectables, *args, **kwargs): # RGB/RGBA -> RGBA/RGBA foreground = add_bands(foreground, background["bands"]) background = add_bands(background, foreground["bands"]) - after_bg_mode = background.attrs["mode"] # It's important to judge whether the alpha band of background is initially generated, e.g. by CloudCompositor - # Or it's just added through 'add_bands' # The result will be used to decide the output image mode - initial_bg_alpha = True if "A" in before_bg_mode and "A" in after_bg_mode else False + initial_bg_alpha = "A" in before_bg_mode attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) - data = self._get_merged_image_data(foreground, background, initial_bg_alpha=initial_bg_alpha) + if "A" not in foreground.attrs["mode"] and "A" not in background.attrs["mode"]: + data = self._simple_overlay(foreground, background) + else: + data = self._get_merged_image_data(foreground, background, initial_bg_alpha=initial_bg_alpha) for data_arr in data: data_arr.attrs = attrs res = super(BackgroundCompositor, self).__call__(data, **kwargs) @@ -1739,7 +1740,7 @@ def _get_alpha(dataset: xr.DataArray): alpha = xr.full_like(first_band, 1) alpha["bands"] = "A" - # There could be Nans in the alpha, especially through 'add_bands' + # There could be Nans in the alpha # Replace them with 0 to prevent cases like 1 + nan = nan, so they won't affect new_alpha alpha = xr.where(alpha.isnull(), 0, alpha) @@ -1751,27 +1752,44 @@ def _get_alpha(dataset: xr.DataArray): data = [] - # Unless background has an initial alpha band, there will be no alpha band in the output image - # Let the writer decide - output_mode = background.attrs["mode"] if initial_bg_alpha else background.attrs["mode"].replace("A", "") + # Pass the image data (alpha band will be dropped temporally) to the writer + output_mode = background.attrs["mode"].replace("A", "") - # If we let the writer decide alpha band, we must fill the transparent areas in the image with np.nan first - # The best way is through the new alpha + # For more info about alpha compositing please review https://en.wikipedia.org/wiki/Alpha_compositing + # Whether there's no initial alpha band, or it has been dropped, we're actually asking the writer for decision + # So first, we must fill the transparent areas in the image with np.nan + # The best way is through a modified version of new alpha new_alpha_nan = xr.where(alpha_fore + alpha_back == 0, np.nan, new_alpha) if "A" not in output_mode \ else new_alpha for band in output_mode: - fg_band = foreground.sel(bands=band) if band != "A" else new_alpha - bg_band = background.sel(bands=band) if band != "A" else new_alpha + fg_band = foreground.sel(bands=band) + bg_band = background.sel(bands=band) chan = (fg_band * alpha_fore + - bg_band * alpha_back * (1 - alpha_fore)) / new_alpha_nan if band != "A" else new_alpha + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha_nan chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) chan["bands"] = band data.append(chan) + # If background has an initial alpha band, it will also be passed to the writer + if initial_bg_alpha: + new_alpha["bands"] = "A" + data.append(new_alpha) + + return data + + @staticmethod + def _simple_overlay(foreground: xr.DataArray, + background: xr.DataArray,) -> list[xr.DataArray]: + # This is for the case when no alpha bands are involved + # Just simply lay the foreground upon background + data_arr = xr.where(foreground.isnull(), background, foreground) + # Split to separate bands so the mode is correct + data = [data_arr.sel(bands=b) for b in data_arr["bands"]] + return data From 33aeb191747451393c195f71bc04a22b5c8b0413 Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 5 Jan 2024 16:27:19 +0100 Subject: [PATCH 1020/1416] refactor a bit more --- satpy/writers/mitiff.py | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index bf27d19840..2e3cd6d600 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -245,6 +245,20 @@ def _add_proj4_string(self, datasets, first_dataset, **kwargs): # FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible proj4_string, x_0 = self._convert_epsg_to_proj(proj4_string, x_0) + proj4_string = self._special_correction_of_proj_string(proj4_string) + + if isinstance(datasets, list): + _dataset = first_dataset + else: + _dataset = datasets + mitiff_pixel_adjustment = kwargs.get("mitiff_pixel_adjustment", False) + proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, mitiff_pixel_adjustment) + LOG.debug("proj4_string: %s", proj4_string) + proj4_string += "\n" + + return proj4_string + + def _special_correction_of_proj_string(self, proj4_string): if "geos" in proj4_string: proj4_string = proj4_string.replace("+sweep=x ", "") if "+a=6378137.0 +b=6356752.31414" in proj4_string: @@ -258,19 +272,10 @@ def _add_proj4_string(self, datasets, first_dataset, **kwargs): if "units" not in proj4_string: proj4_string += " +units=km" - - if isinstance(datasets, list): - _dataset = first_dataset - else: - _dataset = datasets - proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, **kwargs) - LOG.debug("proj4_string: %s", proj4_string) - proj4_string += "\n" - return proj4_string - def _append_projection_center(self, proj4_string, dataset, x_0, y_0, **kwargs): - corner_correction_x, corner_correction_y = self._set_correction_size(dataset, kwargs) + def _append_projection_center(self, proj4_string, dataset, x_0, y_0, mitiff_pixel_adjustment): + corner_correction_x, corner_correction_y = self._set_correction_size(dataset, mitiff_pixel_adjustment) if "x_0" not in proj4_string: proj4_string += " +x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + @@ -290,12 +295,9 @@ def _append_projection_center(self, proj4_string, dataset, x_0, y_0, **kwargs): def _set_correction_size(self, dataset, kwargs): corner_correction_x = dataset.attrs["area"].pixel_size_x corner_correction_y = dataset.attrs["area"].pixel_size_y - try: - if kwargs['mitiff_pixel_adjustment'] is False: - corner_correction_x = 0 - corner_correction_y = 0 - except KeyError: - pass + if kwargs.get("mitiff_pixel_adjustment", False): + corner_correction_x = 0 + corner_correction_y = 0 return corner_correction_x,corner_correction_y def _convert_epsg_to_proj(self, proj4_string, x_0): From c189f214342d516cb7e0b762e4311faea78f53ac Mon Sep 17 00:00:00 2001 From: Trygve Aspenes Date: Fri, 5 Jan 2024 16:43:39 +0100 Subject: [PATCH 1021/1416] Update tests after refactor --- satpy/tests/writer_tests/test_mitiff.py | 33 ++++++------------------- satpy/writers/mitiff.py | 6 ++--- 2 files changed, 11 insertions(+), 28 deletions(-) diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 1a9e8850d6..2dafdd5896 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -901,34 +901,17 @@ def test_correction_proj4_string(self): dims=("y", "x"), attrs={"area": area_def} ) - default_expected_proj4_string = ' Proj string: +init=EPSG:3395 +towgs84=0,0,0 +units=km +x_0=1020.000000 +y_0=1515.000000\n' + default_expected_correction = (20.0, 15.0) w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) - proj4_string = w._add_proj4_string(ds1, ds1) - assert proj4_string == default_expected_proj4_string + mitiff_pixel_adjustment = True + correction = w._set_correction_size(ds1, mitiff_pixel_adjustment) + assert correction == default_expected_correction - kwargs = {'mitiff_pixel_adjustment': False} - new_expected_proj4_string = ' Proj string: +init=EPSG:3395 +towgs84=0,0,0 +units=km +x_0=1000.000000 +y_0=1500.000000\n' + mitiff_pixel_adjustment = False + new_expected_correction = (0, 0) w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) - proj4_string = w._add_proj4_string(ds1, ds1, **kwargs) - assert proj4_string == new_expected_proj4_string - - area_def2 = AreaDefinition( - "test", - "test", - "test", - "+proj=merc +x_0=0 +y_0=0", - 100, - 200, - (-1000., -1500., 1000., 1500.), - ) - ds2 = xr.DataArray( - da.zeros((10, 20), chunks=20), - dims=("y", "x"), - attrs={"area": area_def2} - ) - w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) - proj4_string = w._add_proj4_string(ds2, ds2, **kwargs) - assert proj4_string == new_expected_proj4_string + correction = w._set_correction_size(ds1, mitiff_pixel_adjustment) + assert correction == new_expected_correction def test_save_dataset_palette(self): """Test writer operation as palette.""" diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 2e3cd6d600..b005d37ec5 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -251,7 +251,7 @@ def _add_proj4_string(self, datasets, first_dataset, **kwargs): _dataset = first_dataset else: _dataset = datasets - mitiff_pixel_adjustment = kwargs.get("mitiff_pixel_adjustment", False) + mitiff_pixel_adjustment = kwargs.get("mitiff_pixel_adjustment", True) proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, mitiff_pixel_adjustment) LOG.debug("proj4_string: %s", proj4_string) proj4_string += "\n" @@ -292,10 +292,10 @@ def _append_projection_center(self, proj4_string, dataset, x_0, y_0, mitiff_pixe corner_correction_y) + y_0)) return proj4_string - def _set_correction_size(self, dataset, kwargs): + def _set_correction_size(self, dataset, mitiff_pixel_adjustment): corner_correction_x = dataset.attrs["area"].pixel_size_x corner_correction_y = dataset.attrs["area"].pixel_size_y - if kwargs.get("mitiff_pixel_adjustment", False): + if not mitiff_pixel_adjustment: corner_correction_x = 0 corner_correction_y = 0 return corner_correction_x,corner_correction_y From afe843b3077959b209a3b02dc24f738c34d9a76e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 5 Jan 2024 14:02:32 -0600 Subject: [PATCH 1022/1416] Add QC-based filtering to the VIIRS EDR AOD550 product --- satpy/etc/readers/viirs_edr.yaml | 2 +- satpy/readers/viirs_edr.py | 31 ++++++++++++++++++++ satpy/tests/reader_tests/test_viirs_edr.py | 34 +++++++++++++++++++++- 3 files changed, 65 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index c078e754aa..37f36934b8 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -25,7 +25,7 @@ file_types: file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aod: - file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSAODHandler file_patterns: - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_lst: diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index eaf2f53d42..de89a3dc0d 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -56,6 +56,21 @@ scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"filter_veg": False}) +AOD Filtering +^^^^^^^^^^^^^ + +The AOD (Aerosol Optical Depth) product can be optionally filtered based on +Quality Control (QC) values in the file. By default no filtering is performed. +By providing the ``aod_qc_filter`` keyword argument and specifying the maximum +value of the ``QCAll`` variable to include (not mask). For example:: + + scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"aod_qc_filter": 1}) + +will only preserve AOD550 values where the quality is 0 ("high") or +1 ("medium"). At the time of writing the ``QCAll`` variable has 1 ("medium"), +2 ("low"), and 3 ("no retrieval"). + + """ from __future__ import annotations @@ -343,3 +358,19 @@ def _scale_data(self): add_offset = self.nc[self._manual_scalings[var_name][1]] data_arr.data = data_arr.data * scale_factor.data + add_offset.data self.nc[var_name] = data_arr + + +class VIIRSAODHandler(VIIRSJRRFileHandler): + """File handler for AOD data files.""" + + def __init__(self, *args, aod_qc_filter: int | None = None, **kwargs) -> None: + """Initialize file handler and keep track of QC filtering.""" + super().__init__(*args, **kwargs) + self._aod_qc_filter = aod_qc_filter + + def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: + new_data_arr = super()._mask_invalid(data_arr, ds_info) + if self._aod_qc_filter is None or ds_info["name"] != "AOD550": + return new_data_arr + qc_all = self.nc["QCAll"] + return new_data_arr.where(qc_all <= self._aod_qc_filter) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index d042576d05..e61718e9db 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -220,6 +220,14 @@ def aod_file(tmp_path_factory: TempPathFactory) -> Path: data_vars = _create_continuous_variables( ("AOD550",) ) + qc_data = np.zeros(data_vars["AOD550"].shape, dtype=np.int8) + qc_data[-1, -1] = 2 + data_vars["QCAll"] = xr.DataArray( + qc_data, + dims=data_vars["AOD550"].dims, + attrs={"valid_range": [0, 3]}, + ) + data_vars["QCAll"].encoding["_FillValue"] = -128 return _create_fake_file(tmp_path_factory, fn, data_vars) @@ -371,7 +379,6 @@ def test_get_dataset_surf_refl_with_veg_idx( ("var_names", "data_file"), [ (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")), - (("AOD550",), lazy_fixture("aod_file")), (("VLST",), lazy_fixture("lst_file")), ] ) @@ -385,6 +392,31 @@ def test_get_dataset_generic(self, var_names, data_file): for var_name in var_names: _check_continuous_data_arr(scn[var_name]) + @pytest.mark.parametrize( + ("aod_qc_filter", "exp_masked_pixel"), + [ + (None, False), + (0, True), + (2, False) + ], + ) + def test_get_aod_filtered(self, aod_file, aod_qc_filter, exp_masked_pixel): + """Test that the AOD product can be loaded and filtered.""" + from satpy import Scene + bytes_in_m_row = 4 * 3200 + with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): + scn = Scene(reader="viirs_edr", filenames=[aod_file], reader_kwargs={"aod_qc_filter": aod_qc_filter}) + scn.load(["AOD550"]) + _check_continuous_data_arr(scn["AOD550"]) + data_np = scn["AOD550"].data.compute() + pixel_is_nan = np.isnan(data_np[-1, -1]) + assert pixel_is_nan if exp_masked_pixel else not pixel_is_nan + + # filtering should never affect geolocation + lons, lats = scn["AOD550"].attrs["area"].get_lonlats() + assert not np.isnan(lons[-1, -1].compute()) + assert not np.isnan(lats[-1, -1].compute()) + @pytest.mark.parametrize( ("data_file", "exp_available"), [ From ea5aa2298905a56bb8eed6408c5d91d9dba0c4f0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 9 Jan 2024 20:03:50 -0600 Subject: [PATCH 1023/1416] Fix wildcard kwargs breaking VIIRS EDR reader --- satpy/readers/viirs_edr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index de89a3dc0d..36d9ed5ac0 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -91,7 +91,7 @@ class VIIRSJRRFileHandler(BaseFileHandler): """NetCDF4 reader for VIIRS Active Fires.""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) From 64eedbc9259beb6357ab206a477896e4e0a952da Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 9 Jan 2024 20:08:10 -0600 Subject: [PATCH 1024/1416] Remove unnecessary test file removal --- satpy/tests/test_readers.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index beeb16b972..3e54a93592 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -23,7 +23,6 @@ import sys import unittest import warnings -from contextlib import suppress from pathlib import Path from typing import Iterator from unittest import mock @@ -978,8 +977,7 @@ def _local_file(tmp_path_factory, filename: str) -> Iterator[Path]: tmp_path = tmp_path_factory.mktemp("local_files") local_filename = tmp_path / filename local_filename.touch() - yield local_filename - local_filename.unlink() + return local_filename @pytest.fixture(scope="module") @@ -1007,9 +1005,7 @@ def local_zip_file(local_filename2): zip_file = zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) zip_file.write(local_filename2) zip_file.close() - yield zip_name - with suppress(PermissionError): - zip_name.unlink() + return zip_name class TestFSFile: @@ -1131,9 +1127,7 @@ def local_netcdf_filename(tmp_path_factory): ds["var1"] = xr.DataArray(np.zeros((10, 10), dtype=np.int16), dims=("y", "x")) ds.to_netcdf(filename) - yield str(filename) - with suppress(PermissionError): - filename.unlink() + return str(filename) @pytest.fixture(scope="module") @@ -1188,9 +1182,7 @@ def local_hdf5_filename(tmp_path_factory): h.create_dataset("var1", data=np.zeros((10, 10), dtype=np.int16)) h.close() - yield str(filename) - with suppress(PermissionError): - filename.unlink() + return str(filename) @pytest.fixture(scope="module") From 9d8b2fd0f4445daa3de30b7deff738c0d14a9fa7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 9 Jan 2024 21:09:55 -0600 Subject: [PATCH 1025/1416] Add debug log message to know when AOD QC filtering is being performed --- satpy/readers/viirs_edr.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 36d9ed5ac0..b5ba999b13 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -372,5 +372,6 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: new_data_arr = super()._mask_invalid(data_arr, ds_info) if self._aod_qc_filter is None or ds_info["name"] != "AOD550": return new_data_arr + LOG.debug(f"Filtering AOD data to include quality <= {self._aod_qc_filter}") qc_all = self.nc["QCAll"] return new_data_arr.where(qc_all <= self._aod_qc_filter) From 529de8fe47bfd1f290a602cffcd89d8f02031849 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 9 Jan 2024 21:12:24 -0600 Subject: [PATCH 1026/1416] Fix contextmanager definition in test_readers.py --- satpy/tests/test_readers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 3e54a93592..db3d1ccb1d 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -977,7 +977,7 @@ def _local_file(tmp_path_factory, filename: str) -> Iterator[Path]: tmp_path = tmp_path_factory.mktemp("local_files") local_filename = tmp_path / filename local_filename.touch() - return local_filename + yield local_filename @pytest.fixture(scope="module") From e5890b0d1b64d7035e639cf1a0bd67115208cab8 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 24 Jul 2023 15:34:53 +0200 Subject: [PATCH 1027/1416] convert enumeration type information into flag_meanings / flag_values attributes --- satpy/readers/fci_l2_nc.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index cb5924b448..83f560c35a 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -20,6 +20,7 @@ import numpy as np import xarray as xr +import netCDF4 from pyresample import geometry from satpy._compat import cached_property @@ -93,6 +94,21 @@ def _set_attributes(self, variable, dataset_info, segmented=False): variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) + + if ('import_enum_information' in dataset_info): + if (dataset_info['import_enum_information']): + netCDF4_dataset = netCDF4.Dataset(self.filename, 'r') + # This currently assumes a flat netCDF file + enum = netCDF4_dataset.variables[dataset_info['file_key']].datatype.enum_dict + flag_values = [] + flag_meanings = [] + for item in enumerate(enum): + flag_values.append(item[0]) + flag_meanings.append(item[1]) + + variable.attrs['flag_values'] = flag_values + variable.attrs['flag_meanings'] = flag_meanings + netCDF4_dataset.close() return variable From 16892404e214a3493e5e6950c571a416be8f24d7 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 24 Jul 2023 15:43:16 +0200 Subject: [PATCH 1028/1416] update for clm --- satpy/etc/readers/fci_l2_nc.yaml | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 1ad5d576a0..7755259cea 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -82,54 +82,58 @@ datasets: resolution: 2000 file_type: nc_fci_clm file_key: cloud_state - long_name: cloud_mask_classification + standard_name: cloud_mask_classification + import_enum_information: True quality_illumination_clm: name: quality_illumination_clm resolution: 2000 file_type: nc_fci_clm file_key: quality_illumination - long_name: illumination_classification + standard_name: illumination_classification + import_enum_information: True quality_nwp_parameters_clm: name: quality_nwp_parameters_clm resolution: 2000 file_type: nc_fci_clm file_key: quality_nwp_parameters - long_name: quality_index - + standard_name: forecast_availibility_classification + import_enum_information: True + quality_MTG_parameters_clm: name: quality_MTG_parameters_clm resolution: 2000 file_type: nc_fci_clm file_key: quality_MTG_parameters - long_name: quality_index + standard_name: platform_data_availibility_classification fill_value: -127 + import_enum_information: True quality_overall_processing_clm: name: quality_overall_processing_clm resolution: 2000 file_type: nc_fci_clm file_key: quality_overall_processing - long_name: quality_index + standard_name: quality_flag product_quality_clm: name: product_quality_clm file_type: nc_fci_clm file_key: product_quality - long_name: product_quality_index + standard_name: quality_flag product_completeness_clm: name: product_completeness_clm file_type: nc_fci_clm file_key: product_completeness - long_name: product_completeness_index + standard_name: quality_flag product_timeliness_clm: name: product_timeliness_clm file_type: nc_fci_clm file_key: product_timeliness - long_name: product_timeliness_index + standard_name: quality_flag # FCI CT L2 cloud_phase: From 88259dfd327224df183f717288f5ed43c8e0ead1 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Fri, 1 Dec 2023 10:15:21 +0100 Subject: [PATCH 1029/1416] Merging FCI standard names and flag meanings and Fixing standard names for AMV quality flags --- satpy/etc/readers/fci_l2_nc.yaml | 2174 +++++++++++++++++------------- 1 file changed, 1205 insertions(+), 969 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 7755259cea..ba8d3b1c46 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -83,6 +83,7 @@ datasets: file_type: nc_fci_clm file_key: cloud_state standard_name: cloud_mask_classification + fill_value: -127 import_enum_information: True quality_illumination_clm: @@ -90,7 +91,8 @@ datasets: resolution: 2000 file_type: nc_fci_clm file_key: quality_illumination - standard_name: illumination_classification + standard_name: status_flag + fill_value: -127 import_enum_information: True quality_nwp_parameters_clm: @@ -98,15 +100,16 @@ datasets: resolution: 2000 file_type: nc_fci_clm file_key: quality_nwp_parameters - standard_name: forecast_availibility_classification + standard_name: status_flag + fill_value: -127 import_enum_information: True - + quality_MTG_parameters_clm: name: quality_MTG_parameters_clm resolution: 2000 file_type: nc_fci_clm file_key: quality_MTG_parameters - standard_name: platform_data_availibility_classification + standard_name: status_flag fill_value: -127 import_enum_information: True @@ -116,24 +119,26 @@ datasets: file_type: nc_fci_clm file_key: quality_overall_processing standard_name: quality_flag + fill_value: -127 + import_enum_information: True product_quality_clm: name: product_quality_clm file_type: nc_fci_clm file_key: product_quality - standard_name: quality_flag + standard_name: product_quality product_completeness_clm: name: product_completeness_clm file_type: nc_fci_clm file_key: product_completeness - standard_name: quality_flag + standard_name: product_completeness product_timeliness_clm: name: product_timeliness_clm file_type: nc_fci_clm file_key: product_timeliness - standard_name: quality_flag + standard_name: product_timeliness # FCI CT L2 cloud_phase: @@ -141,60 +146,72 @@ datasets: resolution: 2000 file_type: nc_fci_ct file_key: cloud_phase - long_name: cloud_phase + standar_name: cloud_phase_classification + fill_value: -127 + import_enum_information: True cloud_type: name: cloud_type resolution: 2000 file_type: nc_fci_ct file_key: cloud_type - long_name: cloud_type + standard_name: cloud_type_classification + fill_value: -127 + import_enum_information: True quality_illumination_ct: name: quality_illumination_ct resolution: 2000 file_type: nc_fci_ct file_key: quality_illumination - long_name: illumination_classification + standard_name: status_flag + fill_value: -127 + import_enum_information: True quality_nwp_parameters_ct: name: quality_nwp_parameters_ct resolution: 2000 file_type: nc_fci_ct file_key: quality_nwp_parameters - long_name: quality_index + standard_name: status_flag + fill_value: -127 + import_enum_information: True quality_MTG_parameters_ct: name: quality_MTG_parameters_ct resolution: 2000 file_type: nc_fci_ct file_key: quality_MTG_parameters - long_name: quality_index + standard_name: status_flag + fill_value: -127 + import_enum_information: True quality_overall_processing_ct: name: quality_overall_processing_ct resolution: 2000 file_type: nc_fci_ct file_key: quality_overall_processing - long_name: quality_index + standard_name: quality_flag + fill_value: -127 + import_enum_information: True product_quality_ct: name: product_quality_ct file_type: nc_fci_ct file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_ct: name: product_completeness_ct file_type: nc_fci_ct file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_ct: name: product_timeliness_ct file_type: nc_fci_ct file_key: product_timeliness - long_name: product_timeliness_index + standard_name: product_timeliness # FCI CTTH Product cloud_top_aviation_height: @@ -202,94 +219,116 @@ datasets: resolution: 2000 file_type: nc_fci_ctth file_key: cloud_top_aviation_height + standard_name: height_at_cloud_top_for_aviation cloud_top_height: name: cloud_top_height resolution: 2000 file_type: nc_fci_ctth file_key: cloud_top_height - fill_value: 32769 + standard_name: height_at_cloud_top cloud_top_pressure: name: cloud_top_pressure resolution: 2000 file_type: nc_fci_ctth file_key: cloud_top_pressure - fill_value: 3276.9001 + standard_name: air_pressure_at_cloud_top cloud_top_temperature: name: cloud_top_temperature resolution: 2000 file_type: nc_fci_ctth file_key: cloud_top_temperature - fill_value: 327.69 + standard_name: air_temperature_at_cloud_top effective_cloudiness: name: effective_cloudiness resolution: 2000 file_type: nc_fci_ctth file_key: effective_cloudiness + standard_name: effective_cloud_cover quality_status_ctth: name: quality_status_ctth resolution: 2000 file_type: nc_fci_ctth file_key: quality_status + standard_name: status_flag + fill_value: -127 + import_enum_information: True quality_rtm_ctth: name: quality_rtm_ctth resolution: 2000 file_type: nc_fci_ctth file_key: quality_rtm + standard_name: status_flag + fill_value: -127 + import_enum_information: True quality_method_ctth: name: quality_method_ctth resolution: 2000 file_type: nc_fci_ctth file_key: quality_method + standard_name: status_flag + fill_value: -127 + import_enum_information: True quality_nwp_parameters_ctth: name: quality_nwp_parameters_ctth resolution: 2000 file_type: nc_fci_ctth file_key: quality_nwp_parameters + standard_name: status_flag + fill_value: -127 + import_enum_information: True quality_MTG_parameters_ctth: name: quality_MTG_parameters_ctth resolution: 2000 file_type: nc_fci_ctth file_key: quality_MTG_parameters + standard_name: status_flag fill_value: -127 + import_enum_information: True quality_overall_processing_ctth: name: quality_overall_processing_ctth resolution: 2000 file_type: nc_fci_ctth file_key: quality_overall_processing + standard_name: quality_flag + fill_value: -127 + import_enum_information: True quality_overall_processing_aviation_ctth: name: quality_overall_processing_aviation_ctth resolution: 2000 file_type: nc_fci_ctth file_key: quality_overall_processing_aviation + standard_name: quality_flag + fill_value: -127 + import_enum_information: True product_quality_ctth: name: product_quality_ctth file_type: nc_fci_ctth file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_ctth: name: product_completeness_ctth file_type: nc_fci_ctth file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_ctth: name: product_timeliness_ctth file_type: nc_fci_ctth file_key: product_timeliness - long_name: product_timeliness_index + standard_name: product_timeliness # OCA retrieved_cloud_phase: @@ -297,14 +336,16 @@ datasets: resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_phase - standard_name: thermodynamic_phase_of_cloud_water_particles_at_cloud_top + standard_name: thermodynamic_phase_of_cloud_particles_classification + fill_value: -127 + import_enum_information: True retrieved_cloud_optical_thickness: name: retrieved_cloud_optical_thickness resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness - long_name: cloud_optical_depth + standard_name: atmosphere_optical_thickness_due_to_cloud retrieved_cloud_optical_thickness_upper_layer: name: retrieved_cloud_optical_thickness_upper_layer @@ -312,7 +353,15 @@ datasets: file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 0 - long_name: cloud_optical_depth + standard_name: atmosphere_optical_thickness_due_to_cloud + + retrieval_error_cloud_optical_thickness_upper_layer: + name: retrieval_error_cloud_optical_thickness_upper_layer + resolution: 2000 + file_type: nc_fci_oca + file_key: retrieval_error_cloud_optical_thickness + layer: 0 + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error retrieved_cloud_optical_thickness_lower_layer: name: retrieved_cloud_optical_thickness_lower_layer @@ -320,21 +369,29 @@ datasets: file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 1 - long_name: cloud_optical_depth + standard_name: atmosphere_optical_thickness_due_to_cloud + + retrieval_error_cloud_optical_thickness_lower_layer: + name: retrieval_error_cloud_optical_thickness_lower_layer + resolution: 2000 + file_type: nc_fci_oca + file_key: retrieval_error_cloud_optical_thickness + layer: 1 + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error retrieved_cloud_particle_effective_radius: - name: retrieved_cloud_particle_effective_radius + name: retrieved_cloud_particle_effective_radius_upper_layer resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top + standard_name: effective_radius_of_cloud_particles - retrieved_cloud_top_temperature: - name: retrieved_cloud_top_temperature + retrieval_error_cloud_particle_effective_radius: + name: retrieval_error_cloud_particle_effective_radius_upper_layer resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_top_temperature - standard_name: air_temperature_at_cloud_top + file_key: retrieval_error_cloud_particle_effective_radius + standard_name: effective_radius_of_cloud_particles standard_error retrieved_cloud_top_pressure_upper_layer: name: retrieved_cloud_top_pressure_upper_layer @@ -344,6 +401,14 @@ datasets: layer: 0 standard_name: air_pressure_at_cloud_top + retrieval_error_cloud_top_pressure_upper_layer: + name: retrieval_error_cloud_top_pressure_upper_layer + resolution: 2000 + file_type: nc_fci_oca + file_key: retrieval_error_cloud_top_pressure + layer: 0 + standard_name: air_pressure_at_cloud_top standard_error + retrieved_cloud_top_pressure_lower_layer: name: retrieved_cloud_top_pressure_lower_layer resolution: 2000 @@ -352,76 +417,52 @@ datasets: layer: 1 standard_name: air_pressure_at_cloud_top - retrieved_cloud_top_height: - name: retrieved_cloud_top_height - resolution: 2000 - file_type: nc_fci_oca - file_key: retrieved_cloud_top_height - standard_name: height_at_cloud_top - - retrieval_error_cloud_optical_thickness_upper_layer: - name: retrieval_error_cloud_optical_thickness_upper_layer - resolution: 2000 - file_type: nc_fci_oca - file_key: retrieval_error_cloud_optical_thickness - layer: 0 - long_name: cloud_optical_depth - - retrieval_error_cloud_optical_thickness_lower_layer: - name: retrieval_error_cloud_optical_thickness_lower_layer + retrieval_error_cloud_top_pressure_lower_layer: + name: retrieval_error_cloud_top_pressure_lower_layer resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_optical_thickness + file_key: retrieval_error_cloud_top_pressure layer: 1 - long_name: cloud_optical_depth - - retrieval_error_cloud_particle_effective_radius: - name: retrieval_error_cloud_particle_effective_radius - resolution: 2000 - file_type: nc_fci_oca - file_key: retrieval_error_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top_standard_error + standard_name: air_pressure_at_cloud_top standard_erro - retrieval_error_cloud_top_pressure_upper_layer: - name: retrieval_error_cloud_top_pressure_upper_layer + retrieved_cloud_top_temperature: + name: retrieved_cloud_top_temperature_upper_layer resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_top_pressure - layer: 0 - standard_name: air_pressure_at_cloud_top_standard_error + file_key: retrieved_cloud_top_temperature + standard_name: air_temperature_at_cloud_top - retrieval_error_cloud_top_pressure_lower_layer: - name: retrieval_error_cloud_top_pressure_lower_layer + retrieved_cloud_top_height: + name: retrieved_cloud_top_height_upper_layer resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_top_pressure - layer: 1 - standard_name: air_pressure_at_cloud_top_standard_error + file_key: retrieved_cloud_top_height + standard_name: height_at_cloud_top quality_jmeas: name: quality_jmeas resolution: 2000 file_type: nc_fci_oca file_key: quality_jmeas - long_name: cost_function + standard_name: cost_function_part_due_to_measurements product_quality_oca: name: product_quality_oca file_type: nc_fci_oca file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_oca: name: product_completeness_oca file_type: nc_fci_oca file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_oca: name: product_timeliness_oca file_type: nc_fci_oca file_key: product_timeliness - long_name: product_timeliness_index + standard_name: product_timeliness # FIR fire_probability: @@ -429,30 +470,34 @@ datasets: resolution: 2000 file_type: nc_fci_fir file_key: fire_probability + standard_name: fire_probability fire_result: name: fire_result resolution: 2000 file_type: nc_fci_fir file_key: fire_result + standard_name: active_fire_classification + fill_value: -127 + import_enum_information: True product_quality_fir: name: product_quality_fir file_type: nc_fci_fir file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_fir: name: product_completeness_fir file_type: nc_fci_fir file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_fir: name: product_timeliness_fir file_type: nc_fci_fir file_key: product_timeliness - long_name: product_timeliness_index + standard_name: product_timeliness # OLR olr: @@ -460,39 +505,43 @@ datasets: resolution: 2000 file_type: nc_fci_olr file_key: olr_value - long_name: outgoing_longwave_radiation + standard_name: outgoing_longwave_radiation cloud_type_olr: name: cloud_type_olr resolution: 2000 file_type: nc_fci_olr file_key: cloud_type - long_name: cloud_type_olr + standard_name: cloud_type_classification + fill_value: -127 + import_enum_information: True quality_overall_processing_olr: name: quality_overall_processing_olr resolution: 2000 file_type: nc_fci_olr file_key: quality_overall_processing - long_name: quality_index + standard_name: quality_flag + fill_value: -127 + import_enum_information: True product_quality_olr: name: product_quality_olr file_type: nc_fci_olr file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_olr: name: product_completeness_olr file_type: nc_fci_olr file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_olr: name: product_timeliness_olr file_type: nc_fci_olr file_key: product_timeliness - long_name: product_timeliness_index + standard_name: product_timeliness # CRM crm: @@ -500,7 +549,7 @@ datasets: resolution: 1000 file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance + standard_name: toa_bidirectional_reflectance crm_vis04: name: crm_vis04 @@ -508,7 +557,7 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis04 + standard_name: toa_bidirectional_reflectance vis_channel_id: 0 crm_vis05: @@ -517,7 +566,7 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis05 + standard_name: toa_bidirectional_reflectance vis_channel_id: 1 crm_vis06: @@ -526,7 +575,7 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis06 + standard_name: toa_bidirectional_reflectance vis_channel_id: 2 crm_vis08: @@ -535,7 +584,7 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis08 + standard_name: toa_bidirectional_reflectance vis_channel_id: 3 crm_vis09: @@ -544,7 +593,7 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_vis09 + standard_name: toa_bidirectional_reflectance vis_channel_id: 4 crm_nir13: @@ -553,7 +602,7 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_nir13 + standard_name: toa_bidirectional_reflectance vis_channel_id: 5 crm_nir16: @@ -562,7 +611,7 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_nir16 + standard_name: toa_bidirectional_reflectance vis_channel_id: 6 crm_nir22: @@ -571,7 +620,7 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - long_name: mean_clear_sky_reflectance_nir22 + standard_name: toa_bidirectional_reflectance vis_channel_id: 7 mean_sza: @@ -579,46 +628,46 @@ datasets: resolution: 1000 file_type: nc_fci_crm file_key: mean_solar_zenith - long_name: mean_solar_zenith_angle + standard_name: solar_zenith_angle mean_rel_azi: name: mean_rel_azi resolution: 1000 file_type: nc_fci_crm file_key: mean_rel_solar_sat_azimuth - long_name: mean_relative_solar_satellite_azimuth_angle + standard_name: relative_sun_sensor_azimuth_angle n_acc: name: n_acc resolution: 1000 file_type: nc_fci_crm file_key: number_of_accumulations - long_name: number_of_accumulations + standard_name: number_of_accumulations historical_data: name: historical_data resolution: 1000 file_type: nc_fci_crm file_key: historical_data - long_name: historical_data + standard_name: status_flag product_quality_crm: name: product_quality_crm file_type: nc_fci_crm file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_crm: name: product_completeness_crm file_type: nc_fci_crm file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_crm: name: product_timeliness_crm file_type: nc_fci_crm file_key: product_timeliness - long_name: product_timeliness_index + standard_name: product_timeliness # LAT/LON FOR SEGMENTED PRODUCTS @@ -645,160 +694,98 @@ datasets: resolution: 6000 file_type: nc_fci_gii file_key: k_index - long_name: k_index coordinates: - longitude - latitude + standard_name: atmosphere_stability_lifted_index lifted_index: name: lifted_index resolution: 6000 file_type: nc_fci_gii file_key: lifted_index - long_name: lifted_index coordinates: - longitude - latitude + standard_name: atmosphere_stability_k_index prec_water_high: name: prec_water_high resolution: 6000 file_type: nc_fci_gii file_key: prec_water_high - long_name: prec_water_high coordinates: - longitude - latitude + standard_name: atmosphere_mass_content_of_water_vapor prec_water_low: name: prec_water_low resolution: 6000 file_type: nc_fci_gii file_key: prec_water_low - long_name: prec_water_low coordinates: - longitude - latitude + standard_name: atmosphere_mass_content_of_water_vapor prec_water_mid: name: prec_water_mid resolution: 6000 file_type: nc_fci_gii file_key: prec_water_mid - long_name: prec_water_mid coordinates: - longitude - latitude + standard_name: atmosphere_mass_content_of_water_vapor prec_water_total: name: prec_water_total resolution: 6000 file_type: nc_fci_gii file_key: prec_water_total - long_name: prec_water_total coordinates: - longitude - latitude + standard_name: atmosphere_mass_content_of_water_vapor percent_cloud_free_gii: name: percent_cloud_free_gii resolution: 6000 file_type: nc_fci_gii file_key: percent_cloud_free - long_name: percent_cloud_free coordinates: - longitude - latitude + standard_name: cloud_free_area_fraction number_of_iterations_gii: name: number_of_iterations_gii resolution: 6000 file_type: nc_fci_gii file_key: number_of_iterations - long_name: number_of_iterations coordinates: - longitude - latitude + standard_name: number_of_iterations product_quality_gii: name: product_quality_gii file_type: nc_fci_gii file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_gii: name: product_completeness_gii file_type: nc_fci_gii file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_gii: name: product_timeliness_gii file_type: nc_fci_gii file_key: product_timeliness - long_name: product_timeliness_index - - -# TOZ - total_ozone: - name: total_ozone - resolution: 6000 - file_type: nc_fci_toz - file_key: total_ozone - long_name: total_ozone - coordinates: - - longitude - - latitude - - percent_pixels_toz: - name: percent_pixels_toz - resolution: 6000 - file_type: nc_fci_toz - file_key: percent_pixels - long_name: percent_pixels - coordinates: - - longitude - - latitude - - number_of_iterations_toz: - name: number_of_iterations_toz - resolution: 6000 - file_type: nc_fci_toz - file_key: number_of_iterations - long_name: number_of_iterations - coordinates: - - longitude - - latitude - - retrieval_type_toz: - name: retrieval_type_toz - resolution: 6000 - file_type: nc_fci_toz - file_key: retrieval_type - long_name: retrieval_type - coordinates: - - longitude - - latitude - - product_quality_toz: - name: product_quality_toz - file_type: nc_fci_toz - file_key: product_quality - long_name: product_quality_index - - product_completeness_toz: - name: product_completeness_toz - file_type: nc_fci_toz - file_key: product_completeness - long_name: product_completeness_index - - product_timeliness_toz: - name: product_timeliness_toz - file_type: nc_fci_toz - file_key: product_timeliness - long_name: product_timeliness_index - - + standard_name: product_timeliness # CLM Test cloud_test_sit1_flag: @@ -806,1927 +793,2176 @@ datasets: resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_sit1_flag extract_byte: 0 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt1_flag: name: cloud_test_cmt1_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt1_flag extract_byte: 1 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt2_flag: name: cloud_test_cmt2_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt2_flag extract_byte: 2 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt3_flag: name: cloud_test_cmt3_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt3_flag extract_byte: 3 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt4_flag: name: cloud_test_cmt4_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt4_flag extract_byte: 4 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt5_flag: name: cloud_test_cmt5_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt5_flag extract_byte: 5 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt6_flag: name: cloud_test_cmt6_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt6_flag extract_byte: 6 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt7_flag: name: cloud_test_cmt7_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt7_flag extract_byte: 7 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt8_flag: name: cloud_test_cmt8_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt8_flag extract_byte: 8 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt9_flag: name: cloud_test_cmt9_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt9_flag extract_byte: 9 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt10_flag: name: cloud_test_cmt10_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt0_flag extract_byte: 10 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt11_flag: name: cloud_test_cmt11_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt11_flag extract_byte: 11 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt12_flag: name: cloud_test_cmt12_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt12_flag extract_byte: 12 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt13_flag: name: cloud_test_cmt13_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt13_flag extract_byte: 13 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmt14_flag: name: cloud_test_cmt14_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmt14_flag extract_byte: 14 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_opqt_flag: name: cloud_test_opqt_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_opqt_flag extract_byte: 15 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmrt1_flag: name: cloud_test_cmrt1_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt1_flag extract_byte: 16 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmrt2_flag: name: cloud_test_cmrt2_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt2_flag extract_byte: 17 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmrt3_flag: name: cloud_test_cmrt3_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt3_flag extract_byte: 18 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmrt4_flag: name: cloud_test_cmrt4_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt4_flag extract_byte: 19 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmrt5_flag: name: cloud_test_cmrt5_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt5_flag extract_byte: 20 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_cmrt6_flag: name: cloud_test_cmrt6_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_cmrt6_flag extract_byte: 21 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_dust_flag: name: cloud_test_dust_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_dust_flag extract_byte: 22 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_ash_flag: name: cloud_test_ash_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_ash_flag extract_byte: 23 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_dust_ash_flag: name: cloud_test_dust_ash_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - long_name: cloud_mask_test_dust_ash_flag extract_byte: 24 + flag_values: [0,1] + flag_meanings: ['Test not carried out','Test carried out'] + standard_name: status_flag cloud_test_sit1: name: cloud_test_sit1 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_sit1 extract_byte: 0 + flag_values: [0,1] + flag_meanings: [' Snow/Ice undetected',' Snow/Ice detected'] + standard_name: status_flag cloud_test_cmt1: name: cloud_test_cmt1 resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt1 extract_byte: 1 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt2: name: cloud_test_cmt2 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt2 extract_byte: 2 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt3: name: cloud_test_cmt3 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt3 extract_byte: 3 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt4: name: cloud_test_cmt4 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt4 extract_byte: 4 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt5: name: cloud_test_cmt5 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt5 - extract_byte: 5 + sextract_byte: 5 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt6: name: cloud_test_cmt6 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt6 extract_byte: 6 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt7: name: cloud_test_cmt7 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt7 extract_byte: 7 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag + cloud_test_cmt8: name: cloud_test_cmt8 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt8 extract_byte: 8 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt9: name: cloud_test_cmt9 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt9 extract_byte: 9 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt10: name: cloud_test_cmt10 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt10 extract_byte: 10 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt11: name: cloud_test_cmt11 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt11 extract_byte: 11 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt12: name: cloud_test_cmt12 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt12 extract_byte: 12 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt13: name: cloud_test_cmt13 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt13 extract_byte: 13 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmt14: name: cloud_test_cmt14 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmt14 extract_byte: 14 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_opqt: name: cloud_test_opqt resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_opqt extract_byte: 15 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmrt1: name: cloud_test_cmrt1 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt1 extract_byte: 16 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmrt2: name: cloud_test_cmrt2 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt2 extract_byte: 17 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmrt3: name: cloud_test_cmrt3 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt3 extract_byte: 18 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmrt4: name: cloud_test_cmrt4 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt4 extract_byte: 19 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmrt5: name: cloud_test_cmrt5 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_cmrt5 extract_byte: 20 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_dust: name: cloud_test_dust resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_dust extract_byte: 21 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_ash: name: cloud_test_ash resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_ash extract_byte: 22 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_dust_ash: name: cloud_test_dust_ash resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - long_name: cloud_mask_test_dust_ash extract_byte: 23 + flag_values: [0,1] + flag_meanings: ['Cloud undetected','Cloud detected'] + standard_name: status_flag cloud_test_cmrt6: name: cloud_test_cmrt6 resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_cmrt6_test_result - long_name: cloud_mask_cmrt6_result + fill_value: -127 + standard_name: status_flag + import_enum_information: True product_quality_clmtest: name: product_quality_clmtest file_type: nc_fci_test_clm file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_clmtest: name: product_completeness_clmtest file_type: nc_fci_test_clm file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_clmtest: name: product_timeliness_clmtest file_type: nc_fci_test_clm file_key: product_timeliness - long_name: product_timeliness_index - + standard_name: product_timeliness # ASR - bt_max: - name: bt_max + radiance_min: + name: radiance_min resolution: 32000 + wavelength: [] file_type: nc_fci_asr - file_key: bt_max - long_name: maximum_brightness_temperature_in_segment + file_key: radiance_min + long_name: TOA min Radiance + standard_name: toa_radiance + cell_method: area:minimum coordinates: - longitude - latitude - bt_mean: - name: bt_mean + radiance_max: + name: radiance_max resolution: 32000 + wavelength: [] file_type: nc_fci_asr - file_key: bt_mean - long_name: mean_brightness_temperature_in_segment + file_key: radiance_max + long_name: TOA max Radiance + standard_name: toa_radiance + cell_method: area:maximum coordinates: - longitude - latitude - bt_min: - name: bt_min + radiance_mean: + name: radiance_mean resolution: 32000 + wavelength: [] file_type: nc_fci_asr - file_key: bt_min - long_name: minimum_brightness_temperature_in_segment + file_key: radiance_mean + long_name: TOA mean Radiance + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_std: - name: bt_std + radiance_mean_all_vis04: + name: radiance_mean_all_vis04 resolution: 32000 + wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: bt_std - long_name: brightness_temperature_standard_deviation_in_segment + file_key: radiance_mean + category_id: 0 + channel_id: 0 + long_name: TOA mean Radiance over all pixels for vis04 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - radiance_max: - name: radiance_max + radiance_mean_clear_vis04: + name: radiance_mean_clear_vis04 resolution: 32000 + wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: radiance_max - long_name: maximum_radiance_in_segment + file_key: radiance_mean + category_id: 1 + channel_id: 0 + long_name: TOA mean Radiance over clear pixels for vis04 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - radiance_mean: - name: radiance_mean + radiance_mean_cloudy_vis04: + name: radiance_mean_cloudy_vis04 resolution: 32000 + wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: radiance_mean - long_name: mean_radiance_in_segment + category_id: 2 + channel_id: 0 + long_name: TOA mean Radiance over cloudy pixels for vis04 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - radiance_min: - name: radiance_min + radiance_mean_all_vis05: + name: radiance_mean_all_vis05 resolution: 32000 + wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: radiance_min - long_name: minimum_radiance_in_segment + file_key: radiance_mean + category_id: 0 + channel_id: 1 + long_name: TOA mean Radiance over all pixels for vis05 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - radiance_std: - name: radiance_std + radiance_mean_clear_vis05: + name: radiance_mean_clear_vis05 resolution: 32000 + wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: radiance_std - long_name: radiance_standard_deviation_in_segment + file_key: radiance_mean + category_id: 1 + channel_id: 1 + long_name: TOA mean Radiance over clear pixels for vis05 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_max: - name: reflectance_max + radiance_mean_cloudy_vis05: + name: radiance_mean_cloudy_vis05 resolution: 32000 + wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: reflectance_max - long_name: maximum_reflectance_in_segment + file_key: radiance_mean + category_id: 2 + channel_id: 1 + long_name: TOA mean Radiance over cloudy pixels for vis05 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean: - name: reflectance_mean + radiance_mean_all_vis06: + name: radiance_mean_all_vis06 resolution: 32000 + wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: mean_reflectance_in_segment + file_key: radiance_mean + category_id: 0 + channel_id: 2 + long_name: TOA mean Radiance over all pixels for vis06 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_min: - name: reflectance_min + radiance_mean_clear_vis06: + name: radiance_mean_clear_vis06 resolution: 32000 + wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_min - long_name: minimum_reflectance_in_segment + file_key: radiance_mean + category_id: 1 + channel_id: 2 + long_name: TOA mean Radiance over clear pixels for vis06 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_std: - name: reflectance_std + radiance_mean_cloudy_vis06: + name: radiance_mean_cloudy_vis06 resolution: 32000 + wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_std - long_name: reflectance_standard_deviation_in_segment + file_key: radiance_mean + category_id: 2 + channel_id: 2 + long_name: TOA mean Radiance over cloudy pixels for vis06 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - quality_bt: - name: quality_bt + radiance_mean_all_vis08: + name: radiance_mean_all_vis08 resolution: 32000 + wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_bt - long_name: brightness_temperature_quality - fill_value: -1 + file_key: radiance_mean + category_id: 0 + channel_id: 3 + long_name: TOA mean Radiance over all pixels for vis08 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance: - name: quality_reflectance + radiance_mean_clear_vis08: + name: radiance_mean_clear_vis08 resolution: 32000 + wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: reflectance_quality - fill_value: -1 + file_key: radiance_mean + category_id: 1 + channel_id: 3 + long_name: TOA mean Radiance over clear pixels for vis08 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - quality_radiance: - name: quality_radiance + radiance_mean_cloudy_vis08: + name: radiance_mean_cloudy_vis08 resolution: 32000 + wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_radiance - long_name: radiance_quality - fill_value: -1 + file_key: radiance_mean + category_id: 2 + channel_id: 3 + long_name: TOA mean Radiance over cloudy pixels for vis08 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - land_pixel_percent: - name: land_pixel_percent + radiance_mean_all_vis09: + name: radiance_mean_all_vis09 resolution: 32000 + wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: land_pixel_percent - long_name: land_pixel_percentage_in_segment + file_key: radiance_mean + category_id: 0 + channel_id: 4 + long_name: TOA mean Radiance over all pixels for vis09 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - water_pixel_percent: - name: water_pixel_percent + radiance_mean_clear_vis09: + name: radiance_mean_clear_vis09 resolution: 32000 + wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: water_pixel_percent - long_name: water_pixel_percentage_in_segment + file_key: radiance_mean + category_id: 1 + channel_id: 4 + long_name: TOA mean Radiance over clear pixels for vis09 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - pixel_percentage: - name: pixel_percentage + radiance_mean_cloudy_vis09: + name: radiance_mean_cloudy_vis09 resolution: 32000 + wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: pixel_percentage - long_name: pixel_percentage_used_in_segment + file_key: radiance_mean + category_id: 2 + channel_id: 4 + long_name: TOA mean Radiance over cloudy pixels for vis09 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_all_vis04: - name: reflectance_mean_all_vis04 + radiance_mean_all_nir13: + name: radiance_mean_all_nir13 resolution: 32000 - wavelength: [0.384, 0.444, 0.504] + wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all - vis_channel_id: 0 + file_key: radiance_mean category_id: 0 + channel_id: 5 + long_name: TOA mean Radiance over all pixels for nir13 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_clear_vis04: - name: reflectance_mean_clear_vis04 - resolution: 32000 - wavelength: [0.384, 0.444, 0.504] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear - vis_channel_id: 0 - category_id: 1 - coordinates: - - longitude - - latitude - - reflectance_mean_cloudy_vis04: - name: reflectance_mean_cloudy_vis04 - resolution: 32000 - wavelength: [0.384, 0.444, 0.504] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy - vis_channel_id: 0 - category_id: 2 - coordinates: - - longitude - - latitude - - reflectance_mean_all_vis05: - name: reflectance_mean_all_vis05 - resolution: 32000 - wavelength: [0.47, 0.51, 0.55] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all - vis_channel_id: 1 - category_id: 0 - coordinates: - - longitude - - latitude - - reflectance_mean_clear_vis05: - name: reflectance_mean_clear_vis05 - resolution: 32000 - wavelength: [0.47, 0.51, 0.55] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear - vis_channel_id: 1 - category_id: 1 - coordinates: - - longitude - - latitude - - reflectance_mean_cloudy_vis05: - name: reflectance_mean_cloudy_vis05 - resolution: 32000 - wavelength: [0.47, 0.51, 0.55] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy - vis_channel_id: 1 - category_id: 2 - coordinates: - - longitude - - latitude - - reflectance_mean_all_vis06: - name: reflectance_mean_all_vis06 - resolution: 32000 - wavelength: [0.59, 0.64, 0.69] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all - vis_channel_id: 2 - category_id: 0 - coordinates: - - longitude - - latitude - - reflectance_mean_clear_vis06: - name: reflectance_mean_clear_vis06 - resolution: 32000 - wavelength: [0.59, 0.64, 0.69] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear - vis_channel_id: 2 - category_id: 1 - coordinates: - - longitude - - latitude - - reflectance_mean_cloudy_vis06: - name: reflectance_mean_cloudy_vis06 - resolution: 32000 - wavelength: [0.59, 0.64, 0.69] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy - vis_channel_id: 2 - category_id: 2 - coordinates: - - longitude - - latitude - - reflectance_mean_all_vis08: - name: reflectance_mean_all_vis08 - resolution: 32000 - wavelength: [0.815, 0.865, 0.915] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all - vis_channel_id: 3 - category_id: 0 - coordinates: - - longitude - - latitude - - reflectance_mean_clear_vis08: - name: reflectance_mean_clear_vis08 - resolution: 32000 - wavelength: [0.815, 0.865, 0.915] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear - vis_channel_id: 3 - category_id: 1 - coordinates: - - longitude - - latitude - - reflectance_mean_cloudy_vis08: - name: reflectance_mean_cloudy_vis08 - resolution: 32000 - wavelength: [0.815, 0.865, 0.915] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy - vis_channel_id: 3 - category_id: 2 - coordinates: - - longitude - - latitude - - reflectance_mean_all_vis09: - name: reflectance_mean_all_vis09 - resolution: 32000 - wavelength: [0.894, 0.914, 0.934] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all - vis_channel_id: 4 - category_id: 0 - coordinates: - - longitude - - latitude - - reflectance_mean_clear_vis09: - name: reflectance_mean_clear_vis09 - resolution: 32000 - wavelength: [0.894, 0.914, 0.934] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear - vis_channel_id: 4 - category_id: 1 - coordinates: - - longitude - - latitude - - reflectance_mean_cloudy_vis09: - name: reflectance_mean_cloudy_vis09 - resolution: 32000 - wavelength: [0.894, 0.914, 0.934] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy - vis_channel_id: 4 - category_id: 2 - coordinates: - - longitude - - latitude - - reflectance_mean_all_nir13: - name: reflectance_mean_all_nir13 + radiance_mean_clear_nir13: + name: radiance_mean_clear_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all - vis_channel_id: 5 - category_id: 0 - coordinates: - - longitude - - latitude - - reflectance_mean_clear_nir13: - name: reflectance_mean_clear_nir13 - resolution: 32000 - wavelength: [1.35, 1.38, 1.41] - file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear - vis_channel_id: 5 + file_key: radiance_mean category_id: 1 + channel_id: 5 + long_name: TOA mean Radiance over clear pixels for nir13 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_cloudy_nir13: - name: reflectance_mean_cloudy_nir13 + radiance_mean_cloudy_nir13: + name: radiance_mean_cloudy_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy - vis_channel_id: 5 + file_key: radiance_mean category_id: 2 + channel_id: 5 + long_name: TOA mean Radiance over cloudy pixels for nir13 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_all_nir16: - name: reflectance_mean_all_nir16 + radiance_mean_all_nir16: + name: radiance_mean_all_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all - vis_channel_id: 6 + file_key: radiance_mean category_id: 0 + channel_id: 6 + long_name: TOA mean Radiance over all pixels for nir16 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_clear_nir16: - name: reflectance_mean_clear_nir16 + radiance_mean_clear_nir16: + name: radiance_mean_clear_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear - vis_channel_id: 6 + file_key: radiance_mean category_id: 1 + channel_id: 6 + long_name: TOA mean Radiance over clear pixels for nir16 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_cloudy_nir16: - name: reflectance_mean_cloudy_nir16 + radiance_mean_cloudy_nir16: + name: radiance_mean_cloudy_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy - vis_channel_id: 6 + file_key: radiance_mean category_id: 2 + channel_id: 6 + long_name: TOA mean Radiance over cloudy pixels for nir16 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_all_nir22: - name: reflectance_mean_all_nir22 + radiance_mean_all_nir22: + name: radiance_mean_all_nir22 resolution: 32000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_all - vis_channel_id: 7 + file_key: radiance_mean category_id: 0 + channel_id: 7 + long_name: TOA mean Radiance over all pixels for nir22 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_clear_nir22: - name: reflectance_mean_clear_nir22 + radiance_mean_clear_nir22: + name: radiance_mean_clear_nir22 resolution: 32000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_clear - vis_channel_id: 7 + file_key: radiance_mean category_id: 1 + channel_id: 7 + long_name: TOA mean Radiance over clear pixels for nir22 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_cloudy_nir22: - name: reflectance_mean_cloudy_nir22 + radiance_mean_cloudy_nir22: + name: radiance_mean_cloudy_nir22 resolution: 32000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: reflectance_mean_cloudy - vis_channel_id: 7 + file_key: radiance_mean category_id: 2 + channel_id: 7 + long_name: TOA mean Radiance over cloudy pixels for nir22 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir38: - name: bt_mean_all_ir38 + radiance_mean_all_ir38: + name: radiance_mean_all_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all - ir_channel_id: 0 + file_key: radiance_mean category_id: 0 + channel_id: 8 + long_name: TOA mean Radiance over all pixels for ir38 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir38: - name: bt_mean_clear_ir38 + radiance_mean_clear_ir38: + name: radiance_mean_clear_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear - ir_channel_id: 0 + file_key: radiance_mean category_id: 1 + channel_id: 8 + long_name: TOA mean Radiance over clear pixels for ir38 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir38: - name: bt_mean_cloudy_ir38 + radiance_mean_cloudy_ir38: + name: radiance_mean_cloudy_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy - ir_channel_id: 0 + file_key: radiance_mean category_id: 2 + channel_id: 8 + long_name: TOA mean Radiance over cloudy pixels for ir38 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_wv63: - name: bt_mean_all_wv63 + radiance_mean_all_wv63: + name: radiance_mean_all_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all - ir_channel_id: 1 + file_key: radiance_mean category_id: 0 + channel_id: 9 + long_name: TOA mean Radiance over all pixels for wv63 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_wv63: - name: bt_mean_clear_wv63 + radiance_mean_clear_wv63: + name: radiance_mean_clear_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear - ir_channel_id: 1 + file_key: radiance_mean category_id: 1 + channel_id: 9 + long_name: TOA mean Radiance over clear pixels for wv63 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_wv63: - name: bt_mean_cloudy_wv63 + radiance_mean_cloudy_wv63: + name: radiance_mean_cloudy_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy - ir_channel_id: 1 + file_key: radiance_mean category_id: 2 + channel_id: 9 + long_name: TOA mean Radiance over cloudy pixels for wv63 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_wv73: - name: bt_mean_all_wv73 + radiance_mean_all_wv73: + name: radiance_mean_all_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all - ir_channel_id: 2 + file_key: radiance_mean category_id: 0 + channel_id: 10 + long_name: TOA mean Radiance over all pixels for wv73 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_wv73: - name: bt_mean_clear_wv73 + radiance_mean_clear_wv73: + name: radiance_mean_clear_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear - ir_channel_id: 2 + file_key: radiance_mean category_id: 1 + channel_id: 10 + long_name: TOA mean Radiance over clear pixels for wv73 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_wv73: - name: bt_mean_cloudy_wv73 + radiance_mean_cloudy_wv73: + name: radiance_mean_cloudy_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy - ir_channel_id: 2 + file_key: radiance_mean category_id: 2 + channel_id: 10 + long_name: TOA mean Radiance over cloudy pixels for wv73 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir87: - name: bt_mean_all_ir87 + radiance_mean_all_ir87: + name: radiance_mean_all_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all - ir_channel_id: 3 + file_key: radiance_mean category_id: 0 + channel_id: 11 + long_name: TOA mean Radiance over all pixels for ir87 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir87: - name: bt_mean_clear_ir87 + radiance_mean_clear_ir87: + name: radiance_mean_clear_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear - ir_channel_id: 3 + file_key: radiance_mean category_id: 1 + channel_id: 11 + long_name: TOA mean Radiance over clear pixels for ir87 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir87: - name: bt_mean_cloudy_ir87 + radiance_mean_cloudy_ir87: + name: radiance_mean_cloudy_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy - ir_channel_id: 3 + file_key: radiance_mean category_id: 2 + channel_id: 11 + long_name: TOA mean Radiance over cloudy pixels for ir87 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir97: - name: bt_mean_all_ir97 + radiance_mean_all_ir97: + name: radiance_mean_all_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all - ir_channel_id: 4 + file_key: radiance_mean category_id: 0 + channel_id: 12 + long_name: TOA mean Radiance over all pixels for ir97 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir97: - name: bt_mean_clear_ir97 + radiance_mean_clear_ir97: + name: radiance_mean_clear_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear - ir_channel_id: 4 + file_key: radiance_mean category_id: 1 + channel_id: 12 + long_name: TOA mean Radiance over clear pixels for ir97 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir97: - name: bt_mean_cloudy_ir97 + radiance_mean_cloudy_ir97: + name: radiance_mean_cloudy_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy - ir_channel_id: 4 + file_key: radiance_mean category_id: 2 + channel_id: 12 + long_name: TOA mean Radiance over cloudy pixels for ir97 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir105: - name: bt_mean_all_ir105 + radiance_mean_all_ir105: + name: radiance_mean_all_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all - ir_channel_id: 5 + file_key: radiance_mean category_id: 0 + channel_id: 13 + long_name: TOA mean Radiance over all pixels for ir105 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir105: - name: bt_mean_clear_ir105 + radiance_mean_clear_ir105: + name: radiance_mean_clear_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear - ir_channel_id: 5 + file_key: radiance_mean category_id: 1 + channel_id: 13 + long_name: TOA mean Radiance over clear pixels for ir105 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir105: - name: bt_mean_cloudy_ir105 + radiance_mean_cloudy_ir105: + name: radiance_mean_cloudy_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy - ir_channel_id: 5 + file_key: radiance_mean category_id: 2 + channel_id: 13 + long_name: TOA mean Radiance over cloudy pixels for ir105 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir123: - name: bt_mean_all_ir123 + radiance_mean_all_ir123: + name: radiance_mean_all_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all - ir_channel_id: 6 + file_key: radiance_mean category_id: 0 + channel_id: 14 + long_name: TOA mean Radiance over all pixels for ir123 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir123: - name: bt_mean_clear_ir123 + radiance_mean_clear_ir123: + name: radiance_mean_clear_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear - ir_channel_id: 6 + file_key: radiance_mean category_id: 1 + channel_id: 14 + long_name: TOA mean Radiance over clear pixels for ir123 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir123: - name: bt_mean_cloudy_ir123 + radiance_mean_cloudy_ir123: + name: radiance_mean_cloudy_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy - ir_channel_id: 6 + file_key: radiance_mean category_id: 2 + channel_id: 14 + long_name: TOA mean Radiance over cloudy pixels for ir123 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir133: - name: bt_mean_all_ir133 + radiance_mean_all_ir133: + name: radiance_mean_all_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_all - ir_channel_id: 7 + file_key: radiance_mean category_id: 0 + channel_id: 15 + long_name: TOA mean Radiance over all pixels for ir133 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir133: - name: bt_mean_clear_ir133 + radiance_mean_clear_ir133: + name: radiance_mean_clear_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_clear - ir_channel_id: 7 + file_key: radiance_mean category_id: 1 + channel_id: 15 + long_name: TOA mean Radiance over clear pixels for ir133 channel + standard_name: toa_radiance + cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir133: - name: bt_mean_cloudy_ir133 + radiance_mean_cloudy_ir133: + name: radiance_mean_cloudy_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - long_name: bt_mean_cloudy - ir_channel_id: 7 + file_key: radiance_mean category_id: 2 + channel_id: 15 + long_name: TOA mean Radiance over cloudy pixels for ir133 channel + standard_name: toa_radiance + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_std: + name: radiance_std + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: radiance_std + long_name: TOA Radiance standard deviation over None pixels for None channel + standard_name: toa_radiance + cell_method: area:standard_deviation + coordinates: + - longitude + - latitude + + radiance_quality: + name: radiance_quality + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: radiance_quality + long_name: TOA Radiance Quality + standard_name: radiance_quality + coordinates: + - longitude + - latitude + + reflectance_min: + name: reflectance_min + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: reflectance_min + long_name: TOA min Reflectance + standard_name: toa_reflectance + cell_method: area:minimum + coordinates: + - longitude + - latitude + + reflectance_max: + name: reflectance_max + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: reflectance_max + long_name: TOA max Reflectance + standard_name: toa_reflectance + cell_method: area:maximum + coordinates: + - longitude + - latitude + + reflectance_mean: + name: reflectance_mean + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: TOA mean Reflectance + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_all_vis04: - name: quality_reflectance_all_vis04 + reflectance_mean_all_vis04: + name: reflectance_mean_all_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all - vis_channel_id: 0 + file_key: reflectance_mean category_id: 0 - fill_value: -1 + vis_channel_id: 0 + long_name: TOA mean Reflectance over all pixels for vis04 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_clear_vis04: - name: quality_reflectance_clear_vis04 + reflectance_mean_clear_vis04: + name: reflectance_mean_clear_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear - vis_channel_id: 0 + file_key: reflectance_mean category_id: 1 - fill_value: -1 + vis_channel_id: 0 + long_name: TOA mean Reflectance over clear pixels for vis04 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_cloudy_vis04: - name: quality_reflectance_cloudy_vis04 + reflectance_mean_cloudy_vis04: + name: reflectance_mean_cloudy_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy - vis_channel_id: 0 + file_key: reflectance_mean category_id: 2 - fill_value: -1 + vis_channel_id: 0 + long_name: TOA mean Reflectance over cloudy pixels for vis04 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_all_vis05: - name: quality_reflectance_all_vis05 + reflectance_mean_all_vis05: + name: reflectance_mean_all_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all - vis_channel_id: 1 + file_key: reflectance_mean category_id: 0 - fill_value: -1 + vis_channel_id: 1 + long_name: TOA mean Reflectance over all pixels for vis05 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_clear_vis05: - name: quality_reflectance_clear_vis05 + reflectance_mean_clear_vis05: + name: reflectance_mean_clear_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear - vis_channel_id: 1 + file_key: reflectance_mean category_id: 1 - fill_value: -1 + vis_channel_id: 1 + long_name: TOA mean Reflectance over clear pixels for vis05 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_cloudy_vis05: - name: quality_reflectance_cloudy_vis05 + reflectance_mean_cloudy_vis05: + name: reflectance_mean_cloudy_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy - vis_channel_id: 1 + file_key: reflectance_mean category_id: 2 - fill_value: -1 + vis_channel_id: 1 + long_name: TOA mean Reflectance over cloudy pixels for vis05 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_all_vis06: - name: quality_reflectance_all_vis06 + reflectance_mean_all_vis06: + name: reflectance_mean_all_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all - vis_channel_id: 2 + file_key: reflectance_mean category_id: 0 - fill_value: -1 + vis_channel_id: 2 + long_name: TOA mean Reflectance over all pixels for vis06 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_clear_vis06: - name: quality_reflectance_clear_vis06 + reflectance_mean_clear_vis06: + name: reflectance_mean_clear_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear - vis_channel_id: 2 + file_key: reflectance_mean category_id: 1 - fill_value: -1 + vis_channel_id: 2 + long_name: TOA mean Reflectance over clear pixels for vis06 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_cloudy_vis06: - name: quality_reflectance_cloudy_vis06 + reflectance_mean_cloudy_vis06: + name: reflectance_mean_cloudy_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy - vis_channel_id: 2 + file_key: reflectance_mean category_id: 2 - fill_value: -1 + vis_channel_id: 2 + long_name: TOA mean Reflectance over cloudy pixels for vis06 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_all_vis08: - name: quality_reflectance_all_vis08 + reflectance_mean_all_vis08: + name: reflectance_mean_all_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all - vis_channel_id: 3 + file_key: reflectance_mean category_id: 0 - fill_value: -1 + vis_channel_id: 3 + long_name: TOA mean Reflectance over all pixels for vis08 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_clear_vis08: - name: quality_reflectance_clear_vis08 + reflectance_mean_clear_vis08: + name: reflectance_mean_clear_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear - vis_channel_id: 3 + file_key: reflectance_mean category_id: 1 - fill_value: -1 + vis_channel_id: 3 + long_name: TOA mean Reflectance over clear pixels for vis08 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_cloudy_vis08: - name: quality_reflectance_cloudy_vis08 + reflectance_mean_cloudy_vis08: + name: reflectance_mean_cloudy_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy - vis_channel_id: 3 + file_key: reflectance_mean category_id: 2 - fill_value: -1 + vis_channel_id: 3 + long_name: TOA mean Reflectance over cloudy pixels for vis08 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_all_vis09: - name: quality_reflectance_all_vis09 + reflectance_mean_all_vis09: + name: reflectance_mean_all_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all - vis_channel_id: 4 + file_key: reflectance_mean category_id: 0 - fill_value: -1 + vis_channel_id: 4 + long_name: TOA mean Reflectance over all pixels for vis09 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_clear_vis09: - name: quality_reflectance_clear_vis09 + reflectance_mean_clear_vis09: + name: reflectance_mean_clear_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear - vis_channel_id: 4 + file_key: reflectance_mean category_id: 1 - fill_value: -1 + vis_channel_id: 4 + long_name: TOA mean Reflectance over clear pixels for vis09 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_cloudy_vis09: - name: quality_reflectance_cloudy_vis09 + reflectance_mean_cloudy_vis09: + name: reflectance_mean_cloudy_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy - vis_channel_id: 4 + file_key: reflectance_mean category_id: 2 - fill_value: -1 + vis_channel_id: 4 + long_name: TOA mean Reflectance over cloudy pixels for vis09 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_all_nir13: - name: quality_reflectance_all_nir13 + reflectance_mean_all_nir13: + name: reflectance_mean_all_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all - vis_channel_id: 5 + file_key: reflectance_mean category_id: 0 - fill_value: -1 + vis_channel_id: 5 + long_name: TOA mean Reflectance over all pixels for nir13 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_clear_nir13: - name: quality_reflectance_clear_nir13 + reflectance_mean_clear_nir13: + name: reflectance_mean_clear_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear - vis_channel_id: 5 + file_key: reflectance_mean category_id: 1 - fill_value: -1 + vis_channel_id: 5 + long_name: TOA mean Reflectance over clear pixels for nir13 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_cloudy_nir13: - name: quality_reflectance_cloudy_nir13 + reflectance_mean_cloudy_nir13: + name: reflectance_mean_cloudy_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy - vis_channel_id: 5 + file_key: reflectance_mean category_id: 2 - fill_value: -1 + vis_channel_id: 5 + long_name: TOA mean Reflectance over cloudy pixels for nir13 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_all_nir16: - name: quality_reflectance_all_nir16 + reflectance_mean_all_nir16: + name: reflectance_mean_all_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all - vis_channel_id: 6 + file_key: reflectance_mean category_id: 0 - fill_value: -1 + vis_channel_id: 6 + long_name: TOA mean Reflectance over all pixels for nir16 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_clear_nir16: - name: quality_reflectance_clear_nir16 + reflectance_mean_clear_nir16: + name: reflectance_mean_clear_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear - vis_channel_id: 6 + file_key: reflectance_mean category_id: 1 - fill_value: -1 + vis_channel_id: 6 + long_name: TOA mean Reflectance over clear pixels for nir16 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_cloudy_nir16: - name: quality_reflectance_cloudy_nir16 + reflectance_mean_cloudy_nir16: + name: reflectance_mean_cloudy_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy - vis_channel_id: 6 + file_key: reflectance_mean category_id: 2 - fill_value: -1 + vis_channel_id: 6 + long_name: TOA mean Reflectance over cloudy pixels for nir16 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_all_nir22: - name: quality_reflectance_all_nir22 + reflectance_mean_all_nir22: + name: reflectance_mean_all_nir22 resolution: 32000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_all - vis_channel_id: 7 + file_key: reflectance_mean category_id: 0 - fill_value: -1 + vis_channel_id: 7 + long_name: TOA mean Reflectance over all pixels for nir22 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_clear_nir22: - name: quality_reflectance_clear_nir22 + reflectance_mean_clear_nir22: + name: reflectance_mean_clear_nir22 resolution: 32000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_clear - vis_channel_id: 7 + file_key: reflectance_mean category_id: 1 - fill_value: -1 + vis_channel_id: 7 + long_name: TOA mean Reflectance over clear pixels for nir22 channel + standard_name: toa_reflectance + cell_method: area:mean coordinates: - longitude - latitude - quality_reflectance_cloudy_nir22: - name: quality_reflectance_cloudy_nir22 + reflectance_mean_cloudy_nir22: + name: reflectance_mean_cloudy_nir22 resolution: 32000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: quality_reflectance - long_name: quality_reflectance_cloudy - vis_channel_id: 7 + file_key: reflectance_mean category_id: 2 - fill_value: -1 + vis_channel_id: 7 + long_name: TOA mean Reflectance over cloudy pixels for nir22 channel + standard_name: toa_reflectance + cell_method: area:mean + coordinates: + - longitude + - latitude + + reflectance_std: + name: reflectance_std + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: reflectance_std + long_name: TOA Reflectance standard deviation + standard_name: toa_reflectance + cell_method: area:standard_deviation + coordinates: + - longitude + - latitude + + reflectance_quality: + name: reflectance_quality + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: reflectance_quality + long_name: TOA Reflectance Quality + standard_name: reflectance_quality + coordinates: + - longitude + - latitude + + bt_min: + name: bt_min + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: bt_min + long_name: TOA min Brightess Temperature + standard_name: toa_brightess_temperature + cell_method: area:minimum + coordinates: + - longitude + - latitude + + bt_max: + name: bt_max + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: bt_max + long_name: TOA max Brightess Temperature + standard_name: toa_brightess_temperature + cell_method: area:maximum + coordinates: + - longitude + - latitude + + bt_mean: + name: bt_mean + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: bt_mean + long_name: TOA mean Brightess Temperature + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_all_ir38: - name: quality_bt_all_ir38 + bt_mean_all_ir38: + name: bt_mean_all_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all - ir_channel_id: 0 + file_key: bt_mean category_id: 0 - fill_value: -1 + ir_channel_id: 0 + long_name: TOA mean Brightess Temperature over all pixels for ir38 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_clear_ir38: - name: quality_bt_clear_ir38 + bt_mean_clear_ir38: + name: bt_mean_clear_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear - ir_channel_id: 0 + file_key: bt_mean category_id: 1 - fill_value: -1 + ir_channel_id: 0 + long_name: TOA mean Brightess Temperature over clear pixels for ir38 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_cloudy_ir38: - name: quality_bt_cloudy_ir38 + bt_mean_cloudy_ir38: + name: bt_mean_cloudy_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy - ir_channel_id: 0 + file_key: bt_mean category_id: 2 - fill_value: -1 + ir_channel_id: 0 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir38 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_all_wv63: - name: quality_bt_all_wv63 + bt_mean_all_wv63: + name: bt_mean_all_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all - ir_channel_id: 1 + file_key: bt_mean category_id: 0 - fill_value: -1 + ir_channel_id: 1 + long_name: TOA mean Brightess Temperature over all pixels for wv63 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_clear_wv63: - name: quality_bt_clear_wv63 + bt_mean_clear_wv63: + name: bt_mean_clear_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear - ir_channel_id: 1 + file_key: bt_mean category_id: 1 - fill_value: -1 + ir_channel_id: 1 + long_name: TOA mean Brightess Temperature over clear pixels for wv63 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_cloudy_wv63: - name: quality_bt_cloudy_wv63 + bt_mean_cloudy_wv63: + name: bt_mean_cloudy_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy - ir_channel_id: 1 + file_key: bt_mean category_id: 2 - fill_value: -1 + ir_channel_id: 1 + long_name: TOA mean Brightess Temperature over cloudy pixels for wv63 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_all_wv73: - name: quality_bt_all_wv73 + bt_mean_all_wv73: + name: bt_mean_all_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all - ir_channel_id: 2 + file_key: bt_mean category_id: 0 - fill_value: -1 + ir_channel_id: 2 + long_name: TOA mean Brightess Temperature over all pixels for wv73 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_clear_wv73: - name: quality_bt_clear_wv73 + bt_mean_clear_wv73: + name: bt_mean_clear_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear - ir_channel_id: 2 + file_key: bt_mean category_id: 1 - fill_value: -1 + ir_channel_id: 2 + long_name: TOA mean Brightess Temperature over clear pixels for wv73 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_cloudy_wv73: - name: quality_bt_cloudy_wv73 + bt_mean_cloudy_wv73: + name: bt_mean_cloudy_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy - ir_channel_id: 2 + file_key: bt_mean category_id: 2 - fill_value: -1 + ir_channel_id: 2 + long_name: TOA mean Brightess Temperature over cloudy pixels for wv73 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_all_ir87: - name: quality_bt_all_ir87 + bt_mean_all_ir87: + name: bt_mean_all_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all - ir_channel_id: 3 + file_key: bt_mean category_id: 0 - fill_value: -1 + ir_channel_id: 3 + long_name: TOA mean Brightess Temperature over all pixels for ir87 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_clear_ir87: - name: quality_bt_clear_ir87 + bt_mean_clear_ir87: + name: bt_mean_clear_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear - ir_channel_id: 3 + file_key: bt_mean category_id: 1 - fill_value: -1 + ir_channel_id: 3 + long_name: TOA mean Brightess Temperature over clear pixels for ir87 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_cloudy_ir87: - name: quality_bt_cloudy_ir87 + bt_mean_cloudy_ir87: + name: bt_mean_cloudy_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy - ir_channel_id: 3 + file_key: bt_mean category_id: 2 - fill_value: -1 + ir_channel_id: 3 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir87 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_all_ir97: - name: quality_bt_all_ir97 + bt_mean_all_ir97: + name: bt_mean_all_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all - ir_channel_id: 4 + file_key: bt_mean category_id: 0 - fill_value: -1 + ir_channel_id: 4 + long_name: TOA mean Brightess Temperature over all pixels for ir97 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_clear_ir97: - name: quality_bt_clear_ir97 + bt_mean_clear_ir97: + name: bt_mean_clear_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear - ir_channel_id: 4 + file_key: bt_mean category_id: 1 - fill_value: -1 + ir_channel_id: 4 + long_name: TOA mean Brightess Temperature over clear pixels for ir97 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_cloudy_ir97: - name: quality_bt_cloudy_ir97 + bt_mean_cloudy_ir97: + name: bt_mean_cloudy_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy - ir_channel_id: 4 + file_key: bt_mean category_id: 2 - fill_value: -1 + ir_channel_id: 4 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir97 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_all_ir105: - name: quality_bt_all_ir105 + bt_mean_all_ir105: + name: bt_mean_all_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all - ir_channel_id: 5 + file_key: bt_mean category_id: 0 - fill_value: -1 + ir_channel_id: 5 + long_name: TOA mean Brightess Temperature over all pixels for ir105 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_clear_ir105: - name: quality_bt_clear_ir105 + bt_mean_clear_ir105: + name: bt_mean_clear_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear - ir_channel_id: 5 + file_key: bt_mean category_id: 1 - fill_value: -1 + ir_channel_id: 5 + long_name: TOA mean Brightess Temperature over clear pixels for ir105 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_cloudy_ir105: - name: quality_bt_cloudy_ir105 + bt_mean_cloudy_ir105: + name: bt_mean_cloudy_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy - ir_channel_id: 5 + file_key: bt_mean category_id: 2 - fill_value: -1 + ir_channel_id: 5 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir105 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_all_ir123: - name: quality_bt_all_ir123 + bt_mean_all_ir123: + name: bt_mean_all_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all - ir_channel_id: 6 + file_key: bt_mean category_id: 0 - fill_value: -1 + ir_channel_id: 6 + long_name: TOA mean Brightess Temperature over all pixels for ir123 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_clear_ir123: - name: quality_bt_clear_ir123 + bt_mean_clear_ir123: + name: bt_mean_clear_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear - ir_channel_id: 6 + file_key: bt_mean category_id: 1 - fill_value: -1 + ir_channel_id: 6 + long_name: TOA mean Brightess Temperature over clear pixels for ir123 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_cloudy_ir123: - name: quality_bt_cloudy_ir123 + bt_mean_cloudy_ir123: + name: bt_mean_cloudy_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy - ir_channel_id: 6 + file_key: bt_mean category_id: 2 - fill_value: -1 + ir_channel_id: 6 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir123 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_all_ir133: - name: quality_bt_all_ir133 + bt_mean_all_ir133: + name: bt_mean_all_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_all - ir_channel_id: 7 + file_key: bt_mean category_id: 0 - fill_value: -1 + ir_channel_id: 7 + long_name: TOA mean Brightess Temperature over all pixels for ir133 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_clear_ir133: - name: quality_bt_clear_ir133 + bt_mean_clear_ir133: + name: bt_mean_clear_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_clear - ir_channel_id: 7 + file_key: bt_mean category_id: 1 - fill_value: -1 + ir_channel_id: 7 + long_name: TOA mean Brightess Temperature over clear pixels for ir133 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - quality_bt_cloudy_ir133: - name: quality_bt_cloudy_ir133 + bt_mean_cloudy_ir133: + name: bt_mean_cloudy_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: quality_bt - long_name: quality_bt_cloudy - ir_channel_id: 7 + file_key: bt_mean category_id: 2 - fill_value: -1 + ir_channel_id: 7 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir133 channel + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - pixel_percentage_all: - name: pixel_percentage_all + bt_std: + name: bt_std resolution: 32000 + wavelength: [] file_type: nc_fci_asr - file_key: pixel_percentage - long_name: pixel_percentage_all - category_id: 0 + file_key: bt_std + long_name: TOA Brightess Temperature standard deviation + standard_name: toa_brightess_temperature + cell_method: area:standard_deviation coordinates: - longitude - latitude - pixel_percentage_clear: - name: pixel_percentage_clear + bt_quality: + name: bt_quality resolution: 32000 + wavelength: [] file_type: nc_fci_asr - file_key: pixel_percentage - long_name: pixel_percentage_clear - category_id: 1 + file_key: bt_quality + long_name: TOA Brightess Temperature Quality + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - pixel_percentage_cloudy: - name: pixel_percentage_cloudy + pixel_percentage: + name: pixel_percentage resolution: 32000 file_type: nc_fci_asr file_key: pixel_percentage - long_name: pixel_percentage_cloudy - category_id: 2 + standard_name: pixels_used_fraction + coordinates: + - longitude + - latitude + + land_pixel_percent: + name: land_pixel_percent + resolution: 32000 + file_type: nc_fci_asr + file_key: land_pixel_percent + standard_name: land_area_fraction + coordinates: + - longitude + - latitude + + water_pixel_percent: + name: water_pixel_percent + resolution: 32000 + file_type: nc_fci_asr + file_key: water_pixel_percent + standard_name: water_area_fraction coordinates: - longitude - latitude @@ -2735,19 +2971,19 @@ datasets: name: product_quality_asr file_type: nc_fci_asr file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness_asr: name: product_completeness_asr file_type: nc_fci_asr file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness_asr: name: product_timeliness_asr file_type: nc_fci_asr file_key: product_timeliness - long_name: product_timeliness_index + standard_name: product_timeliness # AMV Intermediate Product intm_latitude: @@ -2992,16 +3228,16 @@ datasets: name: product_quality file_type: nc_fci_amv file_key: product_quality - long_name: product_quality_index + standard_name: product_quality product_completeness: name: product_completeness file_type: nc_fci_amv file_key: product_completeness - long_name: product_completeness_index + standard_name: product_completeness product_timeliness: name: product_timeliness file_type: nc_fci_amv file_key: product_timeliness - long_name: product_timeliness_index + standard_name: product_timeliness From b6a3523940fbc798de2eac7b22613aa129c550f1 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 4 Dec 2023 11:41:47 +0100 Subject: [PATCH 1030/1416] remove unnecessary wavelength attributes --- satpy/etc/readers/fci_l2_nc.yaml | 27 ++++++++++++--------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index ba8d3b1c46..e5f52a29ae 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1308,7 +1308,6 @@ datasets: radiance_min: name: radiance_min resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: radiance_min long_name: TOA min Radiance @@ -1321,7 +1320,6 @@ datasets: radiance_max: name: radiance_max resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: radiance_max long_name: TOA max Radiance @@ -1334,7 +1332,6 @@ datasets: radiance_mean: name: radiance_mean resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: radiance_mean long_name: TOA mean Radiance @@ -2067,7 +2064,7 @@ datasets: radiance_std: name: radiance_std resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: radiance_std long_name: TOA Radiance standard deviation over None pixels for None channel @@ -2080,7 +2077,7 @@ datasets: radiance_quality: name: radiance_quality resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: radiance_quality long_name: TOA Radiance Quality @@ -2092,7 +2089,7 @@ datasets: reflectance_min: name: reflectance_min resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: reflectance_min long_name: TOA min Reflectance @@ -2105,7 +2102,7 @@ datasets: reflectance_max: name: reflectance_max resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: reflectance_max long_name: TOA max Reflectance @@ -2118,7 +2115,7 @@ datasets: reflectance_mean: name: reflectance_mean resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: reflectance_mean long_name: TOA mean Reflectance @@ -2491,7 +2488,7 @@ datasets: reflectance_std: name: reflectance_std resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: reflectance_std long_name: TOA Reflectance standard deviation @@ -2504,7 +2501,7 @@ datasets: reflectance_quality: name: reflectance_quality resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: reflectance_quality long_name: TOA Reflectance Quality @@ -2516,7 +2513,7 @@ datasets: bt_min: name: bt_min resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: bt_min long_name: TOA min Brightess Temperature @@ -2529,7 +2526,7 @@ datasets: bt_max: name: bt_max resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: bt_max long_name: TOA max Brightess Temperature @@ -2542,7 +2539,7 @@ datasets: bt_mean: name: bt_mean resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: bt_mean long_name: TOA mean Brightess Temperature @@ -2915,7 +2912,7 @@ datasets: bt_std: name: bt_std resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: bt_std long_name: TOA Brightess Temperature standard deviation @@ -2928,7 +2925,7 @@ datasets: bt_quality: name: bt_quality resolution: 32000 - wavelength: [] + wavelength: [0,0,0] file_type: nc_fci_asr file_key: bt_quality long_name: TOA Brightess Temperature Quality From 6c310634f54f0d017f3d338d896315ea8596a7fe Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Tue, 12 Dec 2023 17:43:48 +0100 Subject: [PATCH 1031/1416] Various fixes --- satpy/etc/readers/fci_l2_nc.yaml | 34 ++++++++------------------------ satpy/readers/fci_l2_nc.py | 27 +++++++++++++++---------- 2 files changed, 25 insertions(+), 36 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index e5f52a29ae..df5111334a 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1194,7 +1194,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 15 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Opaqueness undetected','Opaqueness detected'] standard_name: status_flag cloud_test_cmrt1: @@ -1214,7 +1214,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 17 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Clear unchanged','Cloud detected'] standard_name: status_flag cloud_test_cmrt3: @@ -1224,7 +1224,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 18 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Clear unchanged','Cloud detected'] standard_name: status_flag cloud_test_cmrt4: @@ -1234,7 +1234,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 19 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Clear unchanged','Cloud detected'] standard_name: status_flag cloud_test_cmrt5: @@ -1244,7 +1244,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 20 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Cloud undetected','Cloud unchanged'] standard_name: status_flag cloud_test_dust: @@ -1254,7 +1254,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 21 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Dust undetected','Dust detected'] standard_name: status_flag cloud_test_ash: @@ -1264,7 +1264,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 22 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Ash undetected','Ash detected'] standard_name: status_flag cloud_test_dust_ash: @@ -1274,7 +1274,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 23 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Dust detected','Ash detected'] standard_name: status_flag cloud_test_cmrt6: @@ -1286,24 +1286,6 @@ datasets: standard_name: status_flag import_enum_information: True - product_quality_clmtest: - name: product_quality_clmtest - file_type: nc_fci_test_clm - file_key: product_quality - standard_name: product_quality - - product_completeness_clmtest: - name: product_completeness_clmtest - file_type: nc_fci_test_clm - file_key: product_completeness - standard_name: product_completeness - - product_timeliness_clmtest: - name: product_timeliness_clmtest - file_type: nc_fci_test_clm - file_key: product_timeliness - standard_name: product_timeliness - # ASR radiance_min: name: radiance_min diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 83f560c35a..d5b6ce95bb 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -18,9 +18,9 @@ import logging from contextlib import suppress +import netCDF4 import numpy as np import xarray as xr -import netCDF4 from pyresample import geometry from satpy._compat import cached_property @@ -92,22 +92,29 @@ def _set_attributes(self, variable, dataset_info, segmented=False): variable = variable.rename({ydim: "y", xdim: "x"}) variable.attrs.setdefault("units", None) + if "unit" in variable.attrs: + # Need to convert this attribute to the expected satpy entry + variable.attrs.update({"units": variable.attrs["unit"]}) + del variable.attrs["unit"] + variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) - - if ('import_enum_information' in dataset_info): - if (dataset_info['import_enum_information']): - netCDF4_dataset = netCDF4.Dataset(self.filename, 'r') - # This currently assumes a flat netCDF file - enum = netCDF4_dataset.variables[dataset_info['file_key']].datatype.enum_dict + + import_enum_information = dataset_info.get("import_enum_information", False) + if (import_enum_information): + netCDF4_dataset = netCDF4.Dataset(self.filename, "r") + # This currently assumes a flat netCDF file + dataType=netCDF4_dataset.variables[dataset_info["file_key"]].datatype + if (hasattr(dataType,"enum_dict")): + enum = dataType.enum_dict flag_values = [] flag_meanings = [] for item in enumerate(enum): flag_values.append(item[0]) flag_meanings.append(item[1]) - - variable.attrs['flag_values'] = flag_values - variable.attrs['flag_meanings'] = flag_meanings + + variable.attrs["flag_values"] = flag_values + variable.attrs["flag_meanings"] = flag_meanings netCDF4_dataset.close() return variable From 0c79715fdd67159d9efd454797ed763a90c39851 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Wed, 13 Dec 2023 17:25:16 +0100 Subject: [PATCH 1032/1416] Fix ASR long names --- satpy/etc/readers/fci_l2_nc.yaml | 434 +++++++++++++++---------------- 1 file changed, 217 insertions(+), 217 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index df5111334a..a7d64d5f53 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1292,7 +1292,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_min - long_name: TOA min Radiance + long_name: TOA Radiance segment min standard_name: toa_radiance cell_method: area:minimum coordinates: @@ -1304,7 +1304,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_max - long_name: TOA max Radiance + long_name: TOA Radiance segment max standard_name: toa_radiance cell_method: area:maximum coordinates: @@ -1316,7 +1316,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_mean - long_name: TOA mean Radiance + long_name: TOA Radiance segment mean standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1329,9 +1329,9 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 0 - long_name: TOA mean Radiance over all pixels for vis04 channel + category_id: 0 + long_name: TOA Radiance segment mean at 0.4um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1344,9 +1344,9 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 0 - long_name: TOA mean Radiance over clear pixels for vis04 channel + category_id: 1 + long_name: TOA Radiance segment mean at 0.4um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1359,9 +1359,9 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 0 - long_name: TOA mean Radiance over cloudy pixels for vis04 channel + category_id: 2 + long_name: TOA Radiance segment mean at 0.4um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1374,9 +1374,9 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 1 - long_name: TOA mean Radiance over all pixels for vis05 channel + category_id: 0 + long_name: TOA Radiance segment mean at 0.5um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1389,9 +1389,9 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 1 - long_name: TOA mean Radiance over clear pixels for vis05 channel + category_id: 1 + long_name: TOA Radiance segment mean at 0.5um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1404,9 +1404,9 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 1 - long_name: TOA mean Radiance over cloudy pixels for vis05 channel + category_id: 2 + long_name: TOA Radiance segment mean at 0.5um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1419,9 +1419,9 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 2 - long_name: TOA mean Radiance over all pixels for vis06 channel + category_id: 0 + long_name: TOA Radiance segment mean at 0.6um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1434,9 +1434,9 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 2 - long_name: TOA mean Radiance over clear pixels for vis06 channel + category_id: 1 + long_name: TOA Radiance segment mean at 0.6um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1449,9 +1449,9 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 2 - long_name: TOA mean Radiance over cloudy pixels for vis06 channel + category_id: 2 + long_name: TOA Radiance segment mean at 0.6um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1464,9 +1464,9 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 3 - long_name: TOA mean Radiance over all pixels for vis08 channel + category_id: 0 + long_name: TOA Radiance segment mean at 0.9um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1479,9 +1479,9 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 3 - long_name: TOA mean Radiance over clear pixels for vis08 channel + category_id: 1 + long_name: TOA Radiance segment mean at 0.9um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1494,9 +1494,9 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 3 - long_name: TOA mean Radiance over cloudy pixels for vis08 channel + category_id: 2 + long_name: TOA Radiance segment mean at 0.9um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1509,9 +1509,9 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 4 - long_name: TOA mean Radiance over all pixels for vis09 channel + category_id: 0 + long_name: TOA Radiance segment mean at 0.9um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1524,9 +1524,9 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 4 - long_name: TOA mean Radiance over clear pixels for vis09 channel + category_id: 1 + long_name: TOA Radiance segment mean at 0.9um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1539,9 +1539,9 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 4 - long_name: TOA mean Radiance over cloudy pixels for vis09 channel + category_id: 2 + long_name: TOA Radiance segment mean at 0.9um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1554,9 +1554,9 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 5 - long_name: TOA mean Radiance over all pixels for nir13 channel + category_id: 0 + long_name: TOA Radiance segment mean at 1.4um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1569,9 +1569,9 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 5 - long_name: TOA mean Radiance over clear pixels for nir13 channel + category_id: 1 + long_name: TOA Radiance segment mean at 1.4um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1584,9 +1584,9 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 5 - long_name: TOA mean Radiance over cloudy pixels for nir13 channel + category_id: 2 + long_name: TOA Radiance segment mean at 1.4um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1599,9 +1599,9 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 6 - long_name: TOA mean Radiance over all pixels for nir16 channel + category_id: 0 + long_name: TOA Radiance segment mean at 1.6um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1614,9 +1614,9 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 6 - long_name: TOA mean Radiance over clear pixels for nir16 channel + category_id: 1 + long_name: TOA Radiance segment mean at 1.6um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1629,9 +1629,9 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 6 - long_name: TOA mean Radiance over cloudy pixels for nir16 channel + category_id: 2 + long_name: TOA Radiance segment mean at 1.6um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1644,9 +1644,9 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 7 - long_name: TOA mean Radiance over all pixels for nir22 channel + category_id: 0 + long_name: TOA Radiance segment mean at 2.2um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1659,9 +1659,9 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 7 - long_name: TOA mean Radiance over clear pixels for nir22 channel + category_id: 1 + long_name: TOA Radiance segment mean at 2.2um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1674,9 +1674,9 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 7 - long_name: TOA mean Radiance over cloudy pixels for nir22 channel + category_id: 2 + long_name: TOA Radiance segment mean at 2.2um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1689,9 +1689,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 8 - long_name: TOA mean Radiance over all pixels for ir38 channel + category_id: 0 + long_name: TOA Radiance segment mean at 3.8um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1704,9 +1704,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 8 - long_name: TOA mean Radiance over clear pixels for ir38 channel + category_id: 1 + long_name: TOA Radiance segment mean at 3.8um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1719,9 +1719,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 8 - long_name: TOA mean Radiance over cloudy pixels for ir38 channel + category_id: 2 + long_name: TOA Radiance segment mean at 3.8um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1734,9 +1734,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 9 - long_name: TOA mean Radiance over all pixels for wv63 channel + category_id: 0 + long_name: TOA Radiance segment mean at 6.3um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1749,9 +1749,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 9 - long_name: TOA mean Radiance over clear pixels for wv63 channel + category_id: 1 + long_name: TOA Radiance segment mean at 6.3um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1764,9 +1764,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 9 - long_name: TOA mean Radiance over cloudy pixels for wv63 channel + category_id: 2 + long_name: TOA Radiance segment mean at 6.3um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1779,9 +1779,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 10 - long_name: TOA mean Radiance over all pixels for wv73 channel + category_id: 0 + long_name: TOA Radiance segment mean at 7.3um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1794,9 +1794,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 10 - long_name: TOA mean Radiance over clear pixels for wv73 channel + category_id: 1 + long_name: TOA Radiance segment mean at 7.3um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1809,9 +1809,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 10 - long_name: TOA mean Radiance over cloudy pixels for wv73 channel + category_id: 2 + long_name: TOA Radiance segment mean at 7.3um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1824,9 +1824,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 11 - long_name: TOA mean Radiance over all pixels for ir87 channel + category_id: 0 + long_name: TOA Radiance segment mean at 8.7um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1839,9 +1839,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 11 - long_name: TOA mean Radiance over clear pixels for ir87 channel + category_id: 1 + long_name: TOA Radiance segment mean at 8.7um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1854,9 +1854,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 11 - long_name: TOA mean Radiance over cloudy pixels for ir87 channel + category_id: 2 + long_name: TOA Radiance segment mean at 8.7um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1869,9 +1869,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 12 - long_name: TOA mean Radiance over all pixels for ir97 channel + category_id: 0 + long_name: TOA Radiance segment mean at 9.7um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1884,9 +1884,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 12 - long_name: TOA mean Radiance over clear pixels for ir97 channel + category_id: 1 + long_name: TOA Radiance segment mean at 9.7um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1899,9 +1899,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 12 - long_name: TOA mean Radiance over cloudy pixels for ir97 channel + category_id: 2 + long_name: TOA Radiance segment mean at 9.7um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1914,9 +1914,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 13 - long_name: TOA mean Radiance over all pixels for ir105 channel + category_id: 0 + long_name: TOA Radiance segment mean at 10.5um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1929,9 +1929,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 13 - long_name: TOA mean Radiance over clear pixels for ir105 channel + category_id: 1 + long_name: TOA Radiance segment mean at 10.5um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1944,9 +1944,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 13 - long_name: TOA mean Radiance over cloudy pixels for ir105 channel + category_id: 2 + long_name: TOA Radiance segment mean at 10.5um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1959,9 +1959,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 14 - long_name: TOA mean Radiance over all pixels for ir123 channel + category_id: 0 + long_name: TOA Radiance segment mean at 12.3um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1974,9 +1974,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 14 - long_name: TOA mean Radiance over clear pixels for ir123 channel + category_id: 1 + long_name: TOA Radiance segment mean at 12.3um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1989,9 +1989,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 14 - long_name: TOA mean Radiance over cloudy pixels for ir123 channel + category_id: 2 + long_name: TOA Radiance segment mean at 12.3um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -2004,9 +2004,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: radiance_mean - category_id: 0 channel_id: 15 - long_name: TOA mean Radiance over all pixels for ir133 channel + category_id: 0 + long_name: TOA Radiance segment mean at 13.3um (all pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -2019,9 +2019,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: radiance_mean - category_id: 1 channel_id: 15 - long_name: TOA mean Radiance over clear pixels for ir133 channel + category_id: 1 + long_name: TOA Radiance segment mean at 13.3um (clear pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -2034,9 +2034,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: radiance_mean - category_id: 2 channel_id: 15 - long_name: TOA mean Radiance over cloudy pixels for ir133 channel + category_id: 2 + long_name: TOA Radiance segment mean at 13.3um (cloudy pixels) standard_name: toa_radiance cell_method: area:mean coordinates: @@ -2046,10 +2046,10 @@ datasets: radiance_std: name: radiance_std resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: radiance_std - long_name: TOA Radiance standard deviation over None pixels for None channel + long_name: TOA Radiance standard deviation standard_name: toa_radiance cell_method: area:standard_deviation coordinates: @@ -2059,10 +2059,10 @@ datasets: radiance_quality: name: radiance_quality resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: radiance_quality - long_name: TOA Radiance Quality + long_name: TOA Radiance % confidence standard_name: radiance_quality coordinates: - longitude @@ -2071,10 +2071,10 @@ datasets: reflectance_min: name: reflectance_min resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: reflectance_min - long_name: TOA min Reflectance + long_name: TOA Reflectance segment min standard_name: toa_reflectance cell_method: area:minimum coordinates: @@ -2084,10 +2084,10 @@ datasets: reflectance_max: name: reflectance_max resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: reflectance_max - long_name: TOA max Reflectance + long_name: TOA Reflectance segment max standard_name: toa_reflectance cell_method: area:maximum coordinates: @@ -2097,10 +2097,10 @@ datasets: reflectance_mean: name: reflectance_mean resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: reflectance_mean - long_name: TOA mean Reflectance + long_name: TOA Reflectance segment mean standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2113,9 +2113,9 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 0 vis_channel_id: 0 - long_name: TOA mean Reflectance over all pixels for vis04 channel + category_id: 0 + long_name: TOA Reflectance segment mean at 0.4um (all pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2128,9 +2128,9 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 1 vis_channel_id: 0 - long_name: TOA mean Reflectance over clear pixels for vis04 channel + category_id: 1 + long_name: TOA Reflectance segment mean at 0.4um (clear pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2143,9 +2143,9 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 2 vis_channel_id: 0 - long_name: TOA mean Reflectance over cloudy pixels for vis04 channel + category_id: 2 + long_name: TOA Reflectance segment mean at 0.4um (cloudy pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2158,9 +2158,9 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 0 vis_channel_id: 1 - long_name: TOA mean Reflectance over all pixels for vis05 channel + category_id: 0 + long_name: TOA Reflectance segment mean at 0.5um (all pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2173,9 +2173,9 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 1 vis_channel_id: 1 - long_name: TOA mean Reflectance over clear pixels for vis05 channel + category_id: 1 + long_name: TOA Reflectance segment mean at 0.5um (clear pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2188,9 +2188,9 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 2 vis_channel_id: 1 - long_name: TOA mean Reflectance over cloudy pixels for vis05 channel + category_id: 2 + long_name: TOA Reflectance segment mean at 0.5um (cloudy pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2203,9 +2203,9 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 0 vis_channel_id: 2 - long_name: TOA mean Reflectance over all pixels for vis06 channel + category_id: 0 + long_name: TOA Reflectance segment mean at 0.6um (all pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2218,9 +2218,9 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 1 vis_channel_id: 2 - long_name: TOA mean Reflectance over clear pixels for vis06 channel + category_id: 1 + long_name: TOA Reflectance segment mean at 0.6um (clear pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2233,9 +2233,9 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 2 vis_channel_id: 2 - long_name: TOA mean Reflectance over cloudy pixels for vis06 channel + category_id: 2 + long_name: TOA Reflectance segment mean at 0.6um (cloudy pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2248,9 +2248,9 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 0 vis_channel_id: 3 - long_name: TOA mean Reflectance over all pixels for vis08 channel + category_id: 0 + long_name: TOA Reflectance segment mean at 0.9um (all pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2263,9 +2263,9 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 1 vis_channel_id: 3 - long_name: TOA mean Reflectance over clear pixels for vis08 channel + category_id: 1 + long_name: TOA Reflectance segment mean at 0.9um (clear pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2278,9 +2278,9 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 2 vis_channel_id: 3 - long_name: TOA mean Reflectance over cloudy pixels for vis08 channel + category_id: 2 + long_name: TOA Reflectance segment mean at 0.9um (cloudy pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2293,9 +2293,9 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 0 vis_channel_id: 4 - long_name: TOA mean Reflectance over all pixels for vis09 channel + category_id: 0 + long_name: TOA Reflectance segment mean at 0.9um (all pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2308,9 +2308,9 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 1 vis_channel_id: 4 - long_name: TOA mean Reflectance over clear pixels for vis09 channel + category_id: 1 + long_name: TOA Reflectance segment mean at 0.9um (clear pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2323,9 +2323,9 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 2 vis_channel_id: 4 - long_name: TOA mean Reflectance over cloudy pixels for vis09 channel + category_id: 2 + long_name: TOA Reflectance segment mean at 0.9um (cloudy pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2338,9 +2338,9 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 0 vis_channel_id: 5 - long_name: TOA mean Reflectance over all pixels for nir13 channel + category_id: 0 + long_name: TOA Reflectance segment mean at 1.4um (all pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2353,9 +2353,9 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 1 vis_channel_id: 5 - long_name: TOA mean Reflectance over clear pixels for nir13 channel + category_id: 1 + long_name: TOA Reflectance segment mean at 1.4um (clear pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2368,9 +2368,9 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 2 vis_channel_id: 5 - long_name: TOA mean Reflectance over cloudy pixels for nir13 channel + category_id: 2 + long_name: TOA Reflectance segment mean at 1.4um (cloudy pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2383,9 +2383,9 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 0 vis_channel_id: 6 - long_name: TOA mean Reflectance over all pixels for nir16 channel + category_id: 0 + long_name: TOA Reflectance segment mean at 1.6um (all pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2398,9 +2398,9 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 1 vis_channel_id: 6 - long_name: TOA mean Reflectance over clear pixels for nir16 channel + category_id: 1 + long_name: TOA Reflectance segment mean at 1.6um (clear pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2413,9 +2413,9 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 2 vis_channel_id: 6 - long_name: TOA mean Reflectance over cloudy pixels for nir16 channel + category_id: 2 + long_name: TOA Reflectance segment mean at 1.6um (cloudy pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2428,9 +2428,9 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 0 vis_channel_id: 7 - long_name: TOA mean Reflectance over all pixels for nir22 channel + category_id: 0 + long_name: TOA Reflectance segment mean at 2.2um (all pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2443,9 +2443,9 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 1 vis_channel_id: 7 - long_name: TOA mean Reflectance over clear pixels for nir22 channel + category_id: 1 + long_name: TOA Reflectance segment mean at 2.2um (clear pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2458,9 +2458,9 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean - category_id: 2 vis_channel_id: 7 - long_name: TOA mean Reflectance over cloudy pixels for nir22 channel + category_id: 2 + long_name: TOA Reflectance segment mean at 2.2um (cloudy pixels) standard_name: toa_reflectance cell_method: area:mean coordinates: @@ -2470,7 +2470,7 @@ datasets: reflectance_std: name: reflectance_std resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: reflectance_std long_name: TOA Reflectance standard deviation @@ -2483,10 +2483,10 @@ datasets: reflectance_quality: name: reflectance_quality resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: reflectance_quality - long_name: TOA Reflectance Quality + long_name: TOA Reflectance % confidence standard_name: reflectance_quality coordinates: - longitude @@ -2495,10 +2495,10 @@ datasets: bt_min: name: bt_min resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: bt_min - long_name: TOA min Brightess Temperature + long_name: TOA Brightess Temperature segment min standard_name: toa_brightess_temperature cell_method: area:minimum coordinates: @@ -2508,10 +2508,10 @@ datasets: bt_max: name: bt_max resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: bt_max - long_name: TOA max Brightess Temperature + long_name: TOA Brightess Temperature segment max standard_name: toa_brightess_temperature cell_method: area:maximum coordinates: @@ -2521,10 +2521,10 @@ datasets: bt_mean: name: bt_mean resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: bt_mean - long_name: TOA mean Brightess Temperature + long_name: TOA Brightess Temperature segment mean standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2537,9 +2537,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean - category_id: 0 ir_channel_id: 0 - long_name: TOA mean Brightess Temperature over all pixels for ir38 channel + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 3.8um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2552,9 +2552,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean - category_id: 1 ir_channel_id: 0 - long_name: TOA mean Brightess Temperature over clear pixels for ir38 channel + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 3.8um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2567,9 +2567,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean - category_id: 2 ir_channel_id: 0 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir38 channel + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 3.8um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2582,9 +2582,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean - category_id: 0 ir_channel_id: 1 - long_name: TOA mean Brightess Temperature over all pixels for wv63 channel + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 6.3um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2597,9 +2597,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean - category_id: 1 ir_channel_id: 1 - long_name: TOA mean Brightess Temperature over clear pixels for wv63 channel + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 6.3um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2612,9 +2612,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean - category_id: 2 ir_channel_id: 1 - long_name: TOA mean Brightess Temperature over cloudy pixels for wv63 channel + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 6.3um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2627,9 +2627,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean - category_id: 0 ir_channel_id: 2 - long_name: TOA mean Brightess Temperature over all pixels for wv73 channel + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 7.3um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2642,9 +2642,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean - category_id: 1 ir_channel_id: 2 - long_name: TOA mean Brightess Temperature over clear pixels for wv73 channel + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 7.3um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2657,9 +2657,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean - category_id: 2 ir_channel_id: 2 - long_name: TOA mean Brightess Temperature over cloudy pixels for wv73 channel + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 7.3um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2672,9 +2672,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: bt_mean - category_id: 0 ir_channel_id: 3 - long_name: TOA mean Brightess Temperature over all pixels for ir87 channel + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 8.7um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2687,9 +2687,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: bt_mean - category_id: 1 ir_channel_id: 3 - long_name: TOA mean Brightess Temperature over clear pixels for ir87 channel + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 8.7um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2702,9 +2702,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: bt_mean - category_id: 2 ir_channel_id: 3 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir87 channel + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 8.7um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2717,9 +2717,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: bt_mean - category_id: 0 ir_channel_id: 4 - long_name: TOA mean Brightess Temperature over all pixels for ir97 channel + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 9.7um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2732,9 +2732,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: bt_mean - category_id: 1 ir_channel_id: 4 - long_name: TOA mean Brightess Temperature over clear pixels for ir97 channel + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 9.7um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2747,9 +2747,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: bt_mean - category_id: 2 ir_channel_id: 4 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir97 channel + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 9.7um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2762,9 +2762,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: bt_mean - category_id: 0 ir_channel_id: 5 - long_name: TOA mean Brightess Temperature over all pixels for ir105 channel + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 10.5um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2777,9 +2777,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: bt_mean - category_id: 1 ir_channel_id: 5 - long_name: TOA mean Brightess Temperature over clear pixels for ir105 channel + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 10.5um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2792,9 +2792,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: bt_mean - category_id: 2 ir_channel_id: 5 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir105 channel + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 10.5um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2807,9 +2807,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: bt_mean - category_id: 0 ir_channel_id: 6 - long_name: TOA mean Brightess Temperature over all pixels for ir123 channel + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 12.3um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2822,9 +2822,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: bt_mean - category_id: 1 ir_channel_id: 6 - long_name: TOA mean Brightess Temperature over clear pixels for ir123 channel + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 12.3um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2837,9 +2837,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: bt_mean - category_id: 2 ir_channel_id: 6 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir123 channel + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 12.3um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2852,9 +2852,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: bt_mean - category_id: 0 ir_channel_id: 7 - long_name: TOA mean Brightess Temperature over all pixels for ir133 channel + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 13.3um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2867,9 +2867,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: bt_mean - category_id: 1 ir_channel_id: 7 - long_name: TOA mean Brightess Temperature over clear pixels for ir133 channel + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 13.3um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2882,9 +2882,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: bt_mean - category_id: 2 ir_channel_id: 7 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir133 channel + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 13.3um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2894,7 +2894,7 @@ datasets: bt_std: name: bt_std resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: bt_std long_name: TOA Brightess Temperature standard deviation @@ -2907,10 +2907,10 @@ datasets: bt_quality: name: bt_quality resolution: 32000 - wavelength: [0,0,0] + wavelength: [] file_type: nc_fci_asr file_key: bt_quality - long_name: TOA Brightess Temperature Quality + long_name: TOA Brightess Temperature % confidence standard_name: brightness_temperature_quality coordinates: - longitude From 106bfaead2d1a89382e34c97f8215158d8b26232 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Wed, 13 Dec 2023 17:40:18 +0100 Subject: [PATCH 1033/1416] Fix ASR standard names --- satpy/etc/readers/fci_l2_nc.yaml | 334 +++++++++++++++---------------- 1 file changed, 161 insertions(+), 173 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index a7d64d5f53..6191b5e3ae 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1292,8 +1292,8 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_min - long_name: TOA Radiance segment min - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment min + standard_name: toa_outgoing_radiance cell_method: area:minimum coordinates: - longitude @@ -1304,8 +1304,8 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_max - long_name: TOA Radiance segment max - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment max + standard_name: toa_outgoing_radiance cell_method: area:maximum coordinates: - longitude @@ -1316,8 +1316,8 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_mean - long_name: TOA Radiance segment mean - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1331,8 +1331,8 @@ datasets: file_key: radiance_mean channel_id: 0 category_id: 0 - long_name: TOA Radiance segment mean at 0.4um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.4um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1346,8 +1346,8 @@ datasets: file_key: radiance_mean channel_id: 0 category_id: 1 - long_name: TOA Radiance segment mean at 0.4um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.4um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1361,8 +1361,8 @@ datasets: file_key: radiance_mean channel_id: 0 category_id: 2 - long_name: TOA Radiance segment mean at 0.4um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.4um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1376,8 +1376,8 @@ datasets: file_key: radiance_mean channel_id: 1 category_id: 0 - long_name: TOA Radiance segment mean at 0.5um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.5um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1391,8 +1391,8 @@ datasets: file_key: radiance_mean channel_id: 1 category_id: 1 - long_name: TOA Radiance segment mean at 0.5um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.5um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1406,8 +1406,8 @@ datasets: file_key: radiance_mean channel_id: 1 category_id: 2 - long_name: TOA Radiance segment mean at 0.5um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.5um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1421,8 +1421,8 @@ datasets: file_key: radiance_mean channel_id: 2 category_id: 0 - long_name: TOA Radiance segment mean at 0.6um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.6um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1436,8 +1436,8 @@ datasets: file_key: radiance_mean channel_id: 2 category_id: 1 - long_name: TOA Radiance segment mean at 0.6um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.6um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1451,8 +1451,8 @@ datasets: file_key: radiance_mean channel_id: 2 category_id: 2 - long_name: TOA Radiance segment mean at 0.6um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.6um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1466,8 +1466,8 @@ datasets: file_key: radiance_mean channel_id: 3 category_id: 0 - long_name: TOA Radiance segment mean at 0.9um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.9um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1481,8 +1481,8 @@ datasets: file_key: radiance_mean channel_id: 3 category_id: 1 - long_name: TOA Radiance segment mean at 0.9um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.9um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1496,8 +1496,8 @@ datasets: file_key: radiance_mean channel_id: 3 category_id: 2 - long_name: TOA Radiance segment mean at 0.9um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.9um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1511,8 +1511,8 @@ datasets: file_key: radiance_mean channel_id: 4 category_id: 0 - long_name: TOA Radiance segment mean at 0.9um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.9um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1526,8 +1526,8 @@ datasets: file_key: radiance_mean channel_id: 4 category_id: 1 - long_name: TOA Radiance segment mean at 0.9um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.9um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1541,8 +1541,8 @@ datasets: file_key: radiance_mean channel_id: 4 category_id: 2 - long_name: TOA Radiance segment mean at 0.9um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 0.9um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1556,8 +1556,8 @@ datasets: file_key: radiance_mean channel_id: 5 category_id: 0 - long_name: TOA Radiance segment mean at 1.4um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 1.4um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1571,8 +1571,8 @@ datasets: file_key: radiance_mean channel_id: 5 category_id: 1 - long_name: TOA Radiance segment mean at 1.4um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 1.4um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1586,8 +1586,8 @@ datasets: file_key: radiance_mean channel_id: 5 category_id: 2 - long_name: TOA Radiance segment mean at 1.4um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 1.4um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1601,8 +1601,8 @@ datasets: file_key: radiance_mean channel_id: 6 category_id: 0 - long_name: TOA Radiance segment mean at 1.6um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 1.6um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1616,8 +1616,8 @@ datasets: file_key: radiance_mean channel_id: 6 category_id: 1 - long_name: TOA Radiance segment mean at 1.6um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 1.6um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1631,8 +1631,8 @@ datasets: file_key: radiance_mean channel_id: 6 category_id: 2 - long_name: TOA Radiance segment mean at 1.6um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 1.6um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1646,8 +1646,8 @@ datasets: file_key: radiance_mean channel_id: 7 category_id: 0 - long_name: TOA Radiance segment mean at 2.2um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 2.2um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1661,8 +1661,8 @@ datasets: file_key: radiance_mean channel_id: 7 category_id: 1 - long_name: TOA Radiance segment mean at 2.2um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 2.2um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1676,8 +1676,8 @@ datasets: file_key: radiance_mean channel_id: 7 category_id: 2 - long_name: TOA Radiance segment mean at 2.2um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 2.2um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1691,8 +1691,8 @@ datasets: file_key: radiance_mean channel_id: 8 category_id: 0 - long_name: TOA Radiance segment mean at 3.8um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 3.8um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1706,8 +1706,8 @@ datasets: file_key: radiance_mean channel_id: 8 category_id: 1 - long_name: TOA Radiance segment mean at 3.8um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 3.8um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1721,8 +1721,8 @@ datasets: file_key: radiance_mean channel_id: 8 category_id: 2 - long_name: TOA Radiance segment mean at 3.8um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 3.8um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1736,8 +1736,8 @@ datasets: file_key: radiance_mean channel_id: 9 category_id: 0 - long_name: TOA Radiance segment mean at 6.3um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 6.3um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1751,8 +1751,8 @@ datasets: file_key: radiance_mean channel_id: 9 category_id: 1 - long_name: TOA Radiance segment mean at 6.3um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 6.3um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1766,8 +1766,8 @@ datasets: file_key: radiance_mean channel_id: 9 category_id: 2 - long_name: TOA Radiance segment mean at 6.3um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 6.3um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1781,8 +1781,8 @@ datasets: file_key: radiance_mean channel_id: 10 category_id: 0 - long_name: TOA Radiance segment mean at 7.3um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 7.3um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1796,8 +1796,8 @@ datasets: file_key: radiance_mean channel_id: 10 category_id: 1 - long_name: TOA Radiance segment mean at 7.3um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 7.3um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1811,8 +1811,8 @@ datasets: file_key: radiance_mean channel_id: 10 category_id: 2 - long_name: TOA Radiance segment mean at 7.3um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 7.3um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1826,8 +1826,8 @@ datasets: file_key: radiance_mean channel_id: 11 category_id: 0 - long_name: TOA Radiance segment mean at 8.7um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 8.7um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1841,8 +1841,8 @@ datasets: file_key: radiance_mean channel_id: 11 category_id: 1 - long_name: TOA Radiance segment mean at 8.7um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 8.7um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1856,8 +1856,8 @@ datasets: file_key: radiance_mean channel_id: 11 category_id: 2 - long_name: TOA Radiance segment mean at 8.7um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 8.7um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1871,8 +1871,8 @@ datasets: file_key: radiance_mean channel_id: 12 category_id: 0 - long_name: TOA Radiance segment mean at 9.7um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 9.7um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1886,8 +1886,8 @@ datasets: file_key: radiance_mean channel_id: 12 category_id: 1 - long_name: TOA Radiance segment mean at 9.7um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 9.7um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1901,8 +1901,8 @@ datasets: file_key: radiance_mean channel_id: 12 category_id: 2 - long_name: TOA Radiance segment mean at 9.7um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 9.7um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1916,8 +1916,8 @@ datasets: file_key: radiance_mean channel_id: 13 category_id: 0 - long_name: TOA Radiance segment mean at 10.5um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 10.5um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1931,8 +1931,8 @@ datasets: file_key: radiance_mean channel_id: 13 category_id: 1 - long_name: TOA Radiance segment mean at 10.5um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 10.5um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1946,8 +1946,8 @@ datasets: file_key: radiance_mean channel_id: 13 category_id: 2 - long_name: TOA Radiance segment mean at 10.5um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 10.5um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1961,8 +1961,8 @@ datasets: file_key: radiance_mean channel_id: 14 category_id: 0 - long_name: TOA Radiance segment mean at 12.3um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 12.3um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1976,8 +1976,8 @@ datasets: file_key: radiance_mean channel_id: 14 category_id: 1 - long_name: TOA Radiance segment mean at 12.3um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 12.3um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1991,8 +1991,8 @@ datasets: file_key: radiance_mean channel_id: 14 category_id: 2 - long_name: TOA Radiance segment mean at 12.3um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 12.3um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -2006,8 +2006,8 @@ datasets: file_key: radiance_mean channel_id: 15 category_id: 0 - long_name: TOA Radiance segment mean at 13.3um (all pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 13.3um (all pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -2021,8 +2021,8 @@ datasets: file_key: radiance_mean channel_id: 15 category_id: 1 - long_name: TOA Radiance segment mean at 13.3um (clear pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 13.3um (clear pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -2036,8 +2036,8 @@ datasets: file_key: radiance_mean channel_id: 15 category_id: 2 - long_name: TOA Radiance segment mean at 13.3um (cloudy pixels) - standard_name: toa_radiance + long_name: TOA Outgoing Radiance segment mean at 13.3um (cloudy pixels) + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -2046,11 +2046,10 @@ datasets: radiance_std: name: radiance_std resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: radiance_std - long_name: TOA Radiance standard deviation - standard_name: toa_radiance + long_name: TOA Outgoing Radiance standard deviation + standard_name: toa_outgoing_radiance cell_method: area:standard_deviation coordinates: - longitude @@ -2059,7 +2058,6 @@ datasets: radiance_quality: name: radiance_quality resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: radiance_quality long_name: TOA Radiance % confidence @@ -2071,11 +2069,10 @@ datasets: reflectance_min: name: reflectance_min resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: reflectance_min - long_name: TOA Reflectance segment min - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment min + standard_name: toa_bidirectional_reflectance cell_method: area:minimum coordinates: - longitude @@ -2084,11 +2081,10 @@ datasets: reflectance_max: name: reflectance_max resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: reflectance_max - long_name: TOA Reflectance segment max - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment max + standard_name: toa_bidirectional_reflectance cell_method: area:maximum coordinates: - longitude @@ -2097,11 +2093,10 @@ datasets: reflectance_mean: name: reflectance_mean resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: reflectance_mean - long_name: TOA Reflectance segment mean - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2115,8 +2110,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 0 category_id: 0 - long_name: TOA Reflectance segment mean at 0.4um (all pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.4um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2130,8 +2125,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 0 category_id: 1 - long_name: TOA Reflectance segment mean at 0.4um (clear pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.4um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2145,8 +2140,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 0 category_id: 2 - long_name: TOA Reflectance segment mean at 0.4um (cloudy pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.4um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2160,8 +2155,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 1 category_id: 0 - long_name: TOA Reflectance segment mean at 0.5um (all pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.5um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2175,8 +2170,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 1 category_id: 1 - long_name: TOA Reflectance segment mean at 0.5um (clear pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.5um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2190,8 +2185,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 1 category_id: 2 - long_name: TOA Reflectance segment mean at 0.5um (cloudy pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.5um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2205,8 +2200,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 2 category_id: 0 - long_name: TOA Reflectance segment mean at 0.6um (all pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.6um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2220,8 +2215,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 2 category_id: 1 - long_name: TOA Reflectance segment mean at 0.6um (clear pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.6um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2235,8 +2230,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 2 category_id: 2 - long_name: TOA Reflectance segment mean at 0.6um (cloudy pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.6um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2250,8 +2245,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 3 category_id: 0 - long_name: TOA Reflectance segment mean at 0.9um (all pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2265,8 +2260,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 3 category_id: 1 - long_name: TOA Reflectance segment mean at 0.9um (clear pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2280,8 +2275,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 3 category_id: 2 - long_name: TOA Reflectance segment mean at 0.9um (cloudy pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2295,8 +2290,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 4 category_id: 0 - long_name: TOA Reflectance segment mean at 0.9um (all pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2310,8 +2305,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 4 category_id: 1 - long_name: TOA Reflectance segment mean at 0.9um (clear pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2325,8 +2320,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 4 category_id: 2 - long_name: TOA Reflectance segment mean at 0.9um (cloudy pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2340,8 +2335,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 5 category_id: 0 - long_name: TOA Reflectance segment mean at 1.4um (all pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 1.4um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2355,8 +2350,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 5 category_id: 1 - long_name: TOA Reflectance segment mean at 1.4um (clear pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 1.4um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2370,8 +2365,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 5 category_id: 2 - long_name: TOA Reflectance segment mean at 1.4um (cloudy pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 1.4um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2385,8 +2380,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 6 category_id: 0 - long_name: TOA Reflectance segment mean at 1.6um (all pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 1.6um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2400,8 +2395,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 6 category_id: 1 - long_name: TOA Reflectance segment mean at 1.6um (clear pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 1.6um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2415,8 +2410,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 6 category_id: 2 - long_name: TOA Reflectance segment mean at 1.6um (cloudy pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 1.6um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2430,8 +2425,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 7 category_id: 0 - long_name: TOA Reflectance segment mean at 2.2um (all pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 2.2um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2445,8 +2440,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 7 category_id: 1 - long_name: TOA Reflectance segment mean at 2.2um (clear pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 2.2um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2460,8 +2455,8 @@ datasets: file_key: reflectance_mean vis_channel_id: 7 category_id: 2 - long_name: TOA Reflectance segment mean at 2.2um (cloudy pixels) - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance segment mean at 2.2um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude @@ -2470,11 +2465,10 @@ datasets: reflectance_std: name: reflectance_std resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: reflectance_std - long_name: TOA Reflectance standard deviation - standard_name: toa_reflectance + long_name: TOA Bidirectional Reflectance standard deviation + standard_name: toa_bidirectional_reflectance cell_method: area:standard_deviation coordinates: - longitude @@ -2483,10 +2477,9 @@ datasets: reflectance_quality: name: reflectance_quality resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: reflectance_quality - long_name: TOA Reflectance % confidence + long_name: TOA Bidirectional Reflectance % confidence standard_name: reflectance_quality coordinates: - longitude @@ -2495,7 +2488,6 @@ datasets: bt_min: name: bt_min resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: bt_min long_name: TOA Brightess Temperature segment min @@ -2508,7 +2500,6 @@ datasets: bt_max: name: bt_max resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: bt_max long_name: TOA Brightess Temperature segment max @@ -2521,7 +2512,6 @@ datasets: bt_mean: name: bt_mean resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: bt_mean long_name: TOA Brightess Temperature segment mean @@ -2894,7 +2884,6 @@ datasets: bt_std: name: bt_std resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: bt_std long_name: TOA Brightess Temperature standard deviation @@ -2907,7 +2896,6 @@ datasets: bt_quality: name: bt_quality resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: bt_quality long_name: TOA Brightess Temperature % confidence From a8a9a00b35694345fd46ffdb7675fee1b973e510 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Fri, 15 Dec 2023 11:51:58 +0100 Subject: [PATCH 1034/1416] Change CRM long names --- satpy/etc/readers/fci_l2_nc.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 6191b5e3ae..c610c8c0ec 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -550,6 +550,7 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance crm_vis04: name: crm_vis04 @@ -559,6 +560,7 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 0 + long_name: TOA Bidirectional Reflectance at 0.41um (temporal average) crm_vis05: name: crm_vis05 @@ -568,6 +570,7 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 1 + long_name: TOA Bidirectional Reflectance at 0.51um (temporal average) crm_vis06: name: crm_vis06 @@ -577,6 +580,7 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 2 + long_name: TOA Bidirectional Reflectance at 0.64um (temporal average) crm_vis08: name: crm_vis08 @@ -586,6 +590,7 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 3 + long_name: TOA Bidirectional Reflectance at 0.865um (temporal average) crm_vis09: name: crm_vis09 @@ -595,6 +600,7 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 4 + long_name: TOA Bidirectional Reflectance at 0.914um (temporal average) crm_nir13: name: crm_nir13 @@ -604,6 +610,7 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 5 + long_name: TOA Bidirectional Reflectance at 1.38um (temporal average) crm_nir16: name: crm_nir16 @@ -613,6 +620,7 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 6 + long_name: TOA Bidirectional Reflectance at 1.61um (temporal average) crm_nir22: name: crm_nir22 @@ -622,6 +630,7 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 7 + long_name: TOA Bidirectional Reflectance at 2.25um (temporal average) mean_sza: name: mean_sza From bd1238eab17276fa9874b5c1a383684f4df8178b Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Fri, 15 Dec 2023 14:25:52 +0100 Subject: [PATCH 1035/1416] Added CRM long names --- satpy/etc/readers/fci_l2_nc.yaml | 630 ++++++++++++++++--------------- 1 file changed, 327 insertions(+), 303 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index c610c8c0ec..360fde4a5d 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -550,7 +550,6 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance crm_vis04: name: crm_vis04 @@ -560,7 +559,6 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 0 - long_name: TOA Bidirectional Reflectance at 0.41um (temporal average) crm_vis05: name: crm_vis05 @@ -570,7 +568,6 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 1 - long_name: TOA Bidirectional Reflectance at 0.51um (temporal average) crm_vis06: name: crm_vis06 @@ -580,7 +577,6 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 2 - long_name: TOA Bidirectional Reflectance at 0.64um (temporal average) crm_vis08: name: crm_vis08 @@ -590,7 +586,6 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 3 - long_name: TOA Bidirectional Reflectance at 0.865um (temporal average) crm_vis09: name: crm_vis09 @@ -600,7 +595,6 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 4 - long_name: TOA Bidirectional Reflectance at 0.914um (temporal average) crm_nir13: name: crm_nir13 @@ -610,7 +604,6 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 5 - long_name: TOA Bidirectional Reflectance at 1.38um (temporal average) crm_nir16: name: crm_nir16 @@ -620,7 +613,6 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 6 - long_name: TOA Bidirectional Reflectance at 1.61um (temporal average) crm_nir22: name: crm_nir22 @@ -630,7 +622,6 @@ datasets: file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance vis_channel_id: 7 - long_name: TOA Bidirectional Reflectance at 2.25um (temporal average) mean_sza: name: mean_sza @@ -1203,7 +1194,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 15 flag_values: [0,1] - flag_meanings: ['Opaqueness undetected','Opaqueness detected'] + flag_meanings: ['Cloud undetected','Cloud detected'] standard_name: status_flag cloud_test_cmrt1: @@ -1223,7 +1214,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 17 flag_values: [0,1] - flag_meanings: ['Clear unchanged','Cloud detected'] + flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] standard_name: status_flag cloud_test_cmrt3: @@ -1233,7 +1224,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 18 flag_values: [0,1] - flag_meanings: ['Clear unchanged','Cloud detected'] + flag_meanings: ['Cloud undetected','Cloud detected'] standard_name: status_flag cloud_test_cmrt4: @@ -1243,7 +1234,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 19 flag_values: [0,1] - flag_meanings: ['Clear unchanged','Cloud detected'] + flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] standard_name: status_flag cloud_test_cmrt5: @@ -1253,7 +1244,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 20 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud unchanged'] + flag_meanings: ['Clear sky restored', 'Cloud unchanged'] standard_name: status_flag cloud_test_dust: @@ -1263,7 +1254,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 21 flag_values: [0,1] - flag_meanings: ['Dust undetected','Dust detected'] + flag_meanings: ['Cloud undetected','Cloud detected'] standard_name: status_flag cloud_test_ash: @@ -1273,7 +1264,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 22 flag_values: [0,1] - flag_meanings: ['Ash undetected','Ash detected'] + flag_meanings: ['Cloud undetected','Cloud detected'] standard_name: status_flag cloud_test_dust_ash: @@ -1283,7 +1274,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 23 flag_values: [0,1] - flag_meanings: ['Dust detected','Ash detected'] + flag_meanings: ['Cloud undetected','Cloud detected'] standard_name: status_flag cloud_test_cmrt6: @@ -1295,14 +1286,33 @@ datasets: standard_name: status_flag import_enum_information: True + product_quality_clmtest: + name: product_quality_clmtest + file_type: nc_fci_test_clm + file_key: product_quality + standard_name: product_quality + + product_completeness_clmtest: + name: product_completeness_clmtest + file_type: nc_fci_test_clm + file_key: product_completeness + standard_name: product_completeness + + product_timeliness_clmtest: + name: product_timeliness_clmtest + file_type: nc_fci_test_clm + file_key: product_timeliness + standard_name: product_timeliness + # ASR radiance_min: name: radiance_min resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: radiance_min - long_name: TOA Outgoing Radiance segment min - standard_name: toa_outgoing_radiance + long_name: TOA min Radiance + standard_name: toa_radiance cell_method: area:minimum coordinates: - longitude @@ -1311,10 +1321,11 @@ datasets: radiance_max: name: radiance_max resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: radiance_max - long_name: TOA Outgoing Radiance segment max - standard_name: toa_outgoing_radiance + long_name: TOA max Radiance + standard_name: toa_radiance cell_method: area:maximum coordinates: - longitude @@ -1323,10 +1334,11 @@ datasets: radiance_mean: name: radiance_mean resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: radiance_mean - long_name: TOA Outgoing Radiance segment mean - standard_name: toa_outgoing_radiance + long_name: TOA mean Radiance + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1338,10 +1350,10 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 0 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 0.4um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 0 + long_name: TOA mean Radiance over all pixels for vis04 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1353,10 +1365,10 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 0 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 0.4um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 0 + long_name: TOA mean Radiance over clear pixels for vis04 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1368,10 +1380,10 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 0 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 0.4um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 0 + long_name: TOA mean Radiance over cloudy pixels for vis04 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1383,10 +1395,10 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 1 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 0.5um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 1 + long_name: TOA mean Radiance over all pixels for vis05 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1398,10 +1410,10 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 1 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 0.5um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 1 + long_name: TOA mean Radiance over clear pixels for vis05 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1413,10 +1425,10 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 1 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 0.5um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 1 + long_name: TOA mean Radiance over cloudy pixels for vis05 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1428,10 +1440,10 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 2 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 0.6um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 2 + long_name: TOA mean Radiance over all pixels for vis06 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1443,10 +1455,10 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 2 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 0.6um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 2 + long_name: TOA mean Radiance over clear pixels for vis06 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1458,10 +1470,10 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 2 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 0.6um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 2 + long_name: TOA mean Radiance over cloudy pixels for vis06 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1473,10 +1485,10 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 3 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 0.9um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 3 + long_name: TOA mean Radiance over all pixels for vis08 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1488,10 +1500,10 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 3 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 0.9um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 3 + long_name: TOA mean Radiance over clear pixels for vis08 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1503,10 +1515,10 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 3 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 0.9um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 3 + long_name: TOA mean Radiance over cloudy pixels for vis08 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1518,10 +1530,10 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 4 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 0.9um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 4 + long_name: TOA mean Radiance over all pixels for vis09 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1533,10 +1545,10 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 4 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 0.9um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 4 + long_name: TOA mean Radiance over clear pixels for vis09 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1548,10 +1560,10 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 4 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 0.9um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 4 + long_name: TOA mean Radiance over cloudy pixels for vis09 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1563,10 +1575,10 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 5 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 1.4um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 5 + long_name: TOA mean Radiance over all pixels for nir13 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1578,10 +1590,10 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 5 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 1.4um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 5 + long_name: TOA mean Radiance over clear pixels for nir13 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1593,10 +1605,10 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 5 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 1.4um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 5 + long_name: TOA mean Radiance over cloudy pixels for nir13 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1608,10 +1620,10 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 6 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 1.6um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 6 + long_name: TOA mean Radiance over all pixels for nir16 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1623,10 +1635,10 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 6 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 1.6um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 6 + long_name: TOA mean Radiance over clear pixels for nir16 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1638,10 +1650,10 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 6 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 1.6um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 6 + long_name: TOA mean Radiance over cloudy pixels for nir16 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1653,10 +1665,10 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 7 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 2.2um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 7 + long_name: TOA mean Radiance over all pixels for nir22 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1668,10 +1680,10 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 7 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 2.2um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 7 + long_name: TOA mean Radiance over clear pixels for nir22 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1683,10 +1695,10 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 7 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 2.2um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 7 + long_name: TOA mean Radiance over cloudy pixels for nir22 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1698,10 +1710,10 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 8 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 3.8um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 8 + long_name: TOA mean Radiance over all pixels for ir38 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1713,10 +1725,10 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 8 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 3.8um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 8 + long_name: TOA mean Radiance over clear pixels for ir38 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1728,10 +1740,10 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 8 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 3.8um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 8 + long_name: TOA mean Radiance over cloudy pixels for ir38 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1743,10 +1755,10 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 9 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 6.3um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 9 + long_name: TOA mean Radiance over all pixels for wv63 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1758,10 +1770,10 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 9 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 6.3um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 9 + long_name: TOA mean Radiance over clear pixels for wv63 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1773,10 +1785,10 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 9 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 6.3um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 9 + long_name: TOA mean Radiance over cloudy pixels for wv63 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1788,10 +1800,10 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 10 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 7.3um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 10 + long_name: TOA mean Radiance over all pixels for wv73 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1803,10 +1815,10 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 10 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 7.3um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 10 + long_name: TOA mean Radiance over clear pixels for wv73 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1818,10 +1830,10 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 10 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 7.3um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 10 + long_name: TOA mean Radiance over cloudy pixels for wv73 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1833,10 +1845,10 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 11 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 8.7um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 11 + long_name: TOA mean Radiance over all pixels for ir87 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1848,10 +1860,10 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 11 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 8.7um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 11 + long_name: TOA mean Radiance over clear pixels for ir87 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1863,10 +1875,10 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 11 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 8.7um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 11 + long_name: TOA mean Radiance over cloudy pixels for ir87 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1878,10 +1890,10 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 12 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 9.7um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 12 + long_name: TOA mean Radiance over all pixels for ir97 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1893,10 +1905,10 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 12 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 9.7um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 12 + long_name: TOA mean Radiance over clear pixels for ir97 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1908,10 +1920,10 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 12 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 9.7um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 12 + long_name: TOA mean Radiance over cloudy pixels for ir97 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1923,10 +1935,10 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 13 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 10.5um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 13 + long_name: TOA mean Radiance over all pixels for ir105 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1938,10 +1950,10 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 13 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 10.5um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 13 + long_name: TOA mean Radiance over clear pixels for ir105 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1953,10 +1965,10 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 13 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 10.5um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 13 + long_name: TOA mean Radiance over cloudy pixels for ir105 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1968,10 +1980,10 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 14 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 12.3um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 14 + long_name: TOA mean Radiance over all pixels for ir123 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1983,10 +1995,10 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 14 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 12.3um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 14 + long_name: TOA mean Radiance over clear pixels for ir123 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -1998,10 +2010,10 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 14 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 12.3um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 14 + long_name: TOA mean Radiance over cloudy pixels for ir123 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -2013,10 +2025,10 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 15 category_id: 0 - long_name: TOA Outgoing Radiance segment mean at 13.3um (all pixels) - standard_name: toa_outgoing_radiance + channel_id: 15 + long_name: TOA mean Radiance over all pixels for ir133 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -2028,10 +2040,10 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 15 category_id: 1 - long_name: TOA Outgoing Radiance segment mean at 13.3um (clear pixels) - standard_name: toa_outgoing_radiance + channel_id: 15 + long_name: TOA mean Radiance over clear pixels for ir133 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -2043,10 +2055,10 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: radiance_mean - channel_id: 15 category_id: 2 - long_name: TOA Outgoing Radiance segment mean at 13.3um (cloudy pixels) - standard_name: toa_outgoing_radiance + channel_id: 15 + long_name: TOA mean Radiance over cloudy pixels for ir133 channel + standard_name: toa_radiance cell_method: area:mean coordinates: - longitude @@ -2055,10 +2067,11 @@ datasets: radiance_std: name: radiance_std resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: radiance_std - long_name: TOA Outgoing Radiance standard deviation - standard_name: toa_outgoing_radiance + long_name: TOA Radiance standard deviation over None pixels for None channel + standard_name: toa_radiance cell_method: area:standard_deviation coordinates: - longitude @@ -2067,9 +2080,10 @@ datasets: radiance_quality: name: radiance_quality resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: radiance_quality - long_name: TOA Radiance % confidence + long_name: TOA Radiance Quality standard_name: radiance_quality coordinates: - longitude @@ -2078,10 +2092,11 @@ datasets: reflectance_min: name: reflectance_min resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: reflectance_min - long_name: TOA Bidirectional Reflectance segment min - standard_name: toa_bidirectional_reflectance + long_name: TOA min Reflectance + standard_name: toa_reflectance cell_method: area:minimum coordinates: - longitude @@ -2090,10 +2105,11 @@ datasets: reflectance_max: name: reflectance_max resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: reflectance_max - long_name: TOA Bidirectional Reflectance segment max - standard_name: toa_bidirectional_reflectance + long_name: TOA max Reflectance + standard_name: toa_reflectance cell_method: area:maximum coordinates: - longitude @@ -2102,10 +2118,11 @@ datasets: reflectance_mean: name: reflectance_mean resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: reflectance_mean - long_name: TOA Bidirectional Reflectance segment mean - standard_name: toa_bidirectional_reflectance + long_name: TOA mean Reflectance + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2117,10 +2134,10 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 0 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.4um (all pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 0 + long_name: TOA mean Reflectance over all pixels for vis04 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2132,10 +2149,10 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 0 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.4um (clear pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 0 + long_name: TOA mean Reflectance over clear pixels for vis04 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2147,10 +2164,10 @@ datasets: wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 0 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.4um (cloudy pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 0 + long_name: TOA mean Reflectance over cloudy pixels for vis04 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2162,10 +2179,10 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 1 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.5um (all pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 1 + long_name: TOA mean Reflectance over all pixels for vis05 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2177,10 +2194,10 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 1 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.5um (clear pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 1 + long_name: TOA mean Reflectance over clear pixels for vis05 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2192,10 +2209,10 @@ datasets: wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 1 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.5um (cloudy pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 1 + long_name: TOA mean Reflectance over cloudy pixels for vis05 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2207,10 +2224,10 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 2 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.6um (all pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 2 + long_name: TOA mean Reflectance over all pixels for vis06 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2222,10 +2239,10 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 2 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.6um (clear pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 2 + long_name: TOA mean Reflectance over clear pixels for vis06 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2237,10 +2254,10 @@ datasets: wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 2 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.6um (cloudy pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 2 + long_name: TOA mean Reflectance over cloudy pixels for vis06 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2252,10 +2269,10 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 3 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (all pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 3 + long_name: TOA mean Reflectance over all pixels for vis08 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2267,10 +2284,10 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 3 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (clear pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 3 + long_name: TOA mean Reflectance over clear pixels for vis08 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2282,10 +2299,10 @@ datasets: wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 3 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (cloudy pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 3 + long_name: TOA mean Reflectance over cloudy pixels for vis08 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2297,10 +2314,10 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 4 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (all pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 4 + long_name: TOA mean Reflectance over all pixels for vis09 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2312,10 +2329,10 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 4 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (clear pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 4 + long_name: TOA mean Reflectance over clear pixels for vis09 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2327,10 +2344,10 @@ datasets: wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 4 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (cloudy pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 4 + long_name: TOA mean Reflectance over cloudy pixels for vis09 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2342,10 +2359,10 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 5 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 1.4um (all pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 5 + long_name: TOA mean Reflectance over all pixels for nir13 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2357,10 +2374,10 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 5 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 1.4um (clear pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 5 + long_name: TOA mean Reflectance over clear pixels for nir13 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2372,10 +2389,10 @@ datasets: wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 5 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 1.4um (cloudy pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 5 + long_name: TOA mean Reflectance over cloudy pixels for nir13 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2387,10 +2404,10 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 6 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 1.6um (all pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 6 + long_name: TOA mean Reflectance over all pixels for nir16 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2402,10 +2419,10 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 6 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 1.6um (clear pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 6 + long_name: TOA mean Reflectance over clear pixels for nir16 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2417,10 +2434,10 @@ datasets: wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 6 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 1.6um (cloudy pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 6 + long_name: TOA mean Reflectance over cloudy pixels for nir16 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2432,10 +2449,10 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 7 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 2.2um (all pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 7 + long_name: TOA mean Reflectance over all pixels for nir22 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2447,10 +2464,10 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 7 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 2.2um (clear pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 7 + long_name: TOA mean Reflectance over clear pixels for nir22 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2462,10 +2479,10 @@ datasets: wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean - vis_channel_id: 7 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 2.2um (cloudy pixels) - standard_name: toa_bidirectional_reflectance + vis_channel_id: 7 + long_name: TOA mean Reflectance over cloudy pixels for nir22 channel + standard_name: toa_reflectance cell_method: area:mean coordinates: - longitude @@ -2474,10 +2491,11 @@ datasets: reflectance_std: name: reflectance_std resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: reflectance_std - long_name: TOA Bidirectional Reflectance standard deviation - standard_name: toa_bidirectional_reflectance + long_name: TOA Reflectance standard deviation + standard_name: toa_reflectance cell_method: area:standard_deviation coordinates: - longitude @@ -2486,9 +2504,10 @@ datasets: reflectance_quality: name: reflectance_quality resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: reflectance_quality - long_name: TOA Bidirectional Reflectance % confidence + long_name: TOA Reflectance Quality standard_name: reflectance_quality coordinates: - longitude @@ -2497,9 +2516,10 @@ datasets: bt_min: name: bt_min resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: bt_min - long_name: TOA Brightess Temperature segment min + long_name: TOA min Brightess Temperature standard_name: toa_brightess_temperature cell_method: area:minimum coordinates: @@ -2509,9 +2529,10 @@ datasets: bt_max: name: bt_max resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: bt_max - long_name: TOA Brightess Temperature segment max + long_name: TOA max Brightess Temperature standard_name: toa_brightess_temperature cell_method: area:maximum coordinates: @@ -2521,9 +2542,10 @@ datasets: bt_mean: name: bt_mean resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: bt_mean - long_name: TOA Brightess Temperature segment mean + long_name: TOA mean Brightess Temperature standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2536,9 +2558,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 0 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 3.8um (all pixels) + ir_channel_id: 0 + long_name: TOA mean Brightess Temperature over all pixels for ir38 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2551,9 +2573,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 0 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 3.8um (clear pixels) + ir_channel_id: 0 + long_name: TOA mean Brightess Temperature over clear pixels for ir38 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2566,9 +2588,9 @@ datasets: wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 0 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 3.8um (cloudy pixels) + ir_channel_id: 0 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir38 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2581,9 +2603,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 1 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 6.3um (all pixels) + ir_channel_id: 1 + long_name: TOA mean Brightess Temperature over all pixels for wv63 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2596,9 +2618,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 1 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 6.3um (clear pixels) + ir_channel_id: 1 + long_name: TOA mean Brightess Temperature over clear pixels for wv63 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2611,9 +2633,9 @@ datasets: wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 1 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 6.3um (cloudy pixels) + ir_channel_id: 1 + long_name: TOA mean Brightess Temperature over cloudy pixels for wv63 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2626,9 +2648,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 2 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 7.3um (all pixels) + ir_channel_id: 2 + long_name: TOA mean Brightess Temperature over all pixels for wv73 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2641,9 +2663,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 2 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 7.3um (clear pixels) + ir_channel_id: 2 + long_name: TOA mean Brightess Temperature over clear pixels for wv73 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2656,9 +2678,9 @@ datasets: wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 2 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 7.3um (cloudy pixels) + ir_channel_id: 2 + long_name: TOA mean Brightess Temperature over cloudy pixels for wv73 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2671,9 +2693,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 3 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 8.7um (all pixels) + ir_channel_id: 3 + long_name: TOA mean Brightess Temperature over all pixels for ir87 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2686,9 +2708,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 3 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 8.7um (clear pixels) + ir_channel_id: 3 + long_name: TOA mean Brightess Temperature over clear pixels for ir87 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2701,9 +2723,9 @@ datasets: wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 3 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 8.7um (cloudy pixels) + ir_channel_id: 3 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir87 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2716,9 +2738,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 4 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 9.7um (all pixels) + ir_channel_id: 4 + long_name: TOA mean Brightess Temperature over all pixels for ir97 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2731,9 +2753,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 4 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 9.7um (clear pixels) + ir_channel_id: 4 + long_name: TOA mean Brightess Temperature over clear pixels for ir97 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2746,9 +2768,9 @@ datasets: wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 4 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 9.7um (cloudy pixels) + ir_channel_id: 4 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir97 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2761,9 +2783,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 5 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 10.5um (all pixels) + ir_channel_id: 5 + long_name: TOA mean Brightess Temperature over all pixels for ir105 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2776,9 +2798,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 5 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 10.5um (clear pixels) + ir_channel_id: 5 + long_name: TOA mean Brightess Temperature over clear pixels for ir105 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2791,9 +2813,9 @@ datasets: wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 5 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 10.5um (cloudy pixels) + ir_channel_id: 5 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir105 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2806,9 +2828,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 6 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 12.3um (all pixels) + ir_channel_id: 6 + long_name: TOA mean Brightess Temperature over all pixels for ir123 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2821,9 +2843,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 6 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 12.3um (clear pixels) + ir_channel_id: 6 + long_name: TOA mean Brightess Temperature over clear pixels for ir123 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2836,9 +2858,9 @@ datasets: wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 6 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 12.3um (cloudy pixels) + ir_channel_id: 6 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir123 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2851,9 +2873,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 7 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 13.3um (all pixels) + ir_channel_id: 7 + long_name: TOA mean Brightess Temperature over all pixels for ir133 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2866,9 +2888,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 7 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 13.3um (clear pixels) + ir_channel_id: 7 + long_name: TOA mean Brightess Temperature over clear pixels for ir133 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2881,9 +2903,9 @@ datasets: wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: bt_mean - ir_channel_id: 7 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 13.3um (cloudy pixels) + ir_channel_id: 7 + long_name: TOA mean Brightess Temperature over cloudy pixels for ir133 channel standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2893,6 +2915,7 @@ datasets: bt_std: name: bt_std resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: bt_std long_name: TOA Brightess Temperature standard deviation @@ -2905,9 +2928,10 @@ datasets: bt_quality: name: bt_quality resolution: 32000 + wavelength: [] file_type: nc_fci_asr file_key: bt_quality - long_name: TOA Brightess Temperature % confidence + long_name: TOA Brightess Temperature Quality standard_name: brightness_temperature_quality coordinates: - longitude From 6aad1bdee201abf848063cc7688841987518249c Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Fri, 15 Dec 2023 17:01:03 +0100 Subject: [PATCH 1036/1416] Fix ASR issues --- satpy/etc/readers/fci_l2_nc.yaml | 1745 ++++++++++++++++-------------- 1 file changed, 917 insertions(+), 828 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 360fde4a5d..b8c342dac1 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1305,23 +1305,57 @@ datasets: standard_name: product_timeliness # ASR - radiance_min: - name: radiance_min + bt_max: + name: bt_max resolution: 32000 - wavelength: [] file_type: nc_fci_asr - file_key: radiance_min - long_name: TOA min Radiance - standard_name: toa_radiance + file_key: bt_max + long_name: TOA Brightess Temperature segment max + standard_name: toa_brightess_temperature + cell_method: area:maximum + coordinates: + - longitude + - latitude + + bt_mean: + name: bt_mean + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + long_name: TOA Brightess Temperature segment mean + standard_name: toa_brightess_temperature + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_min: + name: bt_min + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_min + long_name: TOA Brightess Temperature segment min + standard_name: toa_brightess_temperature cell_method: area:minimum coordinates: - longitude - latitude + bt_std: + name: bt_std + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_std + long_name: TOA Brightess Temperature standard deviation + standard_name: toa_brightess_temperature + cell_method: area:standard_deviation + coordinates: + - longitude + - latitude + radiance_max: name: radiance_max resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: radiance_max long_name: TOA max Radiance @@ -1331,10 +1365,21 @@ datasets: - longitude - latitude + radiance_min: + name: radiance_min + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_min + long_name: TOA min Radiance + standard_name: toa_radiance + cell_method: area:minimum + coordinates: + - longitude + - latitude + radiance_mean: name: radiance_mean resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: radiance_mean long_name: TOA mean Radiance @@ -1344,1625 +1389,1669 @@ datasets: - longitude - latitude - radiance_mean_all_vis04: - name: radiance_mean_all_vis04 + radiance_std: + name: radiance_std + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_std + long_name: TOA Outgoing Radiance standard deviation + standard_name: toa_outgoing_radiance + cell_method: area:standard_deviation + coordinates: + - longitude + - latitude + + reflectance_max: + name: reflectance_max + resolution: 32000 + file_type: nc_fci_asr + file_key: reflectance_max + long_name: TOA Bidirectional Reflectance segment max + standard_name: toa_bidirectional_reflectance + cell_method: area:maximum + coordinates: + - longitude + - latitude + + reflectance_mean: + name: reflectance_mean + resolution: 32000 + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: TOA Bidirectional Reflectance segment mean + standard_name: toa_bidirectional_reflectance + cell_method: area:mean + coordinates: + - longitude + - latitude + + reflectance_min: + name: reflectance_min + resolution: 32000 + file_type: nc_fci_asr + file_key: reflectance_min + long_name: TOA Bidirectional Reflectance segment min + standard_name: toa_bidirectional_reflectance + cell_method: area:minimum + coordinates: + - longitude + - latitude + + reflectance_std: + name: reflectance_std + resolution: 32000 + file_type: nc_fci_asr + file_key: reflectance_std + long_name: TOA Bidirectional Reflectance standard deviation + standard_name: toa_bidirectional_reflectance + cell_method: area:standard_deviation + coordinates: + - longitude + - latitude + + quality_bt: + name: quality_bt + resolution: 32000 + file_type: nc_fci_asr + file_key: quality_bt + fill_value: -1 + long_name: TOA Brightess Temperature % confidence + standard_name: brightness_temperature_quality + coordinates: + - longitude + - latitude + + quality_reflectance: + name: quality_reflectance + resolution: 32000 + file_type: nc_fci_asr + file_key: quality_reflectance + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence + standard_name: reflectance_quality + coordinates: + - longitude + - latitude + + quality_radiance: + name: quality_radiance + resolution: 32000 + wavelength: [] + file_type: nc_fci_asr + file_key: quality_radiance + fill_value: -1 + long_name: TOA Radiance % confidence + standard_name: radiance_quality + coordinates: + - longitude + - latitude + + land_pixel_percent: + name: land_pixel_percent + resolution: 32000 + file_type: nc_fci_asr + file_key: land_pixel_percent + standard_name: land_area_fraction + coordinates: + - longitude + - latitude + + water_pixel_percent: + name: water_pixel_percent + resolution: 32000 + file_type: nc_fci_asr + file_key: water_pixel_percent + standard_name: water_area_fraction + coordinates: + - longitude + - latitude + + pixel_percentage: + name: pixel_percentage + resolution: 32000 + file_type: nc_fci_asr + file_key: pixel_percentage + standard_name: pixels_used_fraction + coordinates: + - longitude + - latitude + + reflectance_mean_all_vis04: + name: reflectance_mean_all_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 0 category_id: 0 - channel_id: 0 - long_name: TOA mean Radiance over all pixels for vis04 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.4um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_vis04: - name: radiance_mean_clear_vis04 + reflectance_mean_clear_vis04: + name: reflectance_mean_clear_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 0 category_id: 1 - channel_id: 0 - long_name: TOA mean Radiance over clear pixels for vis04 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.4um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_vis04: - name: radiance_mean_cloudy_vis04 + reflectance_mean_cloudy_vis04: + name: reflectance_mean_cloudy_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 0 category_id: 2 - channel_id: 0 - long_name: TOA mean Radiance over cloudy pixels for vis04 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.4um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_vis05: - name: radiance_mean_all_vis05 + reflectance_mean_all_vis05: + name: reflectance_mean_all_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 1 category_id: 0 - channel_id: 1 - long_name: TOA mean Radiance over all pixels for vis05 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.5um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_vis05: - name: radiance_mean_clear_vis05 + reflectance_mean_clear_vis05: + name: reflectance_mean_clear_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 1 category_id: 1 - channel_id: 1 - long_name: TOA mean Radiance over clear pixels for vis05 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.5um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_vis05: - name: radiance_mean_cloudy_vis05 + reflectance_mean_cloudy_vis05: + name: reflectance_mean_cloudy_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 1 category_id: 2 - channel_id: 1 - long_name: TOA mean Radiance over cloudy pixels for vis05 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.5um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_vis06: - name: radiance_mean_all_vis06 + reflectance_mean_all_vis06: + name: reflectance_mean_all_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 2 category_id: 0 - channel_id: 2 - long_name: TOA mean Radiance over all pixels for vis06 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.6um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_vis06: - name: radiance_mean_clear_vis06 + reflectance_mean_clear_vis06: + name: reflectance_mean_clear_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 2 category_id: 1 - channel_id: 2 - long_name: TOA mean Radiance over clear pixels for vis06 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.6um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_vis06: - name: radiance_mean_cloudy_vis06 + reflectance_mean_cloudy_vis06: + name: reflectance_mean_cloudy_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 2 category_id: 2 - channel_id: 2 - long_name: TOA mean Radiance over cloudy pixels for vis06 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.6um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_vis08: - name: radiance_mean_all_vis08 + reflectance_mean_all_vis08: + name: reflectance_mean_all_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 3 category_id: 0 - channel_id: 3 - long_name: TOA mean Radiance over all pixels for vis08 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_vis08: - name: radiance_mean_clear_vis08 + reflectance_mean_clear_vis08: + name: reflectance_mean_clear_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 3 category_id: 1 - channel_id: 3 - long_name: TOA mean Radiance over clear pixels for vis08 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_vis08: - name: radiance_mean_cloudy_vis08 + reflectance_mean_cloudy_vis08: + name: reflectance_mean_cloudy_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 3 category_id: 2 - channel_id: 3 - long_name: TOA mean Radiance over cloudy pixels for vis08 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_vis09: - name: radiance_mean_all_vis09 + reflectance_mean_all_vis09: + name: reflectance_mean_all_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 4 category_id: 0 - channel_id: 4 - long_name: TOA mean Radiance over all pixels for vis09 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_vis09: - name: radiance_mean_clear_vis09 + reflectance_mean_clear_vis09: + name: reflectance_mean_clear_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 4 category_id: 1 - channel_id: 4 - long_name: TOA mean Radiance over clear pixels for vis09 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_vis09: - name: radiance_mean_cloudy_vis09 + reflectance_mean_cloudy_vis09: + name: reflectance_mean_cloudy_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 4 category_id: 2 - channel_id: 4 - long_name: TOA mean Radiance over cloudy pixels for vis09 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 0.9um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_nir13: - name: radiance_mean_all_nir13 + reflectance_mean_all_nir13: + name: reflectance_mean_all_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 5 category_id: 0 - channel_id: 5 - long_name: TOA mean Radiance over all pixels for nir13 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 1.4um (all pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_nir13: - name: radiance_mean_clear_nir13 + reflectance_mean_clear_nir13: + name: reflectance_mean_clear_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 5 category_id: 1 - channel_id: 5 - long_name: TOA mean Radiance over clear pixels for nir13 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 1.4um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_nir13: - name: radiance_mean_cloudy_nir13 + reflectance_mean_cloudy_nir13: + name: reflectance_mean_cloudy_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: radiance_mean + file_key: reflectance_mean + vis_channel_id: 5 category_id: 2 - channel_id: 5 - long_name: TOA mean Radiance over cloudy pixels for nir13 channel - standard_name: toa_radiance + long_name: TOA Bidirectional Reflectance segment mean at 1.4um (cloudy pixels) + standard_name: toa_bidirectional_reflectance + cell_method: area:mean + coordinates: + - longitude + - latitude + + reflectance_mean_all_nir16: + name: reflectance_mean_all_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: reflectance_mean + vis_channel_id: 6 + category_id: 0 + long_name: TOA Bidirectional Reflectance segment mean at 1.6um (all pixels) + standard_name: toa_bidirectional_reflectance + cell_method: area:mean + coordinates: + - longitude + - latitude + + reflectance_mean_clear_nir16: + name: reflectance_mean_clear_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: reflectance_mean + vis_channel_id: 6 + category_id: 1 + long_name: TOA Bidirectional Reflectance segment mean at 1.6um (clear pixels) + standard_name: toa_bidirectional_reflectance + cell_method: area:mean + coordinates: + - longitude + - latitude + + reflectance_mean_cloudy_nir16: + name: reflectance_mean_cloudy_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: reflectance_mean + vis_channel_id: 6 + category_id: 2 + long_name: TOA Bidirectional Reflectance segment mean at 1.6um (cloudy pixels) + standard_name: toa_bidirectional_reflectance + cell_method: area:mean + coordinates: + - longitude + - latitude + + reflectance_mean_all_nir22: + name: reflectance_mean_all_nir22 + resolution: 32000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_asr + file_key: reflectance_mean + vis_channel_id: 7 + category_id: 0 + long_name: TOA Bidirectional Reflectance segment mean at 2.2um (all pixels) + standard_name: toa_bidirectional_reflectance + cell_method: area:mean + coordinates: + - longitude + - latitude + + reflectance_mean_clear_nir22: + name: reflectance_mean_clear_nir22 + resolution: 32000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_asr + file_key: reflectance_mean + vis_channel_id: 7 + category_id: 1 + long_name: TOA Bidirectional Reflectance segment mean at 2.2um (clear pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_nir16: - name: radiance_mean_all_nir16 + reflectance_mean_cloudy_nir22: + name: reflectance_mean_cloudy_nir22 resolution: 32000 - wavelength: [1.56, 1.61, 1.66] + wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: radiance_mean - category_id: 0 - channel_id: 6 - long_name: TOA mean Radiance over all pixels for nir16 channel - standard_name: toa_radiance + file_key: reflectance_mean + vis_channel_id: 7 + category_id: 2 + long_name: TOA Bidirectional Reflectance segment mean at 2.2um (cloudy pixels) + standard_name: toa_bidirectional_reflectance cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_nir16: - name: radiance_mean_clear_nir16 + reflectance_min: + name: reflectance_min resolution: 32000 - wavelength: [1.56, 1.61, 1.66] + wavelength: [] file_type: nc_fci_asr - file_key: radiance_mean - category_id: 1 - channel_id: 6 - long_name: TOA mean Radiance over clear pixels for nir16 channel - standard_name: toa_radiance - cell_method: area:mean + file_key: reflectance_min + long_name: TOA Bidirectional Reflectance segment min + standard_name: toa_bidirectional_reflectance + cell_method: area:minimum coordinates: - longitude - latitude - radiance_mean_cloudy_nir16: - name: radiance_mean_cloudy_nir16 + reflectance_std: + name: reflectance_std resolution: 32000 - wavelength: [1.56, 1.61, 1.66] + wavelength: [] file_type: nc_fci_asr - file_key: radiance_mean - category_id: 2 - channel_id: 6 - long_name: TOA mean Radiance over cloudy pixels for nir16 channel - standard_name: toa_radiance - cell_method: area:mean + file_key: reflectance_std + long_name: TOA Bidirectional Reflectance standard deviation + standard_name: toa_bidirectional_reflectance + cell_method: area:standard_deviation coordinates: - longitude - latitude - radiance_mean_all_nir22: - name: radiance_mean_all_nir22 + quality_reflectance: + name: quality_reflectance resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: radiance_mean - category_id: 0 - channel_id: 7 - long_name: TOA mean Radiance over all pixels for nir22 channel - standard_name: toa_radiance - cell_method: area:mean + file_key: quality_reflectance + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence + standard_name: reflectance_quality coordinates: - longitude - latitude - radiance_mean_clear_nir22: - name: radiance_mean_clear_nir22 + bt_max: + name: bt_max resolution: 32000 - wavelength: [2.2, 2.25, 2.3] + wavelength: [] file_type: nc_fci_asr - file_key: radiance_mean - category_id: 1 - channel_id: 7 - long_name: TOA mean Radiance over clear pixels for nir22 channel - standard_name: toa_radiance - cell_method: area:mean + file_key: bt_max + long_name: TOA Brightess Temperature segment max + standard_name: toa_brightess_temperature + cell_method: area:maximum coordinates: - longitude - latitude - radiance_mean_cloudy_nir22: - name: radiance_mean_cloudy_nir22 + bt_mean: + name: bt_mean resolution: 32000 - wavelength: [2.2, 2.25, 2.3] + wavelength: [] file_type: nc_fci_asr - file_key: radiance_mean - category_id: 2 - channel_id: 7 - long_name: TOA mean Radiance over cloudy pixels for nir22 channel - standard_name: toa_radiance + file_key: bt_mean + long_name: TOA Brightess Temperature segment mean + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_ir38: - name: radiance_mean_all_ir38 + bt_mean_all_ir38: + name: bt_mean_all_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 0 category_id: 0 - channel_id: 8 - long_name: TOA mean Radiance over all pixels for ir38 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 3.8um (all pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_ir38: - name: radiance_mean_clear_ir38 + bt_mean_clear_ir38: + name: bt_mean_clear_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 0 category_id: 1 - channel_id: 8 - long_name: TOA mean Radiance over clear pixels for ir38 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 3.8um (clear pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_ir38: - name: radiance_mean_cloudy_ir38 + bt_mean_cloudy_ir38: + name: bt_mean_cloudy_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 0 category_id: 2 - channel_id: 8 - long_name: TOA mean Radiance over cloudy pixels for ir38 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 3.8um (cloudy pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_wv63: - name: radiance_mean_all_wv63 + bt_mean_all_wv63: + name: bt_mean_all_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 1 category_id: 0 - channel_id: 9 - long_name: TOA mean Radiance over all pixels for wv63 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 6.3um (all pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_wv63: - name: radiance_mean_clear_wv63 + bt_mean_clear_wv63: + name: bt_mean_clear_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 1 category_id: 1 - channel_id: 9 - long_name: TOA mean Radiance over clear pixels for wv63 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 6.3um (clear pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_wv63: - name: radiance_mean_cloudy_wv63 + bt_mean_cloudy_wv63: + name: bt_mean_cloudy_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 1 category_id: 2 - channel_id: 9 - long_name: TOA mean Radiance over cloudy pixels for wv63 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 6.3um (cloudy pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_wv73: - name: radiance_mean_all_wv73 + bt_mean_all_wv73: + name: bt_mean_all_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 2 category_id: 0 - channel_id: 10 - long_name: TOA mean Radiance over all pixels for wv73 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 7.3um (all pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_wv73: - name: radiance_mean_clear_wv73 + bt_mean_clear_wv73: + name: bt_mean_clear_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 2 category_id: 1 - channel_id: 10 - long_name: TOA mean Radiance over clear pixels for wv73 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 7.3um (clear pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_wv73: - name: radiance_mean_cloudy_wv73 + bt_mean_cloudy_wv73: + name: bt_mean_cloudy_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 2 category_id: 2 - channel_id: 10 - long_name: TOA mean Radiance over cloudy pixels for wv73 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 7.3um (cloudy pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_ir87: - name: radiance_mean_all_ir87 + bt_mean_all_ir87: + name: bt_mean_all_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 3 category_id: 0 - channel_id: 11 - long_name: TOA mean Radiance over all pixels for ir87 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 8.7um (all pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_ir87: - name: radiance_mean_clear_ir87 + bt_mean_clear_ir87: + name: bt_mean_clear_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 3 category_id: 1 - channel_id: 11 - long_name: TOA mean Radiance over clear pixels for ir87 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 8.7um (clear pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_ir87: - name: radiance_mean_cloudy_ir87 + bt_mean_cloudy_ir87: + name: bt_mean_cloudy_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 3 category_id: 2 - channel_id: 11 - long_name: TOA mean Radiance over cloudy pixels for ir87 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 8.7um (cloudy pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_ir97: - name: radiance_mean_all_ir97 + bt_mean_all_ir97: + name: bt_mean_all_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 4 category_id: 0 - channel_id: 12 - long_name: TOA mean Radiance over all pixels for ir97 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 9.7um (all pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_ir97: - name: radiance_mean_clear_ir97 + bt_mean_clear_ir97: + name: bt_mean_clear_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 4 category_id: 1 - channel_id: 12 - long_name: TOA mean Radiance over clear pixels for ir97 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 9.7um (clear pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_ir97: - name: radiance_mean_cloudy_ir97 + bt_mean_cloudy_ir97: + name: bt_mean_cloudy_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 4 category_id: 2 - channel_id: 12 - long_name: TOA mean Radiance over cloudy pixels for ir97 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 9.7um (cloudy pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_ir105: - name: radiance_mean_all_ir105 + bt_mean_all_ir105: + name: bt_mean_all_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 5 category_id: 0 - channel_id: 13 - long_name: TOA mean Radiance over all pixels for ir105 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 10.5um (all pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_clear_ir105: - name: radiance_mean_clear_ir105 + bt_mean_clear_ir105: + name: bt_mean_clear_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 5 category_id: 1 - channel_id: 13 - long_name: TOA mean Radiance over clear pixels for ir105 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 10.5um (clear pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_ir105: - name: radiance_mean_cloudy_ir105 + bt_mean_cloudy_ir105: + name: bt_mean_cloudy_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 5 category_id: 2 - channel_id: 13 - long_name: TOA mean Radiance over cloudy pixels for ir105 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 10.5um (cloudy pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_all_ir123: - name: radiance_mean_all_ir123 + bt_mean_all_ir123: + name: bt_mean_all_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: radiance_mean + file_key: bt_mean + ir_channel_id: 6 category_id: 0 - channel_id: 14 - long_name: TOA mean Radiance over all pixels for ir123 channel - standard_name: toa_radiance - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_ir123: - name: radiance_mean_clear_ir123 - resolution: 32000 - wavelength: [11.8, 12.3, 12.8] - file_type: nc_fci_asr - file_key: radiance_mean - category_id: 1 - channel_id: 14 - long_name: TOA mean Radiance over clear pixels for ir123 channel - standard_name: toa_radiance + long_name: TOA Brightess Temperature segment mean at 12.3um (all pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - radiance_mean_cloudy_ir123: - name: radiance_mean_cloudy_ir123 + bt_mean_clear_ir123: + name: bt_mean_clear_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] - file_type: nc_fci_asr - file_key: radiance_mean - category_id: 2 - channel_id: 14 - long_name: TOA mean Radiance over cloudy pixels for ir123 channel - standard_name: toa_radiance - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_ir133: - name: radiance_mean_all_ir133 - resolution: 32000 - wavelength: [12.7, 13.3, 13.9] - file_type: nc_fci_asr - file_key: radiance_mean - category_id: 0 - channel_id: 15 - long_name: TOA mean Radiance over all pixels for ir133 channel - standard_name: toa_radiance - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_ir133: - name: radiance_mean_clear_ir133 - resolution: 32000 - wavelength: [12.7, 13.3, 13.9] - file_type: nc_fci_asr - file_key: radiance_mean - category_id: 1 - channel_id: 15 - long_name: TOA mean Radiance over clear pixels for ir133 channel - standard_name: toa_radiance - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_ir133: - name: radiance_mean_cloudy_ir133 - resolution: 32000 - wavelength: [12.7, 13.3, 13.9] - file_type: nc_fci_asr - file_key: radiance_mean - category_id: 2 - channel_id: 15 - long_name: TOA mean Radiance over cloudy pixels for ir133 channel - standard_name: toa_radiance - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_std: - name: radiance_std - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: radiance_std - long_name: TOA Radiance standard deviation over None pixels for None channel - standard_name: toa_radiance - cell_method: area:standard_deviation + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 6 + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 12.3um (clear pixels) + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - radiance_quality: - name: radiance_quality + bt_mean_cloudy_ir123: + name: bt_mean_cloudy_ir123 resolution: 32000 - wavelength: [] + wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: radiance_quality - long_name: TOA Radiance Quality - standard_name: radiance_quality + file_key: bt_mean + ir_channel_id: 6 + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 12.3um (cloudy pixels) + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - reflectance_min: - name: reflectance_min + bt_mean_all_ir133: + name: bt_mean_all_ir133 resolution: 32000 - wavelength: [] + wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: reflectance_min - long_name: TOA min Reflectance - standard_name: toa_reflectance - cell_method: area:minimum + file_key: bt_mean + ir_channel_id: 7 + category_id: 0 + long_name: TOA Brightess Temperature segment mean at 13.3um (all pixels) + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - reflectance_max: - name: reflectance_max + bt_mean_clear_ir133: + name: bt_mean_clear_ir133 resolution: 32000 - wavelength: [] + wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: reflectance_max - long_name: TOA max Reflectance - standard_name: toa_reflectance - cell_method: area:maximum + file_key: bt_mean + ir_channel_id: 7 + category_id: 1 + long_name: TOA Brightess Temperature segment mean at 13.3um (clear pixels) + standard_name: toa_brightess_temperature + cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean: - name: reflectance_mean + bt_mean_cloudy_ir133: + name: bt_mean_cloudy_ir133 resolution: 32000 - wavelength: [] + wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: reflectance_mean - long_name: TOA mean Reflectance - standard_name: toa_reflectance + file_key: bt_mean + ir_channel_id: 7 + category_id: 2 + long_name: TOA Brightess Temperature segment mean at 13.3um (cloudy pixels) + standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - reflectance_mean_all_vis04: - name: reflectance_mean_all_vis04 + quality_reflectance_all_vis04: + name: quality_reflectance_all_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 0 + file_key: quality_reflectance vis_channel_id: 0 - long_name: TOA mean Reflectance over all pixels for vis04 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.4um (all pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_clear_vis04: - name: reflectance_mean_clear_vis04 + quality_reflectance_clear_vis04: + name: quality_reflectance_clear_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 1 + file_key: quality_reflectance vis_channel_id: 0 - long_name: TOA mean Reflectance over clear pixels for vis04 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.4um (clear pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_cloudy_vis04: - name: reflectance_mean_cloudy_vis04 + quality_reflectance_cloudy_vis04: + name: quality_reflectance_cloudy_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 2 + file_key: quality_reflectance vis_channel_id: 0 - long_name: TOA mean Reflectance over cloudy pixels for vis04 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.4um (cloudy pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_all_vis05: - name: reflectance_mean_all_vis05 + quality_reflectance_all_vis05: + name: quality_reflectance_all_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 0 + file_key: quality_reflectance vis_channel_id: 1 - long_name: TOA mean Reflectance over all pixels for vis05 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.5um (all pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_clear_vis05: - name: reflectance_mean_clear_vis05 + quality_reflectance_clear_vis05: + name: quality_reflectance_clear_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 1 + file_key: quality_reflectance vis_channel_id: 1 - long_name: TOA mean Reflectance over clear pixels for vis05 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.5um (clear pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_cloudy_vis05: - name: reflectance_mean_cloudy_vis05 + quality_reflectance_cloudy_vis05: + name: quality_reflectance_cloudy_vis05 resolution: 32000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 2 + file_key: quality_reflectance vis_channel_id: 1 - long_name: TOA mean Reflectance over cloudy pixels for vis05 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.5um (cloudy pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_all_vis06: - name: reflectance_mean_all_vis06 + quality_reflectance_all_vis06: + name: quality_reflectance_all_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 0 + file_key: quality_reflectance vis_channel_id: 2 - long_name: TOA mean Reflectance over all pixels for vis06 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.6um (all pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_clear_vis06: - name: reflectance_mean_clear_vis06 + quality_reflectance_clear_vis06: + name: quality_reflectance_clear_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 1 + file_key: quality_reflectance vis_channel_id: 2 - long_name: TOA mean Reflectance over clear pixels for vis06 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.6um (clear pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_cloudy_vis06: - name: reflectance_mean_cloudy_vis06 + quality_reflectance_cloudy_vis06: + name: quality_reflectance_cloudy_vis06 resolution: 32000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 2 + file_key: quality_reflectance vis_channel_id: 2 - long_name: TOA mean Reflectance over cloudy pixels for vis06 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.6um (cloudy pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_all_vis08: - name: reflectance_mean_all_vis08 + quality_reflectance_all_vis08: + name: quality_reflectance_all_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 0 + file_key: quality_reflectance vis_channel_id: 3 - long_name: TOA mean Reflectance over all pixels for vis08 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.9um (all pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_clear_vis08: - name: reflectance_mean_clear_vis08 + quality_reflectance_clear_vis08: + name: quality_reflectance_clear_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 1 + file_key: quality_reflectance vis_channel_id: 3 - long_name: TOA mean Reflectance over clear pixels for vis08 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.9um (clear pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_cloudy_vis08: - name: reflectance_mean_cloudy_vis08 + quality_reflectance_cloudy_vis08: + name: quality_reflectance_cloudy_vis08 resolution: 32000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 2 + file_key: quality_reflectance vis_channel_id: 3 - long_name: TOA mean Reflectance over cloudy pixels for vis08 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.9um (cloudy pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_all_vis09: - name: reflectance_mean_all_vis09 + quality_reflectance_all_vis09: + name: quality_reflectance_all_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 0 + file_key: quality_reflectance vis_channel_id: 4 - long_name: TOA mean Reflectance over all pixels for vis09 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.9um (all pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_clear_vis09: - name: reflectance_mean_clear_vis09 + quality_reflectance_clear_vis09: + name: quality_reflectance_clear_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 1 + file_key: quality_reflectance vis_channel_id: 4 - long_name: TOA mean Reflectance over clear pixels for vis09 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.9um (clear pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_cloudy_vis09: - name: reflectance_mean_cloudy_vis09 + quality_reflectance_cloudy_vis09: + name: quality_reflectance_cloudy_vis09 resolution: 32000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 2 + file_key: quality_reflectance vis_channel_id: 4 - long_name: TOA mean Reflectance over cloudy pixels for vis09 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 0.9um (cloudy pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_all_nir13: - name: reflectance_mean_all_nir13 + quality_reflectance_all_nir13: + name: quality_reflectance_all_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 0 + file_key: quality_reflectance vis_channel_id: 5 - long_name: TOA mean Reflectance over all pixels for nir13 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 1.4um (all pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_clear_nir13: - name: reflectance_mean_clear_nir13 + quality_reflectance_clear_nir13: + name: quality_reflectance_clear_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 1 + file_key: quality_reflectance vis_channel_id: 5 - long_name: TOA mean Reflectance over clear pixels for nir13 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 1.4um (clear pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_cloudy_nir13: - name: reflectance_mean_cloudy_nir13 + quality_reflectance_cloudy_nir13: + name: quality_reflectance_cloudy_nir13 resolution: 32000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 2 + file_key: quality_reflectance vis_channel_id: 5 - long_name: TOA mean Reflectance over cloudy pixels for nir13 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 1.4um (cloudy pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_all_nir16: - name: reflectance_mean_all_nir16 + quality_reflectance_all_nir16: + name: quality_reflectance_all_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 0 + file_key: quality_reflectance vis_channel_id: 6 - long_name: TOA mean Reflectance over all pixels for nir16 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 1.6um (all pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_clear_nir16: - name: reflectance_mean_clear_nir16 + quality_reflectance_clear_nir16: + name: quality_reflectance_clear_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 1 + file_key: quality_reflectance vis_channel_id: 6 - long_name: TOA mean Reflectance over clear pixels for nir16 channel - standard_name: toa_reflectance - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 1.6um (clear pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - reflectance_mean_cloudy_nir16: - name: reflectance_mean_cloudy_nir16 + quality_reflectance_cloudy_nir16: + name: quality_reflectance_cloudy_nir16 resolution: 32000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 2 + file_key: quality_reflectance vis_channel_id: 6 - long_name: TOA mean Reflectance over cloudy pixels for nir16 channel - standard_name: toa_reflectance - cell_method: area:mean - coordinates: - - longitude - - latitude - - reflectance_mean_all_nir22: - name: reflectance_mean_all_nir22 - resolution: 32000 - wavelength: [2.2, 2.25, 2.3] - file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 0 - vis_channel_id: 7 - long_name: TOA mean Reflectance over all pixels for nir22 channel - standard_name: toa_reflectance - cell_method: area:mean - coordinates: - - longitude - - latitude - - reflectance_mean_clear_nir22: - name: reflectance_mean_clear_nir22 - resolution: 32000 - wavelength: [2.2, 2.25, 2.3] - file_type: nc_fci_asr - file_key: reflectance_mean - category_id: 1 - vis_channel_id: 7 - long_name: TOA mean Reflectance over clear pixels for nir22 channel - standard_name: toa_reflectance - cell_method: area:mean - coordinates: - - longitude - - latitude - - reflectance_mean_cloudy_nir22: - name: reflectance_mean_cloudy_nir22 - resolution: 32000 - wavelength: [2.2, 2.25, 2.3] - file_type: nc_fci_asr - file_key: reflectance_mean category_id: 2 - vis_channel_id: 7 - long_name: TOA mean Reflectance over cloudy pixels for nir22 channel - standard_name: toa_reflectance - cell_method: area:mean - coordinates: - - longitude - - latitude - - reflectance_std: - name: reflectance_std - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: reflectance_std - long_name: TOA Reflectance standard deviation - standard_name: toa_reflectance - cell_method: area:standard_deviation - coordinates: - - longitude - - latitude - - reflectance_quality: - name: reflectance_quality - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: reflectance_quality - long_name: TOA Reflectance Quality + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 1.6um (cloudy pixels) standard_name: reflectance_quality coordinates: - longitude - latitude - bt_min: - name: bt_min + quality_reflectance_all_nir22: + name: quality_reflectance_all_nir22 resolution: 32000 - wavelength: [] + wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: bt_min - long_name: TOA min Brightess Temperature - standard_name: toa_brightess_temperature - cell_method: area:minimum + file_key: quality_reflectance + vis_channel_id: 7 + category_id: 0 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 2.2um (all pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - bt_max: - name: bt_max + quality_reflectance_clear_nir22: + name: quality_reflectance_clear_nir22 resolution: 32000 - wavelength: [] + wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: bt_max - long_name: TOA max Brightess Temperature - standard_name: toa_brightess_temperature - cell_method: area:maximum + file_key: quality_reflectance + vis_channel_id: 7 + category_id: 1 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 2.2um (clear pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - bt_mean: - name: bt_mean + quality_reflectance_cloudy_nir22: + name: quality_reflectance_cloudy_nir22 resolution: 32000 - wavelength: [] + wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr - file_key: bt_mean - long_name: TOA mean Brightess Temperature - standard_name: toa_brightess_temperature - cell_method: area:mean + file_key: quality_reflectance + vis_channel_id: 7 + category_id: 2 + fill_value: -1 + long_name: TOA Bidirectional Reflectance % confidence at 2.2um (cloudy pixels) + standard_name: reflectance_quality coordinates: - longitude - latitude - bt_mean_all_ir38: - name: bt_mean_all_ir38 + quality_bt_all_ir38: + name: quality_bt_all_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - category_id: 0 + file_key: quality_bt ir_channel_id: 0 - long_name: TOA mean Brightess Temperature over all pixels for ir38 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 3.8um (all pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_clear_ir38: - name: bt_mean_clear_ir38 + quality_bt_clear_ir38: + name: quality_bt_clear_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - category_id: 1 + file_key: quality_bt ir_channel_id: 0 - long_name: TOA mean Brightess Temperature over clear pixels for ir38 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 3.8um (clear pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_cloudy_ir38: - name: bt_mean_cloudy_ir38 + quality_bt_cloudy_ir38: + name: quality_bt_cloudy_ir38 resolution: 32000 wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr - file_key: bt_mean - category_id: 2 + file_key: quality_bt ir_channel_id: 0 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir38 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 3.8um (cloudy pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_all_wv63: - name: bt_mean_all_wv63 + quality_bt_all_wv63: + name: quality_bt_all_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - category_id: 0 + file_key: quality_bt ir_channel_id: 1 - long_name: TOA mean Brightess Temperature over all pixels for wv63 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 6.3um (all pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_clear_wv63: - name: bt_mean_clear_wv63 + quality_bt_clear_wv63: + name: quality_bt_clear_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - category_id: 1 + file_key: quality_bt ir_channel_id: 1 - long_name: TOA mean Brightess Temperature over clear pixels for wv63 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 6.3um (clear pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_cloudy_wv63: - name: bt_mean_cloudy_wv63 + quality_bt_cloudy_wv63: + name: quality_bt_cloudy_wv63 resolution: 32000 wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr - file_key: bt_mean - category_id: 2 + file_key: quality_bt ir_channel_id: 1 - long_name: TOA mean Brightess Temperature over cloudy pixels for wv63 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 6.3um (cloudy pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_all_wv73: - name: bt_mean_all_wv73 + quality_bt_all_wv73: + name: quality_bt_all_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - category_id: 0 + file_key: quality_bt ir_channel_id: 2 - long_name: TOA mean Brightess Temperature over all pixels for wv73 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 7.3um (all pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_clear_wv73: - name: bt_mean_clear_wv73 + quality_bt_clear_wv73: + name: quality_bt_clear_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - category_id: 1 + file_key: quality_bt ir_channel_id: 2 - long_name: TOA mean Brightess Temperature over clear pixels for wv73 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 7.3um (clear pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_cloudy_wv73: - name: bt_mean_cloudy_wv73 + quality_bt_cloudy_wv73: + name: quality_bt_cloudy_wv73 resolution: 32000 wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr - file_key: bt_mean - category_id: 2 + file_key: quality_bt ir_channel_id: 2 - long_name: TOA mean Brightess Temperature over cloudy pixels for wv73 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 7.3um (cloudy pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_all_ir87: - name: bt_mean_all_ir87 + quality_bt_all_ir87: + name: quality_bt_all_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - category_id: 0 + file_key: quality_bt ir_channel_id: 3 - long_name: TOA mean Brightess Temperature over all pixels for ir87 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 8.7um (all pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_clear_ir87: - name: bt_mean_clear_ir87 + quality_bt_clear_ir87: + name: quality_bt_clear_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - category_id: 1 + file_key: quality_bt ir_channel_id: 3 - long_name: TOA mean Brightess Temperature over clear pixels for ir87 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 8.7um (clear pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_cloudy_ir87: - name: bt_mean_cloudy_ir87 + quality_bt_cloudy_ir87: + name: quality_bt_cloudy_ir87 resolution: 32000 wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - category_id: 2 + file_key: quality_bt ir_channel_id: 3 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir87 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 8.7um (cloudy pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_all_ir97: - name: bt_mean_all_ir97 + quality_bt_all_ir97: + name: quality_bt_all_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - category_id: 0 + file_key: quality_bt ir_channel_id: 4 - long_name: TOA mean Brightess Temperature over all pixels for ir97 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 9.7um (all pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_clear_ir97: - name: bt_mean_clear_ir97 + quality_bt_clear_ir97: + name: quality_bt_clear_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - category_id: 1 + file_key: quality_bt ir_channel_id: 4 - long_name: TOA mean Brightess Temperature over clear pixels for ir97 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 9.7um (clear pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_cloudy_ir97: - name: bt_mean_cloudy_ir97 + quality_bt_cloudy_ir97: + name: quality_bt_cloudy_ir97 resolution: 32000 wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - category_id: 2 + file_key: quality_bt ir_channel_id: 4 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir97 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 9.7um (cloudy pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_all_ir105: - name: bt_mean_all_ir105 + quality_bt_all_ir105: + name: quality_bt_all_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - category_id: 0 + file_key: quality_bt ir_channel_id: 5 - long_name: TOA mean Brightess Temperature over all pixels for ir105 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 10.5um (all pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_clear_ir105: - name: bt_mean_clear_ir105 + quality_bt_clear_ir105: + name: quality_bt_clear_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - category_id: 1 + file_key: quality_bt ir_channel_id: 5 - long_name: TOA mean Brightess Temperature over clear pixels for ir105 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 10.5um (clear pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_cloudy_ir105: - name: bt_mean_cloudy_ir105 + quality_bt_cloudy_ir105: + name: quality_bt_cloudy_ir105 resolution: 32000 wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - category_id: 2 + file_key: quality_bt ir_channel_id: 5 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir105 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 10.5um (cloudy pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_all_ir123: - name: bt_mean_all_ir123 + quality_bt_all_ir123: + name: quality_bt_all_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - category_id: 0 + file_key: quality_bt ir_channel_id: 6 - long_name: TOA mean Brightess Temperature over all pixels for ir123 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 12.3um (all pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_clear_ir123: - name: bt_mean_clear_ir123 + quality_bt_clear_ir123: + name: quality_bt_clear_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - category_id: 1 + file_key: quality_bt ir_channel_id: 6 - long_name: TOA mean Brightess Temperature over clear pixels for ir123 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 12.3um (clear pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_cloudy_ir123: - name: bt_mean_cloudy_ir123 + quality_bt_cloudy_ir123: + name: quality_bt_cloudy_ir123 resolution: 32000 wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - category_id: 2 + file_key: quality_bt ir_channel_id: 6 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir123 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 2 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 12.3um (cloudy pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_all_ir133: - name: bt_mean_all_ir133 + quality_bt_all_ir133: + name: quality_bt_all_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - category_id: 0 + file_key: quality_bt ir_channel_id: 7 - long_name: TOA mean Brightess Temperature over all pixels for ir133 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 0 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 13.3um (all pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_clear_ir133: - name: bt_mean_clear_ir133 + quality_bt_clear_ir133: + name: quality_bt_clear_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - category_id: 1 + file_key: quality_bt ir_channel_id: 7 - long_name: TOA mean Brightess Temperature over clear pixels for ir133 channel - standard_name: toa_brightess_temperature - cell_method: area:mean + category_id: 1 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 13.3um (clear pixels) + standard_name: brightness_temperature_quality coordinates: - longitude - latitude - bt_mean_cloudy_ir133: - name: bt_mean_cloudy_ir133 + quality_bt_cloudy_ir133: + name: quality_bt_cloudy_ir133 resolution: 32000 wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - category_id: 2 + file_key: quality_bt ir_channel_id: 7 - long_name: TOA mean Brightess Temperature over cloudy pixels for ir133 channel - standard_name: toa_brightess_temperature - cell_method: area:mean - coordinates: - - longitude - - latitude - - bt_std: - name: bt_std - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: bt_std - long_name: TOA Brightess Temperature standard deviation - standard_name: toa_brightess_temperature - cell_method: area:standard_deviation - coordinates: - - longitude - - latitude - - bt_quality: - name: bt_quality - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: bt_quality - long_name: TOA Brightess Temperature Quality + category_id: 2 + fill_value: -1 + long_name: TOA Brightess Temperature % confidence at 13.3um (cloudy pixels) standard_name: brightness_temperature_quality coordinates: - longitude - latitude - pixel_percentage: - name: pixel_percentage + pixel_percentage_all: + name: pixel_percentage_all resolution: 32000 file_type: nc_fci_asr file_key: pixel_percentage + long_name: Percentage of FoR pixels used (all pixels) standard_name: pixels_used_fraction + category_id: 0 coordinates: - longitude - latitude - land_pixel_percent: - name: land_pixel_percent + pixel_percentage_clear: + name: pixel_percentage_clear resolution: 32000 file_type: nc_fci_asr - file_key: land_pixel_percent - standard_name: land_area_fraction + file_key: pixel_percentage + long_name: Percentage of FoR pixels used (clear pixels) + standard_name: pixels_used_fraction + category_id: 1 coordinates: - longitude - latitude - water_pixel_percent: - name: water_pixel_percent + pixel_percentage_cloudy: + name: pixel_percentage_cloudy resolution: 32000 file_type: nc_fci_asr - file_key: water_pixel_percent - standard_name: water_area_fraction + file_key: pixel_percentage + long_name: Percentage of FoR pixels used (cloudy pixels) + standard_name: pixels_used_fraction + category_id: 2 coordinates: - longitude - latitude From 86b96a67a5633848b00fb09e5e07a8d29676bd43 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Fri, 15 Dec 2023 17:10:49 +0100 Subject: [PATCH 1037/1416] Fix ASR issues --- satpy/etc/readers/fci_l2_nc.yaml | 86 ++++---------------------------- 1 file changed, 11 insertions(+), 75 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index b8c342dac1..1334448baa 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1365,26 +1365,26 @@ datasets: - longitude - latitude - radiance_min: - name: radiance_min + radiance_mean: + name: radiance_mean resolution: 32000 file_type: nc_fci_asr - file_key: radiance_min - long_name: TOA min Radiance + file_key: radiance_mean + long_name: TOA mean Radiance standard_name: toa_radiance - cell_method: area:minimum + cell_method: area:mean coordinates: - longitude - latitude - radiance_mean: - name: radiance_mean + radiance_min: + name: radiance_min resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean - long_name: TOA mean Radiance + file_key: radiance_min + long_name: TOA min Radiance standard_name: toa_radiance - cell_method: area:mean + cell_method: area:minimum coordinates: - longitude - latitude @@ -1876,70 +1876,6 @@ datasets: - longitude - latitude - reflectance_min: - name: reflectance_min - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: reflectance_min - long_name: TOA Bidirectional Reflectance segment min - standard_name: toa_bidirectional_reflectance - cell_method: area:minimum - coordinates: - - longitude - - latitude - - reflectance_std: - name: reflectance_std - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: reflectance_std - long_name: TOA Bidirectional Reflectance standard deviation - standard_name: toa_bidirectional_reflectance - cell_method: area:standard_deviation - coordinates: - - longitude - - latitude - - quality_reflectance: - name: quality_reflectance - resolution: 32000 - file_type: nc_fci_asr - file_key: quality_reflectance - fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence - standard_name: reflectance_quality - coordinates: - - longitude - - latitude - - bt_max: - name: bt_max - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: bt_max - long_name: TOA Brightess Temperature segment max - standard_name: toa_brightess_temperature - cell_method: area:maximum - coordinates: - - longitude - - latitude - - bt_mean: - name: bt_mean - resolution: 32000 - wavelength: [] - file_type: nc_fci_asr - file_key: bt_mean - long_name: TOA Brightess Temperature segment mean - standard_name: toa_brightess_temperature - cell_method: area:mean - coordinates: - - longitude - - latitude - bt_mean_all_ir38: name: bt_mean_all_ir38 resolution: 32000 @@ -2300,7 +2236,7 @@ datasets: - longitude - latitude - quality_reflectance_all_vis04: + quality_reflectance_all_vis04: name: quality_reflectance_all_vis04 resolution: 32000 wavelength: [0.384, 0.444, 0.504] From 22dc07b63672436958daf46c685eb1bec4434c1e Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Fri, 15 Dec 2023 17:20:55 +0100 Subject: [PATCH 1038/1416] Various fixes --- satpy/etc/readers/fci_l2_nc.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 1334448baa..f8e102e030 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1045,13 +1045,14 @@ datasets: file_key: cloud_mask_test_result extract_byte: 0 flag_values: [0,1] - flag_meanings: [' Snow/Ice undetected',' Snow/Ice detected'] + flag_meanings: ['No snow/Ice detected',' Snow/Ice detected'] standard_name: status_flag cloud_test_cmt1: name: cloud_test_cmt1 resolution: 2000 file_type: nc_fci_test_clm + file_key: cloud_mask_test_result extract_byte: 1 flag_values: [0,1] flag_meanings: ['Cloud undetected','Cloud detected'] @@ -1092,7 +1093,7 @@ datasets: resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - sextract_byte: 5 + extract_byte: 5 flag_values: [0,1] flag_meanings: ['Cloud undetected','Cloud detected'] standard_name: status_flag From 47813c4d4ae3afa8a8099b0c8c3c5f069e3864a6 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Fri, 15 Dec 2023 17:33:03 +0100 Subject: [PATCH 1039/1416] Fix GII typos --- satpy/etc/readers/fci_l2_nc.yaml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index f8e102e030..0de8d47491 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -697,7 +697,7 @@ datasets: coordinates: - longitude - latitude - standard_name: atmosphere_stability_lifted_index + standard_name: atmosphere_stability_k_index lifted_index: name: lifted_index @@ -707,7 +707,7 @@ datasets: coordinates: - longitude - latitude - standard_name: atmosphere_stability_k_index + standard_name: atmosphere_stability_lifted_index prec_water_high: name: prec_water_high @@ -754,9 +754,11 @@ datasets: resolution: 6000 file_type: nc_fci_gii file_key: percent_cloud_free + units: '%' coordinates: - longitude - latitude + long_name: Percentage of Cloud Free Pixels Processed in FoR standard_name: cloud_free_area_fraction number_of_iterations_gii: @@ -1255,7 +1257,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 21 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No dust detected','Dust detected'] standard_name: status_flag cloud_test_ash: @@ -1265,7 +1267,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 22 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No ash detected','Ash detected'] standard_name: status_flag cloud_test_dust_ash: @@ -1275,7 +1277,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 23 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Dust detected','Ash detected'] standard_name: status_flag cloud_test_cmrt6: From a62485bffd05fe48a0e6f829650f3d82e812302b Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 8 Jan 2024 11:47:40 +0100 Subject: [PATCH 1040/1416] Various fixes (long_names,units) --- satpy/etc/readers/fci_l2_nc.yaml | 60 +++++++++++++++++++++----------- 1 file changed, 39 insertions(+), 21 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 0de8d47491..b1600aff50 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -353,6 +353,7 @@ datasets: file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 0 + long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Upper Layer standard_name: atmosphere_optical_thickness_due_to_cloud retrieval_error_cloud_optical_thickness_upper_layer: @@ -361,6 +362,7 @@ datasets: file_type: nc_fci_oca file_key: retrieval_error_cloud_optical_thickness layer: 0 + long_name: Cloud Optical Thickness Error (error in log10(COT)) for Upper Layer standard_name: atmosphere_optical_thickness_due_to_cloud standard_error retrieved_cloud_optical_thickness_lower_layer: @@ -369,6 +371,7 @@ datasets: file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 1 + long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Lower Layer standard_name: atmosphere_optical_thickness_due_to_cloud retrieval_error_cloud_optical_thickness_lower_layer: @@ -377,21 +380,22 @@ datasets: file_type: nc_fci_oca file_key: retrieval_error_cloud_optical_thickness layer: 1 + long_name: Cloud Optical Thickness Error (error in log10(COT)) for Lower Layer standard_name: atmosphere_optical_thickness_due_to_cloud standard_error retrieved_cloud_particle_effective_radius: - name: retrieved_cloud_particle_effective_radius_upper_layer + name: retrieved_cloud_particle_effective_radius resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_particles + standard_name: effective_radius_of_cloud_particles_at_cloud_top retrieval_error_cloud_particle_effective_radius: - name: retrieval_error_cloud_particle_effective_radius_upper_layer + name: retrieval_error_cloud_particle_effective_radius resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_particles standard_error + standard_name: effective_radius_of_cloud_particles_at_cloud_top standard_error retrieved_cloud_top_pressure_upper_layer: name: retrieved_cloud_top_pressure_upper_layer @@ -399,6 +403,7 @@ datasets: file_type: nc_fci_oca file_key: retrieved_cloud_top_pressure layer: 0 + long_name: Cloud Top Pressure for Upper Layer standard_name: air_pressure_at_cloud_top retrieval_error_cloud_top_pressure_upper_layer: @@ -407,6 +412,7 @@ datasets: file_type: nc_fci_oca file_key: retrieval_error_cloud_top_pressure layer: 0 + long_name: Cloud Top Pressure Error for Upper Layer standard_name: air_pressure_at_cloud_top standard_error retrieved_cloud_top_pressure_lower_layer: @@ -415,6 +421,7 @@ datasets: file_type: nc_fci_oca file_key: retrieved_cloud_top_pressure layer: 1 + long_name: Cloud Top Pressure for Lower Layer standard_name: air_pressure_at_cloud_top retrieval_error_cloud_top_pressure_lower_layer: @@ -423,17 +430,18 @@ datasets: file_type: nc_fci_oca file_key: retrieval_error_cloud_top_pressure layer: 1 - standard_name: air_pressure_at_cloud_top standard_erro + long_name: Cloud Top Pressure Error for Lower Layer + standard_name: air_pressure_at_cloud_top standard_error retrieved_cloud_top_temperature: - name: retrieved_cloud_top_temperature_upper_layer + name: retrieved_cloud_top_temperature resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_temperature standard_name: air_temperature_at_cloud_top retrieved_cloud_top_height: - name: retrieved_cloud_top_height_upper_layer + name: retrieved_cloud_top_height resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_height @@ -650,6 +658,7 @@ datasets: file_type: nc_fci_crm file_key: historical_data standard_name: status_flag + import_enum_information: True product_quality_crm: name: product_quality_crm @@ -1227,7 +1236,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 18 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] standard_name: status_flag cloud_test_cmrt4: @@ -1313,7 +1322,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: bt_max - long_name: TOA Brightess Temperature segment max + long_name: TOA Brightess Temperature Segment Max standard_name: toa_brightess_temperature cell_method: area:maximum coordinates: @@ -1325,7 +1334,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: bt_mean - long_name: TOA Brightess Temperature segment mean + long_name: TOA Brightess Temperature Segment Mean standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -1337,7 +1346,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: bt_min - long_name: TOA Brightess Temperature segment min + long_name: TOA Brightess Temperature Segment Min standard_name: toa_brightess_temperature cell_method: area:minimum coordinates: @@ -1349,7 +1358,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: bt_std - long_name: TOA Brightess Temperature standard deviation + long_name: TOA Brightess Temperature Segment Standard Deviation standard_name: toa_brightess_temperature cell_method: area:standard_deviation coordinates: @@ -1361,7 +1370,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_max - long_name: TOA max Radiance + long_name: TOA Radiance Segment Max standard_name: toa_radiance cell_method: area:maximum coordinates: @@ -1373,7 +1382,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_mean - long_name: TOA mean Radiance + long_name: TOA Radiance Segment Mean standard_name: toa_radiance cell_method: area:mean coordinates: @@ -1385,7 +1394,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_min - long_name: TOA min Radiance + long_name: TOA Radiance Segment Min standard_name: toa_radiance cell_method: area:minimum coordinates: @@ -1397,7 +1406,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: radiance_std - long_name: TOA Outgoing Radiance standard deviation + long_name: TOA Radiance Segment Standard Deviation standard_name: toa_outgoing_radiance cell_method: area:standard_deviation coordinates: @@ -1409,9 +1418,10 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: reflectance_max - long_name: TOA Bidirectional Reflectance segment max + long_name: TOA Bidirectional Reflectance Segment Max standard_name: toa_bidirectional_reflectance cell_method: area:maximum + units: '%' coordinates: - longitude - latitude @@ -1421,9 +1431,10 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: reflectance_mean - long_name: TOA Bidirectional Reflectance segment mean + long_name: TOA Bidirectional Reflectance Segment Mean standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1433,9 +1444,10 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: reflectance_min - long_name: TOA Bidirectional Reflectance segment min + long_name: TOA Bidirectional Reflectance Segment Min standard_name: toa_bidirectional_reflectance cell_method: area:minimum + units: '%' coordinates: - longitude - latitude @@ -1445,9 +1457,10 @@ datasets: resolution: 32000 file_type: nc_fci_asr file_key: reflectance_std - long_name: TOA Bidirectional Reflectance standard deviation + long_name: TOA Bidirectional Reflectance Segment Standard Deviation standard_name: toa_bidirectional_reflectance cell_method: area:standard_deviation + units: '%' coordinates: - longitude - latitude @@ -1479,7 +1492,6 @@ datasets: quality_radiance: name: quality_radiance resolution: 32000 - wavelength: [] file_type: nc_fci_asr file_key: quality_radiance fill_value: -1 @@ -1495,6 +1507,7 @@ datasets: file_type: nc_fci_asr file_key: land_pixel_percent standard_name: land_area_fraction + units: '%' coordinates: - longitude - latitude @@ -1505,6 +1518,7 @@ datasets: file_type: nc_fci_asr file_key: water_pixel_percent standard_name: water_area_fraction + units: '%' coordinates: - longitude - latitude @@ -1515,6 +1529,7 @@ datasets: file_type: nc_fci_asr file_key: pixel_percentage standard_name: pixels_used_fraction + units: '%' coordinates: - longitude - latitude @@ -2967,6 +2982,7 @@ datasets: long_name: Percentage of FoR pixels used (all pixels) standard_name: pixels_used_fraction category_id: 0 + units: '%' coordinates: - longitude - latitude @@ -2979,6 +2995,7 @@ datasets: long_name: Percentage of FoR pixels used (clear pixels) standard_name: pixels_used_fraction category_id: 1 + units: '%' coordinates: - longitude - latitude @@ -2991,6 +3008,7 @@ datasets: long_name: Percentage of FoR pixels used (cloudy pixels) standard_name: pixels_used_fraction category_id: 2 + units: '%' coordinates: - longitude - latitude From 6f9b49d8fb3ebcb3526eb0deecaaf2511ae9a268 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 8 Jan 2024 12:24:39 +0100 Subject: [PATCH 1041/1416] Change ASR long names (2 digits precision for channels % missing units) --- satpy/etc/readers/fci_l2_nc.yaml | 267 ++++++++++++++++++++----------- 1 file changed, 171 insertions(+), 96 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index b1600aff50..3653b9faf6 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1473,6 +1473,7 @@ datasets: fill_value: -1 long_name: TOA Brightess Temperature % confidence standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -1485,6 +1486,7 @@ datasets: fill_value: -1 long_name: TOA Bidirectional Reflectance % confidence standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -1497,6 +1499,7 @@ datasets: fill_value: -1 long_name: TOA Radiance % confidence standard_name: radiance_quality + units: '%' coordinates: - longitude - latitude @@ -1542,9 +1545,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 0 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.4um (all pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (all pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1557,9 +1561,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 0 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.4um (clear pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (clear pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1572,9 +1577,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 0 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.4um (cloudy pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (cloudy pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1587,9 +1593,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 1 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.5um (all pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (all pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1602,9 +1609,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 1 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.5um (clear pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (clear pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1617,9 +1625,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 1 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.5um (cloudy pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (cloudy pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1632,9 +1641,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 2 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.6um (all pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (all pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1647,9 +1657,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 2 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.6um (clear pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (clear pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1662,9 +1673,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 2 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.6um (cloudy pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (cloudy pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1677,9 +1689,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 3 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (all pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (all pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1692,9 +1705,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 3 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (clear pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (clear pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1707,9 +1721,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 3 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (cloudy pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (cloudy pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1722,9 +1737,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 4 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (all pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (all pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1737,9 +1753,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 4 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (clear pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (clear pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1752,9 +1769,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 4 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 0.9um (cloudy pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (cloudy pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1767,9 +1785,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 5 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 1.4um (all pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (all pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1782,9 +1801,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 5 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 1.4um (clear pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (clear pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1797,9 +1817,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 5 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 1.4um (cloudy pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (cloudy pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1812,9 +1833,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 6 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 1.6um (all pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (all pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1827,9 +1849,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 6 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 1.6um (clear pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (clear pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1842,9 +1865,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 6 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 1.6um (cloudy pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (cloudy pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1857,9 +1881,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 7 category_id: 0 - long_name: TOA Bidirectional Reflectance segment mean at 2.2um (all pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (all pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1872,9 +1897,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 7 category_id: 1 - long_name: TOA Bidirectional Reflectance segment mean at 2.2um (clear pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (clear pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1887,9 +1913,10 @@ datasets: file_key: reflectance_mean vis_channel_id: 7 category_id: 2 - long_name: TOA Bidirectional Reflectance segment mean at 2.2um (cloudy pixels) + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (cloudy pixels) standard_name: toa_bidirectional_reflectance cell_method: area:mean + units: '%' coordinates: - longitude - latitude @@ -1902,7 +1929,7 @@ datasets: file_key: bt_mean ir_channel_id: 0 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 3.8um (all pixels) + long_name: TOA Brightess Temperature Segment mean at 3.80um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -1917,7 +1944,7 @@ datasets: file_key: bt_mean ir_channel_id: 0 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 3.8um (clear pixels) + long_name: TOA Brightess Temperature Segment mean at 3.80um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -1932,7 +1959,7 @@ datasets: file_key: bt_mean ir_channel_id: 0 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 3.8um (cloudy pixels) + long_name: TOA Brightess Temperature Segment mean at 3.80um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -1947,7 +1974,7 @@ datasets: file_key: bt_mean ir_channel_id: 1 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 6.3um (all pixels) + long_name: TOA Brightess Temperature Segment mean at 6.30um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -1962,7 +1989,7 @@ datasets: file_key: bt_mean ir_channel_id: 1 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 6.3um (clear pixels) + long_name: TOA Brightess Temperature Segment mean at 6.30um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -1977,7 +2004,7 @@ datasets: file_key: bt_mean ir_channel_id: 1 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 6.3um (cloudy pixels) + long_name: TOA Brightess Temperature Segment mean at 6.30um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -1992,7 +2019,7 @@ datasets: file_key: bt_mean ir_channel_id: 2 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 7.3um (all pixels) + long_name: TOA Brightess Temperature Segment mean at 7.35um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2007,7 +2034,7 @@ datasets: file_key: bt_mean ir_channel_id: 2 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 7.3um (clear pixels) + long_name: TOA Brightess Temperature Segment mean at 7.35um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2022,7 +2049,7 @@ datasets: file_key: bt_mean ir_channel_id: 2 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 7.3um (cloudy pixels) + long_name: TOA Brightess Temperature Segment mean at 7.35um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2037,7 +2064,7 @@ datasets: file_key: bt_mean ir_channel_id: 3 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 8.7um (all pixels) + long_name: TOA Brightess Temperature Segment mean at 8.70um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2052,7 +2079,7 @@ datasets: file_key: bt_mean ir_channel_id: 3 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 8.7um (clear pixels) + long_name: TOA Brightess Temperature Segment mean at 8.70um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2067,7 +2094,7 @@ datasets: file_key: bt_mean ir_channel_id: 3 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 8.7um (cloudy pixels) + long_name: TOA Brightess Temperature Segment mean at 8.70um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2082,7 +2109,7 @@ datasets: file_key: bt_mean ir_channel_id: 4 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 9.7um (all pixels) + long_name: TOA Brightess Temperature Segment mean at 9.66um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2097,7 +2124,7 @@ datasets: file_key: bt_mean ir_channel_id: 4 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 9.7um (clear pixels) + long_name: TOA Brightess Temperature Segment mean at 9.66um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2112,7 +2139,7 @@ datasets: file_key: bt_mean ir_channel_id: 4 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 9.7um (cloudy pixels) + long_name: TOA Brightess Temperature Segment mean at 9.66um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2127,7 +2154,7 @@ datasets: file_key: bt_mean ir_channel_id: 5 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 10.5um (all pixels) + long_name: TOA Brightess Temperature Segment mean at 10.50um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2142,7 +2169,7 @@ datasets: file_key: bt_mean ir_channel_id: 5 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 10.5um (clear pixels) + long_name: TOA Brightess Temperature Segment mean at 10.50um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2157,7 +2184,7 @@ datasets: file_key: bt_mean ir_channel_id: 5 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 10.5um (cloudy pixels) + long_name: TOA Brightess Temperature Segment mean at 10.50um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2172,7 +2199,7 @@ datasets: file_key: bt_mean ir_channel_id: 6 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 12.3um (all pixels) + long_name: TOA Brightess Temperature Segment mean at 12.30um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2187,7 +2214,7 @@ datasets: file_key: bt_mean ir_channel_id: 6 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 12.3um (clear pixels) + long_name: TOA Brightess Temperature Segment mean at 12.30um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2202,7 +2229,7 @@ datasets: file_key: bt_mean ir_channel_id: 6 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 12.3um (cloudy pixels) + long_name: TOA Brightess Temperature Segment mean at 12.30um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2217,7 +2244,7 @@ datasets: file_key: bt_mean ir_channel_id: 7 category_id: 0 - long_name: TOA Brightess Temperature segment mean at 13.3um (all pixels) + long_name: TOA Brightess Temperature Segment mean at 13.30um (all pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2232,7 +2259,7 @@ datasets: file_key: bt_mean ir_channel_id: 7 category_id: 1 - long_name: TOA Brightess Temperature segment mean at 13.3um (clear pixels) + long_name: TOA Brightess Temperature Segment mean at 13.30um (clear pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2247,7 +2274,7 @@ datasets: file_key: bt_mean ir_channel_id: 7 category_id: 2 - long_name: TOA Brightess Temperature segment mean at 13.3um (cloudy pixels) + long_name: TOA Brightess Temperature Segment mean at 13.30um (cloudy pixels) standard_name: toa_brightess_temperature cell_method: area:mean coordinates: @@ -2263,8 +2290,9 @@ datasets: vis_channel_id: 0 category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.4um (all pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (all pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2278,8 +2306,9 @@ datasets: vis_channel_id: 0 category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.4um (clear pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (clear pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2293,8 +2322,9 @@ datasets: vis_channel_id: 0 category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.4um (cloudy pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (cloudy pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2308,8 +2338,9 @@ datasets: vis_channel_id: 1 category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.5um (all pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (all pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2323,8 +2354,9 @@ datasets: vis_channel_id: 1 category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.5um (clear pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (clear pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2338,8 +2370,9 @@ datasets: vis_channel_id: 1 category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.5um (cloudy pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (cloudy pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2353,8 +2386,9 @@ datasets: vis_channel_id: 2 category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.6um (all pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (all pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2368,8 +2402,9 @@ datasets: vis_channel_id: 2 category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.6um (clear pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (clear pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2383,8 +2418,9 @@ datasets: vis_channel_id: 2 category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.6um (cloudy pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (cloudy pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2398,8 +2434,9 @@ datasets: vis_channel_id: 3 category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.9um (all pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (all pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2413,8 +2450,9 @@ datasets: vis_channel_id: 3 category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.9um (clear pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (clear pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2428,8 +2466,9 @@ datasets: vis_channel_id: 3 category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.9um (cloudy pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (cloudy pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2443,8 +2482,9 @@ datasets: vis_channel_id: 4 category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.9um (all pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (all pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2458,8 +2498,9 @@ datasets: vis_channel_id: 4 category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.9um (clear pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (clear pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2473,8 +2514,9 @@ datasets: vis_channel_id: 4 category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 0.9um (cloudy pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (cloudy pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2488,8 +2530,9 @@ datasets: vis_channel_id: 5 category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 1.4um (all pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (all pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2503,8 +2546,9 @@ datasets: vis_channel_id: 5 category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 1.4um (clear pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (clear pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2518,8 +2562,9 @@ datasets: vis_channel_id: 5 category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 1.4um (cloudy pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (cloudy pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2533,8 +2578,9 @@ datasets: vis_channel_id: 6 category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 1.6um (all pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (all pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2548,8 +2594,9 @@ datasets: vis_channel_id: 6 category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 1.6um (clear pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (clear pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2563,8 +2610,9 @@ datasets: vis_channel_id: 6 category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 1.6um (cloudy pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (cloudy pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2578,8 +2626,9 @@ datasets: vis_channel_id: 7 category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 2.2um (all pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (all pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2593,8 +2642,9 @@ datasets: vis_channel_id: 7 category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 2.2um (clear pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (clear pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2608,8 +2658,9 @@ datasets: vis_channel_id: 7 category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence at 2.2um (cloudy pixels) + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (cloudy pixels) standard_name: reflectance_quality + units: '%' coordinates: - longitude - latitude @@ -2623,8 +2674,9 @@ datasets: ir_channel_id: 0 category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 3.8um (all pixels) + long_name: TOA Brightess Temperature % Confidence at 3.80um (all pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2638,8 +2690,9 @@ datasets: ir_channel_id: 0 category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 3.8um (clear pixels) + long_name: TOA Brightess Temperature % Confidence at 3.80um (clear pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2653,8 +2706,9 @@ datasets: ir_channel_id: 0 category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 3.8um (cloudy pixels) + long_name: TOA Brightess Temperature % Confidence at 3.80um (cloudy pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2668,8 +2722,9 @@ datasets: ir_channel_id: 1 category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 6.3um (all pixels) + long_name: TOA Brightess Temperature % Confidence at 6.30um (all pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2683,8 +2738,9 @@ datasets: ir_channel_id: 1 category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 6.3um (clear pixels) + long_name: TOA Brightess Temperature % Confidence at 6.30um (clear pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2698,8 +2754,9 @@ datasets: ir_channel_id: 1 category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 6.3um (cloudy pixels) + long_name: TOA Brightess Temperature % Confidence at 6.30um (cloudy pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2713,8 +2770,9 @@ datasets: ir_channel_id: 2 category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 7.3um (all pixels) + long_name: TOA Brightess Temperature % Confidence at 7.35um (all pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2728,8 +2786,9 @@ datasets: ir_channel_id: 2 category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 7.3um (clear pixels) + long_name: TOA Brightess Temperature % Confidence at 7.35um (clear pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2743,8 +2802,9 @@ datasets: ir_channel_id: 2 category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 7.3um (cloudy pixels) + long_name: TOA Brightess Temperature % Confidence at 7.35um (cloudy pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2758,8 +2818,9 @@ datasets: ir_channel_id: 3 category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 8.7um (all pixels) + long_name: TOA Brightess Temperature % Confidence at 8.70um (all pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2773,8 +2834,9 @@ datasets: ir_channel_id: 3 category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 8.7um (clear pixels) + long_name: TOA Brightess Temperature % Confidence at 8.70um (clear pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2788,8 +2850,9 @@ datasets: ir_channel_id: 3 category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 8.7um (cloudy pixels) + long_name: TOA Brightess Temperature % Confidence at 8.70um (cloudy pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2803,8 +2866,9 @@ datasets: ir_channel_id: 4 category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 9.7um (all pixels) + long_name: TOA Brightess Temperature % Confidence at 9.66um (all pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2818,8 +2882,9 @@ datasets: ir_channel_id: 4 category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 9.7um (clear pixels) + long_name: TOA Brightess Temperature % Confidence at 9.66um (clear pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2833,8 +2898,9 @@ datasets: ir_channel_id: 4 category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 9.7um (cloudy pixels) + long_name: TOA Brightess Temperature % Confidence at 9.66um (cloudy pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2848,8 +2914,9 @@ datasets: ir_channel_id: 5 category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 10.5um (all pixels) + long_name: TOA Brightess Temperature % Confidence at 10.50um (all pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2863,8 +2930,9 @@ datasets: ir_channel_id: 5 category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 10.5um (clear pixels) + long_name: TOA Brightess Temperature % Confidence at 10.50um (clear pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2878,8 +2946,9 @@ datasets: ir_channel_id: 5 category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 10.5um (cloudy pixels) + long_name: TOA Brightess Temperature % Confidence at 10.50um (cloudy pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2893,8 +2962,9 @@ datasets: ir_channel_id: 6 category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 12.3um (all pixels) + long_name: TOA Brightess Temperature % Confidence at 12.30um (all pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2908,8 +2978,9 @@ datasets: ir_channel_id: 6 category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 12.3um (clear pixels) + long_name: TOA Brightess Temperature % Confidence at 12.30um (clear pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2923,8 +2994,9 @@ datasets: ir_channel_id: 6 category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 12.3um (cloudy pixels) + long_name: TOA Brightess Temperature % Confidence at 12.30um (cloudy pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2938,8 +3010,9 @@ datasets: ir_channel_id: 7 category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 13.3um (all pixels) + long_name: TOA Brightess Temperature % Confidence at 13.30um (all pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2953,8 +3026,9 @@ datasets: ir_channel_id: 7 category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 13.3um (clear pixels) + long_name: TOA Brightess Temperature % Confidence at 13.30um (clear pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude @@ -2968,8 +3042,9 @@ datasets: ir_channel_id: 7 category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % confidence at 13.3um (cloudy pixels) + long_name: TOA Brightess Temperature % Confidence at 13.30um (cloudy pixels) standard_name: brightness_temperature_quality + units: '%' coordinates: - longitude - latitude From 582070648ff21c5ea7f99ff57e43338226ad1e23 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Tue, 9 Jan 2024 16:52:22 +0100 Subject: [PATCH 1042/1416] Add unit test for enumeration types --- satpy/tests/reader_tests/test_fci_l2_nc.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 84681b0f02..89242409ac 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -102,6 +102,12 @@ def setUp(self): mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. + # Add enumerated type + enum_dict = {"False": 0, "True": 1} + bool_type = nc.createEnumType(np.uint8,"bool_t",enum_dict) + nc.createVariable("quality_flag", bool_type, + dimensions=("number_of_rows", "number_of_columns")) + self.fh = FciL2NCFileHandler(filename=self.test_file, filename_info={}, filetype_info={}) def tearDown(self): @@ -215,6 +221,19 @@ def test_dataset_with_scalar(self): with pytest.raises(NotImplementedError): self.fh.get_area_def(None) + def test_emumerations(self): + """Test the conversion of enumerated type information into flag_values and flag_meanings.""" + dataset = self.fh.get_dataset(make_dataid(name="test_enum", resolution=2000), + {"name": "quality_flag", + "file_key": "quality_flag", + "file_type": "test_file_type", + "import_enum_information": True}) + attributes = dataset.attrs + assert "flag_values" in attributes + assert attributes["flag_values"] == [0,1] + assert "flag_meanings" in attributes + assert attributes["flag_meanings"] == ["False","True"] + class TestFciL2NCSegmentFileHandler(unittest.TestCase): """Test the FciL2NCSegmentFileHandler reader.""" From 64091a387bac92937eb7b93b87e4481a0c8cc685 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 10 Jan 2024 09:25:11 +0100 Subject: [PATCH 1043/1416] Fix open points for CLMTest datasets. --- satpy/etc/readers/fci_l2_nc.yaml | 52 +++++++++++--------------------- 1 file changed, 17 insertions(+), 35 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 3653b9faf6..76c18f1fe7 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1056,7 +1056,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 0 flag_values: [0,1] - flag_meanings: ['No snow/Ice detected',' Snow/Ice detected'] + flag_meanings: ['No snow/ice detected',' Snow/ice detected'] standard_name: status_flag cloud_test_cmt1: @@ -1066,7 +1066,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 1 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt2: @@ -1076,7 +1076,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 2 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt3: @@ -1086,7 +1086,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 3 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt4: @@ -1096,7 +1096,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 4 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt5: @@ -1106,7 +1106,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 5 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt6: @@ -1116,7 +1116,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 6 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt7: @@ -1126,7 +1126,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 7 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt8: @@ -1136,7 +1136,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 8 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt9: @@ -1146,7 +1146,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 9 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt10: @@ -1156,7 +1156,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 10 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt11: @@ -1166,7 +1166,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 11 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt12: @@ -1176,7 +1176,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 12 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt13: @@ -1186,7 +1186,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 13 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmt14: @@ -1196,7 +1196,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 14 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_opqt: @@ -1206,7 +1206,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 15 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No opaqueness detected', 'Opaqueness detected'] standard_name: status_flag cloud_test_cmrt1: @@ -1216,7 +1216,7 @@ datasets: file_key: cloud_mask_test_result extract_byte: 16 flag_values: [0,1] - flag_meanings: ['Cloud undetected','Cloud detected'] + flag_meanings: ['No cloud detected','Cloud detected'] standard_name: status_flag cloud_test_cmrt2: @@ -1298,24 +1298,6 @@ datasets: standard_name: status_flag import_enum_information: True - product_quality_clmtest: - name: product_quality_clmtest - file_type: nc_fci_test_clm - file_key: product_quality - standard_name: product_quality - - product_completeness_clmtest: - name: product_completeness_clmtest - file_type: nc_fci_test_clm - file_key: product_completeness - standard_name: product_completeness - - product_timeliness_clmtest: - name: product_timeliness_clmtest - file_type: nc_fci_test_clm - file_key: product_timeliness - standard_name: product_timeliness - # ASR bt_max: name: bt_max From d8290d1561c78293fb184862cc2e6f1dc2412b21 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 10 Jan 2024 09:35:56 +0100 Subject: [PATCH 1044/1416] Fix open points for CRM datasets --- satpy/etc/readers/fci_l2_nc.yaml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 76c18f1fe7..cd1f91dfae 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -558,6 +558,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance (temporal average) + units: '%' crm_vis04: name: crm_vis04 @@ -566,6 +568,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance at 0.44um (temporal average) + units: '%' vis_channel_id: 0 crm_vis05: @@ -575,6 +579,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance at 0.51um (temporal average) + units: '%' vis_channel_id: 1 crm_vis06: @@ -584,6 +590,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance at 0.64um (temporal average) + units: '%' vis_channel_id: 2 crm_vis08: @@ -593,6 +601,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance at 0.86um (temporal average) + units: '%' vis_channel_id: 3 crm_vis09: @@ -602,6 +612,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance at 0.91um (temporal average) + units: '%' vis_channel_id: 4 crm_nir13: @@ -611,6 +623,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance at 1.38um (temporal average) + units: '%' vis_channel_id: 5 crm_nir16: @@ -620,6 +634,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance at 1.61um (temporal average) + units: '%' vis_channel_id: 6 crm_nir22: @@ -629,6 +645,8 @@ datasets: file_type: nc_fci_crm file_key: mean_clear_sky_reflectance standard_name: toa_bidirectional_reflectance + long_name: TOA Bidirectional Reflectance at 2.25um (temporal average) + units: '%' vis_channel_id: 7 mean_sza: @@ -637,6 +655,7 @@ datasets: file_type: nc_fci_crm file_key: mean_solar_zenith standard_name: solar_zenith_angle + long_name: Solar Zenith Angle (temporal average) mean_rel_azi: name: mean_rel_azi @@ -644,6 +663,7 @@ datasets: file_type: nc_fci_crm file_key: mean_rel_solar_sat_azimuth standard_name: relative_sun_sensor_azimuth_angle + long_name: Relative Solar Satellite Azimuth Angle (temporal average) n_acc: name: n_acc From a1ad98921059fbcc9107cda80b6190adfdcdab55 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 10 Jan 2024 09:51:43 +0100 Subject: [PATCH 1045/1416] Set unit to None if 'none' in NetCDF file. --- satpy/readers/fci_l2_nc.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index d5b6ce95bb..0a750376fa 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -94,7 +94,10 @@ def _set_attributes(self, variable, dataset_info, segmented=False): variable.attrs.setdefault("units", None) if "unit" in variable.attrs: # Need to convert this attribute to the expected satpy entry - variable.attrs.update({"units": variable.attrs["unit"]}) + ncunit = variable.attrs["unit"] + if ncunit == 'none': + ncunit = None + variable.attrs.update({"units": ncunit}) del variable.attrs["unit"] variable.attrs.update(dataset_info) From 0f42766da54e49fc787ccb7f22bfa896bb8a88d8 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 10 Jan 2024 09:54:56 +0100 Subject: [PATCH 1046/1416] Harmonize toa_outgoing_radiance standard_name --- satpy/etc/readers/fci_l2_nc.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index cd1f91dfae..525ffa11ec 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1373,7 +1373,7 @@ datasets: file_type: nc_fci_asr file_key: radiance_max long_name: TOA Radiance Segment Max - standard_name: toa_radiance + standard_name: toa_outgoing_radiance cell_method: area:maximum coordinates: - longitude @@ -1385,7 +1385,7 @@ datasets: file_type: nc_fci_asr file_key: radiance_mean long_name: TOA Radiance Segment Mean - standard_name: toa_radiance + standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1397,7 +1397,7 @@ datasets: file_type: nc_fci_asr file_key: radiance_min long_name: TOA Radiance Segment Min - standard_name: toa_radiance + standard_name: toa_outgoing_radiance cell_method: area:minimum coordinates: - longitude From e3947a8756657d8fd7a785dce0f032825f1ad135 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 10 Jan 2024 09:59:00 +0100 Subject: [PATCH 1047/1416] Change to double quotes. --- satpy/readers/fci_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 0a750376fa..34623e3e07 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -95,7 +95,7 @@ def _set_attributes(self, variable, dataset_info, segmented=False): if "unit" in variable.attrs: # Need to convert this attribute to the expected satpy entry ncunit = variable.attrs["unit"] - if ncunit == 'none': + if ncunit == "none": ncunit = None variable.attrs.update({"units": ncunit}) del variable.attrs["unit"] From 0fddd56b3744be02a4032e5b5da6c60e4bad122e Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 10 Jan 2024 12:07:16 +0100 Subject: [PATCH 1048/1416] Move 'none' to None conversion to make it apply in all situations. --- satpy/readers/fci_l2_nc.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 34623e3e07..6290f8161c 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -94,10 +94,7 @@ def _set_attributes(self, variable, dataset_info, segmented=False): variable.attrs.setdefault("units", None) if "unit" in variable.attrs: # Need to convert this attribute to the expected satpy entry - ncunit = variable.attrs["unit"] - if ncunit == "none": - ncunit = None - variable.attrs.update({"units": ncunit}) + variable.attrs.update({"units": variable.attrs["unit"]}) del variable.attrs["unit"] variable.attrs.update(dataset_info) @@ -120,6 +117,9 @@ def _set_attributes(self, variable, dataset_info, segmented=False): variable.attrs["flag_meanings"] = flag_meanings netCDF4_dataset.close() + if variable.attrs["units"] == "none": + variable.attrs.update({"units": None}) + return variable def _slice_dataset(self, variable, dataset_info, dimensions): From 176a66c9152b7f7bff46a0d882a4a3a4efcd0154 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 10 Jan 2024 12:10:44 +0100 Subject: [PATCH 1049/1416] Add tests for unit extraction and assignment --- satpy/tests/reader_tests/test_fci_l2_nc.py | 36 +++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 89242409ac..d59c472d7b 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -95,6 +95,7 @@ def setUp(self): "number_of_columns")) two_layers_dataset[0, :, :] = np.ones((100, 10)) two_layers_dataset[1, :, :] = 2 * np.ones((100, 10)) + two_layers_dataset.unit = "test_unit" mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 @@ -173,7 +174,6 @@ def test_dataset(self): np.testing.assert_allclose(dataset.values, np.ones((100, 10))) assert dataset.attrs["test_attr"] == "attr" - assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 def test_dataset_with_layer(self): @@ -234,6 +234,40 @@ def test_emumerations(self): assert "flag_meanings" in attributes assert attributes["flag_meanings"] == ["False","True"] + def test_units_from_file(self): + """Test units extraction from NetCDF file.""" + dataset = self.fh.get_dataset(make_dataid(name="test_units_from_file", resolution=2000), + {"name": "test_one_layer", + "file_key": "test_one_layer", + "file_type": "test_file_type"}) + assert dataset.attrs["units"] == "test_units" + + def test_unit_from_file(self): + """Test that a unit stored with attribute `unit` in the file is assigned to the `units` attribute.""" + dataset = self.fh.get_dataset(make_dataid(name="test_unit_from_file", resolution=2000), + {"name": "test_two_layers", + "file_key": "test_two_layers", "layer": 1, + "file_type": "test_file_type"}) + assert dataset.attrs["units"] == "test_unit" + + def test_units_from_yaml(self): + """Test units extraction from yaml file.""" + dataset = self.fh.get_dataset(make_dataid(name="test_units_from_yaml", resolution=2000), + {"name": "test_one_layer", + "units": "test_unit_from_yaml", + "file_key": "test_one_layer", + "file_type": "test_file_type"}) + assert dataset.attrs["units"] == "test_unit_from_yaml" + + def test_units_none_conversion(self): + """Test that a units stored as 'none' is converted to None.""" + dataset = self.fh.get_dataset(make_dataid(name="test_units_none_conversion", resolution=2000), + {"name": "test_one_layer", + "units": "none", + "file_key": "test_one_layer", + "file_type": "test_file_type"}) + assert dataset.attrs["units"] is None + class TestFciL2NCSegmentFileHandler(unittest.TestCase): """Test the FciL2NCSegmentFileHandler reader.""" From d75b9e52e49b43e099bf7439caacb5e7798117fe Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 10 Jan 2024 12:22:08 +0100 Subject: [PATCH 1050/1416] Fix failing test --- satpy/tests/reader_tests/test_fci_l2_nc.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index d59c472d7b..3e77c1d51e 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -184,7 +184,6 @@ def test_dataset_with_layer(self): "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10))) - assert dataset.attrs["units"] is None assert dataset.attrs["spacecraft_name"] == "test_platform" def test_dataset_with_invalid_filekey(self): From 7afbf0e32b57a0b5563a68585b2762c6e6ad1c3b Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 12 Jan 2024 14:27:08 +0100 Subject: [PATCH 1051/1416] Change order of yaml-key. --- satpy/etc/readers/fci_l2_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 525ffa11ec..eada732555 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -79,10 +79,10 @@ datasets: # CLM cloud_state: name: cloud_state + standard_name: cloud_mask_classification resolution: 2000 file_type: nc_fci_clm file_key: cloud_state - standard_name: cloud_mask_classification fill_value: -127 import_enum_information: True From fa7ff057889083e6c6650da4835f6a20e25eec90 Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Fri, 12 Jan 2024 16:20:37 +0100 Subject: [PATCH 1052/1416] Refactor order of yaml-keys in fci_l2_nc.yaml. --- satpy/etc/readers/fci_l2_nc.yaml | 1092 +++++++++++++++--------------- 1 file changed, 551 insertions(+), 541 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index eada732555..dfa8b14ad4 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -74,9 +74,269 @@ file_types: file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMV-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + datasets: -# CLM +# AMV Intermediate - Atmospheric Motion Vectors Intermediate + intm_latitude: + name: intm_latitude + standard_name: latitude + file_type: nc_fci_amvi + file_key: intm_latitude + + intm_longitude: + name: intm_longitude + standard_name: longitude + file_type: nc_fci_amvi + file_key: intm_longitude + + intm_speed: + name: intm_speed + standard_name: wind_speed + file_type: nc_fci_amvi + file_key: intm_speed + coordinates: + - intm_longitude + - intm_latitude + + intm_u_component: + name: intm_u_component + standard_name: wind_speed_horizontal_component + file_type: nc_fci_amvi + file_key: intm_u_component + coordinates: + - intm_longitude + - intm_latitude + + intm_v_component: + name: intm_v_component + standard_name: wind_speed_vertical_component + file_type: nc_fci_amvi + file_key: intm_v_component + coordinates: + - intm_longitude + - intm_latitude + + intm_direction: + name: intm_direction + standard_name: wind_to_direction + file_type: nc_fci_amvi + file_key: intm_direction + coordinates: + - intm_longitude + - intm_latitude + + intm_pressure: + name: intm_pressure + standard_name: wind_pressure + file_type: nc_fci_amvi + file_key: intm_pressure + coordinates: + - intm_longitude + - intm_latitude + + intm_temperature: + name: intm_temperature + standard_name: wind_temperature + file_type: nc_fci_amvi + file_key: intm_temperature + coordinates: + - intm_longitude + - intm_latitude + + intm_target_type: + name: intm_target_type + standard_name: wind_target_type + file_type: nc_fci_amvi + file_key: target_type + coordinates: + - intm_longitude + - intm_latitude + + intm_wind_method: + name: intm_wind_method + standard_name: wind_wind_method + file_type: nc_fci_amvi + file_key: wind_method + coordinates: + - intm_longitude + - intm_latitude + + +# AMV Final - Atmospheric Motion Vectors Final + channel_id: + name: channel_id + standard_name: channel_id + file_type: nc_fci_amv + file_key: channel_id + + amv_latitude: + name: latitude + standard_name: latitude + file_type: nc_fci_amv + file_key: latitude + + amv_longitude: + name: longitude + standard_name: longitude + file_type: nc_fci_amv + file_key: longitude + + speed: + name: speed + standard_name: wind_speed + file_type: nc_fci_amv + file_key: speed + coordinates: + - longitude + - latitude + + speed_u_component: + name: speed_u_component + standard_name: wind_speed_horizontal_component + file_type: nc_fci_amv + file_key: speed_u_component + coordinates: + - longitude + - latitude + + speed_v_component: + name: speed_v_component + standard_name: wind_speed_vertical_component + file_type: nc_fci_amv + file_key: speed_v_component + coordinates: + - longitude + - latitude + + direction: + name: direction + standard_name: wind_to_direction + file_type: nc_fci_amv + file_key: direction + coordinates: + - longitude + - latitude + + pressure: + name: pressure + standard_name: wind_pressure + file_type: nc_fci_amv + file_key: pressure + coordinates: + - longitude + - latitude + + temperature: + name: temperature + standard_name: wind_temperature + file_type: nc_fci_amv + file_key: temperature + coordinates: + - longitude + - latitude + + target_type: + name: target_type + standard_name: wind_target_type + file_type: nc_fci_amv + file_key: target_type + coordinates: + - longitude + - latitude + + wind_method: + name: wind_method + standard_name: wind_wind_method + file_type: nc_fci_amv + file_key: wind_method + coordinates: + - longitude + - latitude + + fcst_u: + name: fcst_u + standard_name: wind_forecast_u_component + file_type: nc_fci_amv + file_key: forecast_u_component + coordinates: + - longitude + - latitude + + fcst_v: + name: fcst_v + standard_name: wind_forecast_v_component + file_type: nc_fci_amv + file_key: forecast_v_component + coordinates: + - longitude + - latitude + + best_fit_pres: + name: best_fit_pres + standard_name: wind_best_fit_pressure + file_type: nc_fci_amv + file_key: best_fit_pressure + coordinates: + - longitude + - latitude + + best_fit_u: + name: best_fit_u + standard_name: wind_best_fit_u_component + file_type: nc_fci_amv + file_key: best_fit_u_component + coordinates: + - longitude + - latitude + + best_fit_v: + name: best_fit_v + standard_name: wind_best_fit_v_component + file_type: nc_fci_amv + file_key: best_fit_v_component + coordinates: + - longitude + - latitude + + qi: + name: qi + standard_name: wind_overall_reliability + file_type: nc_fci_amv + file_key: overall_reliability + coordinates: + - longitude + - latitude + + qi_excl_fcst: + name: qi_excl_fcst + standard_name: wind_overall_reliability_exc_forecast + file_type: nc_fci_amv + file_key: overall_reliability_exc_forecast + coordinates: + - longitude + - latitude + + product_quality: + name: product_quality + standard_name: product_quality + file_type: nc_fci_amv + file_key: product_quality + + product_completeness: + name: product_completeness + standard_name: product_completeness + file_type: nc_fci_amv + file_key: product_completeness + + product_timeliness: + name: product_timeliness + standard_name: product_timeliness + file_type: nc_fci_amv + file_key: product_timeliness + + +# CLM - Cloud Mask cloud_state: name: cloud_state standard_name: cloud_mask_classification @@ -88,1237 +348,1244 @@ datasets: quality_illumination_clm: name: quality_illumination_clm + standard_name: status_flag resolution: 2000 file_type: nc_fci_clm file_key: quality_illumination - standard_name: status_flag fill_value: -127 import_enum_information: True quality_nwp_parameters_clm: name: quality_nwp_parameters_clm + standard_name: status_flag resolution: 2000 file_type: nc_fci_clm file_key: quality_nwp_parameters - standard_name: status_flag fill_value: -127 import_enum_information: True quality_MTG_parameters_clm: name: quality_MTG_parameters_clm + standard_name: status_flag resolution: 2000 file_type: nc_fci_clm file_key: quality_MTG_parameters - standard_name: status_flag fill_value: -127 import_enum_information: True quality_overall_processing_clm: name: quality_overall_processing_clm + standard_name: quality_flag resolution: 2000 file_type: nc_fci_clm file_key: quality_overall_processing - standard_name: quality_flag fill_value: -127 import_enum_information: True product_quality_clm: name: product_quality_clm + standard_name: product_quality file_type: nc_fci_clm file_key: product_quality - standard_name: product_quality product_completeness_clm: name: product_completeness_clm + standard_name: product_completeness file_type: nc_fci_clm file_key: product_completeness - standard_name: product_completeness product_timeliness_clm: name: product_timeliness_clm + standard_name: product_timeliness file_type: nc_fci_clm file_key: product_timeliness - standard_name: product_timeliness -# FCI CT L2 + +# CT - Cloud Type cloud_phase: name: cloud_phase + standard_name: cloud_phase_classification resolution: 2000 file_type: nc_fci_ct file_key: cloud_phase - standar_name: cloud_phase_classification fill_value: -127 import_enum_information: True cloud_type: name: cloud_type + standard_name: cloud_type_classification resolution: 2000 file_type: nc_fci_ct file_key: cloud_type - standard_name: cloud_type_classification fill_value: -127 import_enum_information: True quality_illumination_ct: name: quality_illumination_ct + standard_name: status_flag resolution: 2000 file_type: nc_fci_ct file_key: quality_illumination - standard_name: status_flag fill_value: -127 import_enum_information: True quality_nwp_parameters_ct: name: quality_nwp_parameters_ct + standard_name: status_flag resolution: 2000 file_type: nc_fci_ct file_key: quality_nwp_parameters - standard_name: status_flag fill_value: -127 import_enum_information: True quality_MTG_parameters_ct: name: quality_MTG_parameters_ct + standard_name: status_flag resolution: 2000 file_type: nc_fci_ct file_key: quality_MTG_parameters - standard_name: status_flag fill_value: -127 import_enum_information: True quality_overall_processing_ct: name: quality_overall_processing_ct + standard_name: quality_flag resolution: 2000 file_type: nc_fci_ct file_key: quality_overall_processing - standard_name: quality_flag fill_value: -127 import_enum_information: True product_quality_ct: name: product_quality_ct + standard_name: product_quality file_type: nc_fci_ct file_key: product_quality - standard_name: product_quality product_completeness_ct: name: product_completeness_ct + standard_name: product_completeness file_type: nc_fci_ct file_key: product_completeness - standard_name: product_completeness product_timeliness_ct: name: product_timeliness_ct + standard_name: product_timeliness file_type: nc_fci_ct file_key: product_timeliness - standard_name: product_timeliness - # FCI CTTH Product + + # CTTH - Cloud Top Temperature and Height cloud_top_aviation_height: name: cloud_top_aviation_height + standard_name: height_at_cloud_top_for_aviation resolution: 2000 file_type: nc_fci_ctth file_key: cloud_top_aviation_height - standard_name: height_at_cloud_top_for_aviation cloud_top_height: name: cloud_top_height + standard_name: height_at_cloud_top resolution: 2000 file_type: nc_fci_ctth file_key: cloud_top_height - standard_name: height_at_cloud_top cloud_top_pressure: name: cloud_top_pressure + standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_ctth file_key: cloud_top_pressure - standard_name: air_pressure_at_cloud_top cloud_top_temperature: name: cloud_top_temperature + standard_name: air_temperature_at_cloud_top resolution: 2000 file_type: nc_fci_ctth file_key: cloud_top_temperature - standard_name: air_temperature_at_cloud_top effective_cloudiness: name: effective_cloudiness + standard_name: effective_cloud_cover resolution: 2000 file_type: nc_fci_ctth file_key: effective_cloudiness - standard_name: effective_cloud_cover quality_status_ctth: name: quality_status_ctth + standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth file_key: quality_status - standard_name: status_flag fill_value: -127 import_enum_information: True quality_rtm_ctth: name: quality_rtm_ctth + standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth file_key: quality_rtm - standard_name: status_flag fill_value: -127 import_enum_information: True quality_method_ctth: name: quality_method_ctth + standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth file_key: quality_method - standard_name: status_flag fill_value: -127 import_enum_information: True quality_nwp_parameters_ctth: name: quality_nwp_parameters_ctth + standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth file_key: quality_nwp_parameters - standard_name: status_flag fill_value: -127 import_enum_information: True quality_MTG_parameters_ctth: name: quality_MTG_parameters_ctth + standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth file_key: quality_MTG_parameters - standard_name: status_flag fill_value: -127 import_enum_information: True quality_overall_processing_ctth: name: quality_overall_processing_ctth + standard_name: quality_flag resolution: 2000 file_type: nc_fci_ctth file_key: quality_overall_processing - standard_name: quality_flag fill_value: -127 import_enum_information: True quality_overall_processing_aviation_ctth: name: quality_overall_processing_aviation_ctth + standard_name: quality_flag resolution: 2000 file_type: nc_fci_ctth file_key: quality_overall_processing_aviation - standard_name: quality_flag fill_value: -127 import_enum_information: True product_quality_ctth: name: product_quality_ctth + standard_name: product_quality file_type: nc_fci_ctth file_key: product_quality - standard_name: product_quality product_completeness_ctth: name: product_completeness_ctth + standard_name: product_completeness file_type: nc_fci_ctth file_key: product_completeness - standard_name: product_completeness product_timeliness_ctth: name: product_timeliness_ctth + standard_name: product_timeliness file_type: nc_fci_ctth file_key: product_timeliness + + + # FIR - Active Fire Monitoring + fire_probability: + name: fire_probability + standard_name: fire_probability + resolution: 2000 + file_type: nc_fci_fir + file_key: fire_probability + + fire_result: + name: fire_result + standard_name: active_fire_classification + resolution: 2000 + file_type: nc_fci_fir + file_key: fire_result + fill_value: -127 + import_enum_information: True + + product_quality_fir: + name: product_quality_fir + standard_name: product_quality + file_type: nc_fci_fir + file_key: product_quality + + product_completeness_fir: + name: product_completeness_fir + standard_name: product_completeness + file_type: nc_fci_fir + file_key: product_completeness + + product_timeliness_fir: + name: product_timeliness_fir + standard_name: product_timeliness + file_type: nc_fci_fir + file_key: product_timeliness + + + # GII - Global Instability Index + k_index: + name: k_index + standard_name: atmosphere_stability_k_index + resolution: 6000 + file_type: nc_fci_gii + file_key: k_index + coordinates: + - longitude + - latitude + + lifted_index: + name: lifted_index + standard_name: atmosphere_stability_lifted_index + resolution: 6000 + file_type: nc_fci_gii + file_key: lifted_index + coordinates: + - longitude + - latitude + + prec_water_high: + name: prec_water_high + standard_name: atmosphere_mass_content_of_water_vapor + resolution: 6000 + file_type: nc_fci_gii + file_key: prec_water_high + coordinates: + - longitude + - latitude + + prec_water_low: + name: prec_water_low + standard_name: atmosphere_mass_content_of_water_vapor + resolution: 6000 + file_type: nc_fci_gii + file_key: prec_water_low + coordinates: + - longitude + - latitude + + prec_water_mid: + name: prec_water_mid + standard_name: atmosphere_mass_content_of_water_vapor + resolution: 6000 + file_type: nc_fci_gii + file_key: prec_water_mid + coordinates: + - longitude + - latitude + + prec_water_total: + name: prec_water_total + standard_name: atmosphere_mass_content_of_water_vapor + resolution: 6000 + file_type: nc_fci_gii + file_key: prec_water_total + coordinates: + - longitude + - latitude + + percent_cloud_free_gii: + name: percent_cloud_free_gii + long_name: Percentage of Cloud Free Pixels Processed in FoR + standard_name: cloud_free_area_fraction + resolution: 6000 + file_type: nc_fci_gii + file_key: percent_cloud_free + units: '%' + coordinates: + - longitude + - latitude + + number_of_iterations_gii: + name: number_of_iterations_gii + standard_name: number_of_iterations + resolution: 6000 + file_type: nc_fci_gii + file_key: number_of_iterations + coordinates: + - longitude + - latitude + + product_quality_gii: + name: product_quality_gii + standard_name: product_quality + file_type: nc_fci_gii + file_key: product_quality + + product_completeness_gii: + name: product_completeness_gii + standard_name: product_completeness + file_type: nc_fci_gii + file_key: product_completeness + + product_timeliness_gii: + name: product_timeliness_gii standard_name: product_timeliness + file_type: nc_fci_gii + file_key: product_timeliness - # OCA + + # OCA - Optimal Cloud Analysis retrieved_cloud_phase: name: retrieved_cloud_phase + standard_name: thermodynamic_phase_of_cloud_particles_classification resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_phase - standard_name: thermodynamic_phase_of_cloud_particles_classification fill_value: -127 import_enum_information: True retrieved_cloud_optical_thickness: name: retrieved_cloud_optical_thickness + standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness - standard_name: atmosphere_optical_thickness_due_to_cloud retrieved_cloud_optical_thickness_upper_layer: name: retrieved_cloud_optical_thickness_upper_layer + long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Upper Layer + standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 0 - long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Upper Layer - standard_name: atmosphere_optical_thickness_due_to_cloud retrieval_error_cloud_optical_thickness_upper_layer: name: retrieval_error_cloud_optical_thickness_upper_layer + long_name: Cloud Optical Thickness Error (error in log10(COT)) for Upper Layer + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_optical_thickness layer: 0 - long_name: Cloud Optical Thickness Error (error in log10(COT)) for Upper Layer - standard_name: atmosphere_optical_thickness_due_to_cloud standard_error retrieved_cloud_optical_thickness_lower_layer: name: retrieved_cloud_optical_thickness_lower_layer + long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Lower Layer + standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 1 - long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Lower Layer - standard_name: atmosphere_optical_thickness_due_to_cloud retrieval_error_cloud_optical_thickness_lower_layer: name: retrieval_error_cloud_optical_thickness_lower_layer + long_name: Cloud Optical Thickness Error (error in log10(COT)) for Lower Layer + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_optical_thickness layer: 1 - long_name: Cloud Optical Thickness Error (error in log10(COT)) for Lower Layer - standard_name: atmosphere_optical_thickness_due_to_cloud standard_error retrieved_cloud_particle_effective_radius: name: retrieved_cloud_particle_effective_radius + standard_name: effective_radius_of_cloud_particles_at_cloud_top resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_particles_at_cloud_top retrieval_error_cloud_particle_effective_radius: name: retrieval_error_cloud_particle_effective_radius + standard_name: effective_radius_of_cloud_particles_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_particles_at_cloud_top standard_error retrieved_cloud_top_pressure_upper_layer: name: retrieved_cloud_top_pressure_upper_layer + long_name: Cloud Top Pressure for Upper Layer + standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_pressure layer: 0 - long_name: Cloud Top Pressure for Upper Layer - standard_name: air_pressure_at_cloud_top retrieval_error_cloud_top_pressure_upper_layer: name: retrieval_error_cloud_top_pressure_upper_layer + long_name: Cloud Top Pressure Error for Upper Layer + standard_name: air_pressure_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_top_pressure layer: 0 - long_name: Cloud Top Pressure Error for Upper Layer - standard_name: air_pressure_at_cloud_top standard_error retrieved_cloud_top_pressure_lower_layer: name: retrieved_cloud_top_pressure_lower_layer + long_name: Cloud Top Pressure for Lower Layer + standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_pressure layer: 1 - long_name: Cloud Top Pressure for Lower Layer - standard_name: air_pressure_at_cloud_top retrieval_error_cloud_top_pressure_lower_layer: name: retrieval_error_cloud_top_pressure_lower_layer + long_name: Cloud Top Pressure Error for Lower Layer + standard_name: air_pressure_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_top_pressure layer: 1 - long_name: Cloud Top Pressure Error for Lower Layer - standard_name: air_pressure_at_cloud_top standard_error retrieved_cloud_top_temperature: name: retrieved_cloud_top_temperature + standard_name: air_temperature_at_cloud_top resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_temperature - standard_name: air_temperature_at_cloud_top retrieved_cloud_top_height: name: retrieved_cloud_top_height + standard_name: height_at_cloud_top resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_height - standard_name: height_at_cloud_top quality_jmeas: name: quality_jmeas + standard_name: cost_function_part_due_to_measurements resolution: 2000 file_type: nc_fci_oca file_key: quality_jmeas - standard_name: cost_function_part_due_to_measurements product_quality_oca: name: product_quality_oca + standard_name: product_quality file_type: nc_fci_oca file_key: product_quality - standard_name: product_quality product_completeness_oca: name: product_completeness_oca + standard_name: product_completeness file_type: nc_fci_oca file_key: product_completeness - standard_name: product_completeness product_timeliness_oca: name: product_timeliness_oca + standard_name: product_timeliness file_type: nc_fci_oca file_key: product_timeliness - standard_name: product_timeliness - - # FIR - fire_probability: - name: fire_probability - resolution: 2000 - file_type: nc_fci_fir - file_key: fire_probability - standard_name: fire_probability - - fire_result: - name: fire_result - resolution: 2000 - file_type: nc_fci_fir - file_key: fire_result - standard_name: active_fire_classification - fill_value: -127 - import_enum_information: True - - product_quality_fir: - name: product_quality_fir - file_type: nc_fci_fir - file_key: product_quality - standard_name: product_quality - - product_completeness_fir: - name: product_completeness_fir - file_type: nc_fci_fir - file_key: product_completeness - standard_name: product_completeness - product_timeliness_fir: - name: product_timeliness_fir - file_type: nc_fci_fir - file_key: product_timeliness - standard_name: product_timeliness - # OLR + # OLR - Outgoing Longwave Radiation olr: name: olr + standard_name: outgoing_longwave_radiation resolution: 2000 file_type: nc_fci_olr file_key: olr_value - standard_name: outgoing_longwave_radiation cloud_type_olr: name: cloud_type_olr + standard_name: cloud_type_classification resolution: 2000 file_type: nc_fci_olr file_key: cloud_type - standard_name: cloud_type_classification fill_value: -127 import_enum_information: True quality_overall_processing_olr: name: quality_overall_processing_olr + standard_name: quality_flag resolution: 2000 file_type: nc_fci_olr file_key: quality_overall_processing - standard_name: quality_flag fill_value: -127 import_enum_information: True product_quality_olr: name: product_quality_olr + standard_name: product_quality file_type: nc_fci_olr file_key: product_quality - standard_name: product_quality product_completeness_olr: name: product_completeness_olr + standard_name: product_completeness file_type: nc_fci_olr file_key: product_completeness - standard_name: product_completeness product_timeliness_olr: name: product_timeliness_olr + standard_name: product_timeliness file_type: nc_fci_olr file_key: product_timeliness - standard_name: product_timeliness - # CRM + + # CRM - Clear-Sky Reflectance Maps crm: name: crm + long_name: TOA Bidirectional Reflectance (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance (temporal average) units: '%' crm_vis04: name: crm_vis04 + long_name: TOA Bidirectional Reflectance at 0.44um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance at 0.44um (temporal average) units: '%' vis_channel_id: 0 crm_vis05: name: crm_vis05 + long_name: TOA Bidirectional Reflectance at 0.51um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance at 0.51um (temporal average) units: '%' vis_channel_id: 1 crm_vis06: name: crm_vis06 + long_name: TOA Bidirectional Reflectance at 0.64um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance at 0.64um (temporal average) units: '%' vis_channel_id: 2 crm_vis08: name: crm_vis08 + long_name: TOA Bidirectional Reflectance at 0.86um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance at 0.86um (temporal average) units: '%' vis_channel_id: 3 crm_vis09: name: crm_vis09 + long_name: TOA Bidirectional Reflectance at 0.91um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance at 0.91um (temporal average) units: '%' vis_channel_id: 4 crm_nir13: name: crm_nir13 + long_name: TOA Bidirectional Reflectance at 1.38um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance at 1.38um (temporal average) units: '%' vis_channel_id: 5 crm_nir16: name: crm_nir16 + long_name: TOA Bidirectional Reflectance at 1.61um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance at 1.61um (temporal average) units: '%' vis_channel_id: 6 crm_nir22: name: crm_nir22 + long_name: TOA Bidirectional Reflectance at 2.25um (temporal average) + standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_crm file_key: mean_clear_sky_reflectance - standard_name: toa_bidirectional_reflectance - long_name: TOA Bidirectional Reflectance at 2.25um (temporal average) units: '%' vis_channel_id: 7 mean_sza: name: mean_sza + long_name: Solar Zenith Angle (temporal average) + standard_name: solar_zenith_angle resolution: 1000 file_type: nc_fci_crm file_key: mean_solar_zenith - standard_name: solar_zenith_angle - long_name: Solar Zenith Angle (temporal average) mean_rel_azi: name: mean_rel_azi + long_name: Relative Solar Satellite Azimuth Angle (temporal average) + standard_name: relative_sun_sensor_azimuth_angle resolution: 1000 file_type: nc_fci_crm file_key: mean_rel_solar_sat_azimuth - standard_name: relative_sun_sensor_azimuth_angle - long_name: Relative Solar Satellite Azimuth Angle (temporal average) n_acc: name: n_acc + standard_name: number_of_accumulations resolution: 1000 file_type: nc_fci_crm file_key: number_of_accumulations - standard_name: number_of_accumulations historical_data: name: historical_data + standard_name: status_flag resolution: 1000 file_type: nc_fci_crm file_key: historical_data - standard_name: status_flag import_enum_information: True product_quality_crm: name: product_quality_crm + standard_name: product_quality file_type: nc_fci_crm file_key: product_quality - standard_name: product_quality product_completeness_crm: name: product_completeness_crm + standard_name: product_completeness file_type: nc_fci_crm file_key: product_completeness - standard_name: product_completeness product_timeliness_crm: name: product_timeliness_crm + standard_name: product_timeliness file_type: nc_fci_crm file_key: product_timeliness - standard_name: product_timeliness # LAT/LON FOR SEGMENTED PRODUCTS latitude: name: latitude + standard_name: latitude file_key: latitude resolution: [6000, 6000, 32000] file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] - standard_name: latitude units: degree_north longitude: name: longitude + standard_name: longitude file_key: longitude resolution: [6000, 6000, 32000] file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] - standard_name: longitude units: degree_east - # GII - k_index: - name: k_index - resolution: 6000 - file_type: nc_fci_gii - file_key: k_index - coordinates: - - longitude - - latitude - standard_name: atmosphere_stability_k_index - - lifted_index: - name: lifted_index - resolution: 6000 - file_type: nc_fci_gii - file_key: lifted_index - coordinates: - - longitude - - latitude - standard_name: atmosphere_stability_lifted_index - - prec_water_high: - name: prec_water_high - resolution: 6000 - file_type: nc_fci_gii - file_key: prec_water_high - coordinates: - - longitude - - latitude - standard_name: atmosphere_mass_content_of_water_vapor - - prec_water_low: - name: prec_water_low - resolution: 6000 - file_type: nc_fci_gii - file_key: prec_water_low - coordinates: - - longitude - - latitude - standard_name: atmosphere_mass_content_of_water_vapor - - prec_water_mid: - name: prec_water_mid - resolution: 6000 - file_type: nc_fci_gii - file_key: prec_water_mid - coordinates: - - longitude - - latitude - standard_name: atmosphere_mass_content_of_water_vapor - - prec_water_total: - name: prec_water_total - resolution: 6000 - file_type: nc_fci_gii - file_key: prec_water_total - coordinates: - - longitude - - latitude - standard_name: atmosphere_mass_content_of_water_vapor - - percent_cloud_free_gii: - name: percent_cloud_free_gii - resolution: 6000 - file_type: nc_fci_gii - file_key: percent_cloud_free - units: '%' - coordinates: - - longitude - - latitude - long_name: Percentage of Cloud Free Pixels Processed in FoR - standard_name: cloud_free_area_fraction - - number_of_iterations_gii: - name: number_of_iterations_gii - resolution: 6000 - file_type: nc_fci_gii - file_key: number_of_iterations - coordinates: - - longitude - - latitude - standard_name: number_of_iterations - - product_quality_gii: - name: product_quality_gii - file_type: nc_fci_gii - file_key: product_quality - standard_name: product_quality - - product_completeness_gii: - name: product_completeness_gii - file_type: nc_fci_gii - file_key: product_completeness - standard_name: product_completeness - - product_timeliness_gii: - name: product_timeliness_gii - file_type: nc_fci_gii - file_key: product_timeliness - standard_name: product_timeliness - - # CLM Test + # CLM Test - Cloud Mask Test cloud_test_sit1_flag: name: cloud_test_sit1_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 0 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt1_flag: name: cloud_test_cmt1_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 1 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt2_flag: name: cloud_test_cmt2_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 2 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt3_flag: name: cloud_test_cmt3_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 3 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt4_flag: name: cloud_test_cmt4_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 4 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt5_flag: name: cloud_test_cmt5_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 5 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt6_flag: name: cloud_test_cmt6_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 6 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt7_flag: name: cloud_test_cmt7_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 7 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt8_flag: name: cloud_test_cmt8_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 8 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt9_flag: name: cloud_test_cmt9_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 9 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt10_flag: name: cloud_test_cmt10_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 10 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt11_flag: name: cloud_test_cmt11_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 11 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt12_flag: name: cloud_test_cmt12_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 12 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt13_flag: name: cloud_test_cmt13_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 13 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmt14_flag: name: cloud_test_cmt14_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 14 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_opqt_flag: name: cloud_test_opqt_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 15 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmrt1_flag: name: cloud_test_cmrt1_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 16 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmrt2_flag: name: cloud_test_cmrt2_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 17 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmrt3_flag: name: cloud_test_cmrt3_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 18 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmrt4_flag: name: cloud_test_cmrt4_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 19 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmrt5_flag: name: cloud_test_cmrt5_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 20 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_cmrt6_flag: name: cloud_test_cmrt6_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 21 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_dust_flag: name: cloud_test_dust_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 22 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_ash_flag: name: cloud_test_ash_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 23 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_dust_ash_flag: name: cloud_test_dust_ash_flag + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag extract_byte: 24 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] - standard_name: status_flag cloud_test_sit1: name: cloud_test_sit1 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 0 flag_values: [0,1] flag_meanings: ['No snow/ice detected',' Snow/ice detected'] - standard_name: status_flag cloud_test_cmt1: name: cloud_test_cmt1 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 1 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt2: name: cloud_test_cmt2 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 2 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt3: name: cloud_test_cmt3 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 3 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt4: name: cloud_test_cmt4 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 4 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt5: name: cloud_test_cmt5 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 5 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt6: name: cloud_test_cmt6 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 6 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt7: name: cloud_test_cmt7 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 7 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt8: name: cloud_test_cmt8 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 8 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt9: name: cloud_test_cmt9 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 9 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt10: name: cloud_test_cmt10 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 10 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt11: name: cloud_test_cmt11 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 11 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt12: name: cloud_test_cmt12 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 12 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt13: name: cloud_test_cmt13 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 13 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmt14: name: cloud_test_cmt14 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 14 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_opqt: name: cloud_test_opqt + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 15 flag_values: [0,1] flag_meanings: ['No opaqueness detected', 'Opaqueness detected'] - standard_name: status_flag cloud_test_cmrt1: name: cloud_test_cmrt1 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 16 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] - standard_name: status_flag cloud_test_cmrt2: name: cloud_test_cmrt2 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 17 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] - standard_name: status_flag cloud_test_cmrt3: name: cloud_test_cmrt3 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 18 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] - standard_name: status_flag cloud_test_cmrt4: name: cloud_test_cmrt4 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 19 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] - standard_name: status_flag cloud_test_cmrt5: name: cloud_test_cmrt5 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 20 flag_values: [0,1] flag_meanings: ['Clear sky restored', 'Cloud unchanged'] - standard_name: status_flag cloud_test_dust: name: cloud_test_dust + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 21 flag_values: [0,1] flag_meanings: ['No dust detected','Dust detected'] - standard_name: status_flag cloud_test_ash: name: cloud_test_ash + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 22 flag_values: [0,1] flag_meanings: ['No ash detected','Ash detected'] - standard_name: status_flag cloud_test_dust_ash: name: cloud_test_dust_ash + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result extract_byte: 23 flag_values: [0,1] flag_meanings: ['Dust detected','Ash detected'] - standard_name: status_flag cloud_test_cmrt6: name: cloud_test_cmrt6 + standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_cmrt6_test_result fill_value: -127 - standard_name: status_flag import_enum_information: True - # ASR + # ASR - All-Sky Radiances bt_max: name: bt_max resolution: 32000 @@ -3107,260 +3374,3 @@ datasets: file_type: nc_fci_asr file_key: product_timeliness standard_name: product_timeliness - -# AMV Intermediate Product - intm_latitude: - name: intm_latitude - file_type: nc_fci_amvi - file_key: intm_latitude - standard_name: latitude - - intm_longitude: - name: intm_longitude - file_type: nc_fci_amvi - file_key: intm_longitude - standard_name: longitude - - intm_speed: - name: intm_speed - file_type: nc_fci_amvi - file_key: intm_speed - standard_name: wind_speed - coordinates: - - intm_longitude - - intm_latitude - - intm_u_component: - name: intm_u_component - file_type: nc_fci_amvi - file_key: intm_u_component - standard_name: wind_speed_horizontal_component - coordinates: - - intm_longitude - - intm_latitude - - intm_v_component: - name: intm_v_component - file_type: nc_fci_amvi - file_key: intm_v_component - standard_name: wind_speed_vertical_component - coordinates: - - intm_longitude - - intm_latitude - - intm_direction: - name: intm_direction - file_type: nc_fci_amvi - file_key: intm_direction - standard_name: wind_to_direction - coordinates: - - intm_longitude - - intm_latitude - - intm_pressure: - name: intm_pressure - file_type: nc_fci_amvi - file_key: intm_pressure - standard_name: wind_pressure - coordinates: - - intm_longitude - - intm_latitude - - intm_temperature: - name: intm_temperature - file_type: nc_fci_amvi - file_key: intm_temperature - standard_name: wind_temperature - coordinates: - - intm_longitude - - intm_latitude - - intm_target_type: - name: intm_target_type - file_type: nc_fci_amvi - file_key: target_type - standard_name: wind_target_type - coordinates: - - intm_longitude - - intm_latitude - - intm_wind_method: - name: intm_wind_method - file_type: nc_fci_amvi - file_key: wind_method - standard_name: wind_wind_method - coordinates: - - intm_longitude - - intm_latitude - -# AMV Final Product - channel_id: - name: channel_id - file_type: nc_fci_amv - file_key: channel_id - standard_name: channel_id - - amv_latitude: - name: latitude - file_type: nc_fci_amv - file_key: latitude - standard_name: latitude - - amv_longitude: - name: longitude - file_type: nc_fci_amv - file_key: longitude - standard_name: longitude - - speed: - name: speed - file_type: nc_fci_amv - file_key: speed - standard_name: wind_speed - coordinates: - - longitude - - latitude - - speed_u_component: - name: speed_u_component - file_type: nc_fci_amv - file_key: speed_u_component - standard_name: wind_speed_horizontal_component - coordinates: - - longitude - - latitude - - speed_v_component: - name: speed_v_component - file_type: nc_fci_amv - file_key: speed_v_component - standard_name: wind_speed_vertical_component - coordinates: - - longitude - - latitude - - direction: - name: direction - file_type: nc_fci_amv - file_key: direction - standard_name: wind_to_direction - coordinates: - - longitude - - latitude - - pressure: - name: pressure - file_type: nc_fci_amv - file_key: pressure - standard_name: wind_pressure - coordinates: - - longitude - - latitude - - temperature: - name: temperature - file_type: nc_fci_amv - file_key: temperature - standard_name: wind_temperature - coordinates: - - longitude - - latitude - - target_type: - name: target_type - file_type: nc_fci_amv - file_key: target_type - standard_name: wind_target_type - coordinates: - - longitude - - latitude - - wind_method: - name: wind_method - file_type: nc_fci_amv - file_key: wind_method - standard_name: wind_wind_method - coordinates: - - longitude - - latitude - - fcst_u: - name: fcst_u - file_type: nc_fci_amv - file_key: forecast_u_component - standard_name: wind_forecast_u_component - coordinates: - - longitude - - latitude - - fcst_v: - name: fcst_v - file_type: nc_fci_amv - file_key: forecast_v_component - standard_name: wind_forecast_v_component - coordinates: - - longitude - - latitude - - best_fit_pres: - name: best_fit_pres - file_type: nc_fci_amv - file_key: best_fit_pressure - standard_name: wind_best_fit_pressure - coordinates: - - longitude - - latitude - - best_fit_u: - name: best_fit_u - file_type: nc_fci_amv - file_key: best_fit_u_component - standard_name: wind_best_fit_u_component - coordinates: - - longitude - - latitude - - best_fit_v: - name: best_fit_v - file_type: nc_fci_amv - file_key: best_fit_v_component - standard_name: wind_best_fit_v_component - coordinates: - - longitude - - latitude - - qi: - name: qi - file_type: nc_fci_amv - file_key: overall_reliability - standard_name: wind_overall_reliability - coordinates: - - longitude - - latitude - - qi_excl_fcst: - name: qi_excl_fcst - file_type: nc_fci_amv - file_key: overall_reliability_exc_forecast - standard_name: wind_overall_reliability_exc_forecast - coordinates: - - longitude - - latitude - - product_quality: - name: product_quality - file_type: nc_fci_amv - file_key: product_quality - standard_name: product_quality - - product_completeness: - name: product_completeness - file_type: nc_fci_amv - file_key: product_completeness - standard_name: product_completeness - - product_timeliness: - name: product_timeliness - file_type: nc_fci_amv - file_key: product_timeliness - standard_name: product_timeliness From 785abd607a18c3cf49a6f2ab704857d22b766843 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 12 Jan 2024 16:14:22 +0000 Subject: [PATCH 1053/1416] Add support for the MERSI-RM instrument on FY-3G --- satpy/etc/composites/mersi-rm.yaml | 89 +++++++++ satpy/etc/readers/mersi_rm_l1b.yaml | 275 ++++++++++++++++++++++++++++ satpy/readers/mersi_l1b.py | 18 +- 3 files changed, 379 insertions(+), 3 deletions(-) create mode 100644 satpy/etc/composites/mersi-rm.yaml create mode 100644 satpy/etc/readers/mersi_rm_l1b.yaml diff --git a/satpy/etc/composites/mersi-rm.yaml b/satpy/etc/composites/mersi-rm.yaml new file mode 100644 index 0000000000..ab0317b62f --- /dev/null +++ b/satpy/etc/composites/mersi-rm.yaml @@ -0,0 +1,89 @@ +sensor_name: visir/mersi-rm + +modifiers: + rayleigh_corrected: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + prerequisites: + - name: '1' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + sunz_corrected: + modifier: !!python/name:satpy.modifiers.SunZenithCorrector + prerequisites: + - solar_zenith_angle + + nir_reflectance: + modifier: !!python/name:satpy.modifiers.NIRReflectance + prerequisites: + - name: '7' + optional_prerequisites: + - solar_zenith_angle + + +composites: + natural_color: + compositor: !!python/name:satpy.composites.RatioSharpenedRGB + prerequisites: + - name: '5' + modifiers: [sunz_corrected] + - name: '3' + modifiers: [sunz_corrected] + - name: '1' + modifiers: [sunz_corrected] + standard_name: natural_color + + overview_raw: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '1' + - name: '2' + - name: '7' + standard_name: overview + + overview: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '1' + modifiers: [sunz_corrected] + - name: '2' + modifiers: [sunz_corrected] + - name: '7' + standard_name: overview + + cloudtop: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '7' + - name: '8' + - name: '9' + standard_name: cloudtop + + day_microphysics: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '2' + modifiers: [sunz_corrected] + - name: '7' + modifiers: [nir_reflectance] + - name: '8' + standard_name: day_microphysics + + night_fog: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: '8' + - name: '7' + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: '7' + - name: '6' + - name: '7' + standard_name: night_fog diff --git a/satpy/etc/readers/mersi_rm_l1b.yaml b/satpy/etc/readers/mersi_rm_l1b.yaml new file mode 100644 index 0000000000..fa70ad57a5 --- /dev/null +++ b/satpy/etc/readers/mersi_rm_l1b.yaml @@ -0,0 +1,275 @@ +reader: + name: mersi_rm_l1b + short_name: MERSI-RM l1b + long_name: MERSI-RM L1B data in HDF5 format + description: FY-3G Medium Resolution Spectral Imager - Rainfall Measurement (MERSI-RM) L1B Reader + status: Beta + supports_fsspec: false + sensors: [mersi-rm] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + mersi_rm_l1b_500: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + # From National Meteorological Satellite Center + - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_0500M_V1.{ext:3s}' + + mersi_rm_l1b_500_geo: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + # From National Meteorological Satellite Center + - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_GEOHK_V1.{ext:3s}' + +# NOTE: Min/max wavelengths are defined here as the wavelength associated with a 1% SRF. +datasets: + '1': + name: '1' + wavelength: [0.60, 0.648, 0.70] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 0 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '2': + name: '2' + wavelength: [0.82, 0.862, 0.91] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 1 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '3': + name: '3' + wavelength: [0.89, 0.935, 0.97] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 2 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '4': + name: '4' + wavelength: [1.33, 1.377, 1.42] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 3 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '5': + name: '5' + wavelength: [1.58, 1.638, 1.69] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Reflectance + band_index: 4 + calibration_key: Calibration/RSB_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts + '6': + name: '6' + wavelength: [3.64, 3.809, 3.99] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Emissive + band_index: 0 + calibration_key: Calibration/IR_Cal_Coeff + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + '7': + name: '7' + wavelength: [10.08, 10.736, 11.62] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Emissive + band_index: 1 + calibration_key: Calibration/IR_Cal_Coeff + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + '8': + name: '8' + wavelength: [11.31, 12.019, 12.81] + resolution: 500 + file_type: mersi_rm_l1b_500 + file_key: Data/EV_Emissive + band_index: 2 + calibration_key: Calibration/IR_Cal_Coeff + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength + + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: 500 + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/Longitude + + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: 500 + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/Latitude + + solar_zenith_angle: + name: solar_zenith_angle + units: degree + standard_name: solar_zenith_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/SolarZenith + + solar_azimuth_angle: + name: solar_azimuth_angle + units: degree + standard_name: solar_azimuth_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/SolarAzimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + units: degree + standard_name: sensor_zenith_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/SensorZenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degree + standard_name: sensor_azimuth_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/SensorAzimuth + + moon_zenith_angle: + name: moon_zenith_angle + units: degree + standard_name: moon_zenith_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/MoonZenith + + moon_azimuth_angle: + name: moon_azimuth_angle + units: degree + standard_name: moon_azimuth_angle + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/MoonAzimuth + + altitude: + name: altitude + units: degree + standard_name: altitude + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/Altitude + + landcover: + name: landcover + units: degree + standard_name: landcover + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/LandCover + + landseamask: + name: landseamask + units: degree + standard_name: landseamask + resolution: 500 + coordinates: [longitude, latitude] + file_type: mersi_rm_l1b_500_geo + file_key: Geolocation/LandSeaMask diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 7070131f51..7675bd1624 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -36,7 +36,7 @@ class MERSIL1B(HDF5FileHandler): - """MERSI-2/MERSI-LL L1B file reader.""" + """MERSI-2/MERSI-LL/MERSI-RM L1B file reader.""" def _strptime(self, date_attr, time_attr): """Parse date/time strings.""" @@ -63,9 +63,18 @@ def sensor_name(self): sensor = { "MERSI": "mersi-2", "MERSI LL": "mersi-ll", + "MERSI RM": "mersi-rm", }.get(file_sensor, file_sensor) return sensor + def get_refl_mult(self): + """Get reflectance multiplier.""" + if self.sensor_name == "mersi-rm": + # MERSI-RM has reflectance in the range 0-1, so we need to convert + return 100. + else: + return 1. + def _get_single_slope_intercept(self, slope, intercept, cal_index): try: # convert scalar arrays to scalar @@ -103,7 +112,7 @@ def get_dataset(self, dataset_id, ds_info): slope = attrs.pop("Slope", None) intercept = attrs.pop("Intercept", None) if slope is not None and dataset_id.get("calibration") != "counts": - if band_index is not None: + if band_index is not None and slope.size > 1: slope = slope[band_index] intercept = intercept[band_index] data = data * slope + intercept @@ -112,12 +121,12 @@ def get_dataset(self, dataset_id, ds_info): coeffs = self._get_coefficients(ds_info["calibration_key"], ds_info["calibration_index"]) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 + data = data * self.get_refl_mult() elif dataset_id.get("calibration") == "brightness_temperature": calibration_index = ds_info["calibration_index"] # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = 1. / (dataset_id["wavelength"][1] / 1e6) - data = self._get_bt_dataset(data, calibration_index, wave_number) data.attrs = attrs @@ -195,6 +204,9 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): corr_coeff_b = coeffs[calibration_index + N_TOT_IR_CHANS_LL] except KeyError: return data + else: + # MERSI-RM has no correction coefficients + corr_coeff_a = 0 if corr_coeff_a != 0: data = (data - corr_coeff_b) / corr_coeff_a From 817d8be55734eb8a5a2fadfe133906c31c08cada Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 15 Jan 2024 16:27:25 +0100 Subject: [PATCH 1054/1416] Harmonize key order for ASR --- satpy/etc/readers/fci_l2_nc.yaml | 1482 ++++++++++++++++++++++-------- 1 file changed, 1099 insertions(+), 383 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index dfa8b14ad4..dbf5db6a8a 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1588,11 +1588,11 @@ datasets: # ASR - All-Sky Radiances bt_max: name: bt_max + long_name: TOA Brightess Temperature Segment max + standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr file_key: bt_max - long_name: TOA Brightess Temperature Segment Max - standard_name: toa_brightess_temperature cell_method: area:maximum coordinates: - longitude @@ -1600,11 +1600,11 @@ datasets: bt_mean: name: bt_mean + long_name: TOA Brightess Temperature Segment mean + standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr file_key: bt_mean - long_name: TOA Brightess Temperature Segment Mean - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -1612,11 +1612,11 @@ datasets: bt_min: name: bt_min + long_name: TOA Brightess Temperature Segment min + standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr file_key: bt_min - long_name: TOA Brightess Temperature Segment Min - standard_name: toa_brightess_temperature cell_method: area:minimum coordinates: - longitude @@ -1624,11 +1624,11 @@ datasets: bt_std: name: bt_std + long_name: TOA Brightess Temperature Segment Standard Deviation + standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr file_key: bt_std - long_name: TOA Brightess Temperature Segment Standard Deviation - standard_name: toa_brightess_temperature cell_method: area:standard_deviation coordinates: - longitude @@ -1636,11 +1636,11 @@ datasets: radiance_max: name: radiance_max + long_name: TOA Radiance Segment max + standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr file_key: radiance_max - long_name: TOA Radiance Segment Max - standard_name: toa_outgoing_radiance cell_method: area:maximum coordinates: - longitude @@ -1648,11 +1648,11 @@ datasets: radiance_mean: name: radiance_mean + long_name: TOA Radiance Segment mean + standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr file_key: radiance_mean - long_name: TOA Radiance Segment Mean - standard_name: toa_outgoing_radiance cell_method: area:mean coordinates: - longitude @@ -1660,11 +1660,11 @@ datasets: radiance_min: name: radiance_min + long_name: TOA Radiance Segment min + standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr file_key: radiance_min - long_name: TOA Radiance Segment Min - standard_name: toa_outgoing_radiance cell_method: area:minimum coordinates: - longitude @@ -1672,11 +1672,11 @@ datasets: radiance_std: name: radiance_std + long_name: TOA Radiance Segment Standard Deviation + standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr file_key: radiance_std - long_name: TOA Radiance Segment Standard Deviation - standard_name: toa_outgoing_radiance cell_method: area:standard_deviation coordinates: - longitude @@ -1684,11 +1684,11 @@ datasets: reflectance_max: name: reflectance_max + long_name: TOA Bidirectional Reflectance Segment max + standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr file_key: reflectance_max - long_name: TOA Bidirectional Reflectance Segment Max - standard_name: toa_bidirectional_reflectance cell_method: area:maximum units: '%' coordinates: @@ -1697,11 +1697,11 @@ datasets: reflectance_mean: name: reflectance_mean + long_name: TOA Bidirectional Reflectance Segment mean + standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr file_key: reflectance_mean - long_name: TOA Bidirectional Reflectance Segment Mean - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1710,11 +1710,11 @@ datasets: reflectance_min: name: reflectance_min + long_name: TOA Bidirectional Reflectance Segment min + standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr file_key: reflectance_min - long_name: TOA Bidirectional Reflectance Segment Min - standard_name: toa_bidirectional_reflectance cell_method: area:minimum units: '%' coordinates: @@ -1723,11 +1723,11 @@ datasets: reflectance_std: name: reflectance_std + long_name: TOA Bidirectional Reflectance Segment Standard Deviation + standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr file_key: reflectance_std - long_name: TOA Bidirectional Reflectance Segment Standard Deviation - standard_name: toa_bidirectional_reflectance cell_method: area:standard_deviation units: '%' coordinates: @@ -1736,12 +1736,12 @@ datasets: quality_bt: name: quality_bt + long_name: TOA Brightess Temperature % Confidence + standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr file_key: quality_bt fill_value: -1 - long_name: TOA Brightess Temperature % confidence - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -1749,12 +1749,12 @@ datasets: quality_reflectance: name: quality_reflectance + long_name: TOA Bidirectional Reflectance % Confidence + standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr file_key: quality_reflectance fill_value: -1 - long_name: TOA Bidirectional Reflectance % confidence - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -1762,60 +1762,56 @@ datasets: quality_radiance: name: quality_radiance + long_name: TOA Radiance % Confidence + standard_name: radiance_quality resolution: 32000 file_type: nc_fci_asr file_key: quality_radiance fill_value: -1 - long_name: TOA Radiance % confidence - standard_name: radiance_quality - units: '%' coordinates: - longitude - latitude land_pixel_percent: name: land_pixel_percent + standard_name: land_area_fraction resolution: 32000 file_type: nc_fci_asr file_key: land_pixel_percent - standard_name: land_area_fraction - units: '%' coordinates: - longitude - latitude water_pixel_percent: name: water_pixel_percent + standard_name: water_area_fraction resolution: 32000 file_type: nc_fci_asr file_key: water_pixel_percent - standard_name: water_area_fraction - units: '%' coordinates: - longitude - latitude pixel_percentage: name: pixel_percentage + standard_name: water_area_fraction resolution: 32000 file_type: nc_fci_asr file_key: pixel_percentage - standard_name: pixels_used_fraction - units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis04: name: reflectance_mean_all_vis04 + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 0 - long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (all pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1824,14 +1820,14 @@ datasets: reflectance_mean_clear_vis04: name: reflectance_mean_clear_vis04 + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 1 - long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (clear pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1840,14 +1836,14 @@ datasets: reflectance_mean_cloudy_vis04: name: reflectance_mean_cloudy_vis04 + long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 2 - long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (cloudy pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1856,14 +1852,14 @@ datasets: reflectance_mean_all_vis05: name: reflectance_mean_all_vis05 + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 0 - long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (all pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1872,14 +1868,14 @@ datasets: reflectance_mean_clear_vis05: name: reflectance_mean_clear_vis05 + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 1 - long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (clear pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1888,14 +1884,14 @@ datasets: reflectance_mean_cloudy_vis05: name: reflectance_mean_cloudy_vis05 + long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 2 - long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (cloudy pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1904,14 +1900,14 @@ datasets: reflectance_mean_all_vis06: name: reflectance_mean_all_vis06 + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 0 - long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (all pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1920,14 +1916,14 @@ datasets: reflectance_mean_clear_vis06: name: reflectance_mean_clear_vis06 + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 1 - long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (clear pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1936,14 +1932,14 @@ datasets: reflectance_mean_cloudy_vis06: name: reflectance_mean_cloudy_vis06 + long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 2 - long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (cloudy pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1952,14 +1948,14 @@ datasets: reflectance_mean_all_vis08: name: reflectance_mean_all_vis08 + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 0 - long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (all pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1968,14 +1964,14 @@ datasets: reflectance_mean_clear_vis08: name: reflectance_mean_clear_vis08 + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 1 - long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (clear pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -1984,14 +1980,14 @@ datasets: reflectance_mean_cloudy_vis08: name: reflectance_mean_cloudy_vis08 + long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 2 - long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (cloudy pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2000,14 +1996,14 @@ datasets: reflectance_mean_all_vis09: name: reflectance_mean_all_vis09 + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 0 - long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (all pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2016,14 +2012,14 @@ datasets: reflectance_mean_clear_vis09: name: reflectance_mean_clear_vis09 + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 1 - long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (clear pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2032,14 +2028,14 @@ datasets: reflectance_mean_cloudy_vis09: name: reflectance_mean_cloudy_vis09 + long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 2 - long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (cloudy pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2048,14 +2044,14 @@ datasets: reflectance_mean_all_nir13: name: reflectance_mean_all_nir13 + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 0 - long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (all pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2064,14 +2060,14 @@ datasets: reflectance_mean_clear_nir13: name: reflectance_mean_clear_nir13 + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 1 - long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (clear pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2080,14 +2076,14 @@ datasets: reflectance_mean_cloudy_nir13: name: reflectance_mean_cloudy_nir13 + long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 2 - long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (cloudy pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2096,14 +2092,14 @@ datasets: reflectance_mean_all_nir16: name: reflectance_mean_all_nir16 + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 0 - long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (all pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2112,14 +2108,14 @@ datasets: reflectance_mean_clear_nir16: name: reflectance_mean_clear_nir16 + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 1 - long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (clear pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2128,14 +2124,14 @@ datasets: reflectance_mean_cloudy_nir16: name: reflectance_mean_cloudy_nir16 + long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 2 - long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (cloudy pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2144,14 +2140,14 @@ datasets: reflectance_mean_all_nir22: name: reflectance_mean_all_nir22 + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (all pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 0 - long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (all pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2160,14 +2156,14 @@ datasets: reflectance_mean_clear_nir22: name: reflectance_mean_clear_nir22 + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (clear pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 1 - long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (clear pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2176,14 +2172,14 @@ datasets: reflectance_mean_cloudy_nir22: name: reflectance_mean_cloudy_nir22 + long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (cloudy pixels) + standard_name: toa_bidirectional_reflectance resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: reflectance_mean vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 2 - long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (cloudy pixels) - standard_name: toa_bidirectional_reflectance cell_method: area:mean units: '%' coordinates: @@ -2192,14 +2188,14 @@ datasets: bt_mean_all_ir38: name: bt_mean_all_ir38 + long_name: TOA Brightess Temperature Segment mean at 3.80um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 0 - long_name: TOA Brightess Temperature Segment mean at 3.80um (all pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2207,14 +2203,14 @@ datasets: bt_mean_clear_ir38: name: bt_mean_clear_ir38 + long_name: TOA Brightess Temperature Segment mean at 3.80um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 1 - long_name: TOA Brightess Temperature Segment mean at 3.80um (clear pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2222,14 +2218,14 @@ datasets: bt_mean_cloudy_ir38: name: bt_mean_cloudy_ir38 + long_name: TOA Brightess Temperature Segment mean at 3.80um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 2 - long_name: TOA Brightess Temperature Segment mean at 3.80um (cloudy pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2237,14 +2233,14 @@ datasets: bt_mean_all_wv63: name: bt_mean_all_wv63 + long_name: TOA Brightess Temperature Segment mean at 6.30um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 0 - long_name: TOA Brightess Temperature Segment mean at 6.30um (all pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2252,14 +2248,14 @@ datasets: bt_mean_clear_wv63: name: bt_mean_clear_wv63 + long_name: TOA Brightess Temperature Segment mean at 6.30um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 1 - long_name: TOA Brightess Temperature Segment mean at 6.30um (clear pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2267,14 +2263,14 @@ datasets: bt_mean_cloudy_wv63: name: bt_mean_cloudy_wv63 + long_name: TOA Brightess Temperature Segment mean at 6.30um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 2 - long_name: TOA Brightess Temperature Segment mean at 6.30um (cloudy pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2282,14 +2278,14 @@ datasets: bt_mean_all_wv73: name: bt_mean_all_wv73 + long_name: TOA Brightess Temperature Segment mean at 7.35um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 0 - long_name: TOA Brightess Temperature Segment mean at 7.35um (all pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2297,14 +2293,14 @@ datasets: bt_mean_clear_wv73: name: bt_mean_clear_wv73 + long_name: TOA Brightess Temperature Segment mean at 7.35um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 1 - long_name: TOA Brightess Temperature Segment mean at 7.35um (clear pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2312,14 +2308,14 @@ datasets: bt_mean_cloudy_wv73: name: bt_mean_cloudy_wv73 + long_name: TOA Brightess Temperature Segment mean at 7.35um (cloudy pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 2 - long_name: TOA Brightess Temperature Segment mean at 7.35um (cloudy pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2327,14 +2323,14 @@ datasets: bt_mean_all_ir87: name: bt_mean_all_ir87 + long_name: TOA Brightess Temperature Segment mean at 8.70um (all pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 0 - long_name: TOA Brightess Temperature Segment mean at 8.70um (all pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2342,14 +2338,14 @@ datasets: bt_mean_clear_ir87: name: bt_mean_clear_ir87 + long_name: TOA Brightess Temperature Segment mean at 8.70um (clear pixels) + standard_name: toa_brightess_temperature resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: bt_mean ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 1 - long_name: TOA Brightess Temperature Segment mean at 8.70um (clear pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2357,194 +2353,914 @@ datasets: bt_mean_cloudy_ir87: name: bt_mean_cloudy_ir87 + long_name: TOA Brightess Temperature Segment mean at 8.70um (cloudy pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_all_ir97: + name: bt_mean_all_ir97 + long_name: TOA Brightess Temperature Segment mean at 9.66um (all pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_clear_ir97: + name: bt_mean_clear_ir97 + long_name: TOA Brightess Temperature Segment mean at 9.66um (clear pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir97: + name: bt_mean_cloudy_ir97 + long_name: TOA Brightess Temperature Segment mean at 9.66um (cloudy pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_all_ir105: + name: bt_mean_all_ir105 + long_name: TOA Brightess Temperature Segment mean at 10.50um (all pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_clear_ir105: + name: bt_mean_clear_ir105 + long_name: TOA Brightess Temperature Segment mean at 10.50um (clear pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir105: + name: bt_mean_cloudy_ir105 + long_name: TOA Brightess Temperature Segment mean at 10.50um (cloudy pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_all_ir123: + name: bt_mean_all_ir123 + long_name: TOA Brightess Temperature Segment mean at 12.30um (all pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_clear_ir123: + name: bt_mean_clear_ir123 + long_name: TOA Brightess Temperature Segment mean at 12.30um (clear pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir123: + name: bt_mean_cloudy_ir123 + long_name: TOA Brightess Temperature Segment mean at 12.30um (cloudy pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_all_ir133: + name: bt_mean_all_ir133 + long_name: TOA Brightess Temperature Segment mean at 13.30um (all pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_clear_ir133: + name: bt_mean_clear_ir133 + long_name: TOA Brightess Temperature Segment mean at 13.30um (clear pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir133: + name: bt_mean_cloudy_ir133 + long_name: TOA Brightess Temperature Segment mean at 13.30um (cloudy pixels) + standard_name: toa_brightess_temperature + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_vis04: + name: radiance_mean_all_vis04 + long_name: TOA Radiance Segment mean at 0.44um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 0 + wavelength: [0.384, 0.444, 0.504] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_vis04: + name: radiance_mean_clear_vis04 + long_name: TOA Radiance Segment mean at 0.44um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 0 + wavelength: [0.384, 0.444, 0.504] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_vis04: + name: radiance_mean_cloudy_vis04 + long_name: TOA Radiance Segment mean at 0.44um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 0 + wavelength: [0.384, 0.444, 0.504] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_vis05: + name: radiance_mean_all_vis05 + long_name: TOA Radiance Segment mean at 0.51um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 1 + wavelength: [0.47, 0.51, 0.55] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_vis05: + name: radiance_mean_clear_vis05 + long_name: TOA Radiance Segment mean at 0.51um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 1 + wavelength: [0.47, 0.51, 0.55] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_vis05: + name: radiance_mean_cloudy_vis05 + long_name: TOA Radiance Segment mean at 0.51um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 1 + wavelength: [0.47, 0.51, 0.55] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_vis06: + name: radiance_mean_all_vis06 + long_name: TOA Radiance Segment mean at 0.64um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 2 + wavelength: [0.59, 0.64, 0.69] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_vis06: + name: radiance_mean_clear_vis06 + long_name: TOA Radiance Segment mean at 0.64um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 2 + wavelength: [0.59, 0.64, 0.69] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_vis06: + name: radiance_mean_cloudy_vis06 + long_name: TOA Radiance Segment mean at 0.64um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 2 + wavelength: [0.59, 0.64, 0.69] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_vis08: + name: radiance_mean_all_vis08 + long_name: TOA Radiance Segment mean at 0.86um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 3 + wavelength: [0.815, 0.865, 0.915] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_vis08: + name: radiance_mean_clear_vis08 + long_name: TOA Radiance Segment mean at 0.86um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 3 + wavelength: [0.815, 0.865, 0.915] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_vis08: + name: radiance_mean_cloudy_vis08 + long_name: TOA Radiance Segment mean at 0.86um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 3 + wavelength: [0.815, 0.865, 0.915] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_vis09: + name: radiance_mean_all_vis09 + long_name: TOA Radiance Segment mean at 0.91um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 4 + wavelength: [0.894, 0.914, 0.934] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_vis09: + name: radiance_mean_clear_vis09 + long_name: TOA Radiance Segment mean at 0.91um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 4 + wavelength: [0.894, 0.914, 0.934] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_vis09: + name: radiance_mean_cloudy_vis09 + long_name: TOA Radiance Segment mean at 0.91um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 4 + wavelength: [0.894, 0.914, 0.934] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_nir13: + name: radiance_mean_all_nir13 + long_name: TOA Radiance Segment mean at 1.38um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 5 + wavelength: [1.35, 1.38, 1.41] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_nir13: + name: radiance_mean_clear_nir13 + long_name: TOA Radiance Segment mean at 1.38um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 5 + wavelength: [1.35, 1.38, 1.41] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_nir13: + name: radiance_mean_cloudy_nir13 + long_name: TOA Radiance Segment mean at 1.38um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 5 + wavelength: [1.35, 1.38, 1.41] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_nir16: + name: radiance_mean_all_nir16 + long_name: TOA Radiance Segment mean at 1.61um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 6 + wavelength: [1.56, 1.61, 1.66] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_nir16: + name: radiance_mean_clear_nir16 + long_name: TOA Radiance Segment mean at 1.61um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 6 + wavelength: [1.56, 1.61, 1.66] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_nir16: + name: radiance_mean_cloudy_nir16 + long_name: TOA Radiance Segment mean at 1.61um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 6 + wavelength: [1.56, 1.61, 1.66] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_nir22: + name: radiance_mean_all_nir22 + long_name: TOA Radiance Segment mean at 2.25um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 7 + wavelength: [2.2, 2.25, 2.3] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_nir22: + name: radiance_mean_clear_nir22 + long_name: TOA Radiance Segment mean at 2.25um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 7 + wavelength: [2.2, 2.25, 2.3] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_nir22: + name: radiance_mean_cloudy_nir22 + long_name: TOA Radiance Segment mean at 2.25um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 7 + wavelength: [2.2, 2.25, 2.3] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_ir38: + name: radiance_mean_all_ir38 + long_name: TOA Radiance Segment mean at 3.80um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 8 + wavelength: [3.4, 3.8, 4.2] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_ir38: + name: radiance_mean_clear_ir38 + long_name: TOA Radiance Segment mean at 3.80um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 8 + wavelength: [3.4, 3.8, 4.2] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_ir38: + name: radiance_mean_cloudy_ir38 + long_name: TOA Radiance Segment mean at 3.80um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 8 + wavelength: [3.4, 3.8, 4.2] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_wv63: + name: radiance_mean_all_wv63 + long_name: TOA Radiance Segment mean at 6.30um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 9 + wavelength: [5.3, 6.3, 7.3] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_wv63: + name: radiance_mean_clear_wv63 + long_name: TOA Radiance Segment mean at 6.30um (clear pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 3 + file_key: radiance_mean + channel_id: 9 + wavelength: [5.3, 6.3, 7.3] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_wv63: + name: radiance_mean_cloudy_wv63 + long_name: TOA Radiance Segment mean at 6.30um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 9 + wavelength: [5.3, 6.3, 7.3] category_id: 2 - long_name: TOA Brightess Temperature Segment mean at 8.70um (cloudy pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir97: - name: bt_mean_all_ir97 + radiance_mean_all_wv73: + name: radiance_mean_all_wv73 + long_name: TOA Radiance Segment mean at 7.35um (all pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 4 + file_key: radiance_mean + channel_id: 10 + wavelength: [6.85, 7.35, 7.85] category_id: 0 - long_name: TOA Brightess Temperature Segment mean at 9.66um (all pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir97: - name: bt_mean_clear_ir97 + radiance_mean_clear_wv73: + name: radiance_mean_clear_wv73 + long_name: TOA Radiance Segment mean at 7.35um (clear pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 4 + file_key: radiance_mean + channel_id: 10 + wavelength: [6.85, 7.35, 7.85] category_id: 1 - long_name: TOA Brightess Temperature Segment mean at 9.66um (clear pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir97: - name: bt_mean_cloudy_ir97 + radiance_mean_cloudy_wv73: + name: radiance_mean_cloudy_wv73 + long_name: TOA Radiance Segment mean at 7.35um (cloudy pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 4 + file_key: radiance_mean + channel_id: 10 + wavelength: [6.85, 7.35, 7.85] category_id: 2 - long_name: TOA Brightess Temperature Segment mean at 9.66um (cloudy pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir105: - name: bt_mean_all_ir105 + radiance_mean_all_ir87: + name: radiance_mean_all_ir87 + long_name: TOA Radiance Segment mean at 8.70um (all pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 5 + file_key: radiance_mean + channel_id: 11 + wavelength: [8.3, 8.7, 9.1] category_id: 0 - long_name: TOA Brightess Temperature Segment mean at 10.50um (all pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir105: - name: bt_mean_clear_ir105 + radiance_mean_clear_ir87: + name: radiance_mean_clear_ir87 + long_name: TOA Radiance Segment mean at 8.70um (clear pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 5 + file_key: radiance_mean + channel_id: 11 + wavelength: [8.3, 8.7, 9.1] category_id: 1 - long_name: TOA Brightess Temperature Segment mean at 10.50um (clear pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir105: - name: bt_mean_cloudy_ir105 + radiance_mean_cloudy_ir87: + name: radiance_mean_cloudy_ir87 + long_name: TOA Radiance Segment mean at 8.70um (cloudy pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 5 + file_key: radiance_mean + channel_id: 11 + wavelength: [8.3, 8.7, 9.1] category_id: 2 - long_name: TOA Brightess Temperature Segment mean at 10.50um (cloudy pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir123: - name: bt_mean_all_ir123 + radiance_mean_all_ir97: + name: radiance_mean_all_ir97 + long_name: TOA Radiance Segment mean at 9.66um (all pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 6 + file_key: radiance_mean + channel_id: 12 + wavelength: [9.36, 9.66, 9.96] category_id: 0 - long_name: TOA Brightess Temperature Segment mean at 12.30um (all pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir123: - name: bt_mean_clear_ir123 + radiance_mean_clear_ir97: + name: radiance_mean_clear_ir97 + long_name: TOA Radiance Segment mean at 9.66um (clear pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 6 + file_key: radiance_mean + channel_id: 12 + wavelength: [9.36, 9.66, 9.96] category_id: 1 - long_name: TOA Brightess Temperature Segment mean at 12.30um (clear pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir123: - name: bt_mean_cloudy_ir123 + radiance_mean_cloudy_ir97: + name: radiance_mean_cloudy_ir97 + long_name: TOA Radiance Segment mean at 9.66um (cloudy pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 6 + file_key: radiance_mean + channel_id: 12 + wavelength: [9.36, 9.66, 9.96] category_id: 2 - long_name: TOA Brightess Temperature Segment mean at 12.30um (cloudy pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_all_ir133: - name: bt_mean_all_ir133 + radiance_mean_all_ir105: + name: radiance_mean_all_ir105 + long_name: TOA Radiance Segment mean at 10.50um (all pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 7 + file_key: radiance_mean + channel_id: 13 + wavelength: [9.8, 10.5, 11.2] category_id: 0 - long_name: TOA Brightess Temperature Segment mean at 13.30um (all pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_clear_ir133: - name: bt_mean_clear_ir133 + radiance_mean_clear_ir105: + name: radiance_mean_clear_ir105 + long_name: TOA Radiance Segment mean at 10.50um (clear pixels) + standard_name: toa_outgoing_radiance resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 7 + file_key: radiance_mean + channel_id: 13 + wavelength: [9.8, 10.5, 11.2] category_id: 1 - long_name: TOA Brightess Temperature Segment mean at 13.30um (clear pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude - latitude - bt_mean_cloudy_ir133: - name: bt_mean_cloudy_ir133 + radiance_mean_cloudy_ir105: + name: radiance_mean_cloudy_ir105 + long_name: TOA Radiance Segment mean at 10.50um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 13 + wavelength: [9.8, 10.5, 11.2] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_ir123: + name: radiance_mean_all_ir123 + long_name: TOA Radiance Segment mean at 12.30um (all pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 14 + wavelength: [11.8, 12.3, 12.8] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_ir123: + name: radiance_mean_clear_ir123 + long_name: TOA Radiance Segment mean at 12.30um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 14 + wavelength: [11.8, 12.3, 12.8] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_ir123: + name: radiance_mean_cloudy_ir123 + long_name: TOA Radiance Segment mean at 12.30um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 14 + wavelength: [11.8, 12.3, 12.8] + category_id: 2 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_all_ir133: + name: radiance_mean_all_ir133 + long_name: TOA Radiance Segment mean at 13.30um (all pixels) + standard_name: toa_outgoing_radiance resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 15 wavelength: [12.7, 13.3, 13.9] + category_id: 0 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_clear_ir133: + name: radiance_mean_clear_ir133 + long_name: TOA Radiance Segment mean at 13.30um (clear pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean - ir_channel_id: 7 + file_key: radiance_mean + channel_id: 15 + wavelength: [12.7, 13.3, 13.9] + category_id: 1 + cell_method: area:mean + coordinates: + - longitude + - latitude + + radiance_mean_cloudy_ir133: + name: radiance_mean_cloudy_ir133 + long_name: TOA Radiance Segment mean at 13.30um (cloudy pixels) + standard_name: toa_outgoing_radiance + resolution: 32000 + file_type: nc_fci_asr + file_key: radiance_mean + channel_id: 15 + wavelength: [12.7, 13.3, 13.9] category_id: 2 - long_name: TOA Brightess Temperature Segment mean at 13.30um (cloudy pixels) - standard_name: toa_brightess_temperature cell_method: area:mean coordinates: - longitude @@ -2552,15 +3268,15 @@ datasets: quality_reflectance_all_vis04: name: quality_reflectance_all_vis04 + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (all pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2568,15 +3284,15 @@ datasets: quality_reflectance_clear_vis04: name: quality_reflectance_clear_vis04 + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (clear pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2584,15 +3300,15 @@ datasets: quality_reflectance_cloudy_vis04: name: quality_reflectance_cloudy_vis04 + long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 0 + wavelength: [0.384, 0.444, 0.504] category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (cloudy pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2600,15 +3316,15 @@ datasets: quality_reflectance_all_vis05: name: quality_reflectance_all_vis05 + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (all pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2616,15 +3332,15 @@ datasets: quality_reflectance_clear_vis05: name: quality_reflectance_clear_vis05 + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (clear pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2632,15 +3348,15 @@ datasets: quality_reflectance_cloudy_vis05: name: quality_reflectance_cloudy_vis05 + long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 1 + wavelength: [0.47, 0.51, 0.55] category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (cloudy pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2648,15 +3364,15 @@ datasets: quality_reflectance_all_vis06: name: quality_reflectance_all_vis06 + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (all pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2664,15 +3380,15 @@ datasets: quality_reflectance_clear_vis06: name: quality_reflectance_clear_vis06 + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (clear pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2680,15 +3396,15 @@ datasets: quality_reflectance_cloudy_vis06: name: quality_reflectance_cloudy_vis06 + long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 2 + wavelength: [0.59, 0.64, 0.69] category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (cloudy pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2696,15 +3412,15 @@ datasets: quality_reflectance_all_vis08: name: quality_reflectance_all_vis08 + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (all pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2712,15 +3428,15 @@ datasets: quality_reflectance_clear_vis08: name: quality_reflectance_clear_vis08 + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (clear pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2728,15 +3444,15 @@ datasets: quality_reflectance_cloudy_vis08: name: quality_reflectance_cloudy_vis08 + long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 3 + wavelength: [0.815, 0.865, 0.915] category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (cloudy pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2744,15 +3460,15 @@ datasets: quality_reflectance_all_vis09: name: quality_reflectance_all_vis09 + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (all pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2760,15 +3476,15 @@ datasets: quality_reflectance_clear_vis09: name: quality_reflectance_clear_vis09 + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (clear pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2776,15 +3492,15 @@ datasets: quality_reflectance_cloudy_vis09: name: quality_reflectance_cloudy_vis09 + long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 4 + wavelength: [0.894, 0.914, 0.934] category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (cloudy pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2792,15 +3508,15 @@ datasets: quality_reflectance_all_nir13: name: quality_reflectance_all_nir13 + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (all pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2808,15 +3524,15 @@ datasets: quality_reflectance_clear_nir13: name: quality_reflectance_clear_nir13 + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (clear pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2824,15 +3540,15 @@ datasets: quality_reflectance_cloudy_nir13: name: quality_reflectance_cloudy_nir13 + long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 5 + wavelength: [1.35, 1.38, 1.41] category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (cloudy pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2840,15 +3556,15 @@ datasets: quality_reflectance_all_nir16: name: quality_reflectance_all_nir16 + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (all pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2856,15 +3572,15 @@ datasets: quality_reflectance_clear_nir16: name: quality_reflectance_clear_nir16 + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (clear pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2872,15 +3588,15 @@ datasets: quality_reflectance_cloudy_nir16: name: quality_reflectance_cloudy_nir16 + long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 6 + wavelength: [1.56, 1.61, 1.66] category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (cloudy pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2888,15 +3604,15 @@ datasets: quality_reflectance_all_nir22: name: quality_reflectance_all_nir22 + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (all pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 0 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (all pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2904,15 +3620,15 @@ datasets: quality_reflectance_clear_nir22: name: quality_reflectance_clear_nir22 + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (clear pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 1 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (clear pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2920,15 +3636,15 @@ datasets: quality_reflectance_cloudy_nir22: name: quality_reflectance_cloudy_nir22 + long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (cloudy pixels) + standard_name: reflectance_quality resolution: 32000 - wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_asr file_key: quality_reflectance vis_channel_id: 7 + wavelength: [2.2, 2.25, 2.3] category_id: 2 fill_value: -1 - long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (cloudy pixels) - standard_name: reflectance_quality units: '%' coordinates: - longitude @@ -2936,15 +3652,15 @@ datasets: quality_bt_all_ir38: name: quality_bt_all_ir38 + long_name: TOA Brightess Temperature % Confidence at 3.80um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 3.80um (all pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -2952,15 +3668,15 @@ datasets: quality_bt_clear_ir38: name: quality_bt_clear_ir38 + long_name: TOA Brightess Temperature % Confidence at 3.80um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 3.80um (clear pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -2968,15 +3684,15 @@ datasets: quality_bt_cloudy_ir38: name: quality_bt_cloudy_ir38 + long_name: TOA Brightess Temperature % Confidence at 3.80um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [3.4, 3.8, 4.2] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 0 + wavelength: [3.4, 3.8, 4.2] category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 3.80um (cloudy pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -2984,15 +3700,15 @@ datasets: quality_bt_all_wv63: name: quality_bt_all_wv63 + long_name: TOA Brightess Temperature % Confidence at 6.30um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 6.30um (all pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3000,15 +3716,15 @@ datasets: quality_bt_clear_wv63: name: quality_bt_clear_wv63 + long_name: TOA Brightess Temperature % Confidence at 6.30um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 6.30um (clear pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3016,15 +3732,15 @@ datasets: quality_bt_cloudy_wv63: name: quality_bt_cloudy_wv63 + long_name: TOA Brightess Temperature % Confidence at 6.30um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [5.3, 6.3, 7.3] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 1 + wavelength: [5.3, 6.3, 7.3] category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 6.30um (cloudy pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3032,15 +3748,15 @@ datasets: quality_bt_all_wv73: name: quality_bt_all_wv73 + long_name: TOA Brightess Temperature % Confidence at 7.35um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 7.35um (all pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3048,15 +3764,15 @@ datasets: quality_bt_clear_wv73: name: quality_bt_clear_wv73 + long_name: TOA Brightess Temperature % Confidence at 7.35um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 7.35um (clear pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3064,15 +3780,15 @@ datasets: quality_bt_cloudy_wv73: name: quality_bt_cloudy_wv73 + long_name: TOA Brightess Temperature % Confidence at 7.35um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [6.85, 7.35, 7.85] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 2 + wavelength: [6.85, 7.35, 7.85] category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 7.35um (cloudy pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3080,15 +3796,15 @@ datasets: quality_bt_all_ir87: name: quality_bt_all_ir87 + long_name: TOA Brightess Temperature % Confidence at 8.70um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 8.70um (all pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3096,15 +3812,15 @@ datasets: quality_bt_clear_ir87: name: quality_bt_clear_ir87 + long_name: TOA Brightess Temperature % Confidence at 8.70um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 8.70um (clear pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3112,15 +3828,15 @@ datasets: quality_bt_cloudy_ir87: name: quality_bt_cloudy_ir87 + long_name: TOA Brightess Temperature % Confidence at 8.70um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [8.3, 8.7, 9.1] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 3 + wavelength: [8.3, 8.7, 9.1] category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 8.70um (cloudy pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3128,15 +3844,15 @@ datasets: quality_bt_all_ir97: name: quality_bt_all_ir97 + long_name: TOA Brightess Temperature % Confidence at 9.66um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 9.66um (all pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3144,15 +3860,15 @@ datasets: quality_bt_clear_ir97: name: quality_bt_clear_ir97 + long_name: TOA Brightess Temperature % Confidence at 9.66um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 9.66um (clear pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3160,15 +3876,15 @@ datasets: quality_bt_cloudy_ir97: name: quality_bt_cloudy_ir97 + long_name: TOA Brightess Temperature % Confidence at 9.66um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.36, 9.66, 9.96] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 4 + wavelength: [9.36, 9.66, 9.96] category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 9.66um (cloudy pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3176,15 +3892,15 @@ datasets: quality_bt_all_ir105: name: quality_bt_all_ir105 + long_name: TOA Brightess Temperature % Confidence at 10.50um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 10.50um (all pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3192,15 +3908,15 @@ datasets: quality_bt_clear_ir105: name: quality_bt_clear_ir105 + long_name: TOA Brightess Temperature % Confidence at 10.50um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 10.50um (clear pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3208,15 +3924,15 @@ datasets: quality_bt_cloudy_ir105: name: quality_bt_cloudy_ir105 + long_name: TOA Brightess Temperature % Confidence at 10.50um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [9.8, 10.5, 11.2] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 5 + wavelength: [9.8, 10.5, 11.2] category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 10.50um (cloudy pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3224,15 +3940,15 @@ datasets: quality_bt_all_ir123: name: quality_bt_all_ir123 + long_name: TOA Brightess Temperature % Confidence at 12.30um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 12.30um (all pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3240,15 +3956,15 @@ datasets: quality_bt_clear_ir123: name: quality_bt_clear_ir123 + long_name: TOA Brightess Temperature % Confidence at 12.30um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 12.30um (clear pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3256,15 +3972,15 @@ datasets: quality_bt_cloudy_ir123: name: quality_bt_cloudy_ir123 + long_name: TOA Brightess Temperature % Confidence at 12.30um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [11.8, 12.3, 12.8] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 6 + wavelength: [11.8, 12.3, 12.8] category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 12.30um (cloudy pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3272,15 +3988,15 @@ datasets: quality_bt_all_ir133: name: quality_bt_all_ir133 + long_name: TOA Brightess Temperature % Confidence at 13.30um (all pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 0 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 13.30um (all pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3288,15 +4004,15 @@ datasets: quality_bt_clear_ir133: name: quality_bt_clear_ir133 + long_name: TOA Brightess Temperature % Confidence at 13.30um (clear pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 1 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 13.30um (clear pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3304,15 +4020,15 @@ datasets: quality_bt_cloudy_ir133: name: quality_bt_cloudy_ir133 + long_name: TOA Brightess Temperature % Confidence at 13.30um (cloudy pixels) + standard_name: brightness_temperature_quality resolution: 32000 - wavelength: [12.7, 13.3, 13.9] file_type: nc_fci_asr file_key: quality_bt ir_channel_id: 7 + wavelength: [12.7, 13.3, 13.9] category_id: 2 fill_value: -1 - long_name: TOA Brightess Temperature % Confidence at 13.30um (cloudy pixels) - standard_name: brightness_temperature_quality units: '%' coordinates: - longitude @@ -3320,11 +4036,11 @@ datasets: pixel_percentage_all: name: pixel_percentage_all + long_name: Percentage of FoR pixels used (all pixels) + standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr file_key: pixel_percentage - long_name: Percentage of FoR pixels used (all pixels) - standard_name: pixels_used_fraction category_id: 0 units: '%' coordinates: @@ -3333,11 +4049,11 @@ datasets: pixel_percentage_clear: name: pixel_percentage_clear + long_name: Percentage of FoR pixels used (clear pixels) + standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr file_key: pixel_percentage - long_name: Percentage of FoR pixels used (clear pixels) - standard_name: pixels_used_fraction category_id: 1 units: '%' coordinates: @@ -3346,11 +4062,11 @@ datasets: pixel_percentage_cloudy: name: pixel_percentage_cloudy + long_name: Percentage of FoR pixels used (cloudy pixels) + standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr file_key: pixel_percentage - long_name: Percentage of FoR pixels used (cloudy pixels) - standard_name: pixels_used_fraction category_id: 2 units: '%' coordinates: @@ -3359,18 +4075,18 @@ datasets: product_quality_asr: name: product_quality_asr + standard_name: product_quality file_type: nc_fci_asr file_key: product_quality - standard_name: product_quality product_completeness_asr: name: product_completeness_asr + standard_name: product_completeness file_type: nc_fci_asr file_key: product_completeness - standard_name: product_completeness product_timeliness_asr: name: product_timeliness_asr + standard_name: product_timeliness file_type: nc_fci_asr file_key: product_timeliness - standard_name: product_timeliness From 093921c8ca3bf070feb098a1f79ffd8e20bde30f Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 15 Jan 2024 16:54:32 +0100 Subject: [PATCH 1055/1416] changed file_key to nc_key --- satpy/etc/readers/fci_l2_nc.yaml | 678 ++++++++++----------- satpy/readers/fci_l2_nc.py | 18 +- satpy/tests/reader_tests/test_fci_l2_nc.py | 66 +- 3 files changed, 381 insertions(+), 381 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index dbf5db6a8a..daca83ada1 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -82,19 +82,19 @@ datasets: name: intm_latitude standard_name: latitude file_type: nc_fci_amvi - file_key: intm_latitude + nc_key: intm_latitude intm_longitude: name: intm_longitude standard_name: longitude file_type: nc_fci_amvi - file_key: intm_longitude + nc_key: intm_longitude intm_speed: name: intm_speed standard_name: wind_speed file_type: nc_fci_amvi - file_key: intm_speed + nc_key: intm_speed coordinates: - intm_longitude - intm_latitude @@ -103,7 +103,7 @@ datasets: name: intm_u_component standard_name: wind_speed_horizontal_component file_type: nc_fci_amvi - file_key: intm_u_component + nc_key: intm_u_component coordinates: - intm_longitude - intm_latitude @@ -112,7 +112,7 @@ datasets: name: intm_v_component standard_name: wind_speed_vertical_component file_type: nc_fci_amvi - file_key: intm_v_component + nc_key: intm_v_component coordinates: - intm_longitude - intm_latitude @@ -121,7 +121,7 @@ datasets: name: intm_direction standard_name: wind_to_direction file_type: nc_fci_amvi - file_key: intm_direction + nc_key: intm_direction coordinates: - intm_longitude - intm_latitude @@ -130,7 +130,7 @@ datasets: name: intm_pressure standard_name: wind_pressure file_type: nc_fci_amvi - file_key: intm_pressure + nc_key: intm_pressure coordinates: - intm_longitude - intm_latitude @@ -139,7 +139,7 @@ datasets: name: intm_temperature standard_name: wind_temperature file_type: nc_fci_amvi - file_key: intm_temperature + nc_key: intm_temperature coordinates: - intm_longitude - intm_latitude @@ -148,7 +148,7 @@ datasets: name: intm_target_type standard_name: wind_target_type file_type: nc_fci_amvi - file_key: target_type + nc_key: target_type coordinates: - intm_longitude - intm_latitude @@ -157,7 +157,7 @@ datasets: name: intm_wind_method standard_name: wind_wind_method file_type: nc_fci_amvi - file_key: wind_method + nc_key: wind_method coordinates: - intm_longitude - intm_latitude @@ -168,25 +168,25 @@ datasets: name: channel_id standard_name: channel_id file_type: nc_fci_amv - file_key: channel_id + nc_key: channel_id amv_latitude: name: latitude standard_name: latitude file_type: nc_fci_amv - file_key: latitude + nc_key: latitude amv_longitude: name: longitude standard_name: longitude file_type: nc_fci_amv - file_key: longitude + nc_key: longitude speed: name: speed standard_name: wind_speed file_type: nc_fci_amv - file_key: speed + nc_key: speed coordinates: - longitude - latitude @@ -195,7 +195,7 @@ datasets: name: speed_u_component standard_name: wind_speed_horizontal_component file_type: nc_fci_amv - file_key: speed_u_component + nc_key: speed_u_component coordinates: - longitude - latitude @@ -204,7 +204,7 @@ datasets: name: speed_v_component standard_name: wind_speed_vertical_component file_type: nc_fci_amv - file_key: speed_v_component + nc_key: speed_v_component coordinates: - longitude - latitude @@ -213,7 +213,7 @@ datasets: name: direction standard_name: wind_to_direction file_type: nc_fci_amv - file_key: direction + nc_key: direction coordinates: - longitude - latitude @@ -222,7 +222,7 @@ datasets: name: pressure standard_name: wind_pressure file_type: nc_fci_amv - file_key: pressure + nc_key: pressure coordinates: - longitude - latitude @@ -231,7 +231,7 @@ datasets: name: temperature standard_name: wind_temperature file_type: nc_fci_amv - file_key: temperature + nc_key: temperature coordinates: - longitude - latitude @@ -240,7 +240,7 @@ datasets: name: target_type standard_name: wind_target_type file_type: nc_fci_amv - file_key: target_type + nc_key: target_type coordinates: - longitude - latitude @@ -249,7 +249,7 @@ datasets: name: wind_method standard_name: wind_wind_method file_type: nc_fci_amv - file_key: wind_method + nc_key: wind_method coordinates: - longitude - latitude @@ -258,7 +258,7 @@ datasets: name: fcst_u standard_name: wind_forecast_u_component file_type: nc_fci_amv - file_key: forecast_u_component + nc_key: forecast_u_component coordinates: - longitude - latitude @@ -267,7 +267,7 @@ datasets: name: fcst_v standard_name: wind_forecast_v_component file_type: nc_fci_amv - file_key: forecast_v_component + nc_key: forecast_v_component coordinates: - longitude - latitude @@ -276,7 +276,7 @@ datasets: name: best_fit_pres standard_name: wind_best_fit_pressure file_type: nc_fci_amv - file_key: best_fit_pressure + nc_key: best_fit_pressure coordinates: - longitude - latitude @@ -285,7 +285,7 @@ datasets: name: best_fit_u standard_name: wind_best_fit_u_component file_type: nc_fci_amv - file_key: best_fit_u_component + nc_key: best_fit_u_component coordinates: - longitude - latitude @@ -294,7 +294,7 @@ datasets: name: best_fit_v standard_name: wind_best_fit_v_component file_type: nc_fci_amv - file_key: best_fit_v_component + nc_key: best_fit_v_component coordinates: - longitude - latitude @@ -303,7 +303,7 @@ datasets: name: qi standard_name: wind_overall_reliability file_type: nc_fci_amv - file_key: overall_reliability + nc_key: overall_reliability coordinates: - longitude - latitude @@ -312,7 +312,7 @@ datasets: name: qi_excl_fcst standard_name: wind_overall_reliability_exc_forecast file_type: nc_fci_amv - file_key: overall_reliability_exc_forecast + nc_key: overall_reliability_exc_forecast coordinates: - longitude - latitude @@ -321,19 +321,19 @@ datasets: name: product_quality standard_name: product_quality file_type: nc_fci_amv - file_key: product_quality + nc_key: product_quality product_completeness: name: product_completeness standard_name: product_completeness file_type: nc_fci_amv - file_key: product_completeness + nc_key: product_completeness product_timeliness: name: product_timeliness standard_name: product_timeliness file_type: nc_fci_amv - file_key: product_timeliness + nc_key: product_timeliness # CLM - Cloud Mask @@ -342,7 +342,7 @@ datasets: standard_name: cloud_mask_classification resolution: 2000 file_type: nc_fci_clm - file_key: cloud_state + nc_key: cloud_state fill_value: -127 import_enum_information: True @@ -351,7 +351,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_clm - file_key: quality_illumination + nc_key: quality_illumination fill_value: -127 import_enum_information: True @@ -360,7 +360,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_clm - file_key: quality_nwp_parameters + nc_key: quality_nwp_parameters fill_value: -127 import_enum_information: True @@ -369,7 +369,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_clm - file_key: quality_MTG_parameters + nc_key: quality_MTG_parameters fill_value: -127 import_enum_information: True @@ -378,7 +378,7 @@ datasets: standard_name: quality_flag resolution: 2000 file_type: nc_fci_clm - file_key: quality_overall_processing + nc_key: quality_overall_processing fill_value: -127 import_enum_information: True @@ -386,19 +386,19 @@ datasets: name: product_quality_clm standard_name: product_quality file_type: nc_fci_clm - file_key: product_quality + nc_key: product_quality product_completeness_clm: name: product_completeness_clm standard_name: product_completeness file_type: nc_fci_clm - file_key: product_completeness + nc_key: product_completeness product_timeliness_clm: name: product_timeliness_clm standard_name: product_timeliness file_type: nc_fci_clm - file_key: product_timeliness + nc_key: product_timeliness # CT - Cloud Type @@ -407,7 +407,7 @@ datasets: standard_name: cloud_phase_classification resolution: 2000 file_type: nc_fci_ct - file_key: cloud_phase + nc_key: cloud_phase fill_value: -127 import_enum_information: True @@ -416,7 +416,7 @@ datasets: standard_name: cloud_type_classification resolution: 2000 file_type: nc_fci_ct - file_key: cloud_type + nc_key: cloud_type fill_value: -127 import_enum_information: True @@ -425,7 +425,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_ct - file_key: quality_illumination + nc_key: quality_illumination fill_value: -127 import_enum_information: True @@ -434,7 +434,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_ct - file_key: quality_nwp_parameters + nc_key: quality_nwp_parameters fill_value: -127 import_enum_information: True @@ -443,7 +443,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_ct - file_key: quality_MTG_parameters + nc_key: quality_MTG_parameters fill_value: -127 import_enum_information: True @@ -452,7 +452,7 @@ datasets: standard_name: quality_flag resolution: 2000 file_type: nc_fci_ct - file_key: quality_overall_processing + nc_key: quality_overall_processing fill_value: -127 import_enum_information: True @@ -460,19 +460,19 @@ datasets: name: product_quality_ct standard_name: product_quality file_type: nc_fci_ct - file_key: product_quality + nc_key: product_quality product_completeness_ct: name: product_completeness_ct standard_name: product_completeness file_type: nc_fci_ct - file_key: product_completeness + nc_key: product_completeness product_timeliness_ct: name: product_timeliness_ct standard_name: product_timeliness file_type: nc_fci_ct - file_key: product_timeliness + nc_key: product_timeliness # CTTH - Cloud Top Temperature and Height @@ -481,42 +481,42 @@ datasets: standard_name: height_at_cloud_top_for_aviation resolution: 2000 file_type: nc_fci_ctth - file_key: cloud_top_aviation_height + nc_key: cloud_top_aviation_height cloud_top_height: name: cloud_top_height standard_name: height_at_cloud_top resolution: 2000 file_type: nc_fci_ctth - file_key: cloud_top_height + nc_key: cloud_top_height cloud_top_pressure: name: cloud_top_pressure standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_ctth - file_key: cloud_top_pressure + nc_key: cloud_top_pressure cloud_top_temperature: name: cloud_top_temperature standard_name: air_temperature_at_cloud_top resolution: 2000 file_type: nc_fci_ctth - file_key: cloud_top_temperature + nc_key: cloud_top_temperature effective_cloudiness: name: effective_cloudiness standard_name: effective_cloud_cover resolution: 2000 file_type: nc_fci_ctth - file_key: effective_cloudiness + nc_key: effective_cloudiness quality_status_ctth: name: quality_status_ctth standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_status + nc_key: quality_status fill_value: -127 import_enum_information: True @@ -525,7 +525,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_rtm + nc_key: quality_rtm fill_value: -127 import_enum_information: True @@ -534,7 +534,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_method + nc_key: quality_method fill_value: -127 import_enum_information: True @@ -543,7 +543,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_nwp_parameters + nc_key: quality_nwp_parameters fill_value: -127 import_enum_information: True @@ -552,7 +552,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_MTG_parameters + nc_key: quality_MTG_parameters fill_value: -127 import_enum_information: True @@ -561,7 +561,7 @@ datasets: standard_name: quality_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_overall_processing + nc_key: quality_overall_processing fill_value: -127 import_enum_information: True @@ -570,7 +570,7 @@ datasets: standard_name: quality_flag resolution: 2000 file_type: nc_fci_ctth - file_key: quality_overall_processing_aviation + nc_key: quality_overall_processing_aviation fill_value: -127 import_enum_information: True @@ -578,19 +578,19 @@ datasets: name: product_quality_ctth standard_name: product_quality file_type: nc_fci_ctth - file_key: product_quality + nc_key: product_quality product_completeness_ctth: name: product_completeness_ctth standard_name: product_completeness file_type: nc_fci_ctth - file_key: product_completeness + nc_key: product_completeness product_timeliness_ctth: name: product_timeliness_ctth standard_name: product_timeliness file_type: nc_fci_ctth - file_key: product_timeliness + nc_key: product_timeliness # FIR - Active Fire Monitoring @@ -599,14 +599,14 @@ datasets: standard_name: fire_probability resolution: 2000 file_type: nc_fci_fir - file_key: fire_probability + nc_key: fire_probability fire_result: name: fire_result standard_name: active_fire_classification resolution: 2000 file_type: nc_fci_fir - file_key: fire_result + nc_key: fire_result fill_value: -127 import_enum_information: True @@ -614,19 +614,19 @@ datasets: name: product_quality_fir standard_name: product_quality file_type: nc_fci_fir - file_key: product_quality + nc_key: product_quality product_completeness_fir: name: product_completeness_fir standard_name: product_completeness file_type: nc_fci_fir - file_key: product_completeness + nc_key: product_completeness product_timeliness_fir: name: product_timeliness_fir standard_name: product_timeliness file_type: nc_fci_fir - file_key: product_timeliness + nc_key: product_timeliness # GII - Global Instability Index @@ -635,7 +635,7 @@ datasets: standard_name: atmosphere_stability_k_index resolution: 6000 file_type: nc_fci_gii - file_key: k_index + nc_key: k_index coordinates: - longitude - latitude @@ -645,7 +645,7 @@ datasets: standard_name: atmosphere_stability_lifted_index resolution: 6000 file_type: nc_fci_gii - file_key: lifted_index + nc_key: lifted_index coordinates: - longitude - latitude @@ -655,7 +655,7 @@ datasets: standard_name: atmosphere_mass_content_of_water_vapor resolution: 6000 file_type: nc_fci_gii - file_key: prec_water_high + nc_key: prec_water_high coordinates: - longitude - latitude @@ -665,7 +665,7 @@ datasets: standard_name: atmosphere_mass_content_of_water_vapor resolution: 6000 file_type: nc_fci_gii - file_key: prec_water_low + nc_key: prec_water_low coordinates: - longitude - latitude @@ -675,7 +675,7 @@ datasets: standard_name: atmosphere_mass_content_of_water_vapor resolution: 6000 file_type: nc_fci_gii - file_key: prec_water_mid + nc_key: prec_water_mid coordinates: - longitude - latitude @@ -685,7 +685,7 @@ datasets: standard_name: atmosphere_mass_content_of_water_vapor resolution: 6000 file_type: nc_fci_gii - file_key: prec_water_total + nc_key: prec_water_total coordinates: - longitude - latitude @@ -696,7 +696,7 @@ datasets: standard_name: cloud_free_area_fraction resolution: 6000 file_type: nc_fci_gii - file_key: percent_cloud_free + nc_key: percent_cloud_free units: '%' coordinates: - longitude @@ -707,7 +707,7 @@ datasets: standard_name: number_of_iterations resolution: 6000 file_type: nc_fci_gii - file_key: number_of_iterations + nc_key: number_of_iterations coordinates: - longitude - latitude @@ -716,19 +716,19 @@ datasets: name: product_quality_gii standard_name: product_quality file_type: nc_fci_gii - file_key: product_quality + nc_key: product_quality product_completeness_gii: name: product_completeness_gii standard_name: product_completeness file_type: nc_fci_gii - file_key: product_completeness + nc_key: product_completeness product_timeliness_gii: name: product_timeliness_gii standard_name: product_timeliness file_type: nc_fci_gii - file_key: product_timeliness + nc_key: product_timeliness # OCA - Optimal Cloud Analysis @@ -737,7 +737,7 @@ datasets: standard_name: thermodynamic_phase_of_cloud_particles_classification resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_phase + nc_key: retrieved_cloud_phase fill_value: -127 import_enum_information: True @@ -746,7 +746,7 @@ datasets: standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_optical_thickness + nc_key: retrieved_cloud_optical_thickness retrieved_cloud_optical_thickness_upper_layer: name: retrieved_cloud_optical_thickness_upper_layer @@ -754,7 +754,7 @@ datasets: standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_optical_thickness + nc_key: retrieved_cloud_optical_thickness layer: 0 retrieval_error_cloud_optical_thickness_upper_layer: @@ -763,7 +763,7 @@ datasets: standard_name: atmosphere_optical_thickness_due_to_cloud standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_optical_thickness + nc_key: retrieval_error_cloud_optical_thickness layer: 0 retrieved_cloud_optical_thickness_lower_layer: @@ -772,7 +772,7 @@ datasets: standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_optical_thickness + nc_key: retrieved_cloud_optical_thickness layer: 1 retrieval_error_cloud_optical_thickness_lower_layer: @@ -781,7 +781,7 @@ datasets: standard_name: atmosphere_optical_thickness_due_to_cloud standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_optical_thickness + nc_key: retrieval_error_cloud_optical_thickness layer: 1 retrieved_cloud_particle_effective_radius: @@ -789,14 +789,14 @@ datasets: standard_name: effective_radius_of_cloud_particles_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_particle_effective_radius + nc_key: retrieved_cloud_particle_effective_radius retrieval_error_cloud_particle_effective_radius: name: retrieval_error_cloud_particle_effective_radius standard_name: effective_radius_of_cloud_particles_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_particle_effective_radius + nc_key: retrieval_error_cloud_particle_effective_radius retrieved_cloud_top_pressure_upper_layer: name: retrieved_cloud_top_pressure_upper_layer @@ -804,7 +804,7 @@ datasets: standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_top_pressure + nc_key: retrieved_cloud_top_pressure layer: 0 retrieval_error_cloud_top_pressure_upper_layer: @@ -813,7 +813,7 @@ datasets: standard_name: air_pressure_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_top_pressure + nc_key: retrieval_error_cloud_top_pressure layer: 0 retrieved_cloud_top_pressure_lower_layer: @@ -822,7 +822,7 @@ datasets: standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_top_pressure + nc_key: retrieved_cloud_top_pressure layer: 1 retrieval_error_cloud_top_pressure_lower_layer: @@ -831,7 +831,7 @@ datasets: standard_name: air_pressure_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca - file_key: retrieval_error_cloud_top_pressure + nc_key: retrieval_error_cloud_top_pressure layer: 1 retrieved_cloud_top_temperature: @@ -839,39 +839,39 @@ datasets: standard_name: air_temperature_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_top_temperature + nc_key: retrieved_cloud_top_temperature retrieved_cloud_top_height: name: retrieved_cloud_top_height standard_name: height_at_cloud_top resolution: 2000 file_type: nc_fci_oca - file_key: retrieved_cloud_top_height + nc_key: retrieved_cloud_top_height quality_jmeas: name: quality_jmeas standard_name: cost_function_part_due_to_measurements resolution: 2000 file_type: nc_fci_oca - file_key: quality_jmeas + nc_key: quality_jmeas product_quality_oca: name: product_quality_oca standard_name: product_quality file_type: nc_fci_oca - file_key: product_quality + nc_key: product_quality product_completeness_oca: name: product_completeness_oca standard_name: product_completeness file_type: nc_fci_oca - file_key: product_completeness + nc_key: product_completeness product_timeliness_oca: name: product_timeliness_oca standard_name: product_timeliness file_type: nc_fci_oca - file_key: product_timeliness + nc_key: product_timeliness # OLR - Outgoing Longwave Radiation @@ -880,14 +880,14 @@ datasets: standard_name: outgoing_longwave_radiation resolution: 2000 file_type: nc_fci_olr - file_key: olr_value + nc_key: olr_value cloud_type_olr: name: cloud_type_olr standard_name: cloud_type_classification resolution: 2000 file_type: nc_fci_olr - file_key: cloud_type + nc_key: cloud_type fill_value: -127 import_enum_information: True @@ -896,7 +896,7 @@ datasets: standard_name: quality_flag resolution: 2000 file_type: nc_fci_olr - file_key: quality_overall_processing + nc_key: quality_overall_processing fill_value: -127 import_enum_information: True @@ -904,19 +904,19 @@ datasets: name: product_quality_olr standard_name: product_quality file_type: nc_fci_olr - file_key: product_quality + nc_key: product_quality product_completeness_olr: name: product_completeness_olr standard_name: product_completeness file_type: nc_fci_olr - file_key: product_completeness + nc_key: product_completeness product_timeliness_olr: name: product_timeliness_olr standard_name: product_timeliness file_type: nc_fci_olr - file_key: product_timeliness + nc_key: product_timeliness # CRM - Clear-Sky Reflectance Maps @@ -926,7 +926,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 1000 file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' crm_vis04: @@ -936,7 +936,7 @@ datasets: resolution: 1000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 0 @@ -947,7 +947,7 @@ datasets: resolution: 1000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 1 @@ -958,7 +958,7 @@ datasets: resolution: 1000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 2 @@ -969,7 +969,7 @@ datasets: resolution: 1000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 3 @@ -980,7 +980,7 @@ datasets: resolution: 1000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 4 @@ -991,7 +991,7 @@ datasets: resolution: 1000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 5 @@ -1002,7 +1002,7 @@ datasets: resolution: 1000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 6 @@ -1013,7 +1013,7 @@ datasets: resolution: 1000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_crm - file_key: mean_clear_sky_reflectance + nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 7 @@ -1023,7 +1023,7 @@ datasets: standard_name: solar_zenith_angle resolution: 1000 file_type: nc_fci_crm - file_key: mean_solar_zenith + nc_key: mean_solar_zenith mean_rel_azi: name: mean_rel_azi @@ -1031,47 +1031,47 @@ datasets: standard_name: relative_sun_sensor_azimuth_angle resolution: 1000 file_type: nc_fci_crm - file_key: mean_rel_solar_sat_azimuth + nc_key: mean_rel_solar_sat_azimuth n_acc: name: n_acc standard_name: number_of_accumulations resolution: 1000 file_type: nc_fci_crm - file_key: number_of_accumulations + nc_key: number_of_accumulations historical_data: name: historical_data standard_name: status_flag resolution: 1000 file_type: nc_fci_crm - file_key: historical_data + nc_key: historical_data import_enum_information: True product_quality_crm: name: product_quality_crm standard_name: product_quality file_type: nc_fci_crm - file_key: product_quality + nc_key: product_quality product_completeness_crm: name: product_completeness_crm standard_name: product_completeness file_type: nc_fci_crm - file_key: product_completeness + nc_key: product_completeness product_timeliness_crm: name: product_timeliness_crm standard_name: product_timeliness file_type: nc_fci_crm - file_key: product_timeliness + nc_key: product_timeliness # LAT/LON FOR SEGMENTED PRODUCTS latitude: name: latitude standard_name: latitude - file_key: latitude + nc_key: latitude resolution: [6000, 6000, 32000] file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] units: degree_north @@ -1079,7 +1079,7 @@ datasets: longitude: name: longitude standard_name: longitude - file_key: longitude + nc_key: longitude resolution: [6000, 6000, 32000] file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] units: degree_east @@ -1091,7 +1091,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 0 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1101,7 +1101,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 1 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1111,7 +1111,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 2 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1121,7 +1121,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 3 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1131,7 +1131,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 4 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1141,7 +1141,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 5 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1151,7 +1151,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 6 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1161,7 +1161,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 7 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1171,7 +1171,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 8 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1181,7 +1181,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 9 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1191,7 +1191,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 10 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1201,7 +1201,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 11 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1211,7 +1211,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 12 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1221,7 +1221,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 13 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1231,7 +1231,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 14 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1241,7 +1241,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 15 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1251,7 +1251,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 16 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1261,7 +1261,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 17 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1271,7 +1271,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 18 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1281,7 +1281,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 19 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1291,7 +1291,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 20 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1301,7 +1301,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 21 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1311,7 +1311,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 22 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1321,7 +1321,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 23 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1331,7 +1331,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_flag + nc_key: cloud_mask_test_flag extract_byte: 24 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] @@ -1341,7 +1341,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 0 flag_values: [0,1] flag_meanings: ['No snow/ice detected',' Snow/ice detected'] @@ -1351,7 +1351,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 1 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1361,7 +1361,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 2 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1371,7 +1371,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 3 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1381,7 +1381,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 4 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1391,7 +1391,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 5 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1401,7 +1401,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 6 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1411,7 +1411,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 7 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1421,7 +1421,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 8 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1431,7 +1431,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 9 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1441,7 +1441,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 10 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1451,7 +1451,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 11 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1461,7 +1461,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 12 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1471,7 +1471,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 13 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1481,7 +1481,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 14 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1491,7 +1491,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 15 flag_values: [0,1] flag_meanings: ['No opaqueness detected', 'Opaqueness detected'] @@ -1501,7 +1501,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 16 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] @@ -1511,7 +1511,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 17 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] @@ -1521,7 +1521,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 18 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] @@ -1531,7 +1531,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 19 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] @@ -1541,7 +1541,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 20 flag_values: [0,1] flag_meanings: ['Clear sky restored', 'Cloud unchanged'] @@ -1551,7 +1551,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 21 flag_values: [0,1] flag_meanings: ['No dust detected','Dust detected'] @@ -1561,7 +1561,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 22 flag_values: [0,1] flag_meanings: ['No ash detected','Ash detected'] @@ -1571,7 +1571,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_test_result + nc_key: cloud_mask_test_result extract_byte: 23 flag_values: [0,1] flag_meanings: ['Dust detected','Ash detected'] @@ -1581,7 +1581,7 @@ datasets: standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm - file_key: cloud_mask_cmrt6_test_result + nc_key: cloud_mask_cmrt6_test_result fill_value: -127 import_enum_information: True @@ -1592,7 +1592,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_max + nc_key: bt_max cell_method: area:maximum coordinates: - longitude @@ -1604,7 +1604,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean cell_method: area:mean coordinates: - longitude @@ -1616,7 +1616,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_min + nc_key: bt_min cell_method: area:minimum coordinates: - longitude @@ -1628,7 +1628,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_std + nc_key: bt_std cell_method: area:standard_deviation coordinates: - longitude @@ -1640,7 +1640,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_max + nc_key: radiance_max cell_method: area:maximum coordinates: - longitude @@ -1652,7 +1652,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean cell_method: area:mean coordinates: - longitude @@ -1664,7 +1664,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_min + nc_key: radiance_min cell_method: area:minimum coordinates: - longitude @@ -1676,7 +1676,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_std + nc_key: radiance_std cell_method: area:standard_deviation coordinates: - longitude @@ -1688,7 +1688,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_max + nc_key: reflectance_max cell_method: area:maximum units: '%' coordinates: @@ -1701,7 +1701,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean cell_method: area:mean units: '%' coordinates: @@ -1714,7 +1714,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_min + nc_key: reflectance_min cell_method: area:minimum units: '%' coordinates: @@ -1727,7 +1727,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_std + nc_key: reflectance_std cell_method: area:standard_deviation units: '%' coordinates: @@ -1740,7 +1740,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt fill_value: -1 units: '%' coordinates: @@ -1753,7 +1753,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance fill_value: -1 units: '%' coordinates: @@ -1766,7 +1766,7 @@ datasets: standard_name: radiance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_radiance + nc_key: quality_radiance fill_value: -1 coordinates: - longitude @@ -1777,7 +1777,7 @@ datasets: standard_name: land_area_fraction resolution: 32000 file_type: nc_fci_asr - file_key: land_pixel_percent + nc_key: land_pixel_percent coordinates: - longitude - latitude @@ -1787,7 +1787,7 @@ datasets: standard_name: water_area_fraction resolution: 32000 file_type: nc_fci_asr - file_key: water_pixel_percent + nc_key: water_pixel_percent coordinates: - longitude - latitude @@ -1797,7 +1797,7 @@ datasets: standard_name: water_area_fraction resolution: 32000 file_type: nc_fci_asr - file_key: pixel_percentage + nc_key: pixel_percentage coordinates: - longitude - latitude @@ -1808,7 +1808,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 0 @@ -1824,7 +1824,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 1 @@ -1840,7 +1840,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 2 @@ -1856,7 +1856,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 0 @@ -1872,7 +1872,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 1 @@ -1888,7 +1888,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 2 @@ -1904,7 +1904,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 0 @@ -1920,7 +1920,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 1 @@ -1936,7 +1936,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 2 @@ -1952,7 +1952,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 0 @@ -1968,7 +1968,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 1 @@ -1984,7 +1984,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 2 @@ -2000,7 +2000,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 0 @@ -2016,7 +2016,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 1 @@ -2032,7 +2032,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 2 @@ -2048,7 +2048,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 0 @@ -2064,7 +2064,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 1 @@ -2080,7 +2080,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 2 @@ -2096,7 +2096,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 0 @@ -2112,7 +2112,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 1 @@ -2128,7 +2128,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 2 @@ -2144,7 +2144,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 0 @@ -2160,7 +2160,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 1 @@ -2176,7 +2176,7 @@ datasets: standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr - file_key: reflectance_mean + nc_key: reflectance_mean vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 2 @@ -2192,7 +2192,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 0 @@ -2207,7 +2207,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 1 @@ -2222,7 +2222,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 2 @@ -2237,7 +2237,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 0 @@ -2252,7 +2252,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 1 @@ -2267,7 +2267,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 2 @@ -2282,7 +2282,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 0 @@ -2297,7 +2297,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 1 @@ -2312,7 +2312,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 2 @@ -2327,7 +2327,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 0 @@ -2342,7 +2342,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 1 @@ -2357,7 +2357,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 2 @@ -2372,7 +2372,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 0 @@ -2387,7 +2387,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 1 @@ -2402,7 +2402,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 2 @@ -2417,7 +2417,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 0 @@ -2432,7 +2432,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 1 @@ -2447,7 +2447,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 2 @@ -2462,7 +2462,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 0 @@ -2477,7 +2477,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 1 @@ -2492,7 +2492,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 2 @@ -2507,7 +2507,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 0 @@ -2522,7 +2522,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 1 @@ -2537,7 +2537,7 @@ datasets: standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr - file_key: bt_mean + nc_key: bt_mean ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 2 @@ -2552,7 +2552,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 0 @@ -2567,7 +2567,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 1 @@ -2582,7 +2582,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 2 @@ -2597,7 +2597,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 0 @@ -2612,7 +2612,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 1 @@ -2627,7 +2627,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 2 @@ -2642,7 +2642,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 0 @@ -2657,7 +2657,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 1 @@ -2672,7 +2672,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 2 @@ -2687,7 +2687,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 0 @@ -2702,7 +2702,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 1 @@ -2717,7 +2717,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 2 @@ -2732,7 +2732,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 0 @@ -2747,7 +2747,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 1 @@ -2762,7 +2762,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 2 @@ -2777,7 +2777,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 0 @@ -2792,7 +2792,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 1 @@ -2807,7 +2807,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 2 @@ -2822,7 +2822,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 0 @@ -2837,7 +2837,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 1 @@ -2852,7 +2852,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 2 @@ -2867,7 +2867,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 0 @@ -2882,7 +2882,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 1 @@ -2897,7 +2897,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 2 @@ -2912,7 +2912,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 8 wavelength: [3.4, 3.8, 4.2] category_id: 0 @@ -2927,7 +2927,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 8 wavelength: [3.4, 3.8, 4.2] category_id: 1 @@ -2942,7 +2942,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 8 wavelength: [3.4, 3.8, 4.2] category_id: 2 @@ -2957,7 +2957,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 9 wavelength: [5.3, 6.3, 7.3] category_id: 0 @@ -2972,7 +2972,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 9 wavelength: [5.3, 6.3, 7.3] category_id: 1 @@ -2987,7 +2987,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 9 wavelength: [5.3, 6.3, 7.3] category_id: 2 @@ -3002,7 +3002,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 10 wavelength: [6.85, 7.35, 7.85] category_id: 0 @@ -3017,7 +3017,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 10 wavelength: [6.85, 7.35, 7.85] category_id: 1 @@ -3032,7 +3032,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 10 wavelength: [6.85, 7.35, 7.85] category_id: 2 @@ -3047,7 +3047,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 11 wavelength: [8.3, 8.7, 9.1] category_id: 0 @@ -3062,7 +3062,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 11 wavelength: [8.3, 8.7, 9.1] category_id: 1 @@ -3077,7 +3077,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 11 wavelength: [8.3, 8.7, 9.1] category_id: 2 @@ -3092,7 +3092,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 12 wavelength: [9.36, 9.66, 9.96] category_id: 0 @@ -3107,7 +3107,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 12 wavelength: [9.36, 9.66, 9.96] category_id: 1 @@ -3122,7 +3122,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 12 wavelength: [9.36, 9.66, 9.96] category_id: 2 @@ -3137,7 +3137,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 13 wavelength: [9.8, 10.5, 11.2] category_id: 0 @@ -3152,7 +3152,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 13 wavelength: [9.8, 10.5, 11.2] category_id: 1 @@ -3167,7 +3167,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 13 wavelength: [9.8, 10.5, 11.2] category_id: 2 @@ -3182,7 +3182,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 14 wavelength: [11.8, 12.3, 12.8] category_id: 0 @@ -3197,7 +3197,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 14 wavelength: [11.8, 12.3, 12.8] category_id: 1 @@ -3212,7 +3212,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 14 wavelength: [11.8, 12.3, 12.8] category_id: 2 @@ -3227,7 +3227,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 15 wavelength: [12.7, 13.3, 13.9] category_id: 0 @@ -3242,7 +3242,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 15 wavelength: [12.7, 13.3, 13.9] category_id: 1 @@ -3257,7 +3257,7 @@ datasets: standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr - file_key: radiance_mean + nc_key: radiance_mean channel_id: 15 wavelength: [12.7, 13.3, 13.9] category_id: 2 @@ -3272,7 +3272,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 0 @@ -3288,7 +3288,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 1 @@ -3304,7 +3304,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 2 @@ -3320,7 +3320,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 0 @@ -3336,7 +3336,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 1 @@ -3352,7 +3352,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 2 @@ -3368,7 +3368,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 0 @@ -3384,7 +3384,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 1 @@ -3400,7 +3400,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 2 @@ -3416,7 +3416,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 0 @@ -3432,7 +3432,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 1 @@ -3448,7 +3448,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 2 @@ -3464,7 +3464,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 0 @@ -3480,7 +3480,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 1 @@ -3496,7 +3496,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 2 @@ -3512,7 +3512,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 0 @@ -3528,7 +3528,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 1 @@ -3544,7 +3544,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 2 @@ -3560,7 +3560,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 0 @@ -3576,7 +3576,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 1 @@ -3592,7 +3592,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 2 @@ -3608,7 +3608,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 0 @@ -3624,7 +3624,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 1 @@ -3640,7 +3640,7 @@ datasets: standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_reflectance + nc_key: quality_reflectance vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 2 @@ -3656,7 +3656,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 0 @@ -3672,7 +3672,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 1 @@ -3688,7 +3688,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 2 @@ -3704,7 +3704,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 0 @@ -3720,7 +3720,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 1 @@ -3736,7 +3736,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 2 @@ -3752,7 +3752,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 0 @@ -3768,7 +3768,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 1 @@ -3784,7 +3784,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 2 @@ -3800,7 +3800,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 0 @@ -3816,7 +3816,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 1 @@ -3832,7 +3832,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 2 @@ -3848,7 +3848,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 0 @@ -3864,7 +3864,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 1 @@ -3880,7 +3880,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 2 @@ -3896,7 +3896,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 0 @@ -3912,7 +3912,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 1 @@ -3928,7 +3928,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 2 @@ -3944,7 +3944,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 0 @@ -3960,7 +3960,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 1 @@ -3976,7 +3976,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 2 @@ -3992,7 +3992,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 0 @@ -4008,7 +4008,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 1 @@ -4024,7 +4024,7 @@ datasets: standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr - file_key: quality_bt + nc_key: quality_bt ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 2 @@ -4040,7 +4040,7 @@ datasets: standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr - file_key: pixel_percentage + nc_key: pixel_percentage category_id: 0 units: '%' coordinates: @@ -4053,7 +4053,7 @@ datasets: standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr - file_key: pixel_percentage + nc_key: pixel_percentage category_id: 1 units: '%' coordinates: @@ -4066,7 +4066,7 @@ datasets: standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr - file_key: pixel_percentage + nc_key: pixel_percentage category_id: 2 units: '%' coordinates: @@ -4077,16 +4077,16 @@ datasets: name: product_quality_asr standard_name: product_quality file_type: nc_fci_asr - file_key: product_quality + nc_key: product_quality product_completeness_asr: name: product_completeness_asr standard_name: product_completeness file_type: nc_fci_asr - file_key: product_completeness + nc_key: product_completeness product_timeliness_asr: name: product_timeliness_asr standard_name: product_timeliness file_type: nc_fci_asr - file_key: product_timeliness + nc_key: product_timeliness diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 6290f8161c..e88b60a739 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -88,7 +88,7 @@ def _set_attributes(self, variable, dataset_info, segmented=False): else: xdim, ydim = "number_of_columns", "number_of_rows" - if dataset_info["file_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: + if dataset_info["nc_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: variable = variable.rename({ydim: "y", xdim: "x"}) variable.attrs.setdefault("units", None) @@ -104,7 +104,7 @@ def _set_attributes(self, variable, dataset_info, segmented=False): if (import_enum_information): netCDF4_dataset = netCDF4.Dataset(self.filename, "r") # This currently assumes a flat netCDF file - dataType=netCDF4_dataset.variables[dataset_info["file_key"]].datatype + dataType=netCDF4_dataset.variables[dataset_info["nc_key"]].datatype if (hasattr(dataType,"enum_dict")): enum = dataType.enum_dict flag_values = [] @@ -189,8 +189,8 @@ def get_area_def(self, key): raise NotImplementedError def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info["file_key"] + """Get dataset using the nc_key in dataset_info.""" + var_key = dataset_info["nc_key"] par_name = dataset_info["name"] logger.debug("Reading in file to get dataset with key %s.", var_key) @@ -222,7 +222,7 @@ def get_dataset(self, dataset_id, dataset_info): @staticmethod def _decode_clm_test_data(variable, dataset_info): - if dataset_info["file_key"] != "cloud_mask_cmrt6_test_result": + if dataset_info["nc_key"] != "cloud_mask_cmrt6_test_result": variable = variable.astype("uint32") variable.values = (variable.values >> dataset_info["extract_byte"] << 31 >> 31).astype("int8") @@ -352,8 +352,8 @@ def get_area_def(self, key): raise NotImplementedError def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info["file_key"] + """Get dataset using the nc_key in dataset_info.""" + var_key = dataset_info["nc_key"] logger.debug("Reading in file to get dataset with key %s.", var_key) try: @@ -470,8 +470,8 @@ def _get_global_attributes(self): return attributes def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info["file_key"] + """Get dataset using the nc_key in dataset_info.""" + var_key = dataset_info["nc_key"] logger.debug("Reading in file to get dataset with key %s.", var_key) try: diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 3e77c1d51e..830f793d00 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -165,10 +165,10 @@ def test_area_definition(self, me_, gad_): assert args[5] == 100 def test_dataset(self): - """Test the correct execution of the get_dataset function with a valid file_key.""" + """Test the correct execution of the get_dataset function with a valid nc_key.""" dataset = self.fh.get_dataset(make_dataid(name="test_one_layer", resolution=2000), {"name": "test_one_layer", - "file_key": "test_one_layer", + "nc_key": "test_one_layer", "fill_value": -999, "file_type": "test_file_type"}) @@ -177,20 +177,20 @@ def test_dataset(self): assert dataset.attrs["fill_value"] == -999 def test_dataset_with_layer(self): - """Check the correct execution of the get_dataset function with a valid file_key & layer.""" + """Check the correct execution of the get_dataset function with a valid nc_key & layer.""" dataset = self.fh.get_dataset(make_dataid(name="test_two_layers", resolution=2000), {"name": "test_two_layers", - "file_key": "test_two_layers", "layer": 1, + "nc_key": "test_two_layers", "layer": 1, "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10))) assert dataset.attrs["spacecraft_name"] == "test_platform" def test_dataset_with_invalid_filekey(self): - """Test the correct execution of the get_dataset function with an invalid file_key.""" + """Test the correct execution of the get_dataset function with an invalid nc_key.""" invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), {"name": "test_invalid", - "file_key": "test_invalid", + "nc_key": "test_invalid", "fill_value": -999, "file_type": "test_file_type"}) assert invalid_dataset is None @@ -199,7 +199,7 @@ def test_dataset_with_total_cot(self): """Test the correct execution of the get_dataset function for total COT (add contributions from two layers).""" dataset = self.fh.get_dataset(make_dataid(name="retrieved_cloud_optical_thickness", resolution=2000), {"name": "retrieved_cloud_optical_thickness", - "file_key": "test_two_layers", + "nc_key": "test_two_layers", "fill_value": -999, "file_type": "test_file_type"}) # Checks that the function returns None @@ -212,7 +212,7 @@ def test_dataset_with_scalar(self): # Checks returned scalar value dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), {"name": "product_quality", - "file_key": "product_quality", + "nc_key": "product_quality", "file_type": "test_file_type"}) assert dataset.values == 99.0 @@ -224,7 +224,7 @@ def test_emumerations(self): """Test the conversion of enumerated type information into flag_values and flag_meanings.""" dataset = self.fh.get_dataset(make_dataid(name="test_enum", resolution=2000), {"name": "quality_flag", - "file_key": "quality_flag", + "nc_key": "quality_flag", "file_type": "test_file_type", "import_enum_information": True}) attributes = dataset.attrs @@ -237,7 +237,7 @@ def test_units_from_file(self): """Test units extraction from NetCDF file.""" dataset = self.fh.get_dataset(make_dataid(name="test_units_from_file", resolution=2000), {"name": "test_one_layer", - "file_key": "test_one_layer", + "nc_key": "test_one_layer", "file_type": "test_file_type"}) assert dataset.attrs["units"] == "test_units" @@ -245,7 +245,7 @@ def test_unit_from_file(self): """Test that a unit stored with attribute `unit` in the file is assigned to the `units` attribute.""" dataset = self.fh.get_dataset(make_dataid(name="test_unit_from_file", resolution=2000), {"name": "test_two_layers", - "file_key": "test_two_layers", "layer": 1, + "nc_key": "test_two_layers", "layer": 1, "file_type": "test_file_type"}) assert dataset.attrs["units"] == "test_unit" @@ -254,7 +254,7 @@ def test_units_from_yaml(self): dataset = self.fh.get_dataset(make_dataid(name="test_units_from_yaml", resolution=2000), {"name": "test_one_layer", "units": "test_unit_from_yaml", - "file_key": "test_one_layer", + "nc_key": "test_one_layer", "file_type": "test_file_type"}) assert dataset.attrs["units"] == "test_unit_from_yaml" @@ -263,7 +263,7 @@ def test_units_none_conversion(self): dataset = self.fh.get_dataset(make_dataid(name="test_units_none_conversion", resolution=2000), {"name": "test_one_layer", "units": "none", - "file_key": "test_one_layer", + "nc_key": "test_one_layer", "file_type": "test_file_type"}) assert dataset.attrs["units"] is None @@ -342,13 +342,13 @@ def test_all_basic(self): assert global_attributes == expected_global_attributes def test_dataset(self): - """Test the correct execution of the get_dataset function with valid file_key.""" + """Test the correct execution of the get_dataset function with valid nc_key.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - # Checks the correct execution of the get_dataset function with a valid file_key + # Checks the correct execution of the get_dataset function with a valid nc_key dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -361,13 +361,13 @@ def test_dataset(self): self.fh.get_area_def(None) def test_dataset_with_invalid_filekey(self): - """Test the correct execution of the get_dataset function with an invalid file_key.""" + """Test the correct execution of the get_dataset function with an invalid nc_key.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - # Checks the correct execution of the get_dataset function with an invalid file_key + # Checks the correct execution of the get_dataset function with an invalid nc_key invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=32000), {"name": "test_invalid", - "file_key": "test_invalid", + "nc_key": "test_invalid", "fill_value": -999, }) # Checks that the function returns None assert invalid_dataset is None @@ -377,10 +377,10 @@ def test_dataset_with_adef(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, with_area_definition=True) - # Checks the correct execution of the get_dataset function with a valid file_key + # Checks the correct execution of the get_dataset function with a valid nc_key dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "coordinates": ("test_lon", "test_lat"), }) expected_dataset = self._get_unique_array(range(8), range(6)) @@ -399,7 +399,7 @@ def test_dataset_with_adef_and_wrongs_dims(self): with_area_definition=True) with pytest.raises(NotImplementedError): self.fh.get_dataset(make_dataid(name="test_wrong_dims", resolution=6000), - {"name": "test_wrong_dims", "file_key": "test_values", "fill_value": -999} + {"name": "test_wrong_dims", "nc_key": "test_values", "fill_value": -999} ) def test_dataset_with_scalar(self): @@ -408,7 +408,7 @@ def test_dataset_with_scalar(self): # Checks returned scalar value dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), {"name": "product_quality", - "file_key": "product_quality", + "nc_key": "product_quality", "file_type": "test_file_type"}) assert dataset.values == 99.0 @@ -422,7 +422,7 @@ def test_dataset_slicing_catid(self): dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "category_id": 5}) expected_dataset = self._get_unique_array(range(8), 5) @@ -434,7 +434,7 @@ def test_dataset_slicing_chid_catid(self): dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "channel_id": 0, "category_id": 1}) expected_dataset = self._get_unique_array(0, 1) @@ -447,7 +447,7 @@ def test_dataset_slicing_visid_catid(self): self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_vis_channels"}) dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "vis_channel_id": 3, "category_id": 3}) expected_dataset = self._get_unique_array(3, 3) @@ -460,7 +460,7 @@ def test_dataset_slicing_irid(self): self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_ir_channels"}) dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", - "file_key": "test_values", + "nc_key": "test_values", "fill_value": -999, "ir_channel_id": 4}) expected_dataset = self._get_unique_array(4, range(6)) @@ -541,7 +541,7 @@ def test_byte_extraction(self): # Value of 1 is expected to be returned for this test dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), {"name": "cloud_mask_test_flag", - "file_key": "cloud_mask_test_flag", + "nc_key": "cloud_mask_test_flag", "fill_value": -999, "file_type": "nc_fci_test_clm", "extract_byte": 1, @@ -552,7 +552,7 @@ def test_byte_extraction(self): # Value of 0 is expected fto be returned or this test dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), {"name": "cloud_mask_test_flag", - "file_key": "cloud_mask_test_flag", + "nc_key": "cloud_mask_test_flag", "fill_value": -999, "mask_value": 0., "file_type": "nc_fci_test_clm", "extract_byte": 23, @@ -627,10 +627,10 @@ def test_all_basic(self, amv_filehandler, amv_file): assert global_attributes == expected_global_attributes def test_dataset(self, amv_filehandler): - """Test the correct execution of the get_dataset function with a valid file_key.""" + """Test the correct execution of the get_dataset function with a valid nc_key.""" dataset = amv_filehandler.get_dataset(make_dataid(name="test_dataset", resolution=2000), {"name": "test_dataset", - "file_key": "test_dataset", + "nc_key": "test_dataset", "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, np.ones(50000)) @@ -639,10 +639,10 @@ def test_dataset(self, amv_filehandler): assert dataset.attrs["fill_value"] == -999 def test_dataset_with_invalid_filekey(self, amv_filehandler): - """Test the correct execution of the get_dataset function with an invalid file_key.""" + """Test the correct execution of the get_dataset function with an invalid nc_key.""" invalid_dataset = amv_filehandler.get_dataset(make_dataid(name="test_invalid", resolution=2000), {"name": "test_invalid", - "file_key": "test_invalid", + "nc_key": "test_invalid", "fill_value": -999, "file_type": "test_file_type"}) assert invalid_dataset is None From b3f996caf96024bc70a8268404051a0bd0aeff9b Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 15 Jan 2024 17:17:20 +0100 Subject: [PATCH 1056/1416] created separate method to add flag values and meanings from enumeration to a variable --- satpy/readers/fci_l2_nc.py | 35 +++++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index e88b60a739..91dab92c48 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -102,26 +102,33 @@ def _set_attributes(self, variable, dataset_info, segmented=False): import_enum_information = dataset_info.get("import_enum_information", False) if (import_enum_information): - netCDF4_dataset = netCDF4.Dataset(self.filename, "r") - # This currently assumes a flat netCDF file - dataType=netCDF4_dataset.variables[dataset_info["nc_key"]].datatype - if (hasattr(dataType,"enum_dict")): - enum = dataType.enum_dict - flag_values = [] - flag_meanings = [] - for item in enumerate(enum): - flag_values.append(item[0]) - flag_meanings.append(item[1]) - - variable.attrs["flag_values"] = flag_values - variable.attrs["flag_meanings"] = flag_meanings - netCDF4_dataset.close() + variable = self._add_flag_values_and_meamings(self.filename,dataset_info["nc_key"], variable) if variable.attrs["units"] == "none": variable.attrs.update({"units": None}) return variable + @staticmethod + def _add_flag_values_and_meamings(filename,key,variable): + #"""Build flag values and meaning from enum datatype """ + netCDF4_dataset = netCDF4.Dataset(filename, "r") + # This currently assumes a flat netCDF file + dataType=netCDF4_dataset.variables[key].datatype + if (hasattr(dataType,"enum_dict")): + enum = dataType.enum_dict + flag_values = [] + flag_meanings = [] + for item in enumerate(enum): + flag_values.append(item[0]) + flag_meanings.append(item[1]) + + variable.attrs["flag_values"] = flag_values + variable.attrs["flag_meanings"] = flag_meanings + netCDF4_dataset.close() + + return variable + def _slice_dataset(self, variable, dataset_info, dimensions): """Slice data if dimension layers have been provided in yaml-file.""" slice_dict = {dim: dataset_info[dim_id] for (dim, dim_id) in dimensions.items() From 832afa9e5e8901320a5308e724ecb16e3f816057 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 16 Jan 2024 11:37:32 +0100 Subject: [PATCH 1057/1416] Refactor sar tests --- satpy/tests/reader_tests/test_sar_c_safe.py | 352 +++++++++++--------- 1 file changed, 202 insertions(+), 150 deletions(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 4ac4d97cfe..a0e6fb8849 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -17,105 +17,185 @@ # satpy. If not, see . """Module for testing the satpy.readers.sar-c_safe module.""" -import unittest -import unittest.mock as mock +import os from enum import Enum from io import BytesIO -import dask.array as da import numpy as np -import xarray as xr +import pytest from satpy.dataset import DataQuery from satpy.readers.sar_c_safe import SAFEXMLAnnotation, SAFEXMLCalibration, SAFEXMLNoise +rasterio = pytest.importorskip("rasterio") + + +dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" +filename_suffix = "20190201t024655-20190201t024720-025730-02dc2a" + +@pytest.fixture(scope="module") +def granule_directory(tmp_path_factory): + """Create a granule directory.""" + data_dir = tmp_path_factory.mktemp("data") + gdir = data_dir / f"S1A_IW_GRDH_1SDV_{dirname_suffix}.SAFE" + os.mkdir(gdir) + return gdir + + +@pytest.fixture(scope="module") +def annotation_file(granule_directory): + """Create an annotation file.""" + ann_dir = granule_directory / "annotation" + os.makedirs(ann_dir, exist_ok=True) + annotation_file = ann_dir / f"s1a-iw-grd-vv-{filename_suffix}-001.xml" + with open(annotation_file, "wb") as fd: + fd.write(annotation_xml) + return annotation_file + + +@pytest.fixture(scope="module") +def annotation_filehandler(annotation_file): + """Create an annotation filehandler.""" + filename_info = dict(start_time=None, end_time=None, polarization="vv") + return SAFEXMLAnnotation(annotation_file, filename_info, None) + + +@pytest.fixture(scope="module") +def calibration_file(granule_directory): + """Create a calibration file.""" + cal_dir = granule_directory / "annotation" / "calibration" + os.makedirs(cal_dir, exist_ok=True) + calibration_file = cal_dir / f"calibration-s1a-iw-grd-vv-{filename_suffix}-001.xml" + with open(calibration_file, "wb") as fd: + fd.write(calibration_xml) + return calibration_file + +@pytest.fixture(scope="module") +def calibration_filehandler(calibration_file, annotation_filehandler): + """Create a calibration filehandler.""" + filename_info = dict(start_time=None, end_time=None, polarization="vv") + return SAFEXMLCalibration(calibration_file, + filename_info, + None, + annotation_filehandler) + +@pytest.fixture(scope="module") +def noise_file(granule_directory): + """Create a noise file.""" + noise_dir = granule_directory / "annotation" / "calibration" + os.makedirs(noise_dir, exist_ok=True) + noise_file = noise_dir / f"noise-s1a-iw-grd-vv-{filename_suffix}-001.xml" + with open(noise_file, "wb") as fd: + fd.write(noise_xml) + return noise_file + + +@pytest.fixture(scope="module") +def noise_filehandler(noise_file, annotation_filehandler): + """Create a noise filehandler.""" + filename_info = dict(start_time=None, end_time=None, polarization="vv") + return SAFEXMLNoise(noise_file, filename_info, None, annotation_filehandler) + + +@pytest.fixture(scope="module") +def noise_with_holes_filehandler(annotation_filehandler): + """Create a noise filehandler from data with holes.""" + filename_info = dict(start_time=None, end_time=None, polarization="vv") + noise_filehandler = SAFEXMLNoise(BytesIO(noise_xml_with_holes), + filename_info, None, + annotation_filehandler) + return noise_filehandler + + + +@pytest.fixture(scope="module") +def measurement_file(granule_directory): + """Create a tiff measurement file.""" + GCP = rasterio.control.GroundControlPoint + + gcps = [GCP(0, 0, 0, 0, 0), + GCP(0, 3, 1, 0, 0), + GCP(3, 0, 0, 1, 0), + GCP(3, 3, 1, 1, 0), + GCP(0, 7, 2, 0, 0), + GCP(3, 7, 2, 1, 0), + GCP(7, 7, 2, 2, 0), + GCP(7, 3, 1, 2, 0), + GCP(7, 0, 0, 2, 0), + GCP(0, 15, 3, 0, 0), + GCP(3, 15, 3, 1, 0), + GCP(7, 15, 3, 2, 0), + GCP(15, 15, 3, 3, 0), + GCP(15, 7, 2, 3, 0), + GCP(15, 3, 1, 3, 0), + GCP(15, 0, 0, 3, 0), + ] + Z = np.linspace(0, 30000, 100, dtype=np.uint16).reshape((10, 10)) + m_dir = granule_directory / "measurement" + os.makedirs(m_dir, exist_ok=True) + filename = m_dir / f"s1a-iw-grd-vv-{filename_suffix}-001.tiff" + with rasterio.open( + filename, + "w", + driver="GTiff", + height=Z.shape[0], + width=Z.shape[1], + count=1, + dtype=Z.dtype, + crs="+proj=latlong", + gcps=gcps) as dst: + dst.write(Z, 1) + return filename + + +@pytest.fixture(scope="module") +def measurement_filehandler(measurement_file, annotation_filehandler, noise_filehandler, calibration_filehandler): + """Create a measurement filehandler.""" + filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": 0, "end_time": 0, + "polarization": "vv"} + filetype_info = None + from satpy.readers.sar_c_safe import SAFEGRD + filehandler = SAFEGRD(measurement_file, + filename_info, + filetype_info, + calibration_filehandler, + noise_filehandler, + annotation_filehandler) + return filehandler -class TestSAFEGRD(unittest.TestCase): + +class Calibration(Enum): + """Calibration levels.""" + + gamma = 1 + sigma_nought = 2 + beta_nought = 3 + dn = 4 + + +class TestSAFEGRD: """Test the SAFE GRD file handler.""" - @mock.patch("rasterio.open") - def setUp(self, mocked_rio_open): - """Set up the test case.""" - from satpy.readers.sar_c_safe import SAFEGRD - filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": 0, "end_time": 0, - "polarization": "vv"} - filetype_info = "bla" - self.noisefh = mock.MagicMock() - self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=["y", "x"]) - self.calfh = mock.MagicMock() - self.calfh.get_calibration_constant.return_value = 1 - self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=["y", "x"]) - self.annotationfh = mock.MagicMock() - - self.test_fh = SAFEGRD("S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/" - "s1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff", - filename_info, filetype_info, self.calfh, self.noisefh, self.annotationfh) - self.mocked_rio_open = mocked_rio_open - - def test_instantiate(self): - """Test initialization of file handlers.""" - assert self.test_fh._polarization == "vv" - assert self.test_fh.calibration == self.calfh - assert self.test_fh.noise == self.noisefh - self.mocked_rio_open.assert_called() - - @mock.patch("xarray.open_dataset") - def test_read_calibrated_natural(self, mocked_xarray_open): - """Test the calibration routines.""" - calibration = mock.MagicMock() - calibration.name = "sigma_nought" - mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], - [2, 3]])), - dims=["y", "x"]) - xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity="natural"), info=dict()) - np.testing.assert_allclose(xarr, [[np.nan, 2], [5, 10]]) - - @mock.patch("xarray.open_dataset") - def test_read_calibrated_dB(self, mocked_xarray_open): + def test_read_calibrated_natural(self, measurement_filehandler): """Test the calibration routines.""" - calibration = mock.MagicMock() - calibration.name = "sigma_nought" - mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], - [2, 3]])), - dims=["y", "x"]) - xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity="dB"), info=dict()) - np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]]) - - def test_read_lon_lats(self): - """Test reading lons and lats.""" + calibration = Calibration.sigma_nought + xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", + calibration=calibration, quantity="natural"), info=dict()) + expected = np.array([[np.nan, 0.02707529], [2.55858416, 3.27611055]]) + np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=2e-7) - class FakeGCP: - - def __init__(self, *args): - self.row, self.col, self.x, self.y, self.z = args - - gcps = [FakeGCP(0, 0, 0, 0, 0), - FakeGCP(0, 3, 1, 0, 0), - FakeGCP(3, 0, 0, 1, 0), - FakeGCP(3, 3, 1, 1, 0), - FakeGCP(0, 7, 2, 0, 0), - FakeGCP(3, 7, 2, 1, 0), - FakeGCP(7, 7, 2, 2, 0), - FakeGCP(7, 3, 1, 2, 0), - FakeGCP(7, 0, 0, 2, 0), - FakeGCP(0, 15, 3, 0, 0), - FakeGCP(3, 15, 3, 1, 0), - FakeGCP(7, 15, 3, 2, 0), - FakeGCP(15, 15, 3, 3, 0), - FakeGCP(15, 7, 2, 3, 0), - FakeGCP(15, 3, 1, 3, 0), - FakeGCP(15, 0, 0, 3, 0), - ] - - crs = dict(init="epsg:4326") - - self.mocked_rio_open.return_value.gcps = [gcps, crs] - self.mocked_rio_open.return_value.shape = [16, 16] + def test_read_calibrated_dB(self, measurement_filehandler): + """Test the calibration routines.""" + calibration = Calibration.sigma_nought + xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", + calibration=calibration, quantity="dB"), info=dict()) + expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) + np.testing.assert_allclose(xarr.values[:2, :2], expected) + def test_read_lon_lats(self, measurement_filehandler): + """Test reading lons and lats.""" query = DataQuery(name="longitude", polarization="vv") - xarr = self.test_fh.get_dataset(query, info=dict()) + xarr = measurement_filehandler.get_dataset(query, info=dict()) expected = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, @@ -212,7 +292,7 @@ def __init__(self, *args): 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, 0.00000000e+00]]) - np.testing.assert_allclose(xarr.values, expected) + np.testing.assert_allclose(xarr.values, expected[:10, :10]) annotation_xml = b""" @@ -622,15 +702,11 @@ def __init__(self, *args): """ -class TestSAFEXMLNoise(unittest.TestCase): +class TestSAFEXMLNoise: """Test the SAFE XML Noise file handler.""" - def setUp(self): + def setup_method(self): """Set up the test case.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") - self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) - self.noise_fh = SAFEXMLNoise(BytesIO(noise_xml), filename_info, mock.MagicMock(), self.annotation_fh) - self.expected_azimuth_noise = np.array([[np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan], @@ -655,8 +731,6 @@ def setUp(self): [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], ]) - self.noise_fh_with_holes = SAFEXMLNoise(BytesIO(noise_xml_with_holes), filename_info, mock.MagicMock(), - self.annotation_fh) self.expected_azimuth_noise_with_holes = np.array( [[np.nan, np.nan, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], @@ -670,112 +744,90 @@ def setUp(self): [10, np.nan, 11, 11, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan] ]) - def test_azimuth_noise_array(self): + def test_azimuth_noise_array(self, noise_filehandler): """Test reading the azimuth-noise array.""" - res = self.noise_fh.azimuth_noise_reader.read_azimuth_noise_array() + res = noise_filehandler.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise) - def test_azimuth_noise_array_with_holes(self): + def test_azimuth_noise_array_with_holes(self, noise_with_holes_filehandler): """Test reading the azimuth-noise array.""" - res = self.noise_fh_with_holes.azimuth_noise_reader.read_azimuth_noise_array() + res = noise_with_holes_filehandler.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise_with_holes) - def test_range_noise_array(self): + def test_range_noise_array(self, noise_filehandler): """Test reading the range-noise array.""" - res = self.noise_fh.read_range_noise_array(chunks=5) + res = noise_filehandler.read_range_noise_array(chunks=5) np.testing.assert_allclose(res, self.expected_range_noise) - def test_get_noise_dataset(self): + def test_get_noise_dataset(self, noise_filehandler): """Test using get_dataset for the noise.""" query = DataQuery(name="noise", polarization="vv") - res = self.noise_fh.get_dataset(query, {}) + res = noise_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_azimuth_noise * self.expected_range_noise) - def test_get_noise_dataset_has_right_chunk_size(self): + def test_get_noise_dataset_has_right_chunk_size(self, noise_filehandler): """Test using get_dataset for the noise has right chunk size in result.""" query = DataQuery(name="noise", polarization="vv") - res = self.noise_fh.get_dataset(query, {}, chunks=3) + res = noise_filehandler.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) -class Calibration(Enum): - """Calibration levels.""" - - gamma = 1 - sigma_nought = 2 - beta_nought = 3 - dn = 4 - - -class TestSAFEXMLCalibration(unittest.TestCase): +class TestSAFEXMLCalibration: """Test the SAFE XML Calibration file handler.""" - def setUp(self): - """Set up the test case.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") - self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) - self.calibration_fh = SAFEXMLCalibration(BytesIO(calibration_xml), - filename_info, - mock.MagicMock(), - self.annotation_fh) + def setup_method(self): + """Set up testing.""" + self.expected_gamma = np.array([[1840.695, 1779.672, 1718.649, 1452.926, 1187.203, 1186.226, + 1185.249, 1184.276, 1183.303, 1181.365]]) * np.ones((10, 1)) - self.expected_gamma = np.array([[1840.695, 1779.672, 1718.649, 1452.926, 1187.203, 1186.226, - 1185.249, 1184.276, 1183.303, 1181.365]]) * np.ones((10, 1)) - def test_dn_calibration_array(self): + def test_dn_calibration_array(self, calibration_filehandler): """Test reading the dn calibration array.""" expected_dn = np.ones((10, 10)) * 1087 - res = self.calibration_fh.get_calibration(Calibration.dn, chunks=5) + res = calibration_filehandler.get_calibration(Calibration.dn, chunks=5) np.testing.assert_allclose(res, expected_dn) - def test_beta_calibration_array(self): + def test_beta_calibration_array(self, calibration_filehandler): """Test reading the beta calibration array.""" expected_beta = np.ones((10, 10)) * 1087 - res = self.calibration_fh.get_calibration(Calibration.beta_nought, chunks=5) + res = calibration_filehandler.get_calibration(Calibration.beta_nought, chunks=5) np.testing.assert_allclose(res, expected_beta) - def test_sigma_calibration_array(self): + def test_sigma_calibration_array(self, calibration_filehandler): """Test reading the sigma calibration array.""" expected_sigma = np.array([[1894.274, 1841.4335, 1788.593, 1554.4165, 1320.24, 1299.104, 1277.968, 1277.968, 1277.968, 1277.968]]) * np.ones((10, 1)) - res = self.calibration_fh.get_calibration(Calibration.sigma_nought, chunks=5) + res = calibration_filehandler.get_calibration(Calibration.sigma_nought, chunks=5) np.testing.assert_allclose(res, expected_sigma) - def test_gamma_calibration_array(self): + + def test_gamma_calibration_array(self, calibration_filehandler): """Test reading the gamma calibration array.""" - res = self.calibration_fh.get_calibration(Calibration.gamma, chunks=5) + res = calibration_filehandler.get_calibration(Calibration.gamma, chunks=5) np.testing.assert_allclose(res, self.expected_gamma) - def test_get_calibration_dataset(self): + def test_get_calibration_dataset(self, calibration_filehandler): """Test using get_dataset for the calibration.""" query = DataQuery(name="gamma", polarization="vv") - res = self.calibration_fh.get_dataset(query, {}) + res = calibration_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_gamma) - def test_get_calibration_dataset_has_right_chunk_size(self): + def test_get_calibration_dataset_has_right_chunk_size(self, calibration_filehandler): """Test using get_dataset for the calibration yields array with right chunksize.""" query = DataQuery(name="gamma", polarization="vv") - res = self.calibration_fh.get_dataset(query, {}, chunks=3) + res = calibration_filehandler.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) np.testing.assert_allclose(res, self.expected_gamma) - def test_get_calibration_constant(self): + def test_get_calibration_constant(self, calibration_filehandler): """Test getting the calibration constant.""" query = DataQuery(name="calibration_constant", polarization="vv") - res = self.calibration_fh.get_dataset(query, {}) + res = calibration_filehandler.get_dataset(query, {}) assert res == 1 -class TestSAFEXMLAnnotation(unittest.TestCase): - """Test the SAFE XML Annotation file handler.""" - - def setUp(self): - """Set up the test case.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") - self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock()) - - def test_incidence_angle(self): - """Test reading the incidence angle.""" - query = DataQuery(name="incidence_angle", polarization="vv") - res = self.annotation_fh.get_dataset(query, {}) - np.testing.assert_allclose(res, 19.18318046) +def test_incidence_angle(annotation_filehandler): + """Test reading the incidence angle in an annotation file.""" + query = DataQuery(name="incidence_angle", polarization="vv") + res = annotation_filehandler.get_dataset(query, {}) + np.testing.assert_allclose(res, 19.18318046) From 2b624dcd3c430c2ff9946c23f90c125331b9f482 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 16 Jan 2024 13:39:39 +0100 Subject: [PATCH 1058/1416] Add a test for reading with the reader --- satpy/tests/reader_tests/test_sar_c_safe.py | 217 +++++++++++--------- 1 file changed, 121 insertions(+), 96 deletions(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index a0e6fb8849..1f966656b7 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -20,10 +20,13 @@ import os from enum import Enum from io import BytesIO +from pathlib import Path import numpy as np import pytest +import yaml +from satpy._config import PACKAGE_CONFIG_PATH from satpy.dataset import DataQuery from satpy.readers.sar_c_safe import SAFEXMLAnnotation, SAFEXMLCalibration, SAFEXMLNoise @@ -164,6 +167,105 @@ def measurement_filehandler(measurement_file, annotation_filehandler, noise_file return filehandler + +expected_longitudes = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, + 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, + 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, + -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, + -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, + 1.00000000e+00], + [1.19166667e+00, 1.32437500e+00, 1.36941964e+00, + 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, + 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, + 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, + 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, + 7.41666667e-01], + [1.82638889e+00, 1.77596726e+00, 1.72667765e+00, + 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, + 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, + 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, + 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, + 7.60912698e-01], + [2.00000000e+00, 1.99166667e+00, 1.99305556e+00, + 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, + 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, + 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, + 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, + 1.00000000e+00], + [1.80833333e+00, 2.01669643e+00, 2.18011267e+00, + 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, + 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, + 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, + 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, + 1.40119048e+00], + [1.34722222e+00, 1.89627976e+00, 2.29940830e+00, + 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, + 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, + 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, + 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, + 1.90674603e+00], + [7.12500000e-01, 1.67563988e+00, 2.36250177e+00, + 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, + 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, + 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, + 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, + 2.45892857e+00], + [5.55111512e-16, 1.40000000e+00, 2.38095238e+00, + 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, + 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, + 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, + 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, + 3.00000000e+00], + [-6.94444444e-01, 1.11458333e+00, 2.36631944e+00, + 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, + 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, + 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, + 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, + 3.47222222e+00], + [-1.27500000e+00, 8.64613095e-01, 2.33016227e+00, + 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, + 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, + 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, + 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, + 3.81785714e+00], + [-1.64583333e+00, 6.95312500e-01, 2.28404018e+00, + 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, + 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, + 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, + 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, + 3.97916667e+00], + [-1.71111111e+00, 6.51904762e-01, 2.23951247e+00, + 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, + 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, + 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, + 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, + 3.89841270e+00], + [-1.37500000e+00, 7.79613095e-01, 2.20813846e+00, + 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, + 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, + 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, + 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, + 3.51785714e+00], + [-5.41666667e-01, 1.12366071e+00, 2.20147747e+00, + 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, + 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, + 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, + 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, + 2.77976190e+00], + [8.84722222e-01, 1.72927083e+00, 2.23108879e+00, + 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, + 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, + 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, + 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, + 1.62638889e+00], + [3.00000000e+00, 2.64166667e+00, 2.30853175e+00, + 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, + 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, + 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, + 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, + 0.00000000e+00]]) + + class Calibration(Enum): """Calibration levels.""" @@ -196,102 +298,7 @@ def test_read_lon_lats(self, measurement_filehandler): """Test reading lons and lats.""" query = DataQuery(name="longitude", polarization="vv") xarr = measurement_filehandler.get_dataset(query, info=dict()) - expected = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, - 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, - 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, - -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, - -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, - 1.00000000e+00], - [1.19166667e+00, 1.32437500e+00, 1.36941964e+00, - 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, - 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, - 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, - 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, - 7.41666667e-01], - [1.82638889e+00, 1.77596726e+00, 1.72667765e+00, - 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, - 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, - 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, - 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, - 7.60912698e-01], - [2.00000000e+00, 1.99166667e+00, 1.99305556e+00, - 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, - 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, - 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, - 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, - 1.00000000e+00], - [1.80833333e+00, 2.01669643e+00, 2.18011267e+00, - 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, - 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, - 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, - 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, - 1.40119048e+00], - [1.34722222e+00, 1.89627976e+00, 2.29940830e+00, - 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, - 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, - 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, - 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, - 1.90674603e+00], - [7.12500000e-01, 1.67563988e+00, 2.36250177e+00, - 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, - 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, - 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, - 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, - 2.45892857e+00], - [5.55111512e-16, 1.40000000e+00, 2.38095238e+00, - 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, - 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, - 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, - 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, - 3.00000000e+00], - [-6.94444444e-01, 1.11458333e+00, 2.36631944e+00, - 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, - 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, - 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, - 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, - 3.47222222e+00], - [-1.27500000e+00, 8.64613095e-01, 2.33016227e+00, - 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, - 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, - 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, - 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, - 3.81785714e+00], - [-1.64583333e+00, 6.95312500e-01, 2.28404018e+00, - 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, - 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, - 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, - 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, - 3.97916667e+00], - [-1.71111111e+00, 6.51904762e-01, 2.23951247e+00, - 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, - 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, - 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, - 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, - 3.89841270e+00], - [-1.37500000e+00, 7.79613095e-01, 2.20813846e+00, - 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, - 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, - 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, - 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, - 3.51785714e+00], - [-5.41666667e-01, 1.12366071e+00, 2.20147747e+00, - 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, - 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, - 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, - 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, - 2.77976190e+00], - [8.84722222e-01, 1.72927083e+00, 2.23108879e+00, - 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, - 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, - 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, - 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, - 1.62638889e+00], - [3.00000000e+00, 2.64166667e+00, 2.30853175e+00, - 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, - 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, - 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, - 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, - 0.00000000e+00]]) + expected = expected_longitudes np.testing.assert_allclose(xarr.values, expected[:10, :10]) @@ -831,3 +838,21 @@ def test_incidence_angle(annotation_filehandler): query = DataQuery(name="incidence_angle", polarization="vv") res = annotation_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, 19.18318046) + + +def test_reading_from_reader(measurement_file, calibration_file, noise_file, annotation_file): + """Test reading using the reader defined in the config.""" + with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: + config = yaml.load(fd, Loader=yaml.UnsafeLoader) + reader_class = config["reader"]["reader"] + reader = reader_class(config) + + files = [measurement_file, calibration_file, noise_file, annotation_file] + reader.create_filehandlers(files) + query = DataQuery(name="measurement", polarization="vv", + calibration="sigma_nought", quantity="dB") + dataset_dict = reader.load([query]) + array = dataset_dict["measurement"] + np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10]) + expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) + np.testing.assert_allclose(array.values[:2, :2], expected_db) From 7c6e304cff9cd269734f3cf2f72975bd37d86f67 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 16 Jan 2024 14:48:43 +0100 Subject: [PATCH 1059/1416] Fix test --- satpy/tests/reader_tests/test_sar_c_safe.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 1f966656b7..f801743a08 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -28,6 +28,7 @@ from satpy._config import PACKAGE_CONFIG_PATH from satpy.dataset import DataQuery +from satpy.dataset.dataid import DataID from satpy.readers.sar_c_safe import SAFEXMLAnnotation, SAFEXMLCalibration, SAFEXMLNoise rasterio = pytest.importorskip("rasterio") @@ -851,6 +852,7 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann reader.create_filehandlers(files) query = DataQuery(name="measurement", polarization="vv", calibration="sigma_nought", quantity="dB") + query = DataID(reader._id_keys, **query.to_dict()) dataset_dict = reader.load([query]) array = dataset_dict["measurement"] np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10]) From b68daa85c3c8713c7a2ebbf09dc0ab7d56fc05f8 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Wed, 17 Jan 2024 13:26:05 -0600 Subject: [PATCH 1060/1416] Add abi_geos to AreaDefinition so that the RGB images are named correctly. --- satpy/readers/clavrx.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 39b3afc007..165de4e696 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -266,14 +266,24 @@ def _read_axi_fixed_grid(filename: str, sensor: str, l1b_attr) -> geometry.AreaD x, y = l1b['x'], l1b['y'] area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h) - area = geometry.AreaDefinition( - 'ahi_geos', - "AHI L2 file area", - 'ahi_geos', - proj, - ncols, - nlines, - np.asarray(area_extent)) + if sensor == "abi": + area = geometry.AreaDefinition( + 'abi_geos', + "ABI L2 file area", + 'abi_geos', + proj, + ncols, + nlines, + np.asarray(area_extent)) + else: + area = geometry.AreaDefinition( + 'ahi_geos', + "AHI L2 file area", + 'ahi_geos', + proj, + ncols, + nlines, + np.asarray(area_extent)) return area From 1b2cd7ab71c88efde756199bc591a2c3be29ad64 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Thu, 18 Jan 2024 10:28:01 +0100 Subject: [PATCH 1061/1416] Fixing pre-commit issues --- satpy/readers/viirs_vgac_l1c_nc.py | 14 +++++----- .../reader_tests/test_viirs_vgac_l1c_nc.py | 27 +++++++++---------- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 2608d160df..1146edbf62 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -81,14 +81,14 @@ def dt64_to_datetime(self, dt64): def extract_time_data(self, data, nc): """Decode time data.""" - reference_time = np.datetime64(datetime.strptime(nc['proj_time0'].attrs["units"], - 'days since %d/%m/%YT%H:%M:%S')) - delta_part_of_day, delta_full_days = np.modf(nc['proj_time0'].values) - delta_full_days = np.timedelta64(int(delta_full_days), 'D') - delta_part_of_day = delta_part_of_day * np.timedelta64(1, 'D').astype('timedelta64[us]') - delta_hours = data.values * np.timedelta64(1, 'h').astype('timedelta64[us]') + reference_time = np.datetime64(datetime.strptime(nc["proj_time0"].attrs["units"], + "days since %d/%m/%YT%H:%M:%S")) + delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values) + delta_full_days = np.timedelta64(int(delta_full_days), "D") + delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[us]") + delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[us]") time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, - coords=data.coords, attrs={'long_name': 'Scanline time'}) + coords=data.coords, attrs={"long_name": "Scanline time"}) self._start_time = self.dt64_to_datetime(time_data[0].values) self._end_time = self.dt64_to_datetime(time_data[-1].values) return time_data diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 0120db9f66..e6f76ad641 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -40,10 +40,10 @@ def nc_filename(tmp_path): nscn = 7 npix = 800 n_lut = 12000 - nc.createDimension('npix', npix) - nc.createDimension('nscn', nscn) - nc.createDimension('n_lut', n_lut) - nc.createDimension('one', 1) + nc.createDimension("npix", npix) + nc.createDimension("nscn", nscn) + nc.createDimension("n_lut", n_lut) + nc.createDimension("one", 1) nc.StartTime = "2023-03-28T09:08:07" nc.EndTime = "2023-03-28T10:11:12" for ind in range(1, 11, 1): @@ -66,18 +66,18 @@ def nc_filename(tmp_path): reference_time = np.datetime64("2010-01-01T00:00:00") start_time = np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123, "ms") delta_days = start_time - reference_time - delta_full_days = delta_days.astype('timedelta64[D]') + delta_full_days = delta_days.astype("timedelta64[D]") hidden_reference_time = reference_time + delta_full_days delta_part_of_days = start_time - hidden_reference_time - proj_time0 = nc.createVariable('proj_time0', np.float64, ("one",)) + proj_time0 = nc.createVariable("proj_time0", np.float64, ("one",)) proj_time0[:] = (delta_full_days.astype(int) + - 0.000001 * delta_part_of_days.astype('timedelta64[us]').astype(np.int64) / (60 * 60 * 24)) - proj_time0.units = 'days since 01/01/2010T00:00:00' - time_v = nc.createVariable('time', np.float64, ('nscn',)) + 0.000001 * delta_part_of_days.astype("timedelta64[us]").astype(np.int64) / (60 * 60 * 24)) + proj_time0.units = "days since 01/01/2010T00:00:00" + time_v = nc.createVariable("time", np.float64, ("nscn",)) delta_h = np.datetime64(nc.EndTime) - start_time - delta_hours = 0.000001 * delta_h.astype('timedelta64[us]').astype(int) / (60 * 60) + delta_hours = 0.000001 * delta_h.astype("timedelta64[us]").astype(int) / (60 * 60) time_v[:] = np.linspace(0, delta_hours, num=nscn) - time_v.units = 'hours since proj_time0' + time_v.units = "hours since proj_time0" return filename_str @@ -91,10 +91,9 @@ def test_read_vgac(self, nc_filename): # Read data scn_ = Scene( - reader='viirs_vgac_l1c_nc', - filenames=[_nc_filename]) + reader="viirs_vgac_l1c_nc", + filenames=[nc_filename]) scn_.load(["M05", "M15", "scanline_timestamps"]) - print(scn_["scanline_timestamps"][-1]) assert ((scn_["scanline_timestamps"][0] - np.datetime64("2023-03-28T09:08:07") - np.timedelta64(123, "ms")) < np.timedelta64(5, "us")) assert ((scn_["scanline_timestamps"][-1] - np.datetime64("2023-03-28T10:11:12")) < np.timedelta64(5, "us")) From 4fa7fbe9591772369ae61e39196f85518a13f2ab Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Thu, 18 Jan 2024 11:13:06 +0100 Subject: [PATCH 1062/1416] flake8 --- satpy/etc/readers/viirs_vgac_l1c_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/viirs_vgac_l1c_nc.yaml b/satpy/etc/readers/viirs_vgac_l1c_nc.yaml index 875cdfa2c5..33dc9571d2 100644 --- a/satpy/etc/readers/viirs_vgac_l1c_nc.yaml +++ b/satpy/etc/readers/viirs_vgac_l1c_nc.yaml @@ -256,4 +256,4 @@ datasets: proj_time0: name: proj_time0 file_type: vgac_nc - nc_key: proj_time0 \ No newline at end of file + nc_key: proj_time0 From dff503341a270b79276fc73e2678c701c517147e Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Thu, 18 Jan 2024 12:38:45 +0100 Subject: [PATCH 1063/1416] Added two more tests --- .../reader_tests/test_viirs_vgac_l1c_nc.py | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index e6f76ad641..2766ef13ec 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -25,6 +25,7 @@ import datetime import numpy as np +import xarray as xr import pytest from netCDF4 import Dataset @@ -107,3 +108,26 @@ def test_read_vgac(self, nc_filename): hour=9, minute=8, second=7) assert scn_.end_time == datetime.datetime(year=2023, month=3, day=28, hour=10, minute=11, second=12) + + def test_dt64_to_datetime(self): + from satpy.readers.viirs_vgac_l1c_nc import VGACFileHandler + fh = VGACFileHandler(filename="", + filename_info={"start_time": "2023-03-28T09:08:07"}, + filetype_info="") + in_dt = datetime.datetime(year=2023, month=3, day=28, + hour=9, minute=8, second=7) + out_dt = fh.dt64_to_datetime(in_dt) + assert out_dt == in_dt + + def test_decode_time_variable(self): + from satpy.readers.viirs_vgac_l1c_nc import VGACFileHandler + fh = VGACFileHandler(filename="", + filename_info={"start_time": "2023-03-28T09:08:07"}, + filetype_info="") + data = xr.DataArray( + [[1, 2], + [3, 4]], + dims=('y', 'x'), + attrs={"units": "something not expected"}) + with pytest.raises(AttributeError): + fh.decode_time_variable(data, "time", None) From cae8018257a54d1b353d1cc88a713e68ba9b1ea6 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Thu, 18 Jan 2024 12:42:30 +0100 Subject: [PATCH 1064/1416] make pre-commit happy --- satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 2766ef13ec..883ac8c709 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -25,8 +25,8 @@ import datetime import numpy as np -import xarray as xr import pytest +import xarray as xr from netCDF4 import Dataset @@ -110,6 +110,7 @@ def test_read_vgac(self, nc_filename): hour=10, minute=11, second=12) def test_dt64_to_datetime(self): + """Test datetime conversion branch.""" from satpy.readers.viirs_vgac_l1c_nc import VGACFileHandler fh = VGACFileHandler(filename="", filename_info={"start_time": "2023-03-28T09:08:07"}, @@ -120,6 +121,7 @@ def test_dt64_to_datetime(self): assert out_dt == in_dt def test_decode_time_variable(self): + """Test decode time variable branch.""" from satpy.readers.viirs_vgac_l1c_nc import VGACFileHandler fh = VGACFileHandler(filename="", filename_info={"start_time": "2023-03-28T09:08:07"}, @@ -127,7 +129,7 @@ def test_decode_time_variable(self): data = xr.DataArray( [[1, 2], [3, 4]], - dims=('y', 'x'), + dims=("y", "x"), attrs={"units": "something not expected"}) with pytest.raises(AttributeError): fh.decode_time_variable(data, "time", None) From 81aaf034e9bf319ef1bd5eef5699494cf74e017c Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 19 Jan 2024 12:51:54 -0600 Subject: [PATCH 1065/1416] Fix problems with tests by splitting into separate files Check for a float32 because that is what satpy is expecting Update valid_range to list if np.ndarray to fix type error in scale_data. --- satpy/readers/clavrx.py | 3 +- .../tests/reader_tests/test_clavrx_geohdf.py | 246 ++++++++++++++++++ satpy/tests/reader_tests/test_clavrx_nc.py | 4 +- ...test_clavrx.py => test_clavrx_polarhdf.py} | 211 +-------------- 4 files changed, 251 insertions(+), 213 deletions(-) create mode 100644 satpy/tests/reader_tests/test_clavrx_geohdf.py rename satpy/tests/reader_tests/{test_clavrx.py => test_clavrx_polarhdf.py} (55%) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 39ba70d03d..e23bde44e5 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -156,7 +156,8 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: valid_range = attrs.get("valid_range", [None]) if isinstance(valid_range, np.ndarray): - attrs["valid_range"] = valid_range.tolist() + valid_range = valid_range.tolist() + attrs["valid_range"] = valid_range flags = not data.attrs.get("SCALED", 1) and any(flag_values) if flags: diff --git a/satpy/tests/reader_tests/test_clavrx_geohdf.py b/satpy/tests/reader_tests/test_clavrx_geohdf.py new file mode 100644 index 0000000000..85a7f6faa3 --- /dev/null +++ b/satpy/tests/reader_tests/test_clavrx_geohdf.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.clavrx module.""" + +import os +import unittest +from unittest import mock + +import numpy as np +import pytest +import xarray as xr +from pyresample.geometry import AreaDefinition + +from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler + +DEFAULT_FILE_DTYPE = np.uint16 +DEFAULT_FILE_SHAPE = (10, 300) +DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], + dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) +DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) +DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) +DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) + +class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): + """Swap-in HDF4 File Handler.""" + + def get_test_content(self, filename, filename_info, filetype_info): + """Mimic reader input file content.""" + file_content = { + "/attr/platform": "HIM8", + "/attr/sensor": "AHI", + # this is a Level 2 file that came from a L1B file + "/attr/L1B": "clavrx_H08_20180806_1800", + } + + file_content["longitude"] = xr.DataArray( + DEFAULT_LON_DATA, + dims=("y", "x"), + attrs={ + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", + }) + file_content["longitude/shape"] = DEFAULT_FILE_SHAPE + + file_content["latitude"] = xr.DataArray( + DEFAULT_LAT_DATA, + dims=("y", "x"), + attrs={ + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", + }) + file_content["latitude/shape"] = DEFAULT_FILE_SHAPE + + file_content["refl_1_38um_nom"] = xr.DataArray( + DEFAULT_FILE_DATA.astype(np.float32), + dims=("y", "x"), + attrs={ + "SCALED": 1, + "add_offset": 59.0, + "scale_factor": 0.0018616290763020515, + "units": "%", + "_FillValue": -32768, + "valid_range": [-32767, 32767], + "actual_range": [-2., 120.], + "actual_missing": -999.0 + }) + file_content["refl_1_38um_nom/shape"] = DEFAULT_FILE_SHAPE + + # data with fill values + file_content["variable2"] = xr.DataArray( + DEFAULT_FILE_DATA.astype(np.float32), + dims=("y", "x"), + attrs={ + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + }) + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE + file_content["variable2"] = file_content["variable2"].where( + file_content["variable2"] % 2 != 0) + + # category + file_content["variable3"] = xr.DataArray( + DEFAULT_FILE_DATA.astype(np.byte), + dims=("y", "x"), + attrs={ + "SCALED": 0, + "_FillValue": -128, + "flag_meanings": "clear water supercooled mixed ice unknown", + "flag_values": [0, 1, 2, 3, 4, 5], + "units": "1", + }) + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE + + return file_content + + +class TestCLAVRXReaderGeo(unittest.TestCase): + """Test CLAVR-X Reader with Geo files.""" + + yaml_file = "clavrx.yaml" + + def setUp(self): + """Wrap HDF4 file handler with our own fake handler.""" + from satpy._config import config_search_paths + from satpy.readers.clavrx import CLAVRXHDF4FileHandler + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library + self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,)) + self.fake_handler = self.p.start() + self.p.is_local = True + + def tearDown(self): + """Stop wrapping the NetCDF4 file handler.""" + self.p.stop() + + def test_init(self): + """Test basic init with no extra parameters.""" + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ + "clavrx_H08_20180806_1800.level2.hdf", + ]) + assert len(loadables) == 1 + r.create_filehandlers(loadables) + # make sure we have some files + assert r.file_handlers + + def test_no_nav_donor(self): + """Test exception raised when no donor file is available.""" + import xarray as xr + + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + fake_fn = "clavrx_H08_20180806_1800.level2.hdf" + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): + loadables = r.select_files_from_pathnames([fake_fn]) + r.create_filehandlers(loadables) + l1b_base = fake_fn.split(".")[0] + msg = f"Missing navigation donor {l1b_base}" + with pytest.raises(IOError, match=msg): + r.load(["refl_1_38um_nom", "variable2", "variable3"]) + + def test_load_all_old_donor(self): + """Test loading all test datasets with old donor.""" + import xarray as xr + + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): + loadables = r.select_files_from_pathnames([ + "clavrx_H08_20180806_1800.level2.hdf", + ]) + r.create_filehandlers(loadables) + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] + x = np.linspace(-0.1518, 0.1518, 300) + y = np.linspace(0.1518, -0.1518, 10) + proj = mock.Mock( + semi_major_axis=6378.137, + semi_minor_axis=6356.7523142, + perspective_point_height=35791, + longitude_of_projection_origin=140.7, + sweep_angle_axis="y", + ) + d.return_value = fake_donor = mock.MagicMock( + variables={"Projection": proj, "x": x, "y": y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"]) + assert len(datasets) == 3 + for v in datasets.values(): + assert "calibration" not in v.attrs + assert v.attrs["units"] in ["1", "%"] + assert isinstance(v.attrs["area"], AreaDefinition) + if v.attrs.get("flag_values"): + assert "_FillValue" in v.attrs + else: + assert "_FillValue" not in v.attrs + if v.attrs["name"] == "refl_1_38um_nom": + assert "valid_range" in v.attrs + assert isinstance(v.attrs["valid_range"], list) + else: + assert "valid_range" not in v.attrs + if "flag_values" in v.attrs: + assert np.issubdtype(v.dtype, np.integer) + assert v.attrs.get("flag_meanings") is not None + + def test_load_all_new_donor(self): + """Test loading all test datasets with new donor.""" + import xarray as xr + + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): + loadables = r.select_files_from_pathnames([ + "clavrx_H08_20180806_1800.level2.hdf", + ]) + r.create_filehandlers(loadables) + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] + x = np.linspace(-0.1518, 0.1518, 300) + y = np.linspace(0.1518, -0.1518, 10) + proj = mock.Mock( + semi_major_axis=6378137, + semi_minor_axis=6356752.3142, + perspective_point_height=35791000, + longitude_of_projection_origin=140.7, + sweep_angle_axis="y", + ) + d.return_value = fake_donor = mock.MagicMock( + variables={"goes_imager_projection": proj, "x": x, "y": y}, + ) + fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] + datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"]) + assert len(datasets) == 3 + for v in datasets.values(): + assert "calibration" not in v.attrs + assert v.attrs["units"] in ["1", "%"] + assert isinstance(v.attrs["area"], AreaDefinition) + assert v.attrs["area"].is_geostationary is True + assert v.attrs["platform_name"] == "himawari8" + assert v.attrs["sensor"] == "ahi" + assert datasets["variable3"].attrs.get("flag_meanings") is not None diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index 50b07306de..5f3e82f1dc 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -210,7 +210,7 @@ def test_load_all_new_donor(self, filenames, loadable_ids): # should have file variable and one alias for reflectance assert "valid_range" not in datasets["variable1"].attrs assert "_FillValue" not in datasets["variable1"].attrs - assert np.float64 == datasets["variable1"].dtype + assert np.float32 == datasets["variable1"].dtype assert "valid_range" not in datasets["variable1"].attrs assert np.issubdtype(datasets["var_flags"].dtype, np.integer) @@ -220,7 +220,7 @@ def test_load_all_new_donor(self, filenames, loadable_ids): assert "valid_range" not in datasets["out_of_range_flags"].attrs assert isinstance(datasets["refl_0_65um_nom"].valid_range, list) - assert np.float64 == datasets["refl_0_65um_nom"].dtype + assert np.float32 == datasets["refl_0_65um_nom"].dtype assert "_FillValue" not in datasets["refl_0_65um_nom"].attrs assert "valid_range" in datasets["refl_0_65um_nom"].attrs diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx_polarhdf.py similarity index 55% rename from satpy/tests/reader_tests/test_clavrx.py rename to satpy/tests/reader_tests/test_clavrx_polarhdf.py index a962afacd2..6b69d8a923 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx_polarhdf.py @@ -23,9 +23,8 @@ import dask.array as da import numpy as np -import pytest import xarray as xr -from pyresample.geometry import AreaDefinition, SwathDefinition +from pyresample.geometry import SwathDefinition from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler @@ -258,211 +257,3 @@ def test_load_all(self): assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16 assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16 assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list) - - -class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): - """Swap-in HDF4 File Handler.""" - - def get_test_content(self, filename, filename_info, filetype_info): - """Mimic reader input file content.""" - file_content = { - "/attr/platform": "HIM8", - "/attr/sensor": "AHI", - # this is a Level 2 file that came from a L1B file - "/attr/L1B": "clavrx_H08_20180806_1800", - } - - file_content["longitude"] = xr.DataArray( - DEFAULT_LON_DATA, - dims=("y", "x"), - attrs={ - "_FillValue": np.nan, - "scale_factor": 1., - "add_offset": 0., - "standard_name": "longitude", - }) - file_content["longitude/shape"] = DEFAULT_FILE_SHAPE - - file_content["latitude"] = xr.DataArray( - DEFAULT_LAT_DATA, - dims=("y", "x"), - attrs={ - "_FillValue": np.nan, - "scale_factor": 1., - "add_offset": 0., - "standard_name": "latitude", - }) - file_content["latitude/shape"] = DEFAULT_FILE_SHAPE - - file_content["refl_1_38um_nom"] = xr.DataArray( - DEFAULT_FILE_DATA.astype(np.float32), - dims=("y", "x"), - attrs={ - "SCALED": 1, - "add_offset": 59.0, - "scale_factor": 0.0018616290763020515, - "units": "%", - "_FillValue": -32768, - "valid_range": [-32767, 32767], - "actual_range": [-2., 120.], - "actual_missing": -999.0 - }) - file_content["refl_1_38um_nom/shape"] = DEFAULT_FILE_SHAPE - - # data with fill values - file_content["variable2"] = xr.DataArray( - DEFAULT_FILE_DATA.astype(np.float32), - dims=("y", "x"), - attrs={ - "_FillValue": -1, - "scale_factor": 1., - "add_offset": 0., - "units": "1", - }) - file_content["variable2/shape"] = DEFAULT_FILE_SHAPE - file_content["variable2"] = file_content["variable2"].where( - file_content["variable2"] % 2 != 0) - - # category - file_content["variable3"] = xr.DataArray( - DEFAULT_FILE_DATA.astype(np.byte), - dims=("y", "x"), - attrs={ - "SCALED": 0, - "_FillValue": -128, - "flag_meanings": "clear water supercooled mixed ice unknown", - "flag_values": [0, 1, 2, 3, 4, 5], - "units": "1", - }) - file_content["variable3/shape"] = DEFAULT_FILE_SHAPE - - return file_content - - -class TestCLAVRXReaderGeo(unittest.TestCase): - """Test CLAVR-X Reader with Geo files.""" - - yaml_file = "clavrx.yaml" - - def setUp(self): - """Wrap HDF4 file handler with our own fake handler.""" - from satpy._config import config_search_paths - from satpy.readers.clavrx import CLAVRXHDF4FileHandler - self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) - # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,)) - self.fake_handler = self.p.start() - self.p.is_local = True - - def tearDown(self): - """Stop wrapping the NetCDF4 file handler.""" - self.p.stop() - - def test_init(self): - """Test basic init with no extra parameters.""" - from satpy.readers import load_reader - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames([ - "clavrx_H08_20180806_1800.level2.hdf", - ]) - assert len(loadables) == 1 - r.create_filehandlers(loadables) - # make sure we have some files - assert r.file_handlers - - def test_no_nav_donor(self): - """Test exception raised when no donor file is available.""" - import xarray as xr - - from satpy.readers import load_reader - r = load_reader(self.reader_configs) - fake_fn = "clavrx_H08_20180806_1800.level2.hdf" - with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): - loadables = r.select_files_from_pathnames([fake_fn]) - r.create_filehandlers(loadables) - l1b_base = fake_fn.split(".")[0] - msg = f"Missing navigation donor {l1b_base}" - with pytest.raises(IOError, match=msg): - r.load(["refl_1_38um_nom", "variable2", "variable3"]) - - def test_load_all_old_donor(self): - """Test loading all test datasets with old donor.""" - import xarray as xr - - from satpy.readers import load_reader - r = load_reader(self.reader_configs) - with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): - loadables = r.select_files_from_pathnames([ - "clavrx_H08_20180806_1800.level2.hdf", - ]) - r.create_filehandlers(loadables) - with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: - g.return_value = ["fake_donor.nc"] - x = np.linspace(-0.1518, 0.1518, 300) - y = np.linspace(0.1518, -0.1518, 10) - proj = mock.Mock( - semi_major_axis=6378.137, - semi_minor_axis=6356.7523142, - perspective_point_height=35791, - longitude_of_projection_origin=140.7, - sweep_angle_axis="y", - ) - d.return_value = fake_donor = mock.MagicMock( - variables={"Projection": proj, "x": x, "y": y}, - ) - fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"]) - assert len(datasets) == 3 - for v in datasets.values(): - assert "calibration" not in v.attrs - assert v.attrs["units"] in ["1", "%"] - assert isinstance(v.attrs["area"], AreaDefinition) - if v.attrs.get("flag_values"): - assert "_FillValue" in v.attrs - else: - assert "_FillValue" not in v.attrs - if v.attrs["name"] == "refl_1_38um_nom": - assert "valid_range" in v.attrs - assert isinstance(v.attrs["valid_range"], list) - else: - assert "valid_range" not in v.attrs - if "flag_values" in v.attrs: - assert np.issubdtype(v.dtype, np.integer) - assert v.attrs.get("flag_meanings") is not None - - def test_load_all_new_donor(self): - """Test loading all test datasets with new donor.""" - import xarray as xr - - from satpy.readers import load_reader - r = load_reader(self.reader_configs) - with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): - loadables = r.select_files_from_pathnames([ - "clavrx_H08_20180806_1800.level2.hdf", - ]) - r.create_filehandlers(loadables) - with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: - g.return_value = ["fake_donor.nc"] - x = np.linspace(-0.1518, 0.1518, 300) - y = np.linspace(0.1518, -0.1518, 10) - proj = mock.Mock( - semi_major_axis=6378137, - semi_minor_axis=6356752.3142, - perspective_point_height=35791000, - longitude_of_projection_origin=140.7, - sweep_angle_axis="y", - ) - d.return_value = fake_donor = mock.MagicMock( - variables={"goes_imager_projection": proj, "x": x, "y": y}, - ) - fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"]) - assert len(datasets) == 3 - for v in datasets.values(): - assert "calibration" not in v.attrs - assert v.attrs["units"] in ["1", "%"] - assert isinstance(v.attrs["area"], AreaDefinition) - assert v.attrs["area"].is_geostationary is True - assert v.attrs["platform_name"] == "himawari8" - assert v.attrs["sensor"] == "ahi" - assert datasets["variable3"].attrs.get("flag_meanings") is not None From 40e6ad5ebd860b11c38abb9acfd97640596afc12 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 22 Jan 2024 15:26:04 +0100 Subject: [PATCH 1066/1416] add missing units. Remove radiance_mean__ --- satpy/etc/readers/fci_l2_nc.yaml | 724 +------------------------------ 1 file changed, 4 insertions(+), 720 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index daca83ada1..e3e063f09b 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1768,6 +1768,7 @@ datasets: file_type: nc_fci_asr nc_key: quality_radiance fill_value: -1 + units: '%' coordinates: - longitude - latitude @@ -1778,6 +1779,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr nc_key: land_pixel_percent + units: '%' coordinates: - longitude - latitude @@ -1788,6 +1790,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr nc_key: water_pixel_percent + units: '%' coordinates: - longitude - latitude @@ -1798,6 +1801,7 @@ datasets: resolution: 32000 file_type: nc_fci_asr nc_key: pixel_percentage + units: '%' coordinates: - longitude - latitude @@ -2546,726 +2550,6 @@ datasets: - longitude - latitude - radiance_mean_all_vis04: - name: radiance_mean_all_vis04 - long_name: TOA Radiance Segment mean at 0.44um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 0 - wavelength: [0.384, 0.444, 0.504] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_vis04: - name: radiance_mean_clear_vis04 - long_name: TOA Radiance Segment mean at 0.44um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 0 - wavelength: [0.384, 0.444, 0.504] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_vis04: - name: radiance_mean_cloudy_vis04 - long_name: TOA Radiance Segment mean at 0.44um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 0 - wavelength: [0.384, 0.444, 0.504] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_vis05: - name: radiance_mean_all_vis05 - long_name: TOA Radiance Segment mean at 0.51um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 1 - wavelength: [0.47, 0.51, 0.55] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_vis05: - name: radiance_mean_clear_vis05 - long_name: TOA Radiance Segment mean at 0.51um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 1 - wavelength: [0.47, 0.51, 0.55] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_vis05: - name: radiance_mean_cloudy_vis05 - long_name: TOA Radiance Segment mean at 0.51um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 1 - wavelength: [0.47, 0.51, 0.55] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_vis06: - name: radiance_mean_all_vis06 - long_name: TOA Radiance Segment mean at 0.64um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 2 - wavelength: [0.59, 0.64, 0.69] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_vis06: - name: radiance_mean_clear_vis06 - long_name: TOA Radiance Segment mean at 0.64um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 2 - wavelength: [0.59, 0.64, 0.69] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_vis06: - name: radiance_mean_cloudy_vis06 - long_name: TOA Radiance Segment mean at 0.64um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 2 - wavelength: [0.59, 0.64, 0.69] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_vis08: - name: radiance_mean_all_vis08 - long_name: TOA Radiance Segment mean at 0.86um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 3 - wavelength: [0.815, 0.865, 0.915] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_vis08: - name: radiance_mean_clear_vis08 - long_name: TOA Radiance Segment mean at 0.86um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 3 - wavelength: [0.815, 0.865, 0.915] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_vis08: - name: radiance_mean_cloudy_vis08 - long_name: TOA Radiance Segment mean at 0.86um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 3 - wavelength: [0.815, 0.865, 0.915] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_vis09: - name: radiance_mean_all_vis09 - long_name: TOA Radiance Segment mean at 0.91um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 4 - wavelength: [0.894, 0.914, 0.934] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_vis09: - name: radiance_mean_clear_vis09 - long_name: TOA Radiance Segment mean at 0.91um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 4 - wavelength: [0.894, 0.914, 0.934] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_vis09: - name: radiance_mean_cloudy_vis09 - long_name: TOA Radiance Segment mean at 0.91um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 4 - wavelength: [0.894, 0.914, 0.934] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_nir13: - name: radiance_mean_all_nir13 - long_name: TOA Radiance Segment mean at 1.38um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 5 - wavelength: [1.35, 1.38, 1.41] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_nir13: - name: radiance_mean_clear_nir13 - long_name: TOA Radiance Segment mean at 1.38um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 5 - wavelength: [1.35, 1.38, 1.41] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_nir13: - name: radiance_mean_cloudy_nir13 - long_name: TOA Radiance Segment mean at 1.38um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 5 - wavelength: [1.35, 1.38, 1.41] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_nir16: - name: radiance_mean_all_nir16 - long_name: TOA Radiance Segment mean at 1.61um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 6 - wavelength: [1.56, 1.61, 1.66] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_nir16: - name: radiance_mean_clear_nir16 - long_name: TOA Radiance Segment mean at 1.61um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 6 - wavelength: [1.56, 1.61, 1.66] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_nir16: - name: radiance_mean_cloudy_nir16 - long_name: TOA Radiance Segment mean at 1.61um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 6 - wavelength: [1.56, 1.61, 1.66] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_nir22: - name: radiance_mean_all_nir22 - long_name: TOA Radiance Segment mean at 2.25um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 7 - wavelength: [2.2, 2.25, 2.3] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_nir22: - name: radiance_mean_clear_nir22 - long_name: TOA Radiance Segment mean at 2.25um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 7 - wavelength: [2.2, 2.25, 2.3] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_nir22: - name: radiance_mean_cloudy_nir22 - long_name: TOA Radiance Segment mean at 2.25um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 7 - wavelength: [2.2, 2.25, 2.3] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_ir38: - name: radiance_mean_all_ir38 - long_name: TOA Radiance Segment mean at 3.80um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 8 - wavelength: [3.4, 3.8, 4.2] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_ir38: - name: radiance_mean_clear_ir38 - long_name: TOA Radiance Segment mean at 3.80um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 8 - wavelength: [3.4, 3.8, 4.2] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_ir38: - name: radiance_mean_cloudy_ir38 - long_name: TOA Radiance Segment mean at 3.80um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 8 - wavelength: [3.4, 3.8, 4.2] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_wv63: - name: radiance_mean_all_wv63 - long_name: TOA Radiance Segment mean at 6.30um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 9 - wavelength: [5.3, 6.3, 7.3] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_wv63: - name: radiance_mean_clear_wv63 - long_name: TOA Radiance Segment mean at 6.30um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 9 - wavelength: [5.3, 6.3, 7.3] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_wv63: - name: radiance_mean_cloudy_wv63 - long_name: TOA Radiance Segment mean at 6.30um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 9 - wavelength: [5.3, 6.3, 7.3] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_wv73: - name: radiance_mean_all_wv73 - long_name: TOA Radiance Segment mean at 7.35um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 10 - wavelength: [6.85, 7.35, 7.85] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_wv73: - name: radiance_mean_clear_wv73 - long_name: TOA Radiance Segment mean at 7.35um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 10 - wavelength: [6.85, 7.35, 7.85] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_wv73: - name: radiance_mean_cloudy_wv73 - long_name: TOA Radiance Segment mean at 7.35um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 10 - wavelength: [6.85, 7.35, 7.85] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_ir87: - name: radiance_mean_all_ir87 - long_name: TOA Radiance Segment mean at 8.70um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 11 - wavelength: [8.3, 8.7, 9.1] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_ir87: - name: radiance_mean_clear_ir87 - long_name: TOA Radiance Segment mean at 8.70um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 11 - wavelength: [8.3, 8.7, 9.1] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_ir87: - name: radiance_mean_cloudy_ir87 - long_name: TOA Radiance Segment mean at 8.70um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 11 - wavelength: [8.3, 8.7, 9.1] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_ir97: - name: radiance_mean_all_ir97 - long_name: TOA Radiance Segment mean at 9.66um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 12 - wavelength: [9.36, 9.66, 9.96] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_ir97: - name: radiance_mean_clear_ir97 - long_name: TOA Radiance Segment mean at 9.66um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 12 - wavelength: [9.36, 9.66, 9.96] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_ir97: - name: radiance_mean_cloudy_ir97 - long_name: TOA Radiance Segment mean at 9.66um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 12 - wavelength: [9.36, 9.66, 9.96] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_ir105: - name: radiance_mean_all_ir105 - long_name: TOA Radiance Segment mean at 10.50um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 13 - wavelength: [9.8, 10.5, 11.2] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_ir105: - name: radiance_mean_clear_ir105 - long_name: TOA Radiance Segment mean at 10.50um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 13 - wavelength: [9.8, 10.5, 11.2] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_ir105: - name: radiance_mean_cloudy_ir105 - long_name: TOA Radiance Segment mean at 10.50um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 13 - wavelength: [9.8, 10.5, 11.2] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_ir123: - name: radiance_mean_all_ir123 - long_name: TOA Radiance Segment mean at 12.30um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 14 - wavelength: [11.8, 12.3, 12.8] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_ir123: - name: radiance_mean_clear_ir123 - long_name: TOA Radiance Segment mean at 12.30um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 14 - wavelength: [11.8, 12.3, 12.8] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_ir123: - name: radiance_mean_cloudy_ir123 - long_name: TOA Radiance Segment mean at 12.30um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 14 - wavelength: [11.8, 12.3, 12.8] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_all_ir133: - name: radiance_mean_all_ir133 - long_name: TOA Radiance Segment mean at 13.30um (all pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 15 - wavelength: [12.7, 13.3, 13.9] - category_id: 0 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_clear_ir133: - name: radiance_mean_clear_ir133 - long_name: TOA Radiance Segment mean at 13.30um (clear pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 15 - wavelength: [12.7, 13.3, 13.9] - category_id: 1 - cell_method: area:mean - coordinates: - - longitude - - latitude - - radiance_mean_cloudy_ir133: - name: radiance_mean_cloudy_ir133 - long_name: TOA Radiance Segment mean at 13.30um (cloudy pixels) - standard_name: toa_outgoing_radiance - resolution: 32000 - file_type: nc_fci_asr - nc_key: radiance_mean - channel_id: 15 - wavelength: [12.7, 13.3, 13.9] - category_id: 2 - cell_method: area:mean - coordinates: - - longitude - - latitude - quality_reflectance_all_vis04: name: quality_reflectance_all_vis04 long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (all pixels) From bf543d10f751c4d10f0377039e703cfe0a73a8aa Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 24 Jan 2024 10:30:20 -0600 Subject: [PATCH 1067/1416] Fix AGRI L1 C07 having a valid LUT value for its fill value --- satpy/readers/fy4_base.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index 144e559858..4a3c0bc625 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -28,6 +28,7 @@ import dask.array as da import numpy as np +import numpy.typing as npt import xarray as xr from satpy._compat import cached_property @@ -86,7 +87,7 @@ def scale(dn, slope, offset): return ref - def apply_lut(self, data, lut): + def _apply_lut(self, data: xr.DataArray, lut: npt.NDArray[np.float32]) -> xr.DataArray: """Calibrate digital number (DN) by applying a LUT. Args: @@ -96,8 +97,15 @@ def apply_lut(self, data, lut): Calibrated quantity """ # append nan to the end of lut for fillvalue - lut = np.append(lut, np.nan) - data.data = da.where(data.data > lut.shape[0], lut.shape[0] - 1, data.data) + fill_value = data.attrs.get("FillValue") + if fill_value is not None: + if fill_value.item() > lut.shape[0] - 1: + lut = np.append(lut, np.nan) + data.data = da.where(data.data > lut.shape[0], lut.shape[0] - 1, data.data) + else: + # Ex. C07 has a LUT of 65536 elements, but fill value is 65535 + # This is considered a bug in the input file format + lut[fill_value] = np.nan res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) @@ -182,7 +190,7 @@ def calibrate_to_bt(self, data, ds_info, ds_name): lut = self[lut_key] # the value of dn is the index of brightness_temperature - data = self.apply_lut(data, lut) + data = self._apply_lut(data, lut.compute().data) ds_info["valid_range"] = lut.attrs["valid_range"] return data From 8efe73b8280de17bd85f5a7e6823508e79df9c9d Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Wed, 24 Jan 2024 16:32:25 +0000 Subject: [PATCH 1068/1416] Viirs_l2 initial commit --- satpy/etc/enhancements/viirs.yaml | 87 +++++++++++ satpy/etc/readers/viirs_l2.yaml | 130 +++++++++++++++++ satpy/readers/viirs_l2.py | 168 ++++++++++++++++++++++ satpy/readers/yaml_reader.py | 8 +- satpy/tests/reader_tests/test_viirs_l2.py | 149 +++++++++++++++++++ 5 files changed, 536 insertions(+), 6 deletions(-) create mode 100644 satpy/etc/readers/viirs_l2.yaml create mode 100644 satpy/readers/viirs_l2.py create mode 100644 satpy/tests/reader_tests/test_viirs_l2.py diff --git a/satpy/etc/enhancements/viirs.yaml b/satpy/etc/enhancements/viirs.yaml index 8b3751167d..a21e47aa7f 100644 --- a/satpy/etc/enhancements/viirs.yaml +++ b/satpy/etc/enhancements/viirs.yaml @@ -79,3 +79,90 @@ enhancements: ], min_value: 0, max_value: 201} + + Cloud_Top_Height: + name: Cloud_Top_Height + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - {colors: [ + [255, 0, 0], + [170, 0, 0], + [110, 0, 0], + [112, 1, 2], + [124, 91, 5], + [240, 190, 64], + [255, 255, 0], + [0, 220, 0], + [0, 136, 0], + [0, 80, 0], + [0, 136, 238], + [0, 0, 255], + [0, 0, 170], + [0, 0, 100], + [183, 15, 141], + [102, 0, 119] + ], + values: [ + 0, + 800, + 1600, + 2350, + 3150, + 4000, + 4800, + 5600, + 6400, + 7200, + 8000, + 8800, + 9600, + 10400, + 11200, + 12000 + ], + min_value: 0, + max_value: 18000, + } + + Clear_Sky_Confidence: + name: Clear_Sky_Confidence + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - {colors: [[255, 247, 236], [254, 246, 233], [254, 244, 230], [254, 243, 228], [254, 242, 224], [254, 241, 222], [254, 239, 219], [254, 239, 216], [254, 237, 213], [254, 236, 210], [254, 235, 207], [254, 233, 204], [254, 232, 202], [253, 231, 198], [253, 230, 195], [253, 228, 191], [253, 226, 189], [253, 225, 185], [253, 223, 181], [253, 221, 178], [253, 220, 174], [253, 218, 172], [253, 216, 168], [253, 215, 165], [253, 213, 161], [253, 211, 157], [253, 210, 156], [253, 207, 153], [253, 206, 152], [253, 203, 149], [253, 202, 148], [253, 200, 145], [253, 198, 143], [253, 196, 141], [253, 193, 139], [253, 192, 137], [253, 189, 134], [253, 188, 133], [252, 185, 130], [252, 182, 127], [252, 177, 123], [252, 174, 120], [252, 170, 116], [252, 166, 112], [252, 163, 109], [252, 159, 105], [252, 156, 103], [252, 151, 99], [252, 148, 96], [252, 144, 92], [251, 140, 88], [250, 137, 87], [249, 134, 86], [248, 131, 85], [247, 127, 83], [246, 125, 82], [245, 121, 80], [244, 119, 79], [243, 115, 78], [242, 111, 76], [241, 109, 75], [240, 105, 73], [239, 102, 72], [237, 98, 69], [236, 94, 67], [234, 89, 63], [232, 86, 60], [230, 81, 57], [227, 76, 53], [226, 73, 50], [224, 68, 46], [222, 65, 44], [220, 60, 40], [218, 56, 37], [216, 51, 33], [214, 46, 30], [211, 43, 28], [208, 39, 25], [206, 36, 23], [202, 31, 20], [200, 28, 18], [197, 24, 15], [194, 21, 13], [191, 16, 10], [188, 12, 7], [185, 9, 5], [182, 4, 3], [180, 1, 1], [175, 0, 0], [172, 0, 0], [167, 0, 0], [164, 0, 0], [159, 0, 0], [154, 0, 0], [151, 0, 0], [146, 0, 0], [143, 0, 0], [138, 0, 0], [135, 0, 0], [130, 0, 0], [127, 0, 0]], + values: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4, 0.41, 0.42, 0.43, 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5, 0.51, 0.52, 0.53, 0.54, 0.55, 0.56, 0.57, 0.58, 0.59, 0.6, 0.61, 0.62, 0.63, 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, 0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, 0.99, 1], + min_value: 0.0, + max_value: 1.0 + } + + + Aerosol_Optical_Thickness_550_Land_Ocean: + name: Aerosol_Optical_Thickness_550_Land_Ocean + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - {colors: 'ylorrd', + min_value: 0.0, + max_value: 1.0, + } + + Angstrom_Exponent_Land_Ocean: + name: Angstrom_Exponent_Land_Ocean + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - {colors: [[122, 145, 2], [123, 148, 3], [124, 150, 4], [124, 153, 5], [125, 155, 6], [126, 158, 7], [127, 160, 8], [127, 163, 9], [128, 165, 10], [129, 168, 11], [130, 170, 12], [130, 173, 13], [131, 175, 14], [132, 178, 15], [133, 181, 16], [132, 183, 18], [132, 185, 20], [132, 187, 22], [132, 189, 25], [132, 191, 27], [132, 193, 29], [132, 195, 31], [131, 197, 34], [131, 199, 36], [131, 201, 38], [131, 203, 40], [131, 205, 43], [131, 207, 45], [131, 209, 47], [131, 212, 50], [130, 213, 51], [129, 215, 53], [128, 217, 55], [128, 219, 57], [127, 221, 59], [126, 222, 61], [125, 224, 63], [125, 226, 64], [124, 228, 66], [123, 230, 68], [122, 231, 70], [122, 233, 72], [121, 235, 74], [120, 237, 76], [120, 239, 78], [119, 239, 79], [118, 240, 80], [117, 241, 82], [116, 242, 83], [116, 243, 85], [115, 244, 86], [114, 245, 87], [113, 246, 89], [112, 247, 90], [112, 248, 92], [111, 249, 93], [110, 250, 94], [109, 251, 96], [108, 252, 97], [108, 253, 99], [107, 252, 100], [106, 252, 102], [106, 252, 103], [105, 251, 105], [105, 251, 106], [104, 251, 108], [103, 251, 109], [103, 250, 111], [102, 250, 112], [102, 250, 114], [101, 250, 115], [100, 249, 117], [100, 249, 118], [99, 249, 120], [99, 249, 122], [98, 247, 123], [97, 246, 124], [96, 245, 126], [95, 244, 127], [94, 243, 128], [93, 242, 130], [92, 241, 131], [92, 239, 132], [91, 238, 134], [90, 237, 135], [89, 236, 136], [88, 235, 138], [87, 234, 139], [86, 233, 140], [86, 232, 142], [85, 230, 143], [84, 229, 144], [83, 228, 145], [82, 226, 147], [81, 225, 148], [80, 224, 149], [79, 223, 150], [78, 221, 152], [77, 220, 153], [76, 219, 154], [75, 218, 155], [74, 216, 157], [73, 215, 158], [72, 214, 159], [72, 213, 161], [71, 211, 162], [70, 209, 163], [69, 208, 164], [68, 206, 165], [67, 205, 166], [66, 203, 167], [65, 201, 168], [64, 200, 170], [63, 198, 171], [62, 197, 172], [61, 195, 173], [60, 193, 174], [59, 192, 175], [58, 190, 176], [58, 189, 178], [58, 187, 178], [58, 185, 179], [58, 184, 180], [58, 182, 181], [58, 181, 182], [58, 179, 183], [58, 178, 184], [59, 176, 184], [59, 175, 185], [59, 173, 186], [59, 172, 187], [59, 170, 188], [59, 169, 189], [59, 167, 190], [60, 166, 191], [60, 164, 191], [61, 162, 192], [61, 160, 193], [62, 158, 194], [63, 156, 195], [63, 154, 195], [64, 152, 196], [64, 150, 197], [65, 148, 198], [66, 146, 199], [66, 144, 199], [67, 142, 200], [67, 140, 201], [68, 138, 202], [69, 137, 203], [69, 135, 203], [70, 133, 204], [70, 131, 205], [71, 129, 205], [72, 128, 206], [72, 126, 207], [73, 124, 207], [73, 122, 208], [74, 120, 209], [75, 119, 209], [75, 117, 210], [76, 115, 211], [76, 113, 211], [77, 111, 212], [78, 110, 213], [78, 108, 213], [79, 106, 214], [80, 104, 214], [80, 102, 215], [81, 101, 216], [82, 99, 216], [82, 97, 217], [83, 95, 217], [84, 93, 218], [84, 92, 219], [85, 90, 219], [86, 88, 220], [86, 86, 220], [87, 84, 221], [88, 83, 222], [88, 82, 222], [89, 81, 223], [90, 80, 223], [91, 80, 224], [92, 79, 224], [93, 78, 225], [94, 77, 225], [95, 77, 226], [96, 76, 226], [97, 75, 227], [98, 74, 227], [99, 74, 228], [100, 73, 228], [101, 72, 229], [102, 72, 230], [104, 72, 230], [106, 73, 230], [108, 73, 230], [110, 74, 231], [112, 74, 231], [114, 75, 231], [116, 75, 231], [118, 76, 232], [120, 76, 232], [122, 77, 232], [124, 77, 232], [126, 78, 233], [128, 78, 233], [130, 79, 233], [133, 80, 234], [135, 80, 234], [137, 80, 234], [139, 81, 234], [141, 81, 234], [143, 81, 234], [145, 82, 234], [147, 82, 234], [149, 82, 234], [151, 83, 234], [153, 83, 234], [155, 83, 234], [157, 84, 234], [159, 84, 234], [161, 84, 234], [164, 85, 235], [165, 85, 235], [166, 85, 235], [168, 85, 235], [169, 85, 235], [171, 85, 235], [172, 85, 235], [174, 85, 235], [175, 86, 235], [177, 86, 235], [178, 86, 235], [180, 86, 235], [181, 86, 235], [183, 86, 235], [184, 86, 235], [186, 87, 235], [187, 87, 234], [188, 87, 234], [190, 87, 234], [191, 88, 234], [193, 88, 234], [194, 88, 234], [196, 88, 234], [197, 89, 234], [199, 89, 234], [200, 89, 234], [202, 89, 234]], + values: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4, 0.41, 0.42, 0.43, 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5, 0.51, 0.52, 0.53, 0.54, 0.55, 0.56, 0.57, 0.58, 0.59, 0.6, 0.61, 0.62, 0.63, 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, 0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, 0.99, 1, 1.01, 1.02, 1.03, 1.04, 1.05, 1.06, 1.07, 1.08, 1.09, 1.1, 1.11, 1.12, 1.13, 1.14, 1.15, 1.16, 1.17, 1.18, 1.19, 1.2, 1.21, 1.22, 1.23, 1.24, 1.25, 1.26, 1.27, 1.28, 1.29, 1.3, 1.31, 1.32, 1.33, 1.34, 1.35, 1.36, 1.37, 1.38, 1.39, 1.4, 1.41, 1.42, 1.43, 1.44, 1.45, 1.46, 1.47, 1.48, 1.49, 1.5, 1.51, 1.52, 1.53, 1.54, 1.55, 1.56, 1.57, 1.58, 1.59, 1.6, 1.61, 1.62, 1.63, 1.64, 1.65, 1.66, 1.67, 1.68, 1.69, 1.7, 1.71, 1.72, 1.73, 1.74, 1.75, 1.76, 1.77, 1.78, 1.79, 1.8, 1.81, 1.82, 1.83, 1.84, 1.85, 1.86, 1.87, 1.88, 1.89, 1.9, 1.91, 1.92, 1.93, 1.94, 1.95, 1.96, 1.97, 1.98, 1.99, 2, 2.01, 2.02, 2.03, 2.04, 2.05, 2.06, 2.07, 2.08, 2.09, 2.1, 2.11, 2.12, 2.13, 2.14, 2.15, 2.16, 2.17, 2.18, 2.19, 2.2, 2.21, 2.22, 2.23, 2.24, 2.25, 2.26, 2.27, 2.28, 2.29, 2.3, 2.31, 2.32, 2.33, 2.34, 2.35, 2.36, 2.37, 2.38, 2.39, 2.4, 2.41, 2.42, 2.43, 2.44, 2.45, 2.46, 2.47, 2.48, 2.49, 2.5], + min_value: -0.5, + max_value: 2.5, + } + diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml new file mode 100644 index 0000000000..045b69cd75 --- /dev/null +++ b/satpy/etc/readers/viirs_l2.yaml @@ -0,0 +1,130 @@ +reader: + name: viirs_l2 + short_name: VIIRS L2 + long_name: SNPP VIIRS Level 2 data in netCDF4 format + description: Generic NASA VIIRS L2 Reader + status: Alpha + supports_fsspec: false + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [viirs] + default_datasets: + +file_types: + cldprop_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'CLDPROP_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + cldmsk_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'CLDMSK_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + aerdb_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + aerdb_l2_viirs_nrt: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.nrt.nc' + cldir_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'CLDIR_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + aerdt_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'AERDT_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + fsnrad_l2_viirs: + file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler + file_patterns: + - 'FSNRAD_L2_VIIRS_CRIS_SS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + +datasets: + cld_lon: + name: cld_lon + resolution: + file_type: [cldmsk_l2_viirs, cldprop_l2_viirs] + file_key: geolocation_data/longitude + units: degrees + standard_name: longitude + cld_lat: + name: cld_lat + resolution: + file_type: [cldmsk_l2_viirs, cldprop_l2_viirs] + file_key: geolocation_data/latitude + units: degrees + standard_name: latitude + aerdb_lon: + name: aerdb_lon + resolution: + file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] + file_key: Longitude + units: degrees + standard_name: Longitude + aerdb_lat: + name: aerdb_lat + resolution: + file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] + file_key: Latitude + units: degrees + standard_name: Latitude + aerdt_lon: + name: aerdt_lon + resolution: + file_type: [aerdt_l2_viirs] + file_key: longitude + units: degrees + standard_name: longitude + aerdt_lat: + name: aerdt_lat + resolution: + file_type: [aerdt_l2_viirs] + file_key: latitude + units: degrees + standard_name: latitude + +################################## +# Datasets in file cldmsk_l2_viirs +################################## + Clear_Sky_Confidence: + name: Clear_Sky_Confidence + long_name: VIIRS Clear Sky Confidence + units: None + coordinates: [cld_lon, cld_lat] + resolution: 742 + file_key: geophysical_data/Clear_Sky_Confidence + file_type: cldmsk_l2_viirs + + +################################### +# Datasets in file cldprop_l2_viirs +################################### + Cloud_Top_Height: + name: Cloud_Top_Height + long_name: Cloud Top Height from NOAA CLAVR-x AWG algorithm + units: m + coordinates: [cld_lon,cld_lat] + resolution: 742 + file_key: geophysical_data/Cloud_Top_Height + file_type: cldprop_l2_viirs + +########################################## +# Datasets in files aerdb_l2_viirs and nrt +########################################## + Angstrom_Exponent_Land_Ocean: + name: Angstrom_Exponent_Land_Ocean + long_name: Deep Blue/SOAR Angstrom exponent over land and ocean + units: None + coordinates: [aerdb_lon,aerdb_lat] + resolution: 742 + file_key: Angstrom_Exponent_Land_Ocean + file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] + + Aerosol_Optical_Thickness_550_Land_Ocean: + name: Aerosol_Optical_Thickness_550_Land_Ocean + long_name: Deep Blue/SOAR aerosol optical thickness at 550 nm over land and ocean + units: None + coordinates: [aerdb_lon,aerdb_lat] + resolution: 742 + file_key: Aerosol_Optical_Thickness_550_Land_Ocean + file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py new file mode 100644 index 0000000000..e199d75437 --- /dev/null +++ b/satpy/readers/viirs_l2.py @@ -0,0 +1,168 @@ +import logging +from datetime import datetime + +import numpy as np + +from satpy.readers.netcdf_utils import NetCDF4FileHandler +import xarray as xr +from pyresample.geometry import AreaDefinition +import numpy as np + +LOG = logging.getLogger(__name__) + + +class VIIRSL2FileHandler(NetCDF4FileHandler): + def _parse_datetime(self, datestr): + """Parse datetime.""" + return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") + + @property + def start_time(self): + """Get start time.""" + return self._parse_datetime(self["/attr/time_coverage_start"]) + + @property + def end_time(self): + """Get end time.""" + return self._parse_datetime(self["/attr/time_coverage_end"]) + + @property + def start_orbit_number(self): + """Get start orbit number.""" + try: + return int(self["/attr/orbit_number"]) + except KeyError: + return int(self["/attr/OrbitNumber"]) + + @property + def end_orbit_number(self): + """Get end orbit number.""" + try: + return int(self["/attr/orbit_number"]) + except KeyError: + return int(self["/attr/OrbitNumber"]) + + @property + def platform_name(self): + """Get platform name.""" + try: + res = self.get("/attr/platform", self.filename_info["platform_shortname"]) + except KeyError: + res = "Unknown" + + return { + "JPSS-1": "NOAA-20", + "NP": "Suomi-NPP", + "J1": "NOAA-20", + "J2": "NOAA-21", + "JPSS-2": "NOAA-21", + }.get(res, res) + + @property + def sensor_name(self): + """Get sensor name.""" + return self["/attr/instrument"].lower() + + def _get_dataset_file_units(self, dataset_id, ds_info, var_path): + file_units = ds_info.get("units") + if file_units is None: + file_units = self.get(var_path + "/attr/units") + if file_units == "none" or file_units == "None": + file_units = "1" + return file_units + + def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): + valid_min = self.get(var_path + "/attr/valid_min") + valid_max = self.get(var_path + "/attr/valid_max") + if not valid_min and not valid_max: + valid_range = self.get(var_path + "/attr/valid_range") + if valid_range: + valid_min = valid_range[0] + valid_max = valid_range[1] + scale_factor = self.get(var_path + "/attr/scale_factor") + scale_offset = self.get(var_path + "/attr/add_offset") + return valid_min, valid_max, scale_factor, scale_offset + + def get_metadata(self, dataset_id, ds_info): + """Get metadata.""" + var_path = ds_info.get("file_key", ds_info["name"]) + file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) + + # Get extra metadata + i = getattr(self[var_path], "attrs", {}) + i.update(ds_info) + i.update(dataset_id.to_dict()) + i.update( + { + "file_units": file_units, + "platform_name": self.platform_name, + "sensor": self.sensor_name, + "start_orbit": self.start_orbit_number, + "end_orbit": self.end_orbit_number, + } + ) + i.update(dataset_id.to_dict()) + return i + + def adjust_scaling_factors(self, factors, file_units, output_units): + """Adjust scaling factors.""" + if factors is None or factors[0] is None: + factors = [1, 0] + if file_units == output_units: + LOG.debug("File units and output units are the same (%s)", file_units) + return factors + factors = np.array(factors) + + if file_units == "1" and output_units == "%": + LOG.debug( + "Adjusting scaling factors to convert '%s' to '%s'", + file_units, + output_units, + ) + factors[::2] = np.where(factors[::2] != -999, factors[::2] * 100.0, -999) + factors[1::2] = np.where(factors[1::2] != -999, factors[1::2] * 100.0, -999) + return factors + else: + return factors + + def available_datasets(self, configured_datasets=None): + """Generate dataset info and their availablity. + + See + :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` + for details. + + """ + for is_avail, ds_info in configured_datasets or []: + if is_avail is not None: + yield is_avail, ds_info + continue + ft_matches = self.file_type_matches(ds_info["file_type"]) + var_path = ds_info.get("file_key", ds_info["name"]) + is_in_file = var_path in self + yield ft_matches and is_in_file, ds_info + + def get_dataset(self, ds_id: int, ds_info: str) -> xr.DataArray: + var_path = ds_info.get("file_key", ds_info["name"]) + metadata = self.get_metadata(ds_id, ds_info) + ( + valid_min, + valid_max, + scale_factor, + scale_offset, + ) = self._get_dataset_valid_range(ds_id, ds_info, var_path) + data = self[var_path] + data.attrs.update(metadata) + if valid_min is not None and valid_max is not None: + data = data.where((data >= valid_min) & (data <= valid_max)) + factors = (scale_factor, scale_offset) + factors = self.adjust_scaling_factors( + factors, metadata["file_units"], ds_info.get("units") + ) + if factors[0] != 1 or factors[1] != 0: + data *= factors[0] + data += factors[1] + # rename dimensions to correspond to satpy's 'y' and 'x' standard + if "number_of_lines" in data.dims: + data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) + return data diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 5444d7e16f..29aaaf0955 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -261,7 +261,6 @@ def select_files_from_pathnames(self, filenames): """Select the files from *filenames* this reader can handle.""" selected_filenames = [] filenames = set(filenames) # make a copy of the inputs - for pattern in self.file_patterns: matching = _match_filenames(filenames, pattern) filenames -= matching @@ -493,7 +492,6 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No """Generate new filehandler instances.""" requirements = filetype_info.get("requires") filetype_cls = filetype_info["file_reader"] - if fh_kwargs is None: fh_kwargs = {} @@ -509,7 +507,6 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No except RuntimeError as err: warnings.warn(str(err) + " for {}".format(filename), stacklevel=4) continue - yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) def time_matches(self, fstart, fend): @@ -786,9 +783,9 @@ def _get_lons_lats_from_coords(self, coords): """Get lons and lats from the coords list.""" lons, lats = None, None for coord in coords: - if coord.attrs.get("standard_name") == "longitude": + if coord.attrs.get("standard_name").lower() == "longitude": lons = coord - elif coord.attrs.get("standard_name") == "latitude": + elif coord.attrs.get("standard_name").lower() == "latitude": lats = coord if lons is None or lats is None: raise ValueError("Missing longitude or latitude coordinate: " + str(coords)) @@ -826,7 +823,6 @@ def _load_dataset_with_area(self, dsid, coords, **kwargs): return None coords = self._assign_coords_from_dataarray(coords, ds) - area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs) if area is not None: diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py new file mode 100644 index 0000000000..e514ecdc1f --- /dev/null +++ b/satpy/tests/reader_tests/test_viirs_l2.py @@ -0,0 +1,149 @@ +import os +from datetime import datetime, timedelta +from unittest import mock + +import numpy as np +import pytest + +from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler +from satpy.tests.utils import convert_file_content_to_data_array +from satpy.readers import load_reader + +DEFAULT_FILE_DTYPE = np.uint16 +DEFAULT_FILE_SHAPE = (10, 300) +DEFAULT_FILE_DATA = np.arange( + DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE +).reshape(DEFAULT_FILE_SHAPE) +DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) +DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) +DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) +DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) + + +class FakeNetCDF4FileHandlerVIIRSL2(FakeNetCDF4FileHandler): + """Swap-in NetCDF4 File Handler.""" + + def get_test_content(self, filename, filename_info, filetype_info): + """Mimic reader input file content.""" + dt = filename_info.get("start_time", datetime(2023, 12, 30, 22, 30, 0)) + file_type = filename[:6] + num_lines = DEFAULT_FILE_SHAPE[0] + num_pixels = DEFAULT_FILE_SHAPE[1] + num_scans = 5 + file_content = { + "/dimension/number_of_scans": num_scans, + "/dimension/number_of_lines": num_lines, + "/dimension/number_of_pixels": num_pixels, + "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime( + "%Y-%m-%dT%H:%M:%S.000Z" + ), + "/attr/orbit_number": 26384, + "/attr/instrument": "VIIRS", + "/attr/platform": "Suomi-NPP", + } + self._fill_contents_with_default_data(file_content, file_type) + convert_file_content_to_data_array(file_content) + return file_content + + def _fill_contents_with_default_data(self, file_content, file_type): + """Fill file contents with default data.""" + if file_type.startswith("CLD"): + file_content["geolocation_data/latitude"] = DEFAULT_LAT_DATA + file_content["geolocation_data/longitude"] = DEFAULT_LON_DATA + if file_type == "CLDPRO": + file_content["geophysical_data/Cloud_Top_Height"] = DEFAULT_FILE_DATA + elif file_type == "CLDMSK": + file_content[ + "geophysical_data/Clear_Sky_Confidence" + ] = DEFAULT_FILE_DATA + elif file_type == "AERDB_": + file_content["Latitude"] = DEFAULT_LAT_DATA + file_content["Longitude"] = DEFAULT_LON_DATA + file_content["Angstrom_Exponent_Land_Ocean"] = DEFAULT_FILE_DATA + file_content["Aerosol_Optical_Thickness_550_Land_Ocean"] = DEFAULT_FILE_DATA + + +class TestVIIRSL2FileHandler: + """Test VIIRS_L2 Reader""" + + yaml_file = "viirs_l2.yaml" + + def setup_method(self): + """Wrap NetCDF4 file handler with our own fake handler.""" + from satpy._config import config_search_paths + from satpy.readers.viirs_l2 import VIIRSL2FileHandler + + self.reader_configs = config_search_paths( + os.path.join("readers", self.yaml_file) + ) + self.p = mock.patch.object( + VIIRSL2FileHandler, "__bases__", (FakeNetCDF4FileHandlerVIIRSL2,) + ) + self.fake_handler = self.p.start() + self.p.is_local = True + + def teardown_method(self): + """Stop wrapping the NetCDF4 file handler.""" + self.p.stop() + + @pytest.mark.parametrize( + "filename", + [ + ("CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc"), + ("CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc"), + ("AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc"), + ], + ) + def test_init(self, filename): + """Test basic init with no extra parameters.""" + from satpy.readers import load_reader + + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([filename]) + assert len(loadables) == 1 + r.create_filehandlers(loadables) + # make sure we have some files + assert r.file_handlers + + def test_load_aerdb(self): + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames( + ["AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc"] + ) + r.create_filehandlers(loadables) + datasets = r.load( + ["Aerosol_Optical_Thickness_550_Land_Ocean", "Angstrom_Exponent_Land_Ocean"] + ) + assert len(datasets) == 2 + for d in datasets.values(): + assert d.shape == DEFAULT_FILE_SHAPE + assert d.dims == ("y", "x") + assert d.attrs["sensor"] == "viirs" + + def test_load_cldprop(self): + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames( + ["CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc"] + ) + r.create_filehandlers(loadables) + datasets = r.load(["Cloud_Top_Height"]) + assert len(datasets) == 1 + for d in datasets.values(): + assert d.shape == DEFAULT_FILE_SHAPE + assert d.dims == ("y", "x") + assert d.attrs["sensor"] == "viirs" + + def test_load_cldmsk(self): + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames( + ["CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc"] + ) + r.create_filehandlers(loadables) + datasets = r.load(["Clear_Sky_Confidence"]) + assert len(datasets) == 1 + for d in datasets.values(): + assert d.shape == DEFAULT_FILE_SHAPE + assert d.dims == ("y", "x") + assert d.attrs["sensor"] == "viirs" From b11de89d995eab2e6fb730ca588199488da81c23 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Wed, 24 Jan 2024 16:53:53 +0000 Subject: [PATCH 1069/1416] Changed aerdb range finding --- satpy/readers/viirs_l2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index e199d75437..671ad98c7a 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -76,7 +76,7 @@ def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): valid_max = self.get(var_path + "/attr/valid_max") if not valid_min and not valid_max: valid_range = self.get(var_path + "/attr/valid_range") - if valid_range: + if valid_range is not None: valid_min = valid_range[0] valid_max = valid_range[1] scale_factor = self.get(var_path + "/attr/scale_factor") From cbf70821780e7c671d8f99065fb1c10196908828 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 24 Jan 2024 11:47:38 -0600 Subject: [PATCH 1070/1416] Add AGRI test for C07 file bug and lots of test cleanup --- satpy/readers/fy4_base.py | 21 ++-- satpy/tests/reader_tests/test_agri_l1.py | 120 ++++++++++------------- 2 files changed, 63 insertions(+), 78 deletions(-) diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index 4a3c0bc625..b0452a5735 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -98,14 +98,15 @@ def _apply_lut(self, data: xr.DataArray, lut: npt.NDArray[np.float32]) -> xr.Dat """ # append nan to the end of lut for fillvalue fill_value = data.attrs.get("FillValue") - if fill_value is not None: - if fill_value.item() > lut.shape[0] - 1: - lut = np.append(lut, np.nan) - data.data = da.where(data.data > lut.shape[0], lut.shape[0] - 1, data.data) - else: - # Ex. C07 has a LUT of 65536 elements, but fill value is 65535 - # This is considered a bug in the input file format - lut[fill_value] = np.nan + if fill_value is not None and fill_value.item() <= lut.shape[0] - 1: + # If LUT includes the fill_value, remove that entry and everything + # after it. + # Ex. C07 has a LUT of 65536 elements, but fill value is 65535 + # This is considered a bug in the input file format + lut = lut[:fill_value.item()] + + lut = np.append(lut, np.nan) + data.data = da.where(data.data >= lut.shape[0], lut.shape[0] - 1, data.data) res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) @@ -146,8 +147,8 @@ def calibrate(self, data, ds_info, ds_name, file_key): raise NotImplementedError("Calibration to radiance is not supported.") # Apply range limits, but not for counts or we convert to float! if calibration != "counts": - data = data.where((data >= min(data.attrs["valid_range"])) & - (data <= max(data.attrs["valid_range"]))) + data = data.where((data >= min(ds_info["valid_range"])) & + (data <= max(ds_info["valid_range"]))) else: data.attrs["_FillValue"] = data.attrs["FillValue"].item() return data diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 66395a8cee..9e31c0f972 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -56,7 +56,7 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" - def make_test_data(self, cwl, ch, prefix, dims, file_type): + def _make_test_data(self, cwl, ch, prefix, dims): """Make test data.""" if prefix == "CAL": data = xr.DataArray( @@ -74,18 +74,25 @@ def make_test_data(self, cwl, ch, prefix, dims, file_type): dims="_const") elif prefix == "NOM": + # Add +1 to check that values beyond the LUT are clipped + data_np = np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1 + fill_value = 65535 + valid_max = 4095 + if ch == 7: + # mimic C07 bug where the fill value is in the LUT + fill_value = 9 # at index [1, 3] (second to last element) + valid_max = 8 data = xr.DataArray( - da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1, - [dim for dim in dims]), + da.from_array(data_np, chunks=[dim for dim in dims]), attrs={ "Slope": np.array(1.), "Intercept": np.array(0.), - "FillValue": np.array(65535), + "FillValue": np.array(fill_value), "units": "DN", "center_wavelength": "{}um".format(cwl).encode("utf-8"), "band_names": "band{}(band number is range from 1 to 14)" .format(ch).encode("utf-8"), "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), - "valid_range": np.array([0, 4095]), + "valid_range": np.array([0, valid_max]), }, dims=("_RegLength", "_RegWidth")) @@ -101,26 +108,15 @@ def make_test_data(self, cwl, ch, prefix, dims, file_type): "valid_range": np.array([0., 360.]), }, dims=("_RegLength", "_RegWidth")) - - elif prefix == "COEF": - if file_type == "500": - data = self._create_coeff_array(1) - - elif file_type == "1000": - data = self._create_coeff_array(3) - - elif file_type == "2000": - data = self._create_coeff_array(7) - - elif file_type == "4000": - data = self._create_coeff_array(14) - return data - def _create_coeff_array(self, nb_channels): + def _create_coeffs_array(self, channel_numbers: list[int]) -> xr.DataArray: + # make coefficients consistent between file types + all_possible_coeffs = (np.arange(14 * 2).reshape((14, 2)) + 1.0) / np.array([1E4, 1E2]) + # get the coefficients for the specific channels this resolution has + these_coeffs = all_possible_coeffs[[chan_num - 1 for chan_num in channel_numbers]] data = xr.DataArray( - da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) / - np.array([1E4, 1E2]), [nb_channels, 2]), + da.from_array(these_coeffs, chunks=[len(channel_numbers), 2]), attrs={ "Slope": 1., "Intercept": 0., "FillValue": 0, @@ -132,60 +128,46 @@ def _create_coeff_array(self, nb_channels): dims=("_num_channel", "_coefs")) return data - def _create_channel_data(self, chs, cwls, file_type): + def _create_channel_data(self, chs, cwls): dim_0 = 2 dim_1 = 5 data = {} - for index, _cwl in enumerate(cwls): - data["CALChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "CAL", - [dim_0, dim_1], file_type) - data["Calibration/CALChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "CAL", - [dim_0, dim_1], file_type) - data["NOMChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "NOM", - [dim_0, dim_1], file_type) - data["Data/NOMChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "NOM", - [dim_0, dim_1], file_type) - data["CALIBRATION_COEF(SCALE+OFFSET)"] = self.make_test_data(cwls[index], chs[index], "COEF", - [dim_0, dim_1], file_type) - data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self.make_test_data(cwls[index], chs[index], "COEF", - [dim_0, dim_1], file_type) + for chan_num, chan_wl in zip(chs, cwls): + cal_data = self._make_test_data(chan_wl, chan_num, "CAL", [dim_0, dim_1]) + data[f"CALChannel{chan_num:02d}"] = cal_data + data[f"Calibration/CALChannel{chan_num:02d}"] = cal_data + nom_data = self._make_test_data(chan_wl, chan_num, "NOM", [dim_0, dim_1]) + data[f"NOMChannel{chan_num:02d}"] = nom_data + data[f"Data/NOMChannel{chan_num:02d}"] = nom_data + data["CALIBRATION_COEF(SCALE+OFFSET)"] = self._create_coeffs_array(chs) + data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self._create_coeffs_array(chs) return data - def _get_500m_data(self, file_type): + def _get_500m_data(self): chs = [2] cwls = [0.65] - data = self._create_channel_data(chs, cwls, file_type) - - return data + return self._create_channel_data(chs, cwls) - def _get_1km_data(self, file_type): - chs = np.linspace(1, 3, 3) + def _get_1km_data(self): + chs = [1, 2, 3] cwls = [0.47, 0.65, 0.83] - data = self._create_channel_data(chs, cwls, file_type) + return self._create_channel_data(chs, cwls) - return data - - def _get_2km_data(self, file_type): - chs = np.linspace(1, 7, 7) + def _get_2km_data(self): + chs = [1, 2, 3, 4, 5, 6, 7] cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72] - data = self._create_channel_data(chs, cwls, file_type) - - return data + return self._create_channel_data(chs, cwls) - def _get_4km_data(self, file_type): - chs = np.linspace(1, 14, 14) + def _get_4km_data(self): + chs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72, 3.72, 6.25, 7.10, 8.50, 10.8, 12, 13.5] - data = self._create_channel_data(chs, cwls, file_type) + return self._create_channel_data(chs, cwls) - return data - - def _get_geo_data(self, file_type): + def _get_geo_data(self): dim_0 = 2 dim_1 = 5 - data = {"NOMSunAzimuth": self.make_test_data("NUL", "NUL", "GEO", - [dim_0, dim_1], file_type), - "Navigation/NOMSunAzimuth": self.make_test_data("NUL", "NUL", "GEO", - [dim_0, dim_1], file_type)} + data = {"NOMSunAzimuth": self._make_test_data("NUL", "NUL", "GEO", [dim_0, dim_1]), + "Navigation/NOMSunAzimuth": self._make_test_data("NUL", "NUL", "GEO", [dim_0, dim_1])} return data def get_test_content(self, filename, filename_info, filetype_info): @@ -210,17 +192,17 @@ def get_test_content(self, filename, filename_info, filetype_info): data = {} if self.filetype_info["file_type"] == "agri_l1_0500m": - data = self._get_500m_data("500") + data = self._get_500m_data() elif self.filetype_info["file_type"] == "agri_l1_1000m": - data = self._get_1km_data("1000") + data = self._get_1km_data() elif self.filetype_info["file_type"] == "agri_l1_2000m": - data = self._get_2km_data("2000") + data = self._get_2km_data() global_attrs["/attr/Observing Beginning Time"] = "00:30:01" global_attrs["/attr/Observing Ending Time"] = "00:34:07" elif self.filetype_info["file_type"] == "agri_l1_4000m": - data = self._get_4km_data("4000") + data = self._get_4km_data() elif self.filetype_info["file_type"] == "agri_l1_4000m_geo": - data = self._get_geo_data("4000") + data = self._get_geo_data() test_content = {} test_content.update(global_attrs) @@ -263,7 +245,7 @@ def setup_method(self): 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), - 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), + 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, np.nan, np.nan]]), 8: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 9: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 10: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), @@ -390,6 +372,7 @@ def test_agri_for_one_resolution(self, resolution_to_test, satname): available_datasets = reader.available_dataset_ids band_names = CHANNELS_BY_RESOLUTION[resolution_to_test] self._assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test) + # band_names = ["C07"] res = reader.load(band_names) assert len(res) == len(band_names) self._check_calibration_and_units(band_names, res) @@ -398,10 +381,11 @@ def test_agri_for_one_resolution(self, resolution_to_test, satname): AREA_EXTENTS_BY_RESOLUTION[satname][resolution_to_test]) def _check_calibration_and_units(self, band_names, result): - for index, band_name in enumerate(band_names): + for band_name in band_names: + band_number = int(band_name[-2:]) assert result[band_name].attrs["sensor"].islower() assert result[band_name].shape == (2, 5) - np.testing.assert_allclose(result[band_name].values, self.expected[index + 1], equal_nan=True) + np.testing.assert_allclose(result[band_name].values, self.expected[band_number], equal_nan=True) self._check_units(band_name, result) @staticmethod From 03eb8533d486c96b5bfea3cf81761b2bb883d350 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 24 Jan 2024 11:50:31 -0600 Subject: [PATCH 1071/1416] More test cleanup --- satpy/tests/reader_tests/test_agri_l1.py | 110 +++++++++++------------ 1 file changed, 54 insertions(+), 56 deletions(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 9e31c0f972..82ac7252b8 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -56,59 +56,57 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" - def _make_test_data(self, cwl, ch, prefix, dims): + def _make_cal_data(self, cwl, ch, dims): """Make test data.""" - if prefix == "CAL": - data = xr.DataArray( - da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), - attrs={ - "Slope": np.array(1.), "Intercept": np.array(0.), - "FillValue": np.array(-65535.0), - "units": "NUL", - "center_wavelength": "{}um".format(cwl).encode("utf-8"), - "band_names": "band{}(band number is range from 1 to 14)" - .format(ch).encode("utf-8"), - "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), - "valid_range": np.array([0, 1.5]), - }, - dims="_const") - - elif prefix == "NOM": - # Add +1 to check that values beyond the LUT are clipped - data_np = np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1 - fill_value = 65535 - valid_max = 4095 - if ch == 7: - # mimic C07 bug where the fill value is in the LUT - fill_value = 9 # at index [1, 3] (second to last element) - valid_max = 8 - data = xr.DataArray( - da.from_array(data_np, chunks=[dim for dim in dims]), - attrs={ - "Slope": np.array(1.), "Intercept": np.array(0.), - "FillValue": np.array(fill_value), - "units": "DN", - "center_wavelength": "{}um".format(cwl).encode("utf-8"), - "band_names": "band{}(band number is range from 1 to 14)" - .format(ch).encode("utf-8"), - "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), - "valid_range": np.array([0, valid_max]), - }, - dims=("_RegLength", "_RegWidth")) - - elif prefix == "GEO": - data = xr.DataArray( - da.from_array(np.arange(0., 360., 36., dtype=np.float32).reshape((2, 5)), - [dim for dim in dims]), - attrs={ - "Slope": np.array(1.), "Intercept": np.array(0.), - "FillValue": np.array(65535.), - "units": "NUL", - "band_names": "NUL", - "valid_range": np.array([0., 360.]), - }, - dims=("_RegLength", "_RegWidth")) - return data + return xr.DataArray( + da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), + attrs={ + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(-65535.0), + "units": "NUL", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 14)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, 1.5]), + }, + dims="_const") + + def _make_nom_data(self, cwl, ch, dims): + # Add +1 to check that values beyond the LUT are clipped + data_np = np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1 + fill_value = 65535 + valid_max = 4095 + if ch == 7: + # mimic C07 bug where the fill value is in the LUT + fill_value = 9 # at index [1, 3] (second to last element) + valid_max = 8 + return xr.DataArray( + da.from_array(data_np, chunks=[dim for dim in dims]), + attrs={ + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(fill_value), + "units": "DN", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 14)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, valid_max]), + }, + dims=("_RegLength", "_RegWidth")) + + def _make_geo_data(self, dims): + return xr.DataArray( + da.from_array(np.arange(0., 360., 36., dtype=np.float32).reshape((2, 5)), + [dim for dim in dims]), + attrs={ + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(65535.), + "units": "NUL", + "band_names": "NUL", + "valid_range": np.array([0., 360.]), + }, + dims=("_RegLength", "_RegWidth")) def _create_coeffs_array(self, channel_numbers: list[int]) -> xr.DataArray: # make coefficients consistent between file types @@ -133,10 +131,10 @@ def _create_channel_data(self, chs, cwls): dim_1 = 5 data = {} for chan_num, chan_wl in zip(chs, cwls): - cal_data = self._make_test_data(chan_wl, chan_num, "CAL", [dim_0, dim_1]) + cal_data = self._make_cal_data(chan_wl, chan_num, [dim_0, dim_1]) data[f"CALChannel{chan_num:02d}"] = cal_data data[f"Calibration/CALChannel{chan_num:02d}"] = cal_data - nom_data = self._make_test_data(chan_wl, chan_num, "NOM", [dim_0, dim_1]) + nom_data = self._make_nom_data(chan_wl, chan_num, [dim_0, dim_1]) data[f"NOMChannel{chan_num:02d}"] = nom_data data[f"Data/NOMChannel{chan_num:02d}"] = nom_data data["CALIBRATION_COEF(SCALE+OFFSET)"] = self._create_coeffs_array(chs) @@ -166,8 +164,8 @@ def _get_4km_data(self): def _get_geo_data(self): dim_0 = 2 dim_1 = 5 - data = {"NOMSunAzimuth": self._make_test_data("NUL", "NUL", "GEO", [dim_0, dim_1]), - "Navigation/NOMSunAzimuth": self._make_test_data("NUL", "NUL", "GEO", [dim_0, dim_1])} + data = {"NOMSunAzimuth": self._make_geo_data([dim_0, dim_1]), + "Navigation/NOMSunAzimuth": self._make_geo_data([dim_0, dim_1])} return data def get_test_content(self, filename, filename_info, filetype_info): From 20cb176e926749f248f2c8de67b6dfe8c0361c8f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 25 Jan 2024 10:24:48 -0600 Subject: [PATCH 1072/1416] Remove left over test comment --- satpy/tests/reader_tests/test_agri_l1.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py index 82ac7252b8..3de679796c 100644 --- a/satpy/tests/reader_tests/test_agri_l1.py +++ b/satpy/tests/reader_tests/test_agri_l1.py @@ -370,7 +370,6 @@ def test_agri_for_one_resolution(self, resolution_to_test, satname): available_datasets = reader.available_dataset_ids band_names = CHANNELS_BY_RESOLUTION[resolution_to_test] self._assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test) - # band_names = ["C07"] res = reader.load(band_names) assert len(res) == len(band_names) self._check_calibration_and_units(band_names, res) From 7c8e5753625493bb7997ef8f5cf75c98708898da Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 26 Jan 2024 16:23:16 +0100 Subject: [PATCH 1073/1416] Add GenericYAMLReader --- satpy/etc/readers/sar-c_safe.yaml | 6 +- satpy/readers/__init__.py | 5 +- satpy/readers/sar_c_safe.py | 162 ++++++++++--- satpy/readers/yaml_reader.py | 239 +++++++++++--------- satpy/tests/reader_tests/test_sar_c_safe.py | 70 ++++-- 5 files changed, 316 insertions(+), 166 deletions(-) diff --git a/satpy/etc/readers/sar-c_safe.yaml b/satpy/etc/readers/sar-c_safe.yaml index 4e45ca8584..a14a401af9 100644 --- a/satpy/etc/readers/sar-c_safe.yaml +++ b/satpy/etc/readers/sar-c_safe.yaml @@ -7,7 +7,7 @@ reader: supports_fsspec: false sensors: [sar-c] default_channels: [] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + reader: !!python/name:satpy.readers.sar_c_safe.SAFESARReader data_identification_keys: name: required: true @@ -40,19 +40,15 @@ reader: file_types: safe_measurement: - file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEGRD file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.tiff'] requires: [safe_calibration, safe_noise, safe_annotation] safe_calibration: - file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLCalibration file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/calibration-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_annotation] safe_noise: - file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLNoise file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/noise-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_annotation] safe_annotation: - file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLAnnotation file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index c8fc0a8b69..43632af9c6 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -572,7 +572,7 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): continue loadables = reader_instance.select_files_from_pathnames(readers_files) if loadables: - reader_instance.create_filehandlers( + reader_instance.create_storage_items( loadables, fh_kwargs=reader_kwargs_without_filter[None if reader is None else reader[idx]]) reader_instances[reader_instance.name] = reader_instance @@ -635,6 +635,9 @@ def _get_reader_kwargs(reader, reader_kwargs): """ reader_kwargs = reader_kwargs or {} + if isinstance(reader, str): + reader = list(reader) + # ensure one reader_kwargs per reader, None if not provided if reader is None: reader_kwargs = {None: reader_kwargs} diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 19e5396b61..565d2c1167 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -36,6 +36,8 @@ import functools import logging +import os +from collections import defaultdict from threading import Lock import defusedxml.ElementTree as ET @@ -46,7 +48,10 @@ from dask.base import tokenize from xarray import DataArray +from satpy.dataset.data_dict import DatasetDict +from satpy.dataset.dataid import DataID from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.yaml_reader import GenericYAMLReader from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) @@ -92,21 +97,15 @@ class SAFEXML(BaseFileHandler): """XML file reader for the SAFE format.""" def __init__(self, filename, filename_info, filetype_info, - header_file=None): + header_file=None, image_shape=None): """Init the xml filehandler.""" - super(SAFEXML, self).__init__(filename, filename_info, filetype_info) + super().__init__(filename, filename_info, filetype_info) self._start_time = filename_info["start_time"] self._end_time = filename_info["end_time"] self._polarization = filename_info["polarization"] self.root = ET.parse(self.filename) - self.hdr = {} - if header_file is not None: - self.hdr = header_file.get_metadata() - else: - self.hdr = self.get_metadata() - self._image_shape = (self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfLines"], - self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfSamples"]) + self._image_shape = image_shape def get_metadata(self): """Convert the xml metadata to dict.""" @@ -133,6 +132,14 @@ def __init__(self, filename, filename_info, filetype_info, self.get_incidence_angle = functools.lru_cache(maxsize=10)( self._get_incidence_angle_uncached ) + self.hdr = self.get_metadata() + self._image_shape = (self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfLines"], + self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfSamples"]) + + @property + def image_shape(self): + """Return the image shape of this dataset.""" + return self._image_shape def get_dataset(self, key, info, chunks=None): """Load a dataset.""" @@ -148,13 +155,13 @@ def _get_incidence_angle_uncached(self, chunks): return incidence_angle.expand(self._image_shape, chunks=chunks) -class SAFEXMLCalibration(SAFEXML): +class Calibrator(SAFEXML): """XML file reader for the SAFE format, Calibration file.""" def __init__(self, filename, filename_info, filetype_info, - header_file=None): + header_file=None, image_shape=None): """Init the XML calibration reader.""" - super().__init__(filename, filename_info, filetype_info, header_file) + super().__init__(filename, filename_info, filetype_info, header_file, image_shape) self.get_calibration = functools.lru_cache(maxsize=10)( self._get_calibration_uncached ) @@ -182,14 +189,22 @@ def _get_calibration_vector(self, calibration_name, chunks): calibration_vector = XMLArray(self.root, ".//calibrationVector", calibration_name) return calibration_vector.expand(self._image_shape, chunks=chunks) + def __call__(self, dn, calibration_type, chunks=None): + """Calibrate the data.""" + logger.debug("Reading calibration data.") + cal = self.get_calibration(calibration_type, chunks=chunks) + cal_constant = self.get_calibration_constant() + logger.debug("Calibrating.") + data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) + return data -class SAFEXMLNoise(SAFEXML): +class Denoiser(SAFEXML): """XML file reader for the SAFE format, Noise file.""" def __init__(self, filename, filename_info, filetype_info, - header_file=None): + header_file=None, image_shape=None): """Init the xml filehandler.""" - super().__init__(filename, filename_info, filetype_info, header_file) + super().__init__(filename, filename_info, filetype_info, header_file, image_shape) self.azimuth_noise_reader = AzimuthNoiseReader(self.root, self._image_shape) self.get_noise_correction = functools.lru_cache(maxsize=10)( @@ -223,6 +238,14 @@ def read_range_noise_array(self, chunks): range_noise = XMLArray(self.root, ".//noiseRangeVector", "noiseRangeLut") return range_noise.expand(self._image_shape, chunks) + def __call__(self, dn, chunks): + """Denoise the data.""" + logger.debug("Reading noise data.") + noise = self.get_noise_correction(chunks=chunks).fillna(0) + dn = dn - noise + return dn + + class AzimuthNoiseReader: """Class to parse and read azimuth-noise data. @@ -547,10 +570,9 @@ class SAFEGRD(BaseFileHandler): block size. """ - def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annotationfh): + def __init__(self, filename, filename_info, filetype_info, calibrator, denoiser): """Init the grd filehandler.""" - super(SAFEGRD, self).__init__(filename, filename_info, - filetype_info) + super().__init__(filename, filename_info, filetype_info) self._start_time = filename_info["start_time"] self._end_time = filename_info["end_time"] @@ -559,9 +581,8 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annot self._mission_id = filename_info["mission_id"] - self.calibration = calfh - self.noise = noisefh - self.annotation = annotationfh + self.calibrator = calibrator + self.denoiser = denoiser self.read_lock = Lock() self.filehandle = rasterio.open(self.filename, "r", sharing=False) @@ -585,8 +606,8 @@ def get_dataset(self, key, info): data.attrs.update(info) else: - data = xr.open_dataset(self.filename, engine="rasterio", - chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE})["band_data"].squeeze() + data = xr.open_dataarray(self.filename, engine="rasterio", + chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE}).squeeze() data = data.assign_coords(x=np.arange(len(data.coords["x"])), y=np.arange(len(data.coords["y"]))) data = self._calibrate_and_denoise(data, key) @@ -613,8 +634,8 @@ def _calibrate_and_denoise(self, data, key): chunks = CHUNK_SIZE dn = self._get_digital_number(data) - dn = self._denoise(dn, chunks) - data = self._calibrate(dn, chunks, key) + dn = self.denoiser(dn, chunks) + data = self.calibrator(dn, key["calibration"], chunks) return data @@ -632,15 +653,6 @@ def _denoise(self, dn, chunks): dn = dn - noise return dn - def _calibrate(self, dn, chunks, key): - """Calibrate the data.""" - logger.debug("Reading calibration data.") - cal = self.calibration.get_calibration(key["calibration"], chunks=chunks) - cal_constant = self.calibration.get_calibration_constant() - logger.debug("Calibrating.") - data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) - return data - def _get_lonlatalts_uncached(self): """Obtain GCPs and construct latitude and longitude arrays. @@ -704,3 +716,85 @@ def start_time(self): def end_time(self): """Get the end time.""" return self._end_time + + +class SAFESARReader(GenericYAMLReader): + """A reader for SAFE SAR-C data for Sentinel 1 satellites.""" + + def __init__(self, config, filter_parameters=None): + """Set up the SAR reader.""" + super().__init__(config) + self.filter_parameters = filter_parameters + self.files_by_type = defaultdict(list) + self.storage_items = [] + + @property + def start_time(self): + """Get the start time.""" + return self.storage_items.values()[0].filename_info["start_time"] + + @property + def end_time(self): + """Get the end time.""" + return self.storage_items.values()[0].filename_info["end_time"] + + def load(self, dataset_keys, **kwargs): + """Load some data.""" + if kwargs: + raise NotImplementedError(f"Don't know how to handle kwargs {kwargs}") + datasets = DatasetDict() + for key in dataset_keys: + for handler in self.storage_items.values(): + val = handler.get_dataset(key, info=dict()) + if val is not None: + val.attrs["start_time"] = handler.start_time + # val.attrs["footprint"] = self.footprint + if key["name"] not in ["longitude", "latitude"]: + lonlats = self.load([DataID(self._id_keys, name="longitude", polarization=key["polarization"]), + DataID(self._id_keys, name="latitude", polarization=key["polarization"])]) + from pyresample.future.geometry import SwathDefinition + val.attrs["area"] = SwathDefinition(lonlats["longitude"], lonlats["latitude"], + attrs=dict(gcps=None)) + datasets[key] = val + continue + return datasets + + def create_storage_items(self, files, **kwargs): + """Create the storage items.""" + filenames = [os.fspath(filename) for filename in files] + files_by_type = defaultdict(list) + for file_type, type_info in self.config["file_types"].items(): + files_by_type[file_type].extend(self.filename_items_for_filetype(filenames, type_info)) + + image_shapes = dict() + for annotation_file, annotation_info in files_by_type["safe_annotation"]: + annotation_fh = SAFEXMLAnnotation(annotation_file, + filename_info=annotation_info, + filetype_info=None) + image_shapes[annotation_info["polarization"]] = annotation_fh.image_shape + + calibration_handlers = dict() + for calibration_file, calibration_info in files_by_type["safe_calibration"]: + polarization = calibration_info["polarization"] + calibration_handlers[polarization] = Calibrator(calibration_file, + filename_info=calibration_info, + filetype_info=None, + image_shape=image_shapes[polarization]) + + noise_handlers = dict() + for noise_file, noise_info in files_by_type["safe_noise"]: + polarization = noise_info["polarization"] + noise_handlers[polarization] = Denoiser(noise_file, + filename_info=noise_info, + filetype_info=None, + image_shape=image_shapes[polarization]) + + measurement_handlers = dict() + for measurement_file, measurement_info in files_by_type["safe_measurement"]: + polarization = measurement_info["polarization"] + measurement_handlers[polarization] = SAFEGRD(measurement_file, + filename_info=measurement_info, + calibrator=calibration_handlers[polarization], + denoiser=noise_handlers[polarization], + filetype_info=None) + self.storage_items = measurement_handlers diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 5444d7e16f..84c4fcd068 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -337,7 +337,127 @@ def _build_id_permutations(self, dataset, id_keys): return id_kwargs -class FileYAMLReader(AbstractYAMLReader, DataDownloadMixin): +class GenericYAMLReader(AbstractYAMLReader): + """A Generic YAML-based reader.""" + + def __init__(self, config_dict, filter_parameters=None, filter_filenames=True): + """Set up the yaml reader.""" + super().__init__(config_dict) + self.filter_parameters = filter_parameters or {} + self.filter_filenames = self.info.get("filter_filenames", filter_filenames) + + + + def filter_selected_filenames(self, filenames): + """Filter provided files based on metadata in the filename.""" + if not isinstance(filenames, set): + # we perform set operations later on to improve performance + filenames = set(filenames) + for _, filetype_info in self.sorted_filetype_items(): + filename_iter = self.filename_items_for_filetype(filenames, + filetype_info) + if self.filter_filenames: + filename_iter = self.filter_filenames_by_info(filename_iter) + + for fn, _ in filename_iter: + yield fn + + def sorted_filetype_items(self): + """Sort the instance's filetypes in using order.""" + processed_types = [] + file_type_items = deque(self.config["file_types"].items()) + while len(file_type_items): + filetype, filetype_info = file_type_items.popleft() + + requirements = filetype_info.get("requires") + if requirements is not None: + # requirements have not been processed yet -> wait + missing = [req for req in requirements + if req not in processed_types] + if missing: + file_type_items.append((filetype, filetype_info)) + continue + + processed_types.append(filetype) + yield filetype, filetype_info + + @staticmethod + def filename_items_for_filetype(filenames, filetype_info): + """Iterate over the filenames matching *filetype_info*.""" + if not isinstance(filenames, set): + # we perform set operations later on to improve performance + filenames = set(filenames) + for pattern in filetype_info["file_patterns"]: + matched_files = set() + matches = _match_filenames(filenames, pattern) + for filename in matches: + try: + filename_info = parse( + pattern, _get_filebase(filename, pattern)) + except ValueError: + logger.debug("Can't parse %s with %s.", filename, pattern) + continue + matched_files.add(filename) + yield filename, filename_info + filenames -= matched_files + + def filter_filenames_by_info(self, filename_items): + """Filter out file using metadata from the filenames. + + Currently only uses start and end time. If only start time is available + from the filename, keep all the filename that have a start time before + the requested end time. + """ + for filename, filename_info in filename_items: + fend = filename_info.get("end_time") + fstart = filename_info.setdefault("start_time", fend) + if fend and fend < fstart: + # correct for filenames with 1 date and 2 times + fend = fend.replace(year=fstart.year, + month=fstart.month, + day=fstart.day) + filename_info["end_time"] = fend + if self.metadata_matches(filename_info): + yield filename, filename_info + + def metadata_matches(self, sample_dict, file_handler=None): + """Check that file metadata matches filter_parameters of this reader.""" + # special handling of start/end times + if not self.time_matches( + sample_dict.get("start_time"), sample_dict.get("end_time")): + return False + for key, val in self.filter_parameters.items(): + if key != "area" and key not in sample_dict: + continue + + if key in ["start_time", "end_time"]: + continue + elif key == "area" and file_handler: + if not self.check_file_covers_area(file_handler, val): + logger.info("Filtering out %s based on area", + file_handler.filename) + break + elif key in sample_dict and val != sample_dict[key]: + # don't use this file + break + else: + # all the metadata keys are equal + return True + return False + + def time_matches(self, fstart, fend): + """Check that a file's start and end time mtach filter_parameters of this reader.""" + start_time = self.filter_parameters.get("start_time") + end_time = self.filter_parameters.get("end_time") + fend = fend or fstart + if start_time and fend and fend < start_time: + return False + if end_time and fstart and fstart > end_time: + return False + return True + + +class FileYAMLReader(GenericYAMLReader, DataDownloadMixin): """Primary reader base class that is configured by a YAML file. This class uses the idea of per-file "file handler" objects to read file @@ -359,12 +479,10 @@ def __init__(self, filter_filenames=True, **kwargs): """Set up initial internal storage for loading file data.""" - super(FileYAMLReader, self).__init__(config_dict) + super().__init__(config_dict, filter_parameters, filter_filenames) self.file_handlers = {} self.available_ids = {} - self.filter_filenames = self.info.get("filter_filenames", filter_filenames) - self.filter_parameters = filter_parameters or {} self.register_data_files() @property @@ -450,45 +568,6 @@ def find_required_filehandlers(self, requirements, filename_info): # filetype! return req_fh - def sorted_filetype_items(self): - """Sort the instance's filetypes in using order.""" - processed_types = [] - file_type_items = deque(self.config["file_types"].items()) - while len(file_type_items): - filetype, filetype_info = file_type_items.popleft() - - requirements = filetype_info.get("requires") - if requirements is not None: - # requirements have not been processed yet -> wait - missing = [req for req in requirements - if req not in processed_types] - if missing: - file_type_items.append((filetype, filetype_info)) - continue - - processed_types.append(filetype) - yield filetype, filetype_info - - @staticmethod - def filename_items_for_filetype(filenames, filetype_info): - """Iterate over the filenames matching *filetype_info*.""" - if not isinstance(filenames, set): - # we perform set operations later on to improve performance - filenames = set(filenames) - for pattern in filetype_info["file_patterns"]: - matched_files = set() - matches = _match_filenames(filenames, pattern) - for filename in matches: - try: - filename_info = parse( - pattern, _get_filebase(filename, pattern)) - except ValueError: - logger.debug("Can't parse %s with %s.", filename, pattern) - continue - matched_files.add(filename) - yield filename, filename_info - filenames -= matched_files - def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None): """Generate new filehandler instances.""" requirements = filetype_info.get("requires") @@ -512,61 +591,6 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) - def time_matches(self, fstart, fend): - """Check that a file's start and end time mtach filter_parameters of this reader.""" - start_time = self.filter_parameters.get("start_time") - end_time = self.filter_parameters.get("end_time") - fend = fend or fstart - if start_time and fend and fend < start_time: - return False - if end_time and fstart and fstart > end_time: - return False - return True - - def metadata_matches(self, sample_dict, file_handler=None): - """Check that file metadata matches filter_parameters of this reader.""" - # special handling of start/end times - if not self.time_matches( - sample_dict.get("start_time"), sample_dict.get("end_time")): - return False - for key, val in self.filter_parameters.items(): - if key != "area" and key not in sample_dict: - continue - - if key in ["start_time", "end_time"]: - continue - elif key == "area" and file_handler: - if not self.check_file_covers_area(file_handler, val): - logger.info("Filtering out %s based on area", - file_handler.filename) - break - elif key in sample_dict and val != sample_dict[key]: - # don't use this file - break - else: - # all the metadata keys are equal - return True - return False - - def filter_filenames_by_info(self, filename_items): - """Filter out file using metadata from the filenames. - - Currently only uses start and end time. If only start time is available - from the filename, keep all the filename that have a start time before - the requested end time. - """ - for filename, filename_info in filename_items: - fend = filename_info.get("end_time") - fstart = filename_info.setdefault("start_time", fend) - if fend and fend < fstart: - # correct for filenames with 1 date and 2 times - fend = fend.replace(year=fstart.year, - month=fstart.month, - day=fstart.day) - filename_info["end_time"] = fend - if self.metadata_matches(filename_info): - yield filename, filename_info - def filter_fh_by_metadata(self, filehandlers): """Filter out filehandlers using provide filter parameters.""" for filehandler in filehandlers: @@ -575,20 +599,6 @@ def filter_fh_by_metadata(self, filehandlers): if self.metadata_matches(filehandler.metadata, filehandler): yield filehandler - def filter_selected_filenames(self, filenames): - """Filter provided files based on metadata in the filename.""" - if not isinstance(filenames, set): - # we perform set operations later on to improve performance - filenames = set(filenames) - for _, filetype_info in self.sorted_filetype_items(): - filename_iter = self.filename_items_for_filetype(filenames, - filetype_info) - if self.filter_filenames: - filename_iter = self.filter_filenames_by_info(filename_iter) - - for fn, _ in filename_iter: - yield fn - def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=None): """Create filehandlers for a given filetype.""" filename_iter = self.filename_items_for_filetype(filenames, @@ -603,6 +613,11 @@ def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=Non filtered_iter = self.filter_fh_by_metadata(filehandler_iter) return list(filtered_iter) + + def create_storage_items(self, files, **kwargs): + """Create the storage items.""" + return self.create_filehandlers(files, **kwargs) + def create_filehandlers(self, filenames, fh_kwargs=None): """Organize the filenames into file types and create file handlers.""" filenames = list(OrderedDict.fromkeys(filenames)) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index f801743a08..d6d178044e 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -18,6 +18,7 @@ """Module for testing the satpy.readers.sar-c_safe module.""" import os +from datetime import datetime from enum import Enum from io import BytesIO from pathlib import Path @@ -29,7 +30,7 @@ from satpy._config import PACKAGE_CONFIG_PATH from satpy.dataset import DataQuery from satpy.dataset.dataid import DataID -from satpy.readers.sar_c_safe import SAFEXMLAnnotation, SAFEXMLCalibration, SAFEXMLNoise +from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation rasterio = pytest.importorskip("rasterio") @@ -37,6 +38,7 @@ dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" filename_suffix = "20190201t024655-20190201t024720-025730-02dc2a" + @pytest.fixture(scope="module") def granule_directory(tmp_path_factory): """Create a granule directory.""" @@ -78,10 +80,10 @@ def calibration_file(granule_directory): def calibration_filehandler(calibration_file, annotation_filehandler): """Create a calibration filehandler.""" filename_info = dict(start_time=None, end_time=None, polarization="vv") - return SAFEXMLCalibration(calibration_file, + return Calibrator(calibration_file, filename_info, None, - annotation_filehandler) + image_shape=annotation_filehandler.image_shape) @pytest.fixture(scope="module") def noise_file(granule_directory): @@ -98,16 +100,16 @@ def noise_file(granule_directory): def noise_filehandler(noise_file, annotation_filehandler): """Create a noise filehandler.""" filename_info = dict(start_time=None, end_time=None, polarization="vv") - return SAFEXMLNoise(noise_file, filename_info, None, annotation_filehandler) + return Denoiser(noise_file, filename_info, None, image_shape=annotation_filehandler.image_shape) @pytest.fixture(scope="module") def noise_with_holes_filehandler(annotation_filehandler): """Create a noise filehandler from data with holes.""" filename_info = dict(start_time=None, end_time=None, polarization="vv") - noise_filehandler = SAFEXMLNoise(BytesIO(noise_xml_with_holes), + noise_filehandler = Denoiser(BytesIO(noise_xml_with_holes), filename_info, None, - annotation_filehandler) + image_shape=annotation_filehandler.image_shape) return noise_filehandler @@ -153,7 +155,7 @@ def measurement_file(granule_directory): @pytest.fixture(scope="module") -def measurement_filehandler(measurement_file, annotation_filehandler, noise_filehandler, calibration_filehandler): +def measurement_filehandler(measurement_file, noise_filehandler, calibration_filehandler): """Create a measurement filehandler.""" filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": 0, "end_time": 0, "polarization": "vv"} @@ -163,8 +165,7 @@ def measurement_filehandler(measurement_file, annotation_filehandler, noise_file filename_info, filetype_info, calibration_filehandler, - noise_filehandler, - annotation_filehandler) + noise_filehandler) return filehandler @@ -835,10 +836,10 @@ def test_get_calibration_constant(self, calibration_filehandler): def test_incidence_angle(annotation_filehandler): - """Test reading the incidence angle in an annotation file.""" - query = DataQuery(name="incidence_angle", polarization="vv") - res = annotation_filehandler.get_dataset(query, {}) - np.testing.assert_allclose(res, 19.18318046) + """Test reading the incidence angle in an annotation file.""" + query = DataQuery(name="incidence_angle", polarization="vv") + res = annotation_filehandler.get_dataset(query, {}) + np.testing.assert_allclose(res, 19.18318046) def test_reading_from_reader(measurement_file, calibration_file, noise_file, annotation_file): @@ -849,7 +850,7 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann reader = reader_class(config) files = [measurement_file, calibration_file, noise_file, annotation_file] - reader.create_filehandlers(files) + reader.create_storage_items(files) query = DataQuery(name="measurement", polarization="vv", calibration="sigma_nought", quantity="dB") query = DataID(reader._id_keys, **query.to_dict()) @@ -858,3 +859,44 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10]) expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) np.testing.assert_allclose(array.values[:2, :2], expected_db) + + +def test_filename_filtering_from_reader(measurement_file, calibration_file, noise_file, annotation_file, tmp_path): + """Test that filenames get filtered before filehandlers are created.""" + with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: + config = yaml.load(fd, Loader=yaml.UnsafeLoader) + reader_class = config["reader"]["reader"] + filter_parameters = {"start_time": datetime(2019, 2, 1, 0, 0, 0), + "end_time": datetime(2019, 2, 1, 12, 0, 0)} + reader = reader_class(config, filter_parameters) + + spurious_file = (tmp_path / "S1A_IW_GRDH_1SDV_20190202T024655_20190202T024720_025730_02DC2A_AE07.SAFE" / + "measurement" / + "s1a-iw-grd-vv-20190202t024655-20190202t024720-025730-02dc2a-001.tiff") + + + files = [spurious_file, measurement_file, calibration_file, noise_file, annotation_file] + + files = reader.filter_selected_filenames(files) + assert spurious_file not in files + try: + reader.create_storage_items(files) + except rasterio.RasterioIOError as err: + pytest.fail(str(err)) + + +def test_swath_def_contains_gcps(measurement_file, calibration_file, noise_file, annotation_file): + """Test reading using the reader defined in the config.""" + with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: + config = yaml.load(fd, Loader=yaml.UnsafeLoader) + reader_class = config["reader"]["reader"] + reader = reader_class(config) + + files = [measurement_file, calibration_file, noise_file, annotation_file] + reader.create_storage_items(files) + query = DataQuery(name="measurement", polarization="vv", + calibration="sigma_nought", quantity="dB") + query = DataID(reader._id_keys, **query.to_dict()) + dataset_dict = reader.load([query]) + array = dataset_dict["measurement"] + assert array.attrs["area"].attrs["gcps"] is not None From 1e7b3591ee6e9280668efa11abf791b5ad2e41d7 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Fri, 26 Jan 2024 18:42:04 +0000 Subject: [PATCH 1074/1416] Enhanced viirs_l2 with geo and diff enhancements --- satpy/etc/enhancements/viirs.yaml | 50 +++++-------------------------- satpy/etc/readers/viirs_l2.yaml | 2 +- satpy/readers/viirs_l2.py | 31 +++++++++++++++++++ satpy/readers/yaml_reader.py | 17 ++++++----- 4 files changed, 49 insertions(+), 51 deletions(-) diff --git a/satpy/etc/enhancements/viirs.yaml b/satpy/etc/enhancements/viirs.yaml index a21e47aa7f..aad8407d92 100644 --- a/satpy/etc/enhancements/viirs.yaml +++ b/satpy/etc/enhancements/viirs.yaml @@ -87,42 +87,8 @@ enhancements: method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - - {colors: [ - [255, 0, 0], - [170, 0, 0], - [110, 0, 0], - [112, 1, 2], - [124, 91, 5], - [240, 190, 64], - [255, 255, 0], - [0, 220, 0], - [0, 136, 0], - [0, 80, 0], - [0, 136, 238], - [0, 0, 255], - [0, 0, 170], - [0, 0, 100], - [183, 15, 141], - [102, 0, 119] - ], - values: [ - 0, - 800, - 1600, - 2350, - 3150, - 4000, - 4800, - 5600, - 6400, - 7200, - 8000, - 8800, - 9600, - 10400, - 11200, - 12000 - ], + - { + filename: cth.txt, min_value: 0, max_value: 18000, } @@ -134,8 +100,8 @@ enhancements: method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - {colors: [[255, 247, 236], [254, 246, 233], [254, 244, 230], [254, 243, 228], [254, 242, 224], [254, 241, 222], [254, 239, 219], [254, 239, 216], [254, 237, 213], [254, 236, 210], [254, 235, 207], [254, 233, 204], [254, 232, 202], [253, 231, 198], [253, 230, 195], [253, 228, 191], [253, 226, 189], [253, 225, 185], [253, 223, 181], [253, 221, 178], [253, 220, 174], [253, 218, 172], [253, 216, 168], [253, 215, 165], [253, 213, 161], [253, 211, 157], [253, 210, 156], [253, 207, 153], [253, 206, 152], [253, 203, 149], [253, 202, 148], [253, 200, 145], [253, 198, 143], [253, 196, 141], [253, 193, 139], [253, 192, 137], [253, 189, 134], [253, 188, 133], [252, 185, 130], [252, 182, 127], [252, 177, 123], [252, 174, 120], [252, 170, 116], [252, 166, 112], [252, 163, 109], [252, 159, 105], [252, 156, 103], [252, 151, 99], [252, 148, 96], [252, 144, 92], [251, 140, 88], [250, 137, 87], [249, 134, 86], [248, 131, 85], [247, 127, 83], [246, 125, 82], [245, 121, 80], [244, 119, 79], [243, 115, 78], [242, 111, 76], [241, 109, 75], [240, 105, 73], [239, 102, 72], [237, 98, 69], [236, 94, 67], [234, 89, 63], [232, 86, 60], [230, 81, 57], [227, 76, 53], [226, 73, 50], [224, 68, 46], [222, 65, 44], [220, 60, 40], [218, 56, 37], [216, 51, 33], [214, 46, 30], [211, 43, 28], [208, 39, 25], [206, 36, 23], [202, 31, 20], [200, 28, 18], [197, 24, 15], [194, 21, 13], [191, 16, 10], [188, 12, 7], [185, 9, 5], [182, 4, 3], [180, 1, 1], [175, 0, 0], [172, 0, 0], [167, 0, 0], [164, 0, 0], [159, 0, 0], [154, 0, 0], [151, 0, 0], [146, 0, 0], [143, 0, 0], [138, 0, 0], [135, 0, 0], [130, 0, 0], [127, 0, 0]], - values: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4, 0.41, 0.42, 0.43, 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5, 0.51, 0.52, 0.53, 0.54, 0.55, 0.56, 0.57, 0.58, 0.59, 0.6, 0.61, 0.62, 0.63, 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, 0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, 0.99, 1], + - { + filename: csc.txt, min_value: 0.0, max_value: 1.0 } @@ -148,9 +114,9 @@ enhancements: method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - {colors: 'ylorrd', + - {filename: aod.txt, min_value: 0.0, - max_value: 1.0, + max_value: 5.0, } Angstrom_Exponent_Land_Ocean: @@ -160,8 +126,8 @@ enhancements: method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - {colors: [[122, 145, 2], [123, 148, 3], [124, 150, 4], [124, 153, 5], [125, 155, 6], [126, 158, 7], [127, 160, 8], [127, 163, 9], [128, 165, 10], [129, 168, 11], [130, 170, 12], [130, 173, 13], [131, 175, 14], [132, 178, 15], [133, 181, 16], [132, 183, 18], [132, 185, 20], [132, 187, 22], [132, 189, 25], [132, 191, 27], [132, 193, 29], [132, 195, 31], [131, 197, 34], [131, 199, 36], [131, 201, 38], [131, 203, 40], [131, 205, 43], [131, 207, 45], [131, 209, 47], [131, 212, 50], [130, 213, 51], [129, 215, 53], [128, 217, 55], [128, 219, 57], [127, 221, 59], [126, 222, 61], [125, 224, 63], [125, 226, 64], [124, 228, 66], [123, 230, 68], [122, 231, 70], [122, 233, 72], [121, 235, 74], [120, 237, 76], [120, 239, 78], [119, 239, 79], [118, 240, 80], [117, 241, 82], [116, 242, 83], [116, 243, 85], [115, 244, 86], [114, 245, 87], [113, 246, 89], [112, 247, 90], [112, 248, 92], [111, 249, 93], [110, 250, 94], [109, 251, 96], [108, 252, 97], [108, 253, 99], [107, 252, 100], [106, 252, 102], [106, 252, 103], [105, 251, 105], [105, 251, 106], [104, 251, 108], [103, 251, 109], [103, 250, 111], [102, 250, 112], [102, 250, 114], [101, 250, 115], [100, 249, 117], [100, 249, 118], [99, 249, 120], [99, 249, 122], [98, 247, 123], [97, 246, 124], [96, 245, 126], [95, 244, 127], [94, 243, 128], [93, 242, 130], [92, 241, 131], [92, 239, 132], [91, 238, 134], [90, 237, 135], [89, 236, 136], [88, 235, 138], [87, 234, 139], [86, 233, 140], [86, 232, 142], [85, 230, 143], [84, 229, 144], [83, 228, 145], [82, 226, 147], [81, 225, 148], [80, 224, 149], [79, 223, 150], [78, 221, 152], [77, 220, 153], [76, 219, 154], [75, 218, 155], [74, 216, 157], [73, 215, 158], [72, 214, 159], [72, 213, 161], [71, 211, 162], [70, 209, 163], [69, 208, 164], [68, 206, 165], [67, 205, 166], [66, 203, 167], [65, 201, 168], [64, 200, 170], [63, 198, 171], [62, 197, 172], [61, 195, 173], [60, 193, 174], [59, 192, 175], [58, 190, 176], [58, 189, 178], [58, 187, 178], [58, 185, 179], [58, 184, 180], [58, 182, 181], [58, 181, 182], [58, 179, 183], [58, 178, 184], [59, 176, 184], [59, 175, 185], [59, 173, 186], [59, 172, 187], [59, 170, 188], [59, 169, 189], [59, 167, 190], [60, 166, 191], [60, 164, 191], [61, 162, 192], [61, 160, 193], [62, 158, 194], [63, 156, 195], [63, 154, 195], [64, 152, 196], [64, 150, 197], [65, 148, 198], [66, 146, 199], [66, 144, 199], [67, 142, 200], [67, 140, 201], [68, 138, 202], [69, 137, 203], [69, 135, 203], [70, 133, 204], [70, 131, 205], [71, 129, 205], [72, 128, 206], [72, 126, 207], [73, 124, 207], [73, 122, 208], [74, 120, 209], [75, 119, 209], [75, 117, 210], [76, 115, 211], [76, 113, 211], [77, 111, 212], [78, 110, 213], [78, 108, 213], [79, 106, 214], [80, 104, 214], [80, 102, 215], [81, 101, 216], [82, 99, 216], [82, 97, 217], [83, 95, 217], [84, 93, 218], [84, 92, 219], [85, 90, 219], [86, 88, 220], [86, 86, 220], [87, 84, 221], [88, 83, 222], [88, 82, 222], [89, 81, 223], [90, 80, 223], [91, 80, 224], [92, 79, 224], [93, 78, 225], [94, 77, 225], [95, 77, 226], [96, 76, 226], [97, 75, 227], [98, 74, 227], [99, 74, 228], [100, 73, 228], [101, 72, 229], [102, 72, 230], [104, 72, 230], [106, 73, 230], [108, 73, 230], [110, 74, 231], [112, 74, 231], [114, 75, 231], [116, 75, 231], [118, 76, 232], [120, 76, 232], [122, 77, 232], [124, 77, 232], [126, 78, 233], [128, 78, 233], [130, 79, 233], [133, 80, 234], [135, 80, 234], [137, 80, 234], [139, 81, 234], [141, 81, 234], [143, 81, 234], [145, 82, 234], [147, 82, 234], [149, 82, 234], [151, 83, 234], [153, 83, 234], [155, 83, 234], [157, 84, 234], [159, 84, 234], [161, 84, 234], [164, 85, 235], [165, 85, 235], [166, 85, 235], [168, 85, 235], [169, 85, 235], [171, 85, 235], [172, 85, 235], [174, 85, 235], [175, 86, 235], [177, 86, 235], [178, 86, 235], [180, 86, 235], [181, 86, 235], [183, 86, 235], [184, 86, 235], [186, 87, 235], [187, 87, 234], [188, 87, 234], [190, 87, 234], [191, 88, 234], [193, 88, 234], [194, 88, 234], [196, 88, 234], [197, 89, 234], [199, 89, 234], [200, 89, 234], [202, 89, 234]], - values: [0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4, 0.41, 0.42, 0.43, 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5, 0.51, 0.52, 0.53, 0.54, 0.55, 0.56, 0.57, 0.58, 0.59, 0.6, 0.61, 0.62, 0.63, 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, 0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, 0.99, 1, 1.01, 1.02, 1.03, 1.04, 1.05, 1.06, 1.07, 1.08, 1.09, 1.1, 1.11, 1.12, 1.13, 1.14, 1.15, 1.16, 1.17, 1.18, 1.19, 1.2, 1.21, 1.22, 1.23, 1.24, 1.25, 1.26, 1.27, 1.28, 1.29, 1.3, 1.31, 1.32, 1.33, 1.34, 1.35, 1.36, 1.37, 1.38, 1.39, 1.4, 1.41, 1.42, 1.43, 1.44, 1.45, 1.46, 1.47, 1.48, 1.49, 1.5, 1.51, 1.52, 1.53, 1.54, 1.55, 1.56, 1.57, 1.58, 1.59, 1.6, 1.61, 1.62, 1.63, 1.64, 1.65, 1.66, 1.67, 1.68, 1.69, 1.7, 1.71, 1.72, 1.73, 1.74, 1.75, 1.76, 1.77, 1.78, 1.79, 1.8, 1.81, 1.82, 1.83, 1.84, 1.85, 1.86, 1.87, 1.88, 1.89, 1.9, 1.91, 1.92, 1.93, 1.94, 1.95, 1.96, 1.97, 1.98, 1.99, 2, 2.01, 2.02, 2.03, 2.04, 2.05, 2.06, 2.07, 2.08, 2.09, 2.1, 2.11, 2.12, 2.13, 2.14, 2.15, 2.16, 2.17, 2.18, 2.19, 2.2, 2.21, 2.22, 2.23, 2.24, 2.25, 2.26, 2.27, 2.28, 2.29, 2.3, 2.31, 2.32, 2.33, 2.34, 2.35, 2.36, 2.37, 2.38, 2.39, 2.4, 2.41, 2.42, 2.43, 2.44, 2.45, 2.46, 2.47, 2.48, 2.49, 2.5], + - { + filename: aelo.txt, min_value: -0.5, max_value: 2.5, } diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml index 045b69cd75..b8f403917f 100644 --- a/satpy/etc/readers/viirs_l2.yaml +++ b/satpy/etc/readers/viirs_l2.yaml @@ -5,7 +5,7 @@ reader: description: Generic NASA VIIRS L2 Reader status: Alpha supports_fsspec: false - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader sensors: [viirs] default_datasets: diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 671ad98c7a..37247d7933 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -166,3 +166,34 @@ def get_dataset(self, ds_id: int, ds_info: str) -> xr.DataArray: if "number_of_lines" in data.dims: data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) return data + + def get_area_def(self, ds_id): + """Get area definition.""" + proj_dict = { + "proj": "latlong", + "datum": "WGS84", + "ellps": "WGS84", + "no_defs": True + } + + area_extent = [self["/attr/geospatial_lon_min"], self["/attr/geospatial_lat_min"], + self["/attr/geospatial_lon_max"], self["/attr/geospatial_lat_max"]] + + if '/dimension/number_of_pixels' in self: + width = int(self['/dimension/number_of_pixels']) + height = int(self['/dimension/number_of_lines']) + else: + width = int(self['/dimension/Idx_Xtrack']) # ncols + height = int(self['/dimension/Idx_Atrack']) + + area = AreaDefinition( + "viirs_l2_area", + "name_of_proj", + "id_of_proj", + proj_dict, + width, + height, + np.asarray(area_extent) + ) + + return area diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 29aaaf0955..809744251e 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1010,14 +1010,15 @@ def _set_orientation(dataset, upper_right_corner): logger.info("Dataset {} is in a SwathDefinition " "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - - projection_type = _get_projection_type(dataset.attrs["area"]) - accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] - if projection_type not in accepted_geos_proj_types: - logger.info("Dataset {} is not in one of the known geostationary projections {} " - "and cannot be flipped.".format(dataset.attrs.get("name", "unknown_name"), - accepted_geos_proj_types)) - return dataset + + if dataset.attrs['area'].area_id != 'viirs_l2_area': + projection_type = _get_projection_type(dataset.attrs["area"]) + accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] + if projection_type not in accepted_geos_proj_types: + logger.info("Dataset {} is not in one of the known geostationary projections {} " + "and cannot be flipped.".format(dataset.attrs.get("name", "unknown_name"), + accepted_geos_proj_types)) + return dataset target_eastright, target_northup = _get_target_scene_orientation(upper_right_corner) From c8901a1a6b865ee327bf56dc67be708b3ad050d3 Mon Sep 17 00:00:00 2001 From: Olivier Samain Date: Mon, 29 Jan 2024 11:26:44 +0100 Subject: [PATCH 1075/1416] made common parameters generic --- satpy/etc/readers/fci_l2_nc.yaml | 343 +++++-------------------------- 1 file changed, 55 insertions(+), 288 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index e3e063f09b..6c87d10c53 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -77,6 +77,61 @@ file_types: datasets: +# COMMON + product_quality: + name: product_quality + standard_name: product_quality + file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] + nc_key: product_quality + + product_completeness: + name: product_completeness + standard_name: product_completeness + file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] + nc_key: product_completeness + + product_timeliness: + name: product_timeliness + standard_name: product_timeliness + file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] + nc_key: product_timeliness + + quality_illumination: + name: quality_illumination + standard_name: status_flag + resolution: 2000 + file_type: [nc_fci_clm, nc_fci_ct] + nc_key: quality_illumination + fill_value: -127 + import_enum_information: True + + quality_nwp_parameters: + name: quality_nwp_parameters + standard_name: status_flag + resolution: 2000 + file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth] + nc_key: quality_nwp_parameters + fill_value: -127 + import_enum_information: True + + quality_MTG_parameters: + name: quality_MTG_parameters + standard_name: status_flag + resolution: 2000 + file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth] + nc_key: quality_MTG_parameters + fill_value: -127 + import_enum_information: True + + quality_overall_processing: + name: quality_overall_processing + standard_name: quality_flag + resolution: 2000 + file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_olr] + nc_key: quality_overall_processing + fill_value: -127 + import_enum_information: True + # AMV Intermediate - Atmospheric Motion Vectors Intermediate intm_latitude: name: intm_latitude @@ -317,24 +372,6 @@ datasets: - longitude - latitude - product_quality: - name: product_quality - standard_name: product_quality - file_type: nc_fci_amv - nc_key: product_quality - - product_completeness: - name: product_completeness - standard_name: product_completeness - file_type: nc_fci_amv - nc_key: product_completeness - - product_timeliness: - name: product_timeliness - standard_name: product_timeliness - file_type: nc_fci_amv - nc_key: product_timeliness - # CLM - Cloud Mask cloud_state: @@ -346,60 +383,6 @@ datasets: fill_value: -127 import_enum_information: True - quality_illumination_clm: - name: quality_illumination_clm - standard_name: status_flag - resolution: 2000 - file_type: nc_fci_clm - nc_key: quality_illumination - fill_value: -127 - import_enum_information: True - - quality_nwp_parameters_clm: - name: quality_nwp_parameters_clm - standard_name: status_flag - resolution: 2000 - file_type: nc_fci_clm - nc_key: quality_nwp_parameters - fill_value: -127 - import_enum_information: True - - quality_MTG_parameters_clm: - name: quality_MTG_parameters_clm - standard_name: status_flag - resolution: 2000 - file_type: nc_fci_clm - nc_key: quality_MTG_parameters - fill_value: -127 - import_enum_information: True - - quality_overall_processing_clm: - name: quality_overall_processing_clm - standard_name: quality_flag - resolution: 2000 - file_type: nc_fci_clm - nc_key: quality_overall_processing - fill_value: -127 - import_enum_information: True - - product_quality_clm: - name: product_quality_clm - standard_name: product_quality - file_type: nc_fci_clm - nc_key: product_quality - - product_completeness_clm: - name: product_completeness_clm - standard_name: product_completeness - file_type: nc_fci_clm - nc_key: product_completeness - - product_timeliness_clm: - name: product_timeliness_clm - standard_name: product_timeliness - file_type: nc_fci_clm - nc_key: product_timeliness - # CT - Cloud Type cloud_phase: @@ -420,60 +403,6 @@ datasets: fill_value: -127 import_enum_information: True - quality_illumination_ct: - name: quality_illumination_ct - standard_name: status_flag - resolution: 2000 - file_type: nc_fci_ct - nc_key: quality_illumination - fill_value: -127 - import_enum_information: True - - quality_nwp_parameters_ct: - name: quality_nwp_parameters_ct - standard_name: status_flag - resolution: 2000 - file_type: nc_fci_ct - nc_key: quality_nwp_parameters - fill_value: -127 - import_enum_information: True - - quality_MTG_parameters_ct: - name: quality_MTG_parameters_ct - standard_name: status_flag - resolution: 2000 - file_type: nc_fci_ct - nc_key: quality_MTG_parameters - fill_value: -127 - import_enum_information: True - - quality_overall_processing_ct: - name: quality_overall_processing_ct - standard_name: quality_flag - resolution: 2000 - file_type: nc_fci_ct - nc_key: quality_overall_processing - fill_value: -127 - import_enum_information: True - - product_quality_ct: - name: product_quality_ct - standard_name: product_quality - file_type: nc_fci_ct - nc_key: product_quality - - product_completeness_ct: - name: product_completeness_ct - standard_name: product_completeness - file_type: nc_fci_ct - nc_key: product_completeness - - product_timeliness_ct: - name: product_timeliness_ct - standard_name: product_timeliness - file_type: nc_fci_ct - nc_key: product_timeliness - # CTTH - Cloud Top Temperature and Height cloud_top_aviation_height: @@ -538,33 +467,6 @@ datasets: fill_value: -127 import_enum_information: True - quality_nwp_parameters_ctth: - name: quality_nwp_parameters_ctth - standard_name: status_flag - resolution: 2000 - file_type: nc_fci_ctth - nc_key: quality_nwp_parameters - fill_value: -127 - import_enum_information: True - - quality_MTG_parameters_ctth: - name: quality_MTG_parameters_ctth - standard_name: status_flag - resolution: 2000 - file_type: nc_fci_ctth - nc_key: quality_MTG_parameters - fill_value: -127 - import_enum_information: True - - quality_overall_processing_ctth: - name: quality_overall_processing_ctth - standard_name: quality_flag - resolution: 2000 - file_type: nc_fci_ctth - nc_key: quality_overall_processing - fill_value: -127 - import_enum_information: True - quality_overall_processing_aviation_ctth: name: quality_overall_processing_aviation_ctth standard_name: quality_flag @@ -574,24 +476,6 @@ datasets: fill_value: -127 import_enum_information: True - product_quality_ctth: - name: product_quality_ctth - standard_name: product_quality - file_type: nc_fci_ctth - nc_key: product_quality - - product_completeness_ctth: - name: product_completeness_ctth - standard_name: product_completeness - file_type: nc_fci_ctth - nc_key: product_completeness - - product_timeliness_ctth: - name: product_timeliness_ctth - standard_name: product_timeliness - file_type: nc_fci_ctth - nc_key: product_timeliness - # FIR - Active Fire Monitoring fire_probability: @@ -610,24 +494,6 @@ datasets: fill_value: -127 import_enum_information: True - product_quality_fir: - name: product_quality_fir - standard_name: product_quality - file_type: nc_fci_fir - nc_key: product_quality - - product_completeness_fir: - name: product_completeness_fir - standard_name: product_completeness - file_type: nc_fci_fir - nc_key: product_completeness - - product_timeliness_fir: - name: product_timeliness_fir - standard_name: product_timeliness - file_type: nc_fci_fir - nc_key: product_timeliness - # GII - Global Instability Index k_index: @@ -712,24 +578,6 @@ datasets: - longitude - latitude - product_quality_gii: - name: product_quality_gii - standard_name: product_quality - file_type: nc_fci_gii - nc_key: product_quality - - product_completeness_gii: - name: product_completeness_gii - standard_name: product_completeness - file_type: nc_fci_gii - nc_key: product_completeness - - product_timeliness_gii: - name: product_timeliness_gii - standard_name: product_timeliness - file_type: nc_fci_gii - nc_key: product_timeliness - # OCA - Optimal Cloud Analysis retrieved_cloud_phase: @@ -855,24 +703,6 @@ datasets: file_type: nc_fci_oca nc_key: quality_jmeas - product_quality_oca: - name: product_quality_oca - standard_name: product_quality - file_type: nc_fci_oca - nc_key: product_quality - - product_completeness_oca: - name: product_completeness_oca - standard_name: product_completeness - file_type: nc_fci_oca - nc_key: product_completeness - - product_timeliness_oca: - name: product_timeliness_oca - standard_name: product_timeliness - file_type: nc_fci_oca - nc_key: product_timeliness - # OLR - Outgoing Longwave Radiation olr: @@ -891,33 +721,6 @@ datasets: fill_value: -127 import_enum_information: True - quality_overall_processing_olr: - name: quality_overall_processing_olr - standard_name: quality_flag - resolution: 2000 - file_type: nc_fci_olr - nc_key: quality_overall_processing - fill_value: -127 - import_enum_information: True - - product_quality_olr: - name: product_quality_olr - standard_name: product_quality - file_type: nc_fci_olr - nc_key: product_quality - - product_completeness_olr: - name: product_completeness_olr - standard_name: product_completeness - file_type: nc_fci_olr - nc_key: product_completeness - - product_timeliness_olr: - name: product_timeliness_olr - standard_name: product_timeliness - file_type: nc_fci_olr - nc_key: product_timeliness - # CRM - Clear-Sky Reflectance Maps crm: @@ -1048,24 +851,6 @@ datasets: nc_key: historical_data import_enum_information: True - product_quality_crm: - name: product_quality_crm - standard_name: product_quality - file_type: nc_fci_crm - nc_key: product_quality - - product_completeness_crm: - name: product_completeness_crm - standard_name: product_completeness - file_type: nc_fci_crm - nc_key: product_completeness - - product_timeliness_crm: - name: product_timeliness_crm - standard_name: product_timeliness - file_type: nc_fci_crm - nc_key: product_timeliness - # LAT/LON FOR SEGMENTED PRODUCTS latitude: @@ -3356,21 +3141,3 @@ datasets: coordinates: - longitude - latitude - - product_quality_asr: - name: product_quality_asr - standard_name: product_quality - file_type: nc_fci_asr - nc_key: product_quality - - product_completeness_asr: - name: product_completeness_asr - standard_name: product_completeness - file_type: nc_fci_asr - nc_key: product_completeness - - product_timeliness_asr: - name: product_timeliness_asr - standard_name: product_timeliness - file_type: nc_fci_asr - nc_key: product_timeliness From c394307e9210a38e89c5a312c93c6a83ea58247b Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Mon, 29 Jan 2024 12:09:14 +0100 Subject: [PATCH 1076/1416] make all dataset names lower-case and remove _ from dataset names. --- satpy/etc/readers/fci_l2_nc.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 6c87d10c53..80229b4b81 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -114,7 +114,7 @@ datasets: fill_value: -127 import_enum_information: True - quality_MTG_parameters: + quality_mtg_parameters: name: quality_MTG_parameters standard_name: status_flag resolution: 2000 @@ -440,8 +440,8 @@ datasets: file_type: nc_fci_ctth nc_key: effective_cloudiness - quality_status_ctth: - name: quality_status_ctth + quality_status: + name: quality_status standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth From 48b4dca28209d41d9328055466dd4eb87721ffbf Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 29 Jan 2024 14:04:49 +0100 Subject: [PATCH 1077/1416] Update asv dependencies --- asv.conf.json | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/asv.conf.json b/asv.conf.json index dbecadf79a..998dc77bd3 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -58,7 +58,7 @@ // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. // "pythons": ["2.7", "3.6"], - "pythons": ["3.9", "3.10"], + "pythons": ["3.11", "3.12"], // The list of conda channel names to be searched for benchmark // dependency packages in the specified order @@ -80,14 +80,14 @@ // "pip+emcee": [""], // emcee is only available for install with pip. // }, "matrix": { - "pyresample": ["1.22.3"], - "trollimage": ["1.17.0"], - "pyorbital": ["1.7.1"], - "pyspectral": ["0.10.6"], - "rasterio": ["1.2.10"], - "dask": ["2021.12.0"], - "xarray": ["0.20.2"], - "numpy": ["1.22.0"], + "pyresample": ["1.27.1"], + "trollimage": ["1.22.2"], + "pyorbital": ["1.8.1"], + "pyspectral": ["0.13.0"], + "rasterio": ["1.3.9"], + "dask": ["2024.1.1"], + "xarray": ["2024.1.1"], + "numpy": ["1.26.0"], "s3fs": [], "h5py": [], "netCDF4": [], From 65cb1ad57da65d2dce9e3752a71549ffe13dd033 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 29 Jan 2024 14:13:52 +0100 Subject: [PATCH 1078/1416] Use mamba as environment type --- asv.conf.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/asv.conf.json b/asv.conf.json index 998dc77bd3..0b53ffd65c 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -46,7 +46,7 @@ // determined by looking for tools on the PATH environment // variable. //"environment_type": "virtualenv", - "environment_type": "conda", + "environment_type": "mamba", // timeout in seconds for installing any dependencies in environment // defaults to 10 min From 0c10810d2e27dfad7d69652ae7a639cd5337b82a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 29 Jan 2024 15:20:05 +0100 Subject: [PATCH 1079/1416] Pin pytest --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index ecc0084ea7..215d215eac 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -43,7 +43,7 @@ dependencies: - python-eccodes # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - - pytest + - pytest<8.0.0 - pytest-cov - pytest-lazy-fixture - fsspec From 2afb5ed79784fe8a77f5e4a3bdc89859a1434eb8 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Mon, 29 Jan 2024 15:41:54 +0100 Subject: [PATCH 1080/1416] Harmonize dataset names. --- satpy/etc/readers/fci_l2_nc.yaml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 80229b4b81..b6056b2a65 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -449,8 +449,8 @@ datasets: fill_value: -127 import_enum_information: True - quality_rtm_ctth: - name: quality_rtm_ctth + quality_rtm: + name: quality_rtm standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth @@ -458,8 +458,8 @@ datasets: fill_value: -127 import_enum_information: True - quality_method_ctth: - name: quality_method_ctth + quality_method: + name: quality_method standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth @@ -467,8 +467,8 @@ datasets: fill_value: -127 import_enum_information: True - quality_overall_processing_aviation_ctth: - name: quality_overall_processing_aviation_ctth + quality_overall_processing_aviation: + name: quality_overall_processing_aviation standard_name: quality_flag resolution: 2000 file_type: nc_fci_ctth @@ -556,8 +556,8 @@ datasets: - longitude - latitude - percent_cloud_free_gii: - name: percent_cloud_free_gii + percent_cloud_free: + name: percent_cloud_free long_name: Percentage of Cloud Free Pixels Processed in FoR standard_name: cloud_free_area_fraction resolution: 6000 @@ -568,8 +568,8 @@ datasets: - longitude - latitude - number_of_iterations_gii: - name: number_of_iterations_gii + number_of_iterations: + name: number_of_iterations standard_name: number_of_iterations resolution: 6000 file_type: nc_fci_gii @@ -712,8 +712,8 @@ datasets: file_type: nc_fci_olr nc_key: olr_value - cloud_type_olr: - name: cloud_type_olr + olr_cloud_type: + name: olr_cloud_type standard_name: cloud_type_classification resolution: 2000 file_type: nc_fci_olr From f5f62f4c65ea2f0428999eba97577abaedf2b843 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Mon, 29 Jan 2024 14:48:32 +0000 Subject: [PATCH 1081/1416] flake8 linting --- satpy/readers/viirs_l2.py | 3 +-- satpy/readers/yaml_reader.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 37247d7933..0aa2cb4add 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -6,7 +6,6 @@ from satpy.readers.netcdf_utils import NetCDF4FileHandler import xarray as xr from pyresample.geometry import AreaDefinition -import numpy as np LOG = logging.getLogger(__name__) @@ -183,7 +182,7 @@ def get_area_def(self, ds_id): width = int(self['/dimension/number_of_pixels']) height = int(self['/dimension/number_of_lines']) else: - width = int(self['/dimension/Idx_Xtrack']) # ncols + width = int(self['/dimension/Idx_Xtrack']) height = int(self['/dimension/Idx_Atrack']) area = AreaDefinition( diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 809744251e..ee6d82a961 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1010,7 +1010,7 @@ def _set_orientation(dataset, upper_right_corner): logger.info("Dataset {} is in a SwathDefinition " "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - + if dataset.attrs['area'].area_id != 'viirs_l2_area': projection_type = _get_projection_type(dataset.attrs["area"]) accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] From 867a0aa043ff39a06d1f335a4d5eec0e896b12ec Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Mon, 29 Jan 2024 14:49:24 +0000 Subject: [PATCH 1082/1416] Moved viirs_l2 enhancements for viirs to generic --- satpy/etc/enhancements/generic.yaml | 52 +++++++++++++++++++++++++++++ satpy/etc/enhancements/viirs.yaml | 50 --------------------------- 2 files changed, 52 insertions(+), 50 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 25680d6db9..6ad3801a4b 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1230,3 +1230,55 @@ enhancements: stretch: crude min_stretch: [0,0,0] max_stretch: [1,1,1] + + Cloud_Top_Height: + name: Cloud_Top_Height + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - { + filename: cth.txt, + min_value: 0, + max_value: 18000, + } + + Clear_Sky_Confidence: + name: Clear_Sky_Confidence + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { + filename: csc.txt, + min_value: 0.0, + max_value: 1.0 + } + + + Aerosol_Optical_Thickness_550_Land_Ocean: + name: Aerosol_Optical_Thickness_550_Land_Ocean + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - {filename: aod.txt, + min_value: 0.0, + max_value: 5.0, + } + + Angstrom_Exponent_Land_Ocean: + name: Angstrom_Exponent_Land_Ocean + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { + filename: aelo.txt, + min_value: -0.5, + max_value: 2.5, + } diff --git a/satpy/etc/enhancements/viirs.yaml b/satpy/etc/enhancements/viirs.yaml index aad8407d92..62198c4de3 100644 --- a/satpy/etc/enhancements/viirs.yaml +++ b/satpy/etc/enhancements/viirs.yaml @@ -80,55 +80,5 @@ enhancements: min_value: 0, max_value: 201} - Cloud_Top_Height: - name: Cloud_Top_Height - operations: - - name: palettize - method: !!python/name:satpy.enhancements.palettize - kwargs: - palettes: - - { - filename: cth.txt, - min_value: 0, - max_value: 18000, - } - - Clear_Sky_Confidence: - name: Clear_Sky_Confidence - operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - { - filename: csc.txt, - min_value: 0.0, - max_value: 1.0 - } - Aerosol_Optical_Thickness_550_Land_Ocean: - name: Aerosol_Optical_Thickness_550_Land_Ocean - operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {filename: aod.txt, - min_value: 0.0, - max_value: 5.0, - } - - Angstrom_Exponent_Land_Ocean: - name: Angstrom_Exponent_Land_Ocean - operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - { - filename: aelo.txt, - min_value: -0.5, - max_value: 2.5, - } - From 80f0b2dbb77434d6bb6e276bc1d6b9a3be93513d Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 29 Jan 2024 15:06:42 +0000 Subject: [PATCH 1083/1416] Use flag_values from enum dict. --- satpy/readers/fci_l2_nc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 91dab92c48..3f4209ec55 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -119,9 +119,9 @@ def _add_flag_values_and_meamings(filename,key,variable): enum = dataType.enum_dict flag_values = [] flag_meanings = [] - for item in enumerate(enum): - flag_values.append(item[0]) - flag_meanings.append(item[1]) + for meaning, value in enum.items(): + flag_values.append(value) + flag_meanings.append(meaning) variable.attrs["flag_values"] = flag_values variable.attrs["flag_meanings"] = flag_meanings From 6d888bea044fa063e00b90bc80c4aab8e29f7004 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 29 Jan 2024 15:09:48 +0000 Subject: [PATCH 1084/1416] Fix code style. --- satpy/readers/fci_l2_nc.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 3f4209ec55..899581b19a 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -101,8 +101,8 @@ def _set_attributes(self, variable, dataset_info, segmented=False): variable.attrs.update(self._get_global_attributes()) import_enum_information = dataset_info.get("import_enum_information", False) - if (import_enum_information): - variable = self._add_flag_values_and_meamings(self.filename,dataset_info["nc_key"], variable) + if import_enum_information: + variable = self._add_flag_values_and_meamings(self.filename, dataset_info["nc_key"], variable) if variable.attrs["units"] == "none": variable.attrs.update({"units": None}) @@ -110,13 +110,13 @@ def _set_attributes(self, variable, dataset_info, segmented=False): return variable @staticmethod - def _add_flag_values_and_meamings(filename,key,variable): - #"""Build flag values and meaning from enum datatype """ - netCDF4_dataset = netCDF4.Dataset(filename, "r") + def _add_flag_values_and_meamings(filename, key, variable): + """Build flag values and meaning from enum datatype.""" + nc_dataset = netCDF4.Dataset(filename, "r") # This currently assumes a flat netCDF file - dataType=netCDF4_dataset.variables[key].datatype - if (hasattr(dataType,"enum_dict")): - enum = dataType.enum_dict + data_type = nc_dataset.variables[key].datatype + if hasattr(data_type, "enum_dict"): + enum = data_type.enum_dict flag_values = [] flag_meanings = [] for meaning, value in enum.items(): @@ -125,7 +125,7 @@ def _add_flag_values_and_meamings(filename,key,variable): variable.attrs["flag_values"] = flag_values variable.attrs["flag_meanings"] = flag_meanings - netCDF4_dataset.close() + nc_dataset.close() return variable @@ -187,7 +187,6 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= self._projection = self.nc["mtg_geos_projection"] self.multi_dims = {"maximum_number_of_layers": "layer", "number_of_vis_channels": "vis_channel_id"} - def get_area_def(self, key): """Return the area definition.""" try: @@ -270,9 +269,9 @@ def _get_area_extent(self): area_extent_pixel_center = make_ext(ll_x, ur_x, ll_y, ur_y, h) # Shift area extent by half a pixel to get the area extent w.r.t. the dataset/pixel corners - scale_factor = (x[1:]-x[0:-1]).values.mean() + scale_factor = (x[1:] - x[0:-1]).values.mean() res = abs(scale_factor) * h - area_extent = tuple(i + res/2 if i > 0 else i - res/2 for i in area_extent_pixel_center) + area_extent = tuple(i + res / 2 if i > 0 else i - res / 2 for i in area_extent_pixel_center) return area_extent @@ -437,8 +436,10 @@ def _modify_area_extent(stand_area_extent): return area_extent + class FciL2NCAMVFileHandler(FciL2CommonFunctions, BaseFileHandler): """Reader class for FCI L2 AMV products in NetCDF4 format.""" + def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) @@ -472,7 +473,7 @@ def _get_global_attributes(self): "spacecraft_name": self.spacecraft_name, "sensor": self.sensor_name, "platform_name": self.spacecraft_name, - "channel":self.filename_info["channel"] + "channel": self.filename_info["channel"] } return attributes From 82750f1a857cbd6ce763691c94cebd92660fddfe Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 29 Jan 2024 15:10:41 +0000 Subject: [PATCH 1085/1416] Use swap_dims instead of rename to rename dimensions. --- satpy/readers/fci_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 899581b19a..4187d133c5 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -89,7 +89,7 @@ def _set_attributes(self, variable, dataset_info, segmented=False): xdim, ydim = "number_of_columns", "number_of_rows" if dataset_info["nc_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: - variable = variable.rename({ydim: "y", xdim: "x"}) + variable = variable.swap_dims({ydim: "y", xdim: "x"}) variable.attrs.setdefault("units", None) if "unit" in variable.attrs: From 07d258d68c7c5740450506b01272a0f71efe1be8 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 29 Jan 2024 14:25:58 -0600 Subject: [PATCH 1086/1416] Remove slstr_l2 reader in favor of ghrsst_l2 --- satpy/etc/readers/slstr_l2.yaml | 63 --------------------------------- satpy/readers/__init__.py | 4 ++- 2 files changed, 3 insertions(+), 64 deletions(-) delete mode 100644 satpy/etc/readers/slstr_l2.yaml diff --git a/satpy/etc/readers/slstr_l2.yaml b/satpy/etc/readers/slstr_l2.yaml deleted file mode 100644 index 7924cb198a..0000000000 --- a/satpy/etc/readers/slstr_l2.yaml +++ /dev/null @@ -1,63 +0,0 @@ -reader: - name: slstr_l2 - short_name: SLSTR l2 - long_name: Sentinel-3 SLSTR Level 2 data in netCDF format - description: NC Reader for Sentinel-3 SLSTR Level 2 data - status: defunct - supports_fsspec: false - sensors: [slstr_l2] - default_channels: [] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - -file_types: - SLSTRB: - file_reader: !!python/name:satpy.readers.slstr_l2.SLSTRL2FileHandler - file_patterns: ['{start_time:%Y%m%d%H%M%S}-{generating_centre:3s}-{type_id:3s}_GHRSST-SSTskin-SLSTR{something:1s}-{end_time:%Y%m%d%H%M%S}-{version}.nc', - '{mission_id:3s}_SL_{processing_level:1s}_WST____{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3.tar'] - -datasets: - longitude: - name: longitude - resolution: 1000 - view: nadir - file_type: SLSTRB - standard_name: lon - units: degree - - latitude: - name: latitude - resolution: 1000 - view: nadir - file_type: SLSTRB - standard_name: lat - units: degree - - sea_surface_temperature: - name: sea_surface_temperature - sensor: slstr_l2 - coordinates: [longitude, latitude] - file_type: SLSTRB - resolution: 1000 - view: nadir - units: kelvin - standard_name: sea_surface_temperature - - sea_ice_fraction: - name: sea_ice_fraction - sensor: slstr_l2 - coordinates: [longitude, latitude] - file_type: SLSTRB - resolution: 1000 - view: nadir - units: "%" - standard_name: sea_ice_fraction - - # Quality estimation 0-5: no data, cloud, worst, low, acceptable, best - quality_level: - name: quality_level - sensor: slstr_l2 - coordinates: [longitude, latitude] - file_type: SLSTRB - resolution: 1000 - view: nadir - standard_name: quality_level diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 21554ba465..ca131b101f 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -39,7 +39,9 @@ # Old Name -> New Name PENDING_OLD_READER_NAMES = {"fci_l1c_fdhsi": "fci_l1c_nc", "viirs_l2_cloud_mask_nc": "viirs_edr"} -OLD_READER_NAMES: dict[str, str] = {} +OLD_READER_NAMES: dict[str, str] = { + "slstr_l2": "ghrsst_l2", +} def group_files(files_to_sort, reader=None, time_threshold=10, From 9cf9397a118a1efecc16b9b16ba7f2a6e29ae7db Mon Sep 17 00:00:00 2001 From: Sauli Joro Date: Tue, 30 Jan 2024 07:58:34 +0100 Subject: [PATCH 1087/1416] Fix typo. --- satpy/readers/fci_l2_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 4187d133c5..8971eb4996 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -102,7 +102,7 @@ def _set_attributes(self, variable, dataset_info, segmented=False): import_enum_information = dataset_info.get("import_enum_information", False) if import_enum_information: - variable = self._add_flag_values_and_meamings(self.filename, dataset_info["nc_key"], variable) + variable = self._add_flag_values_and_meanings(self.filename, dataset_info["nc_key"], variable) if variable.attrs["units"] == "none": variable.attrs.update({"units": None}) @@ -110,7 +110,7 @@ def _set_attributes(self, variable, dataset_info, segmented=False): return variable @staticmethod - def _add_flag_values_and_meamings(filename, key, variable): + def _add_flag_values_and_meanings(filename, key, variable): """Build flag values and meaning from enum datatype.""" nc_dataset = netCDF4.Dataset(filename, "r") # This currently assumes a flat netCDF file From e2cf6eb931bf7ffdf8f0230971bb2a73580de286 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Wed, 31 Jan 2024 11:34:13 -0600 Subject: [PATCH 1088/1416] removing extra lines in creating test content --- satpy/tests/reader_tests/test_clavrx_nc.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index 5f3e82f1dc..3cb188d76c 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -98,7 +98,6 @@ def fake_test_content(filename, **kwargs): "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R", } ) - variable2 = variable2.where(variable2 % 2 != 0, FILL_VALUE) # category @@ -126,7 +125,6 @@ def fake_test_content(filename, **kwargs): "var_flags": var_flags, "out_of_range_flags": out_of_range_flags, } - ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"latitude": latitude, "longitude": longitude}) From e583b9ede2866e8147ac5a52c834466f3834237d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:10:48 +0000 Subject: [PATCH 1089/1416] Bump codecov/codecov-action from 3 to 4 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3 to 4. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v3...v4) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8742864c59..587342912f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -110,7 +110,7 @@ jobs: pytest --cov=satpy satpy/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: flags: unittests file: ./coverage.xml @@ -131,7 +131,7 @@ jobs: coverage xml - name: Upload behaviour test coverage to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 with: flags: behaviourtests file: ./coverage.xml From 14fef9c89ca993a4b3c619cc80e1f0d07d36c271 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 Feb 2024 10:10:51 +0000 Subject: [PATCH 1090/1416] Bump actions/cache from 3 to 4 Bumps [actions/cache](https://github.com/actions/cache) from 3 to 4. - [Release notes](https://github.com/actions/cache/releases) - [Changelog](https://github.com/actions/cache/blob/main/RELEASES.md) - [Commits](https://github.com/actions/cache/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/cache dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8742864c59..429af3b0d9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -51,7 +51,7 @@ jobs: CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)") echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ${{ env.CONDA_PREFIX }} key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }} From 8319b4f44206b1ec7a4f650e1b6cbfcaebeb5199 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 1 Feb 2024 14:48:09 +0200 Subject: [PATCH 1091/1416] Change time attribute averaging to min/max for start/end times --- satpy/composites/__init__.py | 10 ++++----- satpy/dataset/metadata.py | 18 ++++++++++----- satpy/tests/test_composites.py | 8 +++---- satpy/tests/test_dataset.py | 40 +++++++++++++++++++++++++--------- 4 files changed, 51 insertions(+), 25 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a70bbea86f..a7411222aa 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1666,12 +1666,10 @@ def __call__(self, *args, **kwargs): img.attrs.pop("modifiers", None) img.attrs.pop("calibration", None) # Add start time if not present in the filename - if "start_time" not in img.attrs or not img.attrs["start_time"]: - import datetime as dt - img.attrs["start_time"] = dt.datetime.utcnow() - if "end_time" not in img.attrs or not img.attrs["end_time"]: - import datetime as dt - img.attrs["end_time"] = dt.datetime.utcnow() + if "start_time" not in img.attrs: + img.attrs["start_time"] = None + if "end_time" not in img.attrs: + img.attrs["end_time"] = None return img diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 46f6f622b8..6da6f90bbb 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -27,7 +27,7 @@ from satpy.writers.utils import flatten_dict -def combine_metadata(*metadata_objects, average_times=True): +def combine_metadata(*metadata_objects): """Combine the metadata of two or more Datasets. If the values corresponding to any keys are not equal or do not @@ -53,7 +53,7 @@ def combine_metadata(*metadata_objects, average_times=True): shared_keys = _shared_keys(info_dicts) - return _combine_shared_info(shared_keys, info_dicts, average_times) + return _combine_shared_info(shared_keys, info_dicts) def _get_valid_dicts(metadata_objects): @@ -75,17 +75,25 @@ def _shared_keys(info_dicts): return reduce(set.intersection, key_sets) -def _combine_shared_info(shared_keys, info_dicts, average_times): +def _combine_shared_info(shared_keys, info_dicts): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] - if "time" in key and isinstance(values[0], datetime) and average_times: - shared_info[key] = average_datetimes(values) + if "time" in key and isinstance(values[0], datetime): + shared_info[key] = _combine_times(key, values) elif _are_values_combinable(values): shared_info[key] = values[0] return shared_info +def _combine_times(key, values): + if key == "end_time": + return max(values) + elif key == "start_time": + return min(values) + return average_datetimes(values) + + def average_datetimes(datetime_list): """Average a series of datetime objects. diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index b5d5a54b96..420f60efa2 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1420,8 +1420,8 @@ def load(self, arg): filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() - assert "start_time" in res.attrs - assert "end_time" in res.attrs + assert res.attrs["start_time"] is None + assert res.attrs["end_time"] is None assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs @@ -1434,8 +1434,8 @@ def load(self, arg): res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["data_dir/foo.tif"]) - assert "start_time" in res.attrs - assert "end_time" in res.attrs + assert res.attrs["start_time"] is None + assert res.attrs["end_time"] is None assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 1b827b8dcf..1cf8f673ec 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -101,13 +101,28 @@ class TestCombineMetadata(unittest.TestCase): def setUp(self): """Set up the test case.""" - self.datetime_dts = ( + # The times need to be in ascending order (oldest first) + self.start_time_dts = ( {"start_time": datetime(2018, 2, 1, 11, 58, 0)}, {"start_time": datetime(2018, 2, 1, 11, 59, 0)}, {"start_time": datetime(2018, 2, 1, 12, 0, 0)}, {"start_time": datetime(2018, 2, 1, 12, 1, 0)}, {"start_time": datetime(2018, 2, 1, 12, 2, 0)}, ) + self.end_time_dts = ( + {"end_time": datetime(2018, 2, 1, 11, 58, 0)}, + {"end_time": datetime(2018, 2, 1, 11, 59, 0)}, + {"end_time": datetime(2018, 2, 1, 12, 0, 0)}, + {"end_time": datetime(2018, 2, 1, 12, 1, 0)}, + {"end_time": datetime(2018, 2, 1, 12, 2, 0)}, + ) + self.other_time_dts = ( + {"other_time": datetime(2018, 2, 1, 11, 58, 0)}, + {"other_time": datetime(2018, 2, 1, 11, 59, 0)}, + {"other_time": datetime(2018, 2, 1, 12, 0, 0)}, + {"other_time": datetime(2018, 2, 1, 12, 1, 0)}, + {"other_time": datetime(2018, 2, 1, 12, 2, 0)}, + ) def test_average_datetimes(self): """Test the average_datetimes helper function.""" @@ -122,18 +137,23 @@ def test_average_datetimes(self): ret = average_datetimes(dts) assert dts[2] == ret - def test_combine_times_with_averaging(self): - """Test the combine_metadata with times with averaging.""" + def test_combine_start_times(self): + """Test the combine_metadata with start times.""" + from satpy.dataset.metadata import combine_metadata + ret = combine_metadata(*self.start_time_dts) + assert ret["start_time"] == self.start_time_dts[0]["start_time"] + + def test_combine_end_times(self): + """Test the combine_metadata with end times.""" from satpy.dataset.metadata import combine_metadata - ret = combine_metadata(*self.datetime_dts) - assert self.datetime_dts[2]["start_time"] == ret["start_time"] + ret = combine_metadata(*self.end_time_dts) + assert ret["end_time"] == self.end_time_dts[-1]["end_time"] - def test_combine_times_without_averaging(self): - """Test the combine_metadata with times without averaging.""" + def test_combine_other_times(self): + """Test the combine_metadata with other time values than start or end times.""" from satpy.dataset.metadata import combine_metadata - ret = combine_metadata(*self.datetime_dts, average_times=False) - # times are not equal so don't include it in the final result - assert "start_time" not in ret + ret = combine_metadata(*self.other_time_dts) + assert ret["other_time"] == self.other_time_dts[2]["other_time"] def test_combine_arrays(self): """Test the combine_metadata with arrays.""" From 782ac687680696d648d84755a757e755fe6714e6 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 1 Feb 2024 14:53:33 +0200 Subject: [PATCH 1092/1416] Change time attribute averaging to min/max for start/end times --- satpy/composites/__init__.py | 10 ++++++---- satpy/tests/test_composites.py | 8 ++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a7411222aa..a70bbea86f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1666,10 +1666,12 @@ def __call__(self, *args, **kwargs): img.attrs.pop("modifiers", None) img.attrs.pop("calibration", None) # Add start time if not present in the filename - if "start_time" not in img.attrs: - img.attrs["start_time"] = None - if "end_time" not in img.attrs: - img.attrs["end_time"] = None + if "start_time" not in img.attrs or not img.attrs["start_time"]: + import datetime as dt + img.attrs["start_time"] = dt.datetime.utcnow() + if "end_time" not in img.attrs or not img.attrs["end_time"]: + import datetime as dt + img.attrs["end_time"] = dt.datetime.utcnow() return img diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 420f60efa2..b5d5a54b96 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1420,8 +1420,8 @@ def load(self, arg): filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() - assert res.attrs["start_time"] is None - assert res.attrs["end_time"] is None + assert "start_time" in res.attrs + assert "end_time" in res.attrs assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs @@ -1434,8 +1434,8 @@ def load(self, arg): res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["data_dir/foo.tif"]) - assert res.attrs["start_time"] is None - assert res.attrs["end_time"] is None + assert "start_time" in res.attrs + assert "end_time" in res.attrs assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs From cf98aa21f326bedbc0278ce20b4b267bc928f9ee Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 1 Feb 2024 15:11:15 +0200 Subject: [PATCH 1093/1416] Allow None as time value --- satpy/composites/__init__.py | 10 ++++------ satpy/dataset/metadata.py | 16 ++++++++++++---- satpy/tests/test_composites.py | 8 ++++---- satpy/tests/test_dataset.py | 26 ++++++++++++++++++++++++++ 4 files changed, 46 insertions(+), 14 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a70bbea86f..a7411222aa 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1666,12 +1666,10 @@ def __call__(self, *args, **kwargs): img.attrs.pop("modifiers", None) img.attrs.pop("calibration", None) # Add start time if not present in the filename - if "start_time" not in img.attrs or not img.attrs["start_time"]: - import datetime as dt - img.attrs["start_time"] = dt.datetime.utcnow() - if "end_time" not in img.attrs or not img.attrs["end_time"]: - import datetime as dt - img.attrs["end_time"] = dt.datetime.utcnow() + if "start_time" not in img.attrs: + img.attrs["start_time"] = None + if "end_time" not in img.attrs: + img.attrs["end_time"] = None return img diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 6da6f90bbb..6e95908f68 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -79,7 +79,7 @@ def _combine_shared_info(shared_keys, info_dicts): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] - if "time" in key and isinstance(values[0], datetime): + if "time" in key: shared_info[key] = _combine_times(key, values) elif _are_values_combinable(values): shared_info[key] = values[0] @@ -87,11 +87,19 @@ def _combine_shared_info(shared_keys, info_dicts): def _combine_times(key, values): + filtered_values = _filter_time_values(values) + if not filtered_values: + return values if key == "end_time": - return max(values) + return max(filtered_values) elif key == "start_time": - return min(values) - return average_datetimes(values) + return min(filtered_values) + return average_datetimes(filtered_values) + + +def _filter_time_values(values): + """Remove values that are not datetime objects.""" + return [v for v in values if isinstance(v, datetime)] def average_datetimes(datetime_list): diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index b5d5a54b96..420f60efa2 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1420,8 +1420,8 @@ def load(self, arg): filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() - assert "start_time" in res.attrs - assert "end_time" in res.attrs + assert res.attrs["start_time"] is None + assert res.attrs["end_time"] is None assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs @@ -1434,8 +1434,8 @@ def load(self, arg): res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["data_dir/foo.tif"]) - assert "start_time" in res.attrs - assert "end_time" in res.attrs + assert res.attrs["start_time"] is None + assert res.attrs["end_time"] is None assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 1cf8f673ec..82b3a6c1cd 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -123,6 +123,20 @@ def setUp(self): {"other_time": datetime(2018, 2, 1, 12, 1, 0)}, {"other_time": datetime(2018, 2, 1, 12, 2, 0)}, ) + self.start_time_dts_with_none = ( + {"start_time": None}, + {"start_time": datetime(2018, 2, 1, 11, 59, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 0, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 1, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 2, 0)}, + ) + self.end_time_dts_with_none = ( + {"end_time": datetime(2018, 2, 1, 11, 58, 0)}, + {"end_time": datetime(2018, 2, 1, 11, 59, 0)}, + {"end_time": datetime(2018, 2, 1, 12, 0, 0)}, + {"end_time": datetime(2018, 2, 1, 12, 1, 0)}, + {"end_time": None}, + ) def test_average_datetimes(self): """Test the average_datetimes helper function.""" @@ -149,6 +163,18 @@ def test_combine_end_times(self): ret = combine_metadata(*self.end_time_dts) assert ret["end_time"] == self.end_time_dts[-1]["end_time"] + def test_combine_start_times_with_none(self): + """Test the combine_metadata with start times when there's a None included.""" + from satpy.dataset.metadata import combine_metadata + ret = combine_metadata(*self.start_time_dts_with_none) + assert ret["start_time"] == self.start_time_dts_with_none[1]["start_time"] + + def test_combine_end_times_with_none(self): + """Test the combine_metadata with end times when there's a None included.""" + from satpy.dataset.metadata import combine_metadata + ret = combine_metadata(*self.end_time_dts_with_none) + assert ret["end_time"] == self.end_time_dts_with_none[-2]["end_time"] + def test_combine_other_times(self): """Test the combine_metadata with other time values than start or end times.""" from satpy.dataset.metadata import combine_metadata From ab904284a8dee3f667d3379bad77340c2ae791dd Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 1 Feb 2024 15:21:56 +0200 Subject: [PATCH 1094/1416] Update combine_metadata docstring --- satpy/dataset/metadata.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 6e95908f68..cf33dce222 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -32,15 +32,14 @@ def combine_metadata(*metadata_objects): If the values corresponding to any keys are not equal or do not exist in all provided dictionaries then they are not included in - the returned dictionary. By default any keys with the word 'time' - in them and consisting of datetime objects will be averaged. This - is to handle cases where data were observed at almost the same time - but not exactly. In the interest of time, lazy arrays are compared by - object identity rather than by their contents. + the returned dictionary. The 'start_time' values will be set to the + earliest value and 'end_time' to latest time. All other keys containing + the word 'time' are averaged. Before these adjustments, non-datetime + objects are filtered out. In the interest of time, lazy arrays are compared + by object identity rather than by their contents. Args: *metadata_objects: MetadataObject or dict objects to combine - average_times (bool): Average any keys with 'time' in the name Returns: dict: the combined metadata From 942be9c65755757a7940bfae815ccade898fba6e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 1 Feb 2024 16:26:05 +0200 Subject: [PATCH 1095/1416] Do not include top-level non-time objects in shared_info as times --- satpy/dataset/metadata.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index cf33dce222..1a52b6825f 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -79,7 +79,10 @@ def _combine_shared_info(shared_keys, info_dicts): for key in shared_keys: values = [info[key] for info in info_dicts] if "time" in key: - shared_info[key] = _combine_times(key, values) + times = _combine_times(key, values) + if times is None: + continue + shared_info[key] = times elif _are_values_combinable(values): shared_info[key] = values[0] return shared_info @@ -88,7 +91,7 @@ def _combine_shared_info(shared_keys, info_dicts): def _combine_times(key, values): filtered_values = _filter_time_values(values) if not filtered_values: - return values + return None if key == "end_time": return max(filtered_values) elif key == "start_time": From db2bf3182fa0769787529917e65739e88be9e591 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 1 Feb 2024 16:47:22 +0200 Subject: [PATCH 1096/1416] Remove combine_times kwarg from multiscene.stack and default to its default behaviour --- satpy/multiscene/_blend_funcs.py | 28 +++++++--------------- satpy/tests/multiscene_tests/test_blend.py | 15 ++++-------- 2 files changed, 13 insertions(+), 30 deletions(-) diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index 49869a0418..53b30e79d7 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -13,7 +13,6 @@ def stack( data_arrays: Sequence[xr.DataArray], weights: Optional[Sequence[xr.DataArray]] = None, - combine_times: bool = True, blend_type: str = "select_with_weights" ) -> xr.DataArray: """Combine a series of datasets in different ways. @@ -39,19 +38,18 @@ def stack( """ if weights: - return _stack_with_weights(data_arrays, weights, combine_times, blend_type) - return _stack_no_weights(data_arrays, combine_times) + return _stack_with_weights(data_arrays, weights, blend_type) + return _stack_no_weights(data_arrays) def _stack_with_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], - combine_times: bool, blend_type: str ) -> xr.DataArray: blend_func = _get_weighted_blending_func(blend_type) filled_weights = list(_fill_weights_for_invalid_dataset_pixels(datasets, weights)) - return blend_func(datasets, filled_weights, combine_times) + return blend_func(datasets, filled_weights) def _get_weighted_blending_func(blend_type: str) -> Callable: @@ -84,10 +82,9 @@ def _fill_weights_for_invalid_dataset_pixels( def _stack_blend_by_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], - combine_times: bool ) -> xr.DataArray: """Stack datasets blending overlap using weights.""" - attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets], combine_times) + attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets]) overlays = [] for weight, overlay in zip(weights, datasets): @@ -109,14 +106,13 @@ def _stack_blend_by_weights( def _stack_select_by_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], - combine_times: bool ) -> xr.DataArray: """Stack datasets selecting pixels using weights.""" indices = da.argmax(da.dstack(weights), axis=-1) if "bands" in datasets[0].dims: indices = [indices] * datasets[0].sizes["bands"] - attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets], combine_times) + attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets]) dims = datasets[0].dims coords = datasets[0].coords selected_array = xr.DataArray(da.choose(indices, datasets), dims=dims, coords=coords, attrs=attrs) @@ -125,7 +121,6 @@ def _stack_select_by_weights( def _stack_no_weights( datasets: Sequence[xr.DataArray], - combine_times: bool ) -> xr.DataArray: base = datasets[0].copy() collected_attrs = [base.attrs] @@ -136,20 +131,13 @@ def _stack_no_weights( except KeyError: base = base.where(data_arr.isnull(), data_arr) - attrs = _combine_stacked_attrs(collected_attrs, combine_times) + attrs = _combine_stacked_attrs(collected_attrs) base.attrs = attrs return base -def _combine_stacked_attrs(collected_attrs: Sequence[Mapping], combine_times: bool) -> dict: - attrs = combine_metadata(*collected_attrs) - if combine_times and ("start_time" in attrs or "end_time" in attrs): - new_start, new_end = _get_combined_start_end_times(collected_attrs) - if new_start: - attrs["start_time"] = new_start - if new_end: - attrs["end_time"] = new_end - return attrs +def _combine_stacked_attrs(collected_attrs: Sequence[Mapping]) -> dict: + return combine_metadata(*collected_attrs) def _get_combined_start_end_times(metadata_objects: Iterable[Mapping]) -> tuple[datetime | None, datetime | None]: diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index f9d7e35462..c964501225 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -245,10 +245,9 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): ("select_with_weights", _get_expected_stack_select), ("blend_with_weights", _get_expected_stack_blend), ]) - @pytest.mark.parametrize("combine_times", [False, True]) def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, groups, scene1_with_weights, scene2_with_weights, - combine_times, blend_func, exp_result_func): + blend_func, exp_result_func): """Test stacking two scenes using weights. Here we test that the start and end times can be combined so that they @@ -266,7 +265,7 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] - stack_func = partial(stack, weights=weights, blend_type=blend_func, combine_times=combine_times) + stack_func = partial(stack, weights=weights, blend_type=blend_func) weighted_blend = multi_scene.blend(blend_function=stack_func) expected = exp_result_func(scene1, scene2) @@ -275,12 +274,8 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr np.testing.assert_allclose(result.data, expected.data) _check_stacked_metadata(result, "CloudType") - if combine_times: - assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) - else: - assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 11, 7, 250000) - assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 20, 11, 950000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) @pytest.fixture() def datasets_and_weights(self): @@ -329,7 +324,7 @@ def test_blend_function_stack_weighted(self, datasets_and_weights, line, column) input_data["weights"][1][line, :] = 2 input_data["weights"][2][:, column] = 2 - stack_with_weights = partial(stack, weights=input_data["weights"], combine_times=False) + stack_with_weights = partial(stack, weights=input_data["weights"]) blend_result = stack_with_weights(input_data["datasets"][0:3]) ds1 = input_data["datasets"][0] From e0fe8450c2209c7de489e075299d057605a8898f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 2 Feb 2024 08:58:51 +0200 Subject: [PATCH 1097/1416] Remove obsolete private function --- satpy/multiscene/_blend_funcs.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index 53b30e79d7..7478648140 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -1,6 +1,5 @@ from __future__ import annotations -from datetime import datetime from typing import Callable, Iterable, Mapping, Optional, Sequence import pandas as pd @@ -140,18 +139,6 @@ def _combine_stacked_attrs(collected_attrs: Sequence[Mapping]) -> dict: return combine_metadata(*collected_attrs) -def _get_combined_start_end_times(metadata_objects: Iterable[Mapping]) -> tuple[datetime | None, datetime | None]: - """Get the start and end times attributes valid for the entire dataset series.""" - start_time = None - end_time = None - for md_obj in metadata_objects: - if "start_time" in md_obj and (start_time is None or md_obj["start_time"] < start_time): - start_time = md_obj["start_time"] - if "end_time" in md_obj and (end_time is None or md_obj["end_time"] > end_time): - end_time = md_obj["end_time"] - return start_time, end_time - - def timeseries(datasets): """Expand dataset with and concatenate by time dimension.""" expanded_ds = [] From f1170f66c702f7169e2f290d8c5ce8c70cdf8d5d Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 2 Feb 2024 10:37:12 -0600 Subject: [PATCH 1098/1416] Remove extra spaces --- satpy/etc/enhancements/generic.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 16daa2ff27..63778bd030 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -285,10 +285,10 @@ enhancements: 2, # Probably Cloudy 3, # Cloudy ], - 'colors': [[ 0, 0, 0], # black,-127 = Fill Value - [ 94, 79, 162], # blue, 0 = Clear - [ 73, 228, 242], # cyan, 1 = Probably Clear - [158, 1, 66], # red, 2 = Probably Cloudy + 'colors': [[0, 0, 0], # black,-127 = Fill Value + [94, 79, 162], # blue, 0 = Clear + [73, 228, 242], # cyan, 1 = Probably Clear + [158, 1, 66], # red, 2 = Probably Cloudy [255, 255, 255], # white, 3 = Cloudy ], 'color_scale': 255, From 48770828066f96ef201713a6d4da24e9a2aa393c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 10 Oct 2023 13:13:26 -0500 Subject: [PATCH 1099/1416] Update CI to test Python 3.12 --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 52952a6330..2716e7f792 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -18,10 +18,10 @@ jobs: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.9", "3.11", "3.12"] experimental: [false] include: - - python-version: "3.11" + - python-version: "3.12" os: "ubuntu-latest" experimental: true From 6e1342fffc6dd4e8e2c0478e486a3349221ff5ce Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 5 Feb 2024 11:21:09 +0200 Subject: [PATCH 1100/1416] Remove unnecessary setting of start/end time attributes --- satpy/composites/__init__.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a7411222aa..7fc8bec92d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1665,11 +1665,6 @@ def __call__(self, *args, **kwargs): img.attrs["mode"] = "".join(img.bands.data) img.attrs.pop("modifiers", None) img.attrs.pop("calibration", None) - # Add start time if not present in the filename - if "start_time" not in img.attrs: - img.attrs["start_time"] = None - if "end_time" not in img.attrs: - img.attrs["end_time"] = None return img From 0df041347b82e3a00c59947f8a2deb3b908829e6 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Mon, 5 Feb 2024 16:05:32 +0000 Subject: [PATCH 1101/1416] Removed area_def and added colormaps in yaml --- satpy/etc/composites/viirs.yaml | 20 ++ satpy/etc/enhancements/generic.yaml | 252 +++++++++++++++++++--- satpy/etc/readers/viirs_l2.yaml | 14 +- satpy/readers/viirs_l2.py | 32 --- satpy/readers/yaml_reader.py | 15 +- satpy/tests/reader_tests/test_viirs_l2.py | 6 +- 6 files changed, 261 insertions(+), 78 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index bebf6c5833..0c82948ee8 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -685,3 +685,23 @@ composites: - name: I01 - name: I03 standard_name: cimss_cloud_type + + cth: + description: > + VIIRS Cloud Top Height with colormap applied + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - Cloud_Top_Height + standard_name: cth + + viirs_cloud_top_height: + description: > + Cloud Top Height composite from NOAA/SNPP + compositor: !!python/name:satpy.composites.MaskingCompositor + prerequisites: + - Cloud_Top_Height + - cth + conditions: + - method: isnan + transparency: 100 + standard_name: viirs_cloud_top_height diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 6ad3801a4b..f2c9dbd9c2 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1231,18 +1231,80 @@ enhancements: min_stretch: [0,0,0] max_stretch: [1,1,1] - Cloud_Top_Height: - name: Cloud_Top_Height + viirs_cloud_top_height: + name: viirs_cloud_top_height operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - - { - filename: cth.txt, - min_value: 0, - max_value: 18000, - } + - { + colors: [ + [255, 0, 0], [255, 0, 1], [255, 1, 0], [255, 1, 1], [254, 0, 0], [254, 0, 1], + [254, 1, 0], [254, 1, 1], [254, 2, 1], [254, 2, 0], [254, 2, 2], [253, 0, 0], + [253, 0, 1], [253, 1, 0], [253, 1, 1], [253, 1, 2], [170, 0, 0], [170, 0, 1], + [170, 1, 0], [170, 1, 1], [171, 0, 0], [171, 0, 1], [171, 1, 0], [171, 1, 1], + [171, 2, 1], [171, 2, 0], [171, 2, 2], [172, 0, 0], [172, 0, 1], [172, 1, 0], + [172, 1, 1], [172, 1, 2], [110, 0, 0], [110, 0, 1], [110, 1, 0], [110, 1, 1], + [111, 0, 0], [111, 0, 1], [111, 1, 0], [111, 1, 1], [111, 2, 1], [111, 2, 0], + [111, 2, 2], [112, 0, 0], [112, 0, 1], [112, 1, 0], [112, 1, 1], [112, 1, 2], + [122, 90, 3], [122, 90, 4], [122, 91, 3], [122, 91, 4], [123, 90, 3], [123, 90, 4], + [123, 91, 3], [123, 91, 4], [123, 92, 4], [123, 92, 3], [123, 92, 5], [124, 90, 3], + [124, 90, 4], [124, 91, 3], [124, 91, 4], [124, 91, 5], [187, 136, 0], [187, 136, 1], + [187, 137, 0], [187, 137, 1], [188, 136, 0], [188, 136, 1], [188, 137, 0], + [188, 137, 1], [188, 138, 1], [188, 138, 0], [188, 138, 2], [189, 136, 0], + [189, 136, 1], [189, 137, 0], [189, 137, 1], [189, 137, 2], [240, 190, 64], + [240, 190, 65], [240, 191, 64], [240, 191, 65], [241, 190, 64], [241, 190, 65], + [241, 191, 64], [241, 191, 65], [241, 192, 65], [241, 192, 64], [241, 192, 66], + [242, 190, 64], [242, 190, 65], [242, 191, 64], [242, 191, 65], [242, 191, 66], + [255, 255, 0], [255, 255, 1], [255, 254, 0], [255, 254, 1], [254, 255, 0], + [254, 255, 1], [254, 254, 0], [254, 254, 1], [254, 253, 1], [254, 253, 0], + [254, 253, 2], [253, 255, 0], [253, 255, 1], [253, 254, 0], [253, 254, 1], + [253, 254, 2], [0, 220, 0], [0, 220, 1], [0, 221, 0], [0, 221, 1], [1, 220, 0], + [1, 220, 1], [1, 221, 0], [1, 221, 1], [1, 222, 1], [1, 222, 0], [1, 222, 2], + [2, 220, 0], [2, 220, 1], [2, 221, 0], [2, 221, 1], [2, 221, 2], [0, 136, 0], + [0, 136, 1], [0, 137, 0], [0, 137, 1], [1, 136, 0], [1, 136, 1], [1, 137, 0], + [1, 137, 1], [1, 138, 1], [1, 138, 0], [1, 138, 2], [2, 136, 0], [2, 136, 1], + [2, 137, 0], [2, 137, 1], [2, 137, 2], [0, 80, 0], [0, 80, 1], [0, 81, 0], + [0, 81, 1], [1, 80, 0], [1, 80, 1], [1, 81, 0], [1, 81, 1], [1, 82, 1], [1, 82, 0], + [1, 82, 2], [2, 80, 0], [2, 80, 1], [2, 81, 0], [2, 81, 1], [2, 81, 2], [0, 136, 238], + [0, 136, 239], [0, 137, 238], [0, 137, 239], [1, 136, 238], [1, 136, 239], + [1, 137, 238], [1, 137, 239], [1, 138, 239], [1, 138, 238], [1, 138, 240], + [2, 136, 238], [2, 136, 239], [2, 137, 238], [2, 137, 239], [2, 137, 240], + [0, 0, 255], [0, 0, 254], [0, 1, 255], [0, 1, 254], [1, 0, 255], [1, 0, 254], + [1, 1, 255], [1, 1, 254], [1, 2, 254], [1, 2, 255], [1, 2, 253], [2, 0, 253], + [2, 0, 254], [2, 1, 253], [2, 1, 254], [2, 1, 255], [0, 0, 170], [0, 0, 171], + [0, 1, 170], [0, 1, 171], [1, 0, 170], [1, 0, 171], [1, 1, 170], [1, 1, 171], + [1, 2, 171], [1, 2, 170], [1, 2, 172], [2, 0, 170], [2, 0, 171], [2, 1, 170], + [2, 1, 171], [2, 1, 172], [0, 0, 100], [0, 0, 101], [0, 1, 100], [0, 1, 101], + [1, 0, 100], [1, 0, 101], [1, 1, 100], [1, 1, 101], [1, 2, 101], [1, 2, 100], + [1, 2, 102], [2, 0, 100], [2, 0, 101], [2, 1, 100], [2, 1, 101], [2, 1, 102], + [183, 15, 141], [183, 15, 142], [183, 16, 141], [183, 16, 142], [184, 15, 141], + [184, 15, 142], [184, 16, 141], [184, 16, 142], [184, 17, 142], [184, 17, 141], + [184, 17, 143], [185, 15, 141], [185, 15, 142], [185, 16, 141], [185, 16, 142], + [185, 16, 143], [102, 0, 119] + ], + values: [ + 0, 50, 100, 150, 200, 250, 300, 350, 400, 450, 500, 550, 600, 650, 700, 750, 800, + 850, 900, 950, 1000, 1050, 1100, 1150, 1200, 1250, 1300, 1350, 1400, 1450, 1500, + 1550, 1600, 1650, 1700, 1750, 1800, 1850, 1900, 1950, 2000, 2050, 2100, 2150, 2200, + 2250, 2300, 2350, 2400, 2450, 2500, 2550, 2600, 2650, 2700, 2750, 2800, 2850, 2900, + 2950, 3000, 3050, 3100, 3150, 3200, 3250, 3300, 3350, 3400, 3450, 3500, 3550, 3600, + 3650, 3700, 3750, 3800, 3850, 3900, 3950, 4000, 4050, 4100, 4150, 4200, 4250, 4300, + 4350, 4400, 4450, 4500, 4550, 4600, 4650, 4700, 4750, 4800, 4850, 4900, 4950, 5000, + 5050, 5100, 5150, 5200, 5250, 5300, 5350, 5400, 5450, 5500, 5550, 5600, 5650, 5700, + 5750, 5800, 5850, 5900, 5950, 6000, 6050, 6100, 6150, 6200, 6250, 6300, 6350, 6400, + 6450, 6500, 6550, 6600, 6650, 6700, 6750, 6800, 6850, 6900, 6950, 7000, 7050, 7100, + 7150, 7200, 7250, 7300, 7350, 7400, 7450, 7500, 7550, 7600, 7650, 7700, 7750, 7800, + 7850, 7900, 7950, 8000, 8050, 8100, 8150, 8200, 8250, 8300, 8350, 8400, 8450, 8500, + 8550, 8600, 8650, 8700, 8750, 8800, 8850, 8900, 8950, 9000, 9050, 9100, 9150, 9200, + 9250, 9300, 9350, 9400, 9450, 9500, 9550, 9600, 9650, 9700, 9750, 9800, 9850, 9900, + 9950, 10000, 10050, 10100, 10150, 10200, 10250, 10300, 10350, 10400, 10450, 10500, + 10550, 10600, 10650, 10700, 10750, 10800, 10850, 10900, 10950, 11000, 11050, 11100, + 11150, 11200, 11250, 11300, 11350, 11400, 11450, 11500, 11550, 11600, 11650, 11700, + 11750, 11800, 11850, 11900, 11950, 12000 + ], + } Clear_Sky_Confidence: name: Clear_Sky_Confidence @@ -1252,33 +1314,171 @@ enhancements: kwargs: palettes: - { - filename: csc.txt, - min_value: 0.0, - max_value: 1.0 - } - - - Aerosol_Optical_Thickness_550_Land_Ocean: - name: Aerosol_Optical_Thickness_550_Land_Ocean + colors: [ + [255, 247, 236], [254, 246, 233], [254, 244, 230], [254, 243, 228], [254, 242, 224], + [254, 241, 222], [254, 239, 219], [254, 239, 216], [254, 237, 213], [254, 236, 210], + [254, 235, 207], [254, 233, 204], [254, 232, 202], [253, 231, 198], [253, 230, 195], + [253, 228, 191], [253, 226, 189], [253, 225, 185], [253, 223, 181], [253, 221, 178], + [253, 220, 174], [253, 218, 172], [253, 216, 168], [253, 215, 165], [253, 213, 161], + [253, 211, 157], [253, 210, 156], [253, 207, 153], [253, 206, 152], [253, 203, 149], + [253, 202, 148], [253, 200, 145], [253, 198, 143], [253, 196, 141], [253, 193, 139], + [253, 192, 137], [253, 189, 134], [253, 188, 133], [252, 185, 130], [252, 182, 127], + [252, 177, 123], [252, 174, 120], [252, 170, 116], [252, 166, 112], [252, 163, 109], + [252, 159, 105], [252, 156, 103], [252, 151, 99], [252, 148, 96], [252, 144, 92], + [251, 140, 88], [250, 137, 87], [249, 134, 86], [248, 131, 85], [247, 127, 83], + [246, 125, 82], [245, 121, 80], [244, 119, 79], [243, 115, 78], [242, 111, 76], + [241, 109, 75], [240, 105, 73], [239, 102, 72], [237, 98, 69], [236, 94, 67], + [234, 89, 63], [232, 86, 60], [230, 81, 57], [227, 76, 53], [226, 73, 50], [224, 68, 46], + [222, 65, 44], [220, 60, 40], [218, 56, 37], [216, 51, 33], [214, 46, 30], [211, 43, 28], + [208, 39, 25], [206, 36, 23], [202, 31, 20], [200, 28, 18], [197, 24, 15], [194, 21, 13], + [191, 16, 10], [188, 12, 7], [185, 9, 5], [182, 4, 3], [180, 1, 1], [175, 0, 0], + [172, 0, 0], [167, 0, 0], [164, 0, 0], [159, 0, 0], [154, 0, 0], [151, 0, 0], + [146, 0, 0], [143, 0, 0], [138, 0, 0], [135, 0, 0], [130, 0, 0], [127, 0, 0] + ], + values: [ + 0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, + 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, + 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4, 0.41, 0.42, + 0.43, 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5, 0.51, 0.52, 0.53, 0.54, 0.55, 0.56, + 0.57, 0.58, 0.59, 0.6, 0.61, 0.62, 0.63, 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, + 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, + 0.85, 0.86, 0.87, 0.88, 0.89, 0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, 0.99, 1 + ] + } + + Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate: + name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - {filename: aod.txt, - min_value: 0.0, - max_value: 5.0, - } + - { + colors: [ + [255, 252, 199], [255, 251, 193], [255, 250, 188], [255, 249, 183], [255, 248, 178], + [255, 247, 173], [255, 246, 167], [255, 245, 162], [255, 244, 157], [255, 243, 152], + [255, 242, 147], [255, 240, 144], [255, 239, 141], [255, 238, 138], [255, 236, 135], + [255, 235, 132], [255, 234, 129], [255, 232, 126], [255, 231, 123], [255, 230, 120], + [255, 229, 118], [255, 227, 115], [255, 226, 113], [255, 225, 110], [255, 223, 108], + [255, 222, 106], [255, 221, 103], [255, 219, 101], [255, 218, 98], [255, 217, 96], + [255, 216, 94], [255, 214, 91], [255, 212, 89], [255, 210, 87], [255, 208, 85], + [255, 207, 83], [255, 205, 80], [255, 203, 78], [255, 201, 76], [255, 199, 74], + [255, 198, 72], [255, 195, 70], [255, 193, 68], [255, 190, 66], [255, 188, 64], + [255, 185, 62], [255, 183, 60], [255, 180, 58], [255, 178, 56], [255, 175, 54], + [255, 173, 53], [255, 170, 51], [255, 168, 50], [255, 165, 49], [255, 163, 47], + [255, 161, 46], [255, 158, 45], [255, 156, 43], [255, 153, 42], [255, 151, 41], + [255, 149, 40], [255, 146, 39], [255, 144, 38], [255, 142, 37], [255, 140, 37], + [255, 138, 36], [255, 135, 35], [255, 133, 35], [255, 131, 34], [255, 129, 33], + [255, 127, 33], [255, 124, 32], [255, 121, 31], [255, 118, 31], [255, 115, 30], + [255, 112, 30], [255, 109, 29], [255, 106, 28], [255, 103, 28], [255, 100, 27], + [255, 98, 27], [255, 94, 26], [255, 91, 25], [255, 88, 24], [255, 85, 24], [255, 82, 23], + [255, 78, 22], [255, 75, 22], [255, 72, 21], [255, 69, 20], [255, 66, 20], [254, 63, 19], + [253, 60, 19], [252, 58, 18], [251, 55, 18], [250, 53, 18], [249, 50, 17], [248, 47, 17], + [247, 45, 16], [246, 42, 16], [245, 40, 16], [243, 38, 15], [242, 36, 15], [240, 34, 14], + [239, 32, 14], [238, 30, 13], [236, 28, 13], [235, 26, 12], [233, 24, 12], [232, 22, 11], + [231, 20, 11], [229, 18, 11], [227, 17, 11], [225, 16, 11], [223, 14, 11], [221, 13, 11], + [219, 12, 11], [217, 10, 11], [215, 9, 11], [213, 8, 11], [211, 7, 12], [208, 6, 12], + [206, 5, 12], [204, 4, 12], [201, 4, 12], [199, 3, 13], [197, 2, 13], [194, 2, 13], + [192, 1, 13], [190, 0, 13], [188, 0, 14], [184, 0, 14], [181, 0, 14], [178, 0, 14], + [174, 0, 14], [171, 0, 14], [168, 0, 14], [164, 0, 14], [161, 0, 14], [158, 0, 14], + [155, 0, 14], [152, 0, 14], [149, 0, 14], [146, 0, 14], [143, 0, 14], [140, 0, 14], + [137, 0, 14], [134, 0, 14], [131, 0, 14], [128, 0, 14], [125, 0, 14] + ], + values: [ + 0, 0.005, 0.01, 0.015, 0.02, 0.025, 0.03, 0.035, 0.04, 0.045, 0.05, 0.055, 0.06, + 0.065, 0.07, 0.075, 0.08, 0.085, 0.09, 0.095, 0.1, 0.105, 0.11, 0.115, 0.12, 0.125, + 0.13, 0.135, 0.14, 0.145, 0.15, 0.155, 0.16, 0.165, 0.17, 0.175, 0.18, 0.185, 0.19, + 0.195, 0.2, 0.205, 0.21, 0.215, 0.22, 0.225, 0.23, 0.235, 0.24, 0.245, 0.25, 0.255, + 0.26, 0.265, 0.27, 0.275, 0.28, 0.285, 0.29, 0.295, 0.3, 0.305, 0.31, 0.315, 0.32, + 0.325, 0.33, 0.335, 0.34, 0.345, 0.35, 0.355, 0.36, 0.365, 0.37, 0.375, 0.38, 0.385, + 0.39, 0.395, 0.4, 0.405, 0.41, 0.415, 0.42, 0.425, 0.43, 0.435, 0.44, 0.445, 0.45, + 0.455, 0.46, 0.465, 0.47, 0.475, 0.48, 0.485, 0.49, 0.495, 0.5, 0.505, 0.51, 0.515, + 0.52, 0.525, 0.53, 0.535, 0.54, 0.545, 0.55, 0.555, 0.56, 0.565, 0.57, 0.575, 0.58, + 0.585, 0.59, 0.595, 0.6, 0.605, 0.61, 0.615, 0.62, 0.625, 0.63, 0.635, 0.64, 0.645, + 0.65, 0.655, 0.66, 0.665, 0.67, 0.675, 0.68, 0.685, 0.69, 0.695, 0.7, 1.13, 1.56, + 1.99, 2.42, 2.85, 3.28, 3.71, 4.14, 4.57, 5 + ], + } - Angstrom_Exponent_Land_Ocean: - name: Angstrom_Exponent_Land_Ocean + Angstrom_Exponent_Land_Ocean_Best_Estimate: + name: Angstrom_Exponent_Land_Ocean_Best_Estimate operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - { - filename: aelo.txt, - min_value: -0.5, - max_value: 2.5, - } + - { + colors: [ + [122, 145, 2], [123, 148, 3], [124, 150, 4], [124, 153, 5], [125, 155, 6], + [126, 158, 7], [127, 160, 8], [127, 163, 9], [128, 165, 10], [129, 168, 11], + [130, 170, 12], [130, 173, 13], [131, 175, 14], [132, 178, 15], [133, 181, 16], + [132, 183, 18], [132, 185, 20], [132, 187, 22], [132, 189, 25], [132, 191, 27], + [132, 193, 29], [132, 195, 31], [131, 197, 34], [131, 199, 36], [131, 201, 38], + [131, 203, 40], [131, 205, 43], [131, 207, 45], [131, 209, 47], [131, 212, 50], + [130, 213, 51], [129, 215, 53], [128, 217, 55], [128, 219, 57], [127, 221, 59], + [126, 222, 61], [125, 224, 63], [125, 226, 64], [124, 228, 66], [123, 230, 68], + [122, 231, 70], [122, 233, 72], [121, 235, 74], [120, 237, 76], [120, 239, 78], + [119, 239, 79], [118, 240, 80], [117, 241, 82], [116, 242, 83], [116, 243, 85], + [115, 244, 86], [114, 245, 87], [113, 246, 89], [112, 247, 90], [112, 248, 92], + [111, 249, 93], [110, 250, 94], [109, 251, 96], [108, 252, 97], [108, 253, 99], + [107, 252, 100], [106, 252, 102], [106, 252, 103], [105, 251, 105], [105, 251, 106], + [104, 251, 108], [103, 251, 109], [103, 250, 111], [102, 250, 112], [102, 250, 114], + [101, 250, 115], [100, 249, 117], [100, 249, 118], [99, 249, 120], [99, 249, 122], + [98, 247, 123], [97, 246, 124], [96, 245, 126], [95, 244, 127], [94, 243, 128], + [93, 242, 130], [92, 241, 131], [92, 239, 132], [91, 238, 134], [90, 237, 135], + [89, 236, 136], [88, 235, 138], [87, 234, 139], [86, 233, 140], [86, 232, 142], + [85, 230, 143], [84, 229, 144], [83, 228, 145], [82, 226, 147], [81, 225, 148], + [80, 224, 149], [79, 223, 150], [78, 221, 152], [77, 220, 153], [76, 219, 154], + [75, 218, 155], [74, 216, 157], [73, 215, 158], [72, 214, 159], [72, 213, 161], + [71, 211, 162], [70, 209, 163], [69, 208, 164], [68, 206, 165], [67, 205, 166], + [66, 203, 167], [65, 201, 168], [64, 200, 170], [63, 198, 171], [62, 197, 172], + [61, 195, 173], [60, 193, 174], [59, 192, 175], [58, 190, 176], [58, 189, 178], + [58, 187, 178], [58, 185, 179], [58, 184, 180], [58, 182, 181], [58, 181, 182], + [58, 179, 183], [58, 178, 184], [59, 176, 184], [59, 175, 185], [59, 173, 186], + [59, 172, 187], [59, 170, 188], [59, 169, 189], [59, 167, 190], [60, 166, 191], + [60, 164, 191], [61, 162, 192], [61, 160, 193], [62, 158, 194], [63, 156, 195], + [63, 154, 195], [64, 152, 196], [64, 150, 197], [65, 148, 198], [66, 146, 199], + [66, 144, 199], [67, 142, 200], [67, 140, 201], [68, 138, 202], [69, 137, 203], + [69, 135, 203], [70, 133, 204], [70, 131, 205], [71, 129, 205], [72, 128, 206], + [72, 126, 207], [73, 124, 207], [73, 122, 208], [74, 120, 209], [75, 119, 209], + [75, 117, 210], [76, 115, 211], [76, 113, 211], [77, 111, 212], [78, 110, 213], + [78, 108, 213], [79, 106, 214], [80, 104, 214], [80, 102, 215], [81, 101, 216], + [82, 99, 216], [82, 97, 217], [83, 95, 217], [84, 93, 218], [84, 92, 219], + [85, 90, 219], [86, 88, 220], [86, 86, 220], [87, 84, 221], [88, 83, 222], + [88, 82, 222], [89, 81, 223], [90, 80, 223], [91, 80, 224], [92, 79, 224], + [93, 78, 225], [94, 77, 225], [95, 77, 226], [96, 76, 226], [97, 75, 227], + [98, 74, 227], [99, 74, 228], [100, 73, 228], [101, 72, 229], [102, 72, 230], + [104, 72, 230], [106, 73, 230], [108, 73, 230], [110, 74, 231], [112, 74, 231], + [114, 75, 231], [116, 75, 231], [118, 76, 232], [120, 76, 232], [122, 77, 232], + [124, 77, 232], [126, 78, 233], [128, 78, 233], [130, 79, 233], [133, 80, 234], + [135, 80, 234], [137, 80, 234], [139, 81, 234], [141, 81, 234], [143, 81, 234], + [145, 82, 234], [147, 82, 234], [149, 82, 234], [151, 83, 234], [153, 83, 234], + [155, 83, 234], [157, 84, 234], [159, 84, 234], [161, 84, 234], [164, 85, 235], + [165, 85, 235], [166, 85, 235], [168, 85, 235], [169, 85, 235], [171, 85, 235], + [172, 85, 235], [174, 85, 235], [175, 86, 235], [177, 86, 235], [178, 86, 235], + [180, 86, 235], [181, 86, 235], [183, 86, 235], [184, 86, 235], [186, 87, 235], + [187, 87, 234], [188, 87, 234], [190, 87, 234], [191, 88, 234], [193, 88, 234], + [194, 88, 234], [196, 88, 234], [197, 89, 234], [199, 89, 234], [200, 89, 234], [202, 89, 234] + ], + values: [ + 0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, + 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, + 0.25, 0.26, 0.27, 0.28, 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, + 0.38, 0.39, 0.4, 0.41, 0.42, 0.43, 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5, + 0.51, 0.52, 0.53, 0.54, 0.55, 0.56, 0.57, 0.58, 0.59, 0.6, 0.61, 0.62, 0.63, + 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, + 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, + 0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, 0.99, 1, 1.01, 1.02, 1.03, + 1.04, 1.05, 1.06, 1.07, 1.08, 1.09, 1.1, 1.11, 1.12, 1.13, 1.14, 1.15, 1.16, 1.17, + 1.18, 1.19, 1.2, 1.21, 1.22, 1.23, 1.24, 1.25, 1.26, 1.27, 1.28, 1.29, 1.3, 1.31, + 1.32, 1.33, 1.34, 1.35, 1.36, 1.37, 1.38, 1.39, 1.4, 1.41, 1.42, 1.43, 1.44, 1.45, + 1.46, 1.47, 1.48, 1.49, 1.5, 1.51, 1.52, 1.53, 1.54, 1.55, 1.56, 1.57, 1.58, 1.59, + 1.6, 1.61, 1.62, 1.63, 1.64, 1.65, 1.66, 1.67, 1.68, 1.69, 1.7, 1.71, 1.72, 1.73, + 1.74, 1.75, 1.76, 1.77, 1.78, 1.79, 1.8, 1.81, 1.82, 1.83, 1.84, 1.85, 1.86, 1.87, + 1.88, 1.89, 1.9, 1.91, 1.92, 1.93, 1.94, 1.95, 1.96, 1.97, 1.98, 1.99, 2, 2.01, + 2.02, 2.03, 2.04, 2.05, 2.06, 2.07, 2.08, 2.09, 2.1, 2.11, 2.12, 2.13, 2.14, 2.15, + 2.16, 2.17, 2.18, 2.19, 2.2, 2.21, 2.22, 2.23, 2.24, 2.25, 2.26, 2.27, 2.28, 2.29, + 2.3, 2.31, 2.32, 2.33, 2.34, 2.35, 2.36, 2.37, 2.38, 2.39, 2.4, 2.41, 2.42, 2.43, + 2.44, 2.45, 2.46, 2.47, 2.48, 2.49, 2.5 + ], + } diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml index b8f403917f..d0fd419fb1 100644 --- a/satpy/etc/readers/viirs_l2.yaml +++ b/satpy/etc/readers/viirs_l2.yaml @@ -91,7 +91,6 @@ datasets: long_name: VIIRS Clear Sky Confidence units: None coordinates: [cld_lon, cld_lat] - resolution: 742 file_key: geophysical_data/Clear_Sky_Confidence file_type: cldmsk_l2_viirs @@ -104,27 +103,24 @@ datasets: long_name: Cloud Top Height from NOAA CLAVR-x AWG algorithm units: m coordinates: [cld_lon,cld_lat] - resolution: 742 file_key: geophysical_data/Cloud_Top_Height file_type: cldprop_l2_viirs ########################################## # Datasets in files aerdb_l2_viirs and nrt ########################################## - Angstrom_Exponent_Land_Ocean: - name: Angstrom_Exponent_Land_Ocean + Angstrom_Exponent_Land_Ocean_Best_Estimate: + name: Angstrom_Exponent_Land_Ocean_Best_Estimate long_name: Deep Blue/SOAR Angstrom exponent over land and ocean units: None coordinates: [aerdb_lon,aerdb_lat] - resolution: 742 - file_key: Angstrom_Exponent_Land_Ocean + file_key: Angstrom_Exponent_Land_Ocean_Best_Estimate file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] Aerosol_Optical_Thickness_550_Land_Ocean: - name: Aerosol_Optical_Thickness_550_Land_Ocean + name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate long_name: Deep Blue/SOAR aerosol optical thickness at 550 nm over land and ocean units: None coordinates: [aerdb_lon,aerdb_lat] - resolution: 742 - file_key: Aerosol_Optical_Thickness_550_Land_Ocean + file_key: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 0aa2cb4add..72c75a962b 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -5,7 +5,6 @@ from satpy.readers.netcdf_utils import NetCDF4FileHandler import xarray as xr -from pyresample.geometry import AreaDefinition LOG = logging.getLogger(__name__) @@ -165,34 +164,3 @@ def get_dataset(self, ds_id: int, ds_info: str) -> xr.DataArray: if "number_of_lines" in data.dims: data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) return data - - def get_area_def(self, ds_id): - """Get area definition.""" - proj_dict = { - "proj": "latlong", - "datum": "WGS84", - "ellps": "WGS84", - "no_defs": True - } - - area_extent = [self["/attr/geospatial_lon_min"], self["/attr/geospatial_lat_min"], - self["/attr/geospatial_lon_max"], self["/attr/geospatial_lat_max"]] - - if '/dimension/number_of_pixels' in self: - width = int(self['/dimension/number_of_pixels']) - height = int(self['/dimension/number_of_lines']) - else: - width = int(self['/dimension/Idx_Xtrack']) - height = int(self['/dimension/Idx_Atrack']) - - area = AreaDefinition( - "viirs_l2_area", - "name_of_proj", - "id_of_proj", - proj_dict, - width, - height, - np.asarray(area_extent) - ) - - return area diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index ee6d82a961..29aaaf0955 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1011,14 +1011,13 @@ def _set_orientation(dataset, upper_right_corner): "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - if dataset.attrs['area'].area_id != 'viirs_l2_area': - projection_type = _get_projection_type(dataset.attrs["area"]) - accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] - if projection_type not in accepted_geos_proj_types: - logger.info("Dataset {} is not in one of the known geostationary projections {} " - "and cannot be flipped.".format(dataset.attrs.get("name", "unknown_name"), - accepted_geos_proj_types)) - return dataset + projection_type = _get_projection_type(dataset.attrs["area"]) + accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] + if projection_type not in accepted_geos_proj_types: + logger.info("Dataset {} is not in one of the known geostationary projections {} " + "and cannot be flipped.".format(dataset.attrs.get("name", "unknown_name"), + accepted_geos_proj_types)) + return dataset target_eastright, target_northup = _get_target_scene_orientation(upper_right_corner) diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py index e514ecdc1f..90590a39b2 100644 --- a/satpy/tests/reader_tests/test_viirs_l2.py +++ b/satpy/tests/reader_tests/test_viirs_l2.py @@ -61,8 +61,8 @@ def _fill_contents_with_default_data(self, file_content, file_type): elif file_type == "AERDB_": file_content["Latitude"] = DEFAULT_LAT_DATA file_content["Longitude"] = DEFAULT_LON_DATA - file_content["Angstrom_Exponent_Land_Ocean"] = DEFAULT_FILE_DATA - file_content["Aerosol_Optical_Thickness_550_Land_Ocean"] = DEFAULT_FILE_DATA + file_content["Angstrom_Exponent_Land_Ocean_Best_Estimate"] = DEFAULT_FILE_DATA + file_content["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate"] = DEFAULT_FILE_DATA class TestVIIRSL2FileHandler: @@ -114,7 +114,7 @@ def test_load_aerdb(self): ) r.create_filehandlers(loadables) datasets = r.load( - ["Aerosol_Optical_Thickness_550_Land_Ocean", "Angstrom_Exponent_Land_Ocean"] + ["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate", "Angstrom_Exponent_Land_Ocean_Best_Estimate"] ) assert len(datasets) == 2 for d in datasets.values(): From 99b707a48d5ce3488fe591469ec3db97e1b87f8f Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Mon, 5 Feb 2024 16:32:27 +0000 Subject: [PATCH 1102/1416] Added my name to authors list --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 796ee9743b..d976cf318f 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -88,3 +88,4 @@ The following people have made contributions to this project: - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) +- [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) From df86728c3754f523a15a12a1e7a2221b1477d459 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Mon, 5 Feb 2024 16:39:28 +0000 Subject: [PATCH 1103/1416] Removed whitespace from viirs enhancements --- satpy/etc/enhancements/viirs.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/etc/enhancements/viirs.yaml b/satpy/etc/enhancements/viirs.yaml index 62198c4de3..8b3751167d 100644 --- a/satpy/etc/enhancements/viirs.yaml +++ b/satpy/etc/enhancements/viirs.yaml @@ -79,6 +79,3 @@ enhancements: ], min_value: 0, max_value: 201} - - - From ed65d84ae804dcdb281348fec3a354a0bb318213 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 20:02:28 +0000 Subject: [PATCH 1104/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.9 → v0.2.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.9...v0.2.0) - [github.com/PyCQA/bandit: 1.7.6 → 1.7.7](https://github.com/PyCQA/bandit/compare/1.7.6...1.7.7) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 37c458982a..4a90da5ce9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.1.9' + rev: 'v0.2.0' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -14,7 +14,7 @@ repos: - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.6' # Update me! + rev: '1.7.7' # Update me! hooks: - id: bandit args: [--ini, .bandit] From 13168cfcce132c3fd5fdaecfd1d29eafef32ec32 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 5 Feb 2024 22:16:53 +0200 Subject: [PATCH 1105/1416] Remove assertions that don't hold when not loading an actual image --- satpy/tests/test_composites.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 420f60efa2..c913d49045 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1420,8 +1420,6 @@ def load(self, arg): filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() - assert res.attrs["start_time"] is None - assert res.attrs["end_time"] is None assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs @@ -1434,8 +1432,6 @@ def load(self, arg): res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["data_dir/foo.tif"]) - assert res.attrs["start_time"] is None - assert res.attrs["end_time"] is None assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs From 1a8a7edfb41385ab284a636782e02f602849e02c Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Mon, 5 Feb 2024 20:20:01 +0000 Subject: [PATCH 1106/1416] restored yaml_reader and added coordinate check to viirs_l2 --- satpy/etc/readers/viirs_l2.yaml | 16 ++++++++-------- satpy/readers/viirs_l2.py | 8 ++++++++ satpy/readers/yaml_reader.py | 8 ++++++-- 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml index d0fd419fb1..9ba23a17ee 100644 --- a/satpy/etc/readers/viirs_l2.yaml +++ b/satpy/etc/readers/viirs_l2.yaml @@ -42,42 +42,42 @@ file_types: datasets: cld_lon: name: cld_lon - resolution: + resolution: 1000 file_type: [cldmsk_l2_viirs, cldprop_l2_viirs] file_key: geolocation_data/longitude units: degrees standard_name: longitude cld_lat: name: cld_lat - resolution: + resolution: 1000 file_type: [cldmsk_l2_viirs, cldprop_l2_viirs] file_key: geolocation_data/latitude units: degrees standard_name: latitude aerdb_lon: name: aerdb_lon - resolution: + resolution: 1000 file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] file_key: Longitude units: degrees - standard_name: Longitude + standard_name: longitude aerdb_lat: name: aerdb_lat - resolution: + resolution: 1000 file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] file_key: Latitude units: degrees - standard_name: Latitude + standard_name: latitude aerdt_lon: name: aerdt_lon - resolution: + resolution: 1000 file_type: [aerdt_l2_viirs] file_key: longitude units: degrees standard_name: longitude aerdt_lat: name: aerdt_lat - resolution: + resolution: 1000 file_type: [aerdt_l2_viirs] file_key: latitude units: degrees diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 72c75a962b..ce242bedbc 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -150,6 +150,14 @@ def get_dataset(self, ds_id: int, ds_info: str) -> xr.DataArray: scale_offset, ) = self._get_dataset_valid_range(ds_id, ds_info, var_path) data = self[var_path] + + # For aerdb Longitude and Latitude datasets have coordinates + # This check is needed to work with yaml_reader + if 'long_name' in metadata and metadata['long_name'] == 'Longitude': + data.coords['Latitude'].attrs['standard_name'] = 'latitude' + elif 'long_name' in metadata and metadata['long_name'] == 'Latitude': + data.coords['Longitude'].attrs['standard_name'] = 'longitude' + data.attrs.update(metadata) if valid_min is not None and valid_max is not None: data = data.where((data >= valid_min) & (data <= valid_max)) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 29aaaf0955..5444d7e16f 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -261,6 +261,7 @@ def select_files_from_pathnames(self, filenames): """Select the files from *filenames* this reader can handle.""" selected_filenames = [] filenames = set(filenames) # make a copy of the inputs + for pattern in self.file_patterns: matching = _match_filenames(filenames, pattern) filenames -= matching @@ -492,6 +493,7 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No """Generate new filehandler instances.""" requirements = filetype_info.get("requires") filetype_cls = filetype_info["file_reader"] + if fh_kwargs is None: fh_kwargs = {} @@ -507,6 +509,7 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No except RuntimeError as err: warnings.warn(str(err) + " for {}".format(filename), stacklevel=4) continue + yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) def time_matches(self, fstart, fend): @@ -783,9 +786,9 @@ def _get_lons_lats_from_coords(self, coords): """Get lons and lats from the coords list.""" lons, lats = None, None for coord in coords: - if coord.attrs.get("standard_name").lower() == "longitude": + if coord.attrs.get("standard_name") == "longitude": lons = coord - elif coord.attrs.get("standard_name").lower() == "latitude": + elif coord.attrs.get("standard_name") == "latitude": lats = coord if lons is None or lats is None: raise ValueError("Missing longitude or latitude coordinate: " + str(coords)) @@ -823,6 +826,7 @@ def _load_dataset_with_area(self, dsid, coords, **kwargs): return None coords = self._assign_coords_from_dataarray(coords, ds) + area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs) if area is not None: From f7e75708c9e024d0be56ecf5f6a13cc1ee72f174 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 5 Feb 2024 22:58:54 +0200 Subject: [PATCH 1107/1416] Combine also values of 'time_parameters' dictionary items --- satpy/dataset/metadata.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 1a52b6825f..3b6e835b0d 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -89,16 +89,28 @@ def _combine_shared_info(shared_keys, info_dicts): def _combine_times(key, values): + if key == "time_parameters": + return _combine_time_parameters(values) filtered_values = _filter_time_values(values) if not filtered_values: return None - if key == "end_time": + if "end_time" in key: return max(filtered_values) - elif key == "start_time": + elif "start_time" in key: return min(filtered_values) return average_datetimes(filtered_values) +def _combine_time_parameters(values): + # Assume the first item has all the keys + keys = values[0].keys() + res = {} + for key in keys: + sub_values = [itm[key] for itm in values] + res[key] = _combine_times(key, sub_values) + return res + + def _filter_time_values(values): """Remove values that are not datetime objects.""" return [v for v in values if isinstance(v, datetime)] From 4d3d62b052b87f71653300956657ab213a6ed7ea Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 5 Feb 2024 15:08:24 -0600 Subject: [PATCH 1108/1416] Fix ruff configuration deprecations --- pyproject.toml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4de1e302f4..537f70cabb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,19 +14,21 @@ known_first_party = "satpy" line_length = 120 [tool.ruff] +line-length = 120 + +[tool.ruff.lint] # See https://docs.astral.sh/ruff/rules/ # In the future, add "B", "S", "N" select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] -line-length = 120 -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests "utils/coord2area_def.py" = ["T201"] # allow print "fetch_avhrr_calcoeffs.py" = ["T201"] # allow print -[tool.ruff.pydocstyle] +[tool.ruff.lint.pydocstyle] convention = "google" -[tool.ruff.mccabe] +[tool.ruff.lint.mccabe] # Unlike Flake8, default to a complexity level of 10. max-complexity = 10 From 55d7ed20202e84a8e84d5103a26c50ec9e19d299 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 5 Feb 2024 15:09:37 -0600 Subject: [PATCH 1109/1416] Remove unnecessarily escaped quote characters --- satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py | 2 +- satpy/writers/awips_tiled.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py index 9bf5f5f093..f90da00613 100644 --- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py +++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py @@ -330,7 +330,7 @@ def _get_global_attrs(self, num_rows, num_cols): "/attr/Platform_LongName": "Haiyang 2B Ocean Observing Satellite", "/attr/Platform_ShortName": "HY-2B", "/attr/Platform_Type": "spacecraft", - "/attr/Producer_Agency": "Ministry of Natural Resources of the People\'s Republic of China", + "/attr/Producer_Agency": "Ministry of Natural Resources of the People's Republic of China", "/attr/Producer_Institution": "NSOAS", "/attr/Production_Date_Time": "20200326T06:23:10", "/attr/Range_Beginning_Time": "20200326T01:11:07", diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 03ce3e9d68..8fe9a8d2cc 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -1824,7 +1824,7 @@ def main(): group_2.add_argument("--letters", dest="lettered_grid", action="store_true", help="Create tiles from a static letter-based grid based on the product projection") group_2.add_argument("--letter-subtiles", nargs=2, type=int, default=(2, 2), - help="Specify number of subtiles in each lettered tile: \'row col\'") + help="Specify number of subtiles in each lettered tile: 'row col'") group_2.add_argument("--output-pattern", default=DEFAULT_OUTPUT_PATTERN, help="output filenaming pattern") group_2.add_argument("--source-name", default="SSEC", From 4e85a8f611c208a8d4911a7021ec7f343784c40c Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 6 Feb 2024 11:47:01 +0200 Subject: [PATCH 1110/1416] Separate value combination to a new function --- satpy/dataset/metadata.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 3b6e835b0d..561580e2d9 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -78,16 +78,20 @@ def _combine_shared_info(shared_keys, info_dicts): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] - if "time" in key: - times = _combine_times(key, values) - if times is None: - continue - shared_info[key] = times - elif _are_values_combinable(values): - shared_info[key] = values[0] + _combine_values(key, values, shared_info) return shared_info +def _combine_values(key, values, shared_info): + if "time" in key: + times = _combine_times(key, values) + if times is None: + return + shared_info[key] = times + elif _are_values_combinable(values): + shared_info[key] = values[0] + + def _combine_times(key, values): if key == "time_parameters": return _combine_time_parameters(values) From cb13af7326617e4565038c32b98be77d2f9f3cad Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Tue, 6 Feb 2024 14:36:59 +0000 Subject: [PATCH 1111/1416] Updated reader and tests based on ruff linting --- satpy/readers/viirs_l2.py | 23 ++++++++++++++++++----- satpy/tests/reader_tests/test_viirs_l2.py | 9 ++++++--- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index ce242bedbc..59d3c8ea1f 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -1,15 +1,27 @@ +"""Interface to VIIRS L2 format. + +This reader implements the support of L2 files generated using the VIIRS instrument on SNPP and NOAA satellite files. +The intent of this reader is to be able to reproduce images from L2 layers in NASA worldview with identical colormaps. + +Currently a subset of four of these layers are supported +1. Deep Blue Aerosol Angstrom Exponent (Land and Ocean) +2. Clear Sky Confidence +3. Cloud Top Height +4. Deep Blue Aerosol Optical Thickness (Land and Ocean) +""" import logging from datetime import datetime import numpy as np +import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler -import xarray as xr LOG = logging.getLogger(__name__) class VIIRSL2FileHandler(NetCDF4FileHandler): + """NetCDF File Handler for VIIRS L2 Products.""" def _parse_datetime(self, datestr): """Parse datetime.""" return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") @@ -141,6 +153,7 @@ def available_datasets(self, configured_datasets=None): yield ft_matches and is_in_file, ds_info def get_dataset(self, ds_id: int, ds_info: str) -> xr.DataArray: + """Get DataArray for specified dataset.""" var_path = ds_info.get("file_key", ds_info["name"]) metadata = self.get_metadata(ds_id, ds_info) ( @@ -153,10 +166,10 @@ def get_dataset(self, ds_id: int, ds_info: str) -> xr.DataArray: # For aerdb Longitude and Latitude datasets have coordinates # This check is needed to work with yaml_reader - if 'long_name' in metadata and metadata['long_name'] == 'Longitude': - data.coords['Latitude'].attrs['standard_name'] = 'latitude' - elif 'long_name' in metadata and metadata['long_name'] == 'Latitude': - data.coords['Longitude'].attrs['standard_name'] = 'longitude' + if "long_name" in metadata and metadata["long_name"] == "Longitude": + data.coords["Latitude"].attrs["standard_name"] = "latitude" + elif "long_name" in metadata and metadata["long_name"] == "Latitude": + data.coords["Longitude"].attrs["standard_name"] = "longitude" data.attrs.update(metadata) if valid_min is not None and valid_max is not None: diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py index 90590a39b2..dbef1d55ee 100644 --- a/satpy/tests/reader_tests/test_viirs_l2.py +++ b/satpy/tests/reader_tests/test_viirs_l2.py @@ -1,3 +1,4 @@ +"""Module for testing the satpy.readers.viirs_l2 module.""" import os from datetime import datetime, timedelta from unittest import mock @@ -5,9 +6,9 @@ import numpy as np import pytest +from satpy.readers import load_reader from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array -from satpy.readers import load_reader DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) @@ -66,8 +67,7 @@ def _fill_contents_with_default_data(self, file_content, file_type): class TestVIIRSL2FileHandler: - """Test VIIRS_L2 Reader""" - + """Test VIIRS_L2 Reader.""" yaml_file = "viirs_l2.yaml" def setup_method(self): @@ -108,6 +108,7 @@ def test_init(self, filename): assert r.file_handlers def test_load_aerdb(self): + """Test Aerdb File Loading.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames( ["AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc"] @@ -123,6 +124,7 @@ def test_load_aerdb(self): assert d.attrs["sensor"] == "viirs" def test_load_cldprop(self): + """Test CLDPROP File Loading.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames( ["CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc"] @@ -136,6 +138,7 @@ def test_load_cldprop(self): assert d.attrs["sensor"] == "viirs" def test_load_cldmsk(self): + """Test CLDMSK File Loading.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames( ["CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc"] From ecab531eff04ecf63e0d5fe068203475c79ecd54 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Tue, 6 Feb 2024 14:59:51 +0000 Subject: [PATCH 1112/1416] Cleaned up method inputs and imports based on mypy --- satpy/readers/viirs_l2.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 59d3c8ea1f..7aa816709d 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -13,7 +13,6 @@ from datetime import datetime import numpy as np -import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler @@ -73,7 +72,7 @@ def sensor_name(self): """Get sensor name.""" return self["/attr/instrument"].lower() - def _get_dataset_file_units(self, dataset_id, ds_info, var_path): + def _get_dataset_file_units(self, ds_info, var_path): file_units = ds_info.get("units") if file_units is None: file_units = self.get(var_path + "/attr/units") @@ -81,7 +80,7 @@ def _get_dataset_file_units(self, dataset_id, ds_info, var_path): file_units = "1" return file_units - def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): + def _get_dataset_valid_range(self, ds_info, var_path): valid_min = self.get(var_path + "/attr/valid_min") valid_max = self.get(var_path + "/attr/valid_max") if not valid_min and not valid_max: @@ -152,7 +151,7 @@ def available_datasets(self, configured_datasets=None): is_in_file = var_path in self yield ft_matches and is_in_file, ds_info - def get_dataset(self, ds_id: int, ds_info: str) -> xr.DataArray: + def get_dataset(self, ds_id, ds_info): """Get DataArray for specified dataset.""" var_path = ds_info.get("file_key", ds_info["name"]) metadata = self.get_metadata(ds_id, ds_info) From 6c6ded64949f4a7cf97e5154b708e4ba110b5963 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Tue, 6 Feb 2024 15:13:14 +0000 Subject: [PATCH 1113/1416] Removed yaml trailing whitespace --- satpy/etc/composites/viirs.yaml | 4 ++-- satpy/etc/enhancements/generic.yaml | 2 +- satpy/etc/readers/viirs_l2.yaml | 32 ++++++++++++++--------------- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 0c82948ee8..09fc358a6b 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -685,7 +685,7 @@ composites: - name: I01 - name: I03 standard_name: cimss_cloud_type - + cth: description: > VIIRS Cloud Top Height with colormap applied @@ -702,6 +702,6 @@ composites: - Cloud_Top_Height - cth conditions: - - method: isnan + - method: isnan transparency: 100 standard_name: viirs_cloud_top_height diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index f2c9dbd9c2..e37c0d69ce 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1399,7 +1399,7 @@ enhancements: 1.99, 2.42, 2.85, 3.28, 3.71, 4.14, 4.57, 5 ], } - + Angstrom_Exponent_Land_Ocean_Best_Estimate: name: Angstrom_Exponent_Land_Ocean_Best_Estimate operations: diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml index 9ba23a17ee..26ac735726 100644 --- a/satpy/etc/readers/viirs_l2.yaml +++ b/satpy/etc/readers/viirs_l2.yaml @@ -3,7 +3,7 @@ reader: short_name: VIIRS L2 long_name: SNPP VIIRS Level 2 data in netCDF4 format description: Generic NASA VIIRS L2 Reader - status: Alpha + status: Alpha supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader sensors: [viirs] @@ -12,32 +12,32 @@ reader: file_types: cldprop_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler - file_patterns: - - 'CLDPROP_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + file_patterns: + - 'CLDPROP_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' cldmsk_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler - file_patterns: - - 'CLDMSK_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + file_patterns: + - 'CLDMSK_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' aerdb_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler - file_patterns: - - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + file_patterns: + - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' aerdb_l2_viirs_nrt: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler - file_patterns: - - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.nrt.nc' + file_patterns: + - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.nrt.nc' cldir_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler - file_patterns: - - 'CLDIR_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + file_patterns: + - 'CLDIR_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' aerdt_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler - file_patterns: - - 'AERDT_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + file_patterns: + - 'AERDT_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' fsnrad_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler - file_patterns: - - 'FSNRAD_L2_VIIRS_CRIS_SS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' + file_patterns: + - 'FSNRAD_L2_VIIRS_CRIS_SS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' datasets: cld_lon: @@ -82,7 +82,7 @@ datasets: file_key: latitude units: degrees standard_name: latitude - + ################################## # Datasets in file cldmsk_l2_viirs ################################## From a3fb5950f2eece44099b82f070684746ce66f601 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Tue, 6 Feb 2024 17:58:13 +0000 Subject: [PATCH 1114/1416] Updated units, param tests, fixed reader inputs --- satpy/etc/readers/viirs_l2.yaml | 6 +-- satpy/readers/viirs_l2.py | 5 +- satpy/tests/reader_tests/test_viirs_l2.py | 59 ++++++++--------------- 3 files changed, 25 insertions(+), 45 deletions(-) diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml index 26ac735726..b86a856e82 100644 --- a/satpy/etc/readers/viirs_l2.yaml +++ b/satpy/etc/readers/viirs_l2.yaml @@ -89,7 +89,7 @@ datasets: Clear_Sky_Confidence: name: Clear_Sky_Confidence long_name: VIIRS Clear Sky Confidence - units: None + units: "1" coordinates: [cld_lon, cld_lat] file_key: geophysical_data/Clear_Sky_Confidence file_type: cldmsk_l2_viirs @@ -112,7 +112,7 @@ datasets: Angstrom_Exponent_Land_Ocean_Best_Estimate: name: Angstrom_Exponent_Land_Ocean_Best_Estimate long_name: Deep Blue/SOAR Angstrom exponent over land and ocean - units: None + units: "1" coordinates: [aerdb_lon,aerdb_lat] file_key: Angstrom_Exponent_Land_Ocean_Best_Estimate file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] @@ -120,7 +120,7 @@ datasets: Aerosol_Optical_Thickness_550_Land_Ocean: name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate long_name: Deep Blue/SOAR aerosol optical thickness at 550 nm over land and ocean - units: None + units: "1" coordinates: [aerdb_lon,aerdb_lat] file_key: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 7aa816709d..77a8a4e697 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -95,7 +95,7 @@ def _get_dataset_valid_range(self, ds_info, var_path): def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_path = ds_info.get("file_key", ds_info["name"]) - file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) + file_units = self._get_dataset_file_units(ds_info, var_path) # Get extra metadata i = getattr(self[var_path], "attrs", {}) @@ -160,9 +160,8 @@ def get_dataset(self, ds_id, ds_info): valid_max, scale_factor, scale_offset, - ) = self._get_dataset_valid_range(ds_id, ds_info, var_path) + ) = self._get_dataset_valid_range(ds_info, var_path) data = self[var_path] - # For aerdb Longitude and Latitude datasets have coordinates # This check is needed to work with yaml_reader if "long_name" in metadata and metadata["long_name"] == "Longitude": diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py index dbef1d55ee..cf57e54a70 100644 --- a/satpy/tests/reader_tests/test_viirs_l2.py +++ b/satpy/tests/reader_tests/test_viirs_l2.py @@ -10,7 +10,7 @@ from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array -DEFAULT_FILE_DTYPE = np.uint16 +DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange( DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE @@ -107,46 +107,27 @@ def test_init(self, filename): # make sure we have some files assert r.file_handlers - def test_load_aerdb(self): - """Test Aerdb File Loading.""" - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames( - ["AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc"] - ) - r.create_filehandlers(loadables) - datasets = r.load( - ["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate", "Angstrom_Exponent_Land_Ocean_Best_Estimate"] - ) - assert len(datasets) == 2 - for d in datasets.values(): - assert d.shape == DEFAULT_FILE_SHAPE - assert d.dims == ("y", "x") - assert d.attrs["sensor"] == "viirs" - - def test_load_cldprop(self): - """Test CLDPROP File Loading.""" - r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames( - ["CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc"] - ) - r.create_filehandlers(loadables) - datasets = r.load(["Cloud_Top_Height"]) - assert len(datasets) == 1 - for d in datasets.values(): - assert d.shape == DEFAULT_FILE_SHAPE - assert d.dims == ("y", "x") - assert d.attrs["sensor"] == "viirs" - - def test_load_cldmsk(self): - """Test CLDMSK File Loading.""" + @pytest.mark.parametrize( + ("filename", "datasets"), + [ + pytest.param("CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc",["Cloud_Top_Height"],id="CLDPROP"), + pytest.param("CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc",["Clear_Sky_Confidence"],id="CLDMSK"), + pytest.param("AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc", + ["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate", + "Angstrom_Exponent_Land_Ocean_Best_Estimate"],id="AERDB"), + ], + ) + def test_load_l2_files(self,filename,datasets): + """Test L2 File Loading.""" r = load_reader(self.reader_configs) - loadables = r.select_files_from_pathnames( - ["CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc"] - ) + loadables = r.select_files_from_pathnames([filename]) r.create_filehandlers(loadables) - datasets = r.load(["Clear_Sky_Confidence"]) - assert len(datasets) == 1 - for d in datasets.values(): + loaded_datasets = r.load(datasets) + assert len(loaded_datasets) == len(datasets) + for d in loaded_datasets.values(): assert d.shape == DEFAULT_FILE_SHAPE assert d.dims == ("y", "x") assert d.attrs["sensor"] == "viirs" + d_np = d.compute() + assert d.dtype == d_np.dtype + assert d.dtype == np.float32 From c21d75c8cba59a1b911cf24ba24349a47752fd10 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Tue, 6 Feb 2024 18:43:23 +0000 Subject: [PATCH 1115/1416] added ft_matches check --- satpy/readers/viirs_l2.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 77a8a4e697..4dc5c8beee 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -147,8 +147,11 @@ def available_datasets(self, configured_datasets=None): yield is_avail, ds_info continue ft_matches = self.file_type_matches(ds_info["file_type"]) - var_path = ds_info.get("file_key", ds_info["name"]) - is_in_file = var_path in self + if not ft_matches: + is_in_file = None + else: + var_path = ds_info.get("file_key", ds_info["name"]) + is_in_file = var_path in self yield ft_matches and is_in_file, ds_info def get_dataset(self, ds_id, ds_info): From 5faf2e389d56b2d477f3429cbc2bfee0308af5fe Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Wed, 7 Feb 2024 11:52:17 +0000 Subject: [PATCH 1116/1416] Make dataset name lower-case. --- satpy/etc/readers/fci_l2_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index b6056b2a65..ab91e592b5 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -115,7 +115,7 @@ datasets: import_enum_information: True quality_mtg_parameters: - name: quality_MTG_parameters + name: quality_mtg_parameters standard_name: status_flag resolution: 2000 file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth] From 91dbc8e336af1c1830b6502f7accf488f1c90fd8 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 8 Feb 2024 13:19:10 +0200 Subject: [PATCH 1117/1416] Reword docstring of combine_metadata() to include time_parameters dict --- satpy/dataset/metadata.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 561580e2d9..3f1de02ed5 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -32,11 +32,15 @@ def combine_metadata(*metadata_objects): If the values corresponding to any keys are not equal or do not exist in all provided dictionaries then they are not included in - the returned dictionary. The 'start_time' values will be set to the - earliest value and 'end_time' to latest time. All other keys containing - the word 'time' are averaged. Before these adjustments, non-datetime - objects are filtered out. In the interest of time, lazy arrays are compared - by object identity rather than by their contents. + the returned dictionary. All values of the keys containing the substring + 'start_time' will be set to the earliest value and similarly for 'end_time' + to latest time. All other keys containing the word 'time' are averaged. + Before these adjustments, non-datetime objects are filtered out. + + The same rules are applied to 'time_parameters' dictionary. + + In the interest of processing time, lazy arrays are compared by object + identity rather than by their contents. Args: *metadata_objects: MetadataObject or dict objects to combine From c127ab7bf9f101dfb1336f3efc14bcf8fba090c1 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Thu, 8 Feb 2024 21:18:37 +0000 Subject: [PATCH 1118/1416] moved colormap to file, removed composites, minor edits --- satpy/etc/colormaps/aerosol_thickness.txt | 151 +++++++++++ satpy/etc/colormaps/angstrom_exponent.txt | 251 +++++++++++++++++++ satpy/etc/colormaps/clear_sky_confidence.txt | 101 ++++++++ satpy/etc/composites/viirs.yaml | 20 -- satpy/etc/enhancements/generic.yaml | 247 ++---------------- satpy/etc/readers/viirs_l2.yaml | 19 +- satpy/readers/viirs_l2.py | 24 +- satpy/tests/reader_tests/test_viirs_l2.py | 10 +- 8 files changed, 550 insertions(+), 273 deletions(-) create mode 100644 satpy/etc/colormaps/aerosol_thickness.txt create mode 100644 satpy/etc/colormaps/angstrom_exponent.txt create mode 100644 satpy/etc/colormaps/clear_sky_confidence.txt diff --git a/satpy/etc/colormaps/aerosol_thickness.txt b/satpy/etc/colormaps/aerosol_thickness.txt new file mode 100644 index 0000000000..e1e6a02f45 --- /dev/null +++ b/satpy/etc/colormaps/aerosol_thickness.txt @@ -0,0 +1,151 @@ +0, 255, 252, 199 +0.005, 255, 251, 193 +0.01, 255, 250, 188 +0.015, 255, 249, 183 +0.02, 255, 248, 178 +0.025, 255, 247, 173 +0.03, 255, 246, 167 +0.035, 255, 245, 162 +0.04, 255, 244, 157 +0.045, 255, 243, 152 +0.05, 255, 242, 147 +0.055, 255, 240, 144 +0.06, 255, 239, 141 +0.065, 255, 238, 138 +0.07, 255, 236, 135 +0.075, 255, 235, 132 +0.08, 255, 234, 129 +0.085, 255, 232, 126 +0.09, 255, 231, 123 +0.095, 255, 230, 120 +0.1, 255, 229, 118 +0.105, 255, 227, 115 +0.11, 255, 226, 113 +0.115, 255, 225, 110 +0.12, 255, 223, 108 +0.125, 255, 222, 106 +0.13, 255, 221, 103 +0.135, 255, 219, 101 +0.14, 255, 218, 98 +0.145, 255, 217, 96 +0.15, 255, 216, 94 +0.155, 255, 214, 91 +0.16, 255, 212, 89 +0.165, 255, 210, 87 +0.17, 255, 208, 85 +0.175, 255, 207, 83 +0.18, 255, 205, 80 +0.185, 255, 203, 78 +0.19, 255, 201, 76 +0.195, 255, 199, 74 +0.2, 255, 198, 72 +0.205, 255, 195, 70 +0.21, 255, 193, 68 +0.215, 255, 190, 66 +0.22, 255, 188, 64 +0.225, 255, 185, 62 +0.23, 255, 183, 60 +0.235, 255, 180, 58 +0.24, 255, 178, 56 +0.245, 255, 175, 54 +0.25, 255, 173, 53 +0.255, 255, 170, 51 +0.26, 255, 168, 50 +0.265, 255, 165, 49 +0.27, 255, 163, 47 +0.275, 255, 161, 46 +0.28, 255, 158, 45 +0.285, 255, 156, 43 +0.29, 255, 153, 42 +0.295, 255, 151, 41 +0.3, 255, 149, 40 +0.305, 255, 146, 39 +0.31, 255, 144, 38 +0.315, 255, 142, 37 +0.32, 255, 140, 37 +0.325, 255, 138, 36 +0.33, 255, 135, 35 +0.335, 255, 133, 35 +0.34, 255, 131, 34 +0.345, 255, 129, 33 +0.35, 255, 127, 33 +0.355, 255, 124, 32 +0.36, 255, 121, 31 +0.365, 255, 118, 31 +0.37, 255, 115, 30 +0.375, 255, 112, 30 +0.38, 255, 109, 29 +0.385, 255, 106, 28 +0.39, 255, 103, 28 +0.395, 255, 100, 27 +0.4, 255, 98, 27 +0.405, 255, 94, 26 +0.41, 255, 91, 25 +0.415, 255, 88, 24 +0.42, 255, 85, 24 +0.425, 255, 82, 23 +0.43, 255, 78, 22 +0.435, 255, 75, 22 +0.44, 255, 72, 21 +0.445, 255, 69, 20 +0.45, 255, 66, 20 +0.455, 254, 63, 19 +0.46, 253, 60, 19 +0.465, 252, 58, 18 +0.47, 251, 55, 18 +0.475, 250, 53, 18 +0.48, 249, 50, 17 +0.485, 248, 47, 17 +0.49, 247, 45, 16 +0.495, 246, 42, 16 +0.5, 245, 40, 16 +0.505, 243, 38, 15 +0.51, 242, 36, 15 +0.515, 240, 34, 14 +0.52, 239, 32, 14 +0.525, 238, 30, 13 +0.53, 236, 28, 13 +0.535, 235, 26, 12 +0.54, 233, 24, 12 +0.545, 232, 22, 11 +0.55, 231, 20, 11 +0.555, 229, 18, 11 +0.56, 227, 17, 11 +0.565, 225, 16, 11 +0.57, 223, 14, 11 +0.575, 221, 13, 11 +0.58, 219, 12, 11 +0.585, 217, 10, 11 +0.59, 215, 9, 11 +0.595, 213, 8, 11 +0.6, 211, 7, 12 +0.605, 208, 6, 12 +0.61, 206, 5, 12 +0.615, 204, 4, 12 +0.62, 201, 4, 12 +0.625, 199, 3, 13 +0.63, 197, 2, 13 +0.635, 194, 2, 13 +0.64, 192, 1, 13 +0.645, 190, 0, 13 +0.65, 188, 0, 14 +0.655, 184, 0, 14 +0.66, 181, 0, 14 +0.665, 178, 0, 14 +0.67, 174, 0, 14 +0.675, 171, 0, 14 +0.68, 168, 0, 14 +0.685, 164, 0, 14 +0.69, 161, 0, 14 +0.695, 158, 0, 14 +0.7, 155, 0, 14 +1.13, 152, 0, 14 +1.56, 149, 0, 14 +1.99, 146, 0, 14 +2.42, 143, 0, 14 +2.85, 140, 0, 14 +3.28, 137, 0, 14 +3.71, 134, 0, 14 +4.14, 131, 0, 14 +4.57, 128, 0, 14 +5, 125, 0, 14 diff --git a/satpy/etc/colormaps/angstrom_exponent.txt b/satpy/etc/colormaps/angstrom_exponent.txt new file mode 100644 index 0000000000..4e56fccd68 --- /dev/null +++ b/satpy/etc/colormaps/angstrom_exponent.txt @@ -0,0 +1,251 @@ +0, 122, 145, 2 +0.01, 123, 148, 3 +0.02, 124, 150, 4 +0.03, 124, 153, 5 +0.04, 125, 155, 6 +0.05, 126, 158, 7 +0.06, 127, 160, 8 +0.07, 127, 163, 9 +0.08, 128, 165, 10 +0.09, 129, 168, 11 +0.1, 130, 170, 12 +0.11, 130, 173, 13 +0.12, 131, 175, 14 +0.13, 132, 178, 15 +0.14, 133, 181, 16 +0.15, 132, 183, 18 +0.16, 132, 185, 20 +0.17, 132, 187, 22 +0.18, 132, 189, 25 +0.19, 132, 191, 27 +0.2, 132, 193, 29 +0.21, 132, 195, 31 +0.22, 131, 197, 34 +0.23, 131, 199, 36 +0.24, 131, 201, 38 +0.25, 131, 203, 40 +0.26, 131, 205, 43 +0.27, 131, 207, 45 +0.28, 131, 209, 47 +0.29, 131, 212, 50 +0.3, 130, 213, 51 +0.31, 129, 215, 53 +0.32, 128, 217, 55 +0.33, 128, 219, 57 +0.34, 127, 221, 59 +0.35, 126, 222, 61 +0.36, 125, 224, 63 +0.37, 125, 226, 64 +0.38, 124, 228, 66 +0.39, 123, 230, 68 +0.4, 122, 231, 70 +0.41, 122, 233, 72 +0.42, 121, 235, 74 +0.43, 120, 237, 76 +0.44, 120, 239, 78 +0.45, 119, 239, 79 +0.46, 118, 240, 80 +0.47, 117, 241, 82 +0.48, 116, 242, 83 +0.49, 116, 243, 85 +0.5, 115, 244, 86 +0.51, 114, 245, 87 +0.52, 113, 246, 89 +0.53, 112, 247, 90 +0.54, 112, 248, 92 +0.55, 111, 249, 93 +0.56, 110, 250, 94 +0.57, 109, 251, 96 +0.58, 108, 252, 97 +0.59, 108, 253, 99 +0.6, 107, 252, 100 +0.61, 106, 252, 102 +0.62, 106, 252, 103 +0.63, 105, 251, 105 +0.64, 105, 251, 106 +0.65, 104, 251, 108 +0.66, 103, 251, 109 +0.67, 103, 250, 111 +0.68, 102, 250, 112 +0.69, 102, 250, 114 +0.7, 101, 250, 115 +0.71, 100, 249, 117 +0.72, 100, 249, 118 +0.73, 99, 249, 120 +0.74, 99, 249, 122 +0.75, 98, 247, 123 +0.76, 97, 246, 124 +0.77, 96, 245, 126 +0.78, 95, 244, 127 +0.79, 94, 243, 128 +0.8, 93, 242, 130 +0.81, 92, 241, 131 +0.82, 92, 239, 132 +0.83, 91, 238, 134 +0.84, 90, 237, 135 +0.85, 89, 236, 136 +0.86, 88, 235, 138 +0.87, 87, 234, 139 +0.88, 86, 233, 140 +0.89, 86, 232, 142 +0.9, 85, 230, 143 +0.91, 84, 229, 144 +0.92, 83, 228, 145 +0.93, 82, 226, 147 +0.94, 81, 225, 148 +0.95, 80, 224, 149 +0.96, 79, 223, 150 +0.97, 78, 221, 152 +0.98, 77, 220, 153 +0.99, 76, 219, 154 +1, 75, 218, 155 +1.01, 74, 216, 157 +1.02, 73, 215, 158 +1.03, 72, 214, 159 +1.04, 72, 213, 161 +1.05, 71, 211, 162 +1.06, 70, 209, 163 +1.07, 69, 208, 164 +1.08, 68, 206, 165 +1.09, 67, 205, 166 +1.1, 66, 203, 167 +1.11, 65, 201, 168 +1.12, 64, 200, 170 +1.13, 63, 198, 171 +1.14, 62, 197, 172 +1.15, 61, 195, 173 +1.16, 60, 193, 174 +1.17, 59, 192, 175 +1.18, 58, 190, 176 +1.19, 58, 189, 178 +1.2, 58, 187, 178 +1.21, 58, 185, 179 +1.22, 58, 184, 180 +1.23, 58, 182, 181 +1.24, 58, 181, 182 +1.25, 58, 179, 183 +1.26, 58, 178, 184 +1.27, 59, 176, 184 +1.28, 59, 175, 185 +1.29, 59, 173, 186 +1.3, 59, 172, 187 +1.31, 59, 170, 188 +1.32, 59, 169, 189 +1.33, 59, 167, 190 +1.34, 60, 166, 191 +1.35, 60, 164, 191 +1.36, 61, 162, 192 +1.37, 61, 160, 193 +1.38, 62, 158, 194 +1.39, 63, 156, 195 +1.4, 63, 154, 195 +1.41, 64, 152, 196 +1.42, 64, 150, 197 +1.43, 65, 148, 198 +1.44, 66, 146, 199 +1.45, 66, 144, 199 +1.46, 67, 142, 200 +1.47, 67, 140, 201 +1.48, 68, 138, 202 +1.49, 69, 137, 203 +1.5, 69, 135, 203 +1.51, 70, 133, 204 +1.52, 70, 131, 205 +1.53, 71, 129, 205 +1.54, 72, 128, 206 +1.55, 72, 126, 207 +1.56, 73, 124, 207 +1.57, 73, 122, 208 +1.58, 74, 120, 209 +1.59, 75, 119, 209 +1.6, 75, 117, 210 +1.61, 76, 115, 211 +1.62, 76, 113, 211 +1.63, 77, 111, 212 +1.64, 78, 110, 213 +1.65, 78, 108, 213 +1.66, 79, 106, 214 +1.67, 80, 104, 214 +1.68, 80, 102, 215 +1.69, 81, 101, 216 +1.7, 82, 99, 216 +1.71, 82, 97, 217 +1.72, 83, 95, 217 +1.73, 84, 93, 218 +1.74, 84, 92, 219 +1.75, 85, 90, 219 +1.76, 86, 88, 220 +1.77, 86, 86, 220 +1.78, 87, 84, 221 +1.79, 88, 83, 222 +1.8, 88, 82, 222 +1.81, 89, 81, 223 +1.82, 90, 80, 223 +1.83, 91, 80, 224 +1.84, 92, 79, 224 +1.85, 93, 78, 225 +1.86, 94, 77, 225 +1.87, 95, 77, 226 +1.88, 96, 76, 226 +1.89, 97, 75, 227 +1.9, 98, 74, 227 +1.91, 99, 74, 228 +1.92, 100, 73, 228 +1.93, 101, 72, 229 +1.94, 102, 72, 230 +1.95, 104, 72, 230 +1.96, 106, 73, 230 +1.97, 108, 73, 230 +1.98, 110, 74, 231 +1.99, 112, 74, 231 +2, 114, 75, 231 +2.01, 116, 75, 231 +2.02, 118, 76, 232 +2.03, 120, 76, 232 +2.04, 122, 77, 232 +2.05, 124, 77, 232 +2.06, 126, 78, 233 +2.07, 128, 78, 233 +2.08, 130, 79, 233 +2.09, 133, 80, 234 +2.1, 135, 80, 234 +2.11, 137, 80, 234 +2.12, 139, 81, 234 +2.13, 141, 81, 234 +2.14, 143, 81, 234 +2.15, 145, 82, 234 +2.16, 147, 82, 234 +2.17, 149, 82, 234 +2.18, 151, 83, 234 +2.19, 153, 83, 234 +2.2, 155, 83, 234 +2.21, 157, 84, 234 +2.22, 159, 84, 234 +2.23, 161, 84, 234 +2.24, 164, 85, 235 +2.25, 165, 85, 235 +2.26, 166, 85, 235 +2.27, 168, 85, 235 +2.28, 169, 85, 235 +2.29, 171, 85, 235 +2.3, 172, 85, 235 +2.31, 174, 85, 235 +2.32, 175, 86, 235 +2.33, 177, 86, 235 +2.34, 178, 86, 235 +2.35, 180, 86, 235 +2.36, 181, 86, 235 +2.37, 183, 86, 235 +2.38, 184, 86, 235 +2.39, 186, 87, 235 +2.4, 187, 87, 234 +2.41, 188, 87, 234 +2.42, 190, 87, 234 +2.43, 191, 88, 234 +2.44, 193, 88, 234 +2.45, 194, 88, 234 +2.46, 196, 88, 234 +2.47, 197, 89, 234 +2.48, 199, 89, 234 +2.49, 200, 89, 234 +2.5, 202, 89, 234 diff --git a/satpy/etc/colormaps/clear_sky_confidence.txt b/satpy/etc/colormaps/clear_sky_confidence.txt new file mode 100644 index 0000000000..58393dbbcd --- /dev/null +++ b/satpy/etc/colormaps/clear_sky_confidence.txt @@ -0,0 +1,101 @@ +0, 255, 247, 236 +0.01, 254, 246, 233 +0.02, 254, 244, 230 +0.03, 254, 243, 228 +0.04, 254, 242, 224 +0.05, 254, 241, 222 +0.06, 254, 239, 219 +0.07, 254, 239, 216 +0.08, 254, 237, 213 +0.09, 254, 236, 210 +0.1, 254, 235, 207 +0.11, 254, 233, 204 +0.12, 254, 232, 202 +0.13, 253, 231, 198 +0.14, 253, 230, 195 +0.15, 253, 228, 191 +0.16, 253, 226, 189 +0.17, 253, 225, 185 +0.18, 253, 223, 181 +0.19, 253, 221, 178 +0.2, 253, 220, 174 +0.21, 253, 218, 172 +0.22, 253, 216, 168 +0.23, 253, 215, 165 +0.24, 253, 213, 161 +0.25, 253, 211, 157 +0.26, 253, 210, 156 +0.27, 253, 207, 153 +0.28, 253, 206, 152 +0.29, 253, 203, 149 +0.3, 253, 202, 148 +0.31, 253, 200, 145 +0.32, 253, 198, 143 +0.33, 253, 196, 141 +0.34, 253, 193, 139 +0.35, 253, 192, 137 +0.36, 253, 189, 134 +0.37, 253, 188, 133 +0.38, 252, 185, 130 +0.39, 252, 182, 127 +0.4, 252, 177, 123 +0.41, 252, 174, 120 +0.42, 252, 170, 116 +0.43, 252, 166, 112 +0.44, 252, 163, 109 +0.45, 252, 159, 105 +0.46, 252, 156, 103 +0.47, 252, 151, 99 +0.48, 252, 148, 96 +0.49, 252, 144, 92 +0.5, 251, 140, 88 +0.51, 250, 137, 87 +0.52, 249, 134, 86 +0.53, 248, 131, 85 +0.54, 247, 127, 83 +0.55, 246, 125, 82 +0.56, 245, 121, 80 +0.57, 244, 119, 79 +0.58, 243, 115, 78 +0.59, 242, 111, 76 +0.6, 241, 109, 75 +0.61, 240, 105, 73 +0.62, 239, 102, 72 +0.63, 237, 98, 69 +0.64, 236, 94, 67 +0.65, 234, 89, 63 +0.66, 232, 86, 60 +0.67, 230, 81, 57 +0.68, 227, 76, 53 +0.69, 226, 73, 50 +0.7, 224, 68, 46 +0.71, 222, 65, 44 +0.72, 220, 60, 40 +0.73, 218, 56, 37 +0.74, 216, 51, 33 +0.75, 214, 46, 30 +0.76, 211, 43, 28 +0.77, 208, 39, 25 +0.78, 206, 36, 23 +0.79, 202, 31, 20 +0.8, 200, 28, 188 +0.81, 197, 24, 15 +0.82, 194, 21, 13 +0.83, 191, 16, 10 +0.84, 188, 12, 7 +0.85, 185, 9, 5 +0.86, 182, 4, 3 +0.87, 180, 1, 1 +0.88, 175, 0, 0 +0.89, 172, 0, 0 +0.9, 167, 0, 0 +0.91, 164, 0, 0 +0.92, 159, 0, 0 +0.93, 154, 0, 0 +0.94, 151, 0, 0 +0.95, 146, 0, 0 +0.96, 143, 0, 0 +0.97, 138, 0, 0 +0.98, 135, 0, 0 +0.99, 130, 0, 0 +1, 127, 0, 0 diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 09fc358a6b..bebf6c5833 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -685,23 +685,3 @@ composites: - name: I01 - name: I03 standard_name: cimss_cloud_type - - cth: - description: > - VIIRS Cloud Top Height with colormap applied - compositor: !!python/name:satpy.composites.SingleBandCompositor - prerequisites: - - Cloud_Top_Height - standard_name: cth - - viirs_cloud_top_height: - description: > - Cloud Top Height composite from NOAA/SNPP - compositor: !!python/name:satpy.composites.MaskingCompositor - prerequisites: - - Cloud_Top_Height - - cth - conditions: - - method: isnan - transparency: 100 - standard_name: viirs_cloud_top_height diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index e37c0d69ce..78b59a69c7 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1231,254 +1231,59 @@ enhancements: min_stretch: [0,0,0] max_stretch: [1,1,1] - viirs_cloud_top_height: - name: viirs_cloud_top_height + Cloud_Top_Height: + standard_name: cldprop_cloud_top_height operations: - - name: palettize - method: !!python/name:satpy.enhancements.palettize + - name: colorize + method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - { - colors: [ - [255, 0, 0], [255, 0, 1], [255, 1, 0], [255, 1, 1], [254, 0, 0], [254, 0, 1], - [254, 1, 0], [254, 1, 1], [254, 2, 1], [254, 2, 0], [254, 2, 2], [253, 0, 0], - [253, 0, 1], [253, 1, 0], [253, 1, 1], [253, 1, 2], [170, 0, 0], [170, 0, 1], - [170, 1, 0], [170, 1, 1], [171, 0, 0], [171, 0, 1], [171, 1, 0], [171, 1, 1], - [171, 2, 1], [171, 2, 0], [171, 2, 2], [172, 0, 0], [172, 0, 1], [172, 1, 0], - [172, 1, 1], [172, 1, 2], [110, 0, 0], [110, 0, 1], [110, 1, 0], [110, 1, 1], - [111, 0, 0], [111, 0, 1], [111, 1, 0], [111, 1, 1], [111, 2, 1], [111, 2, 0], - [111, 2, 2], [112, 0, 0], [112, 0, 1], [112, 1, 0], [112, 1, 1], [112, 1, 2], - [122, 90, 3], [122, 90, 4], [122, 91, 3], [122, 91, 4], [123, 90, 3], [123, 90, 4], - [123, 91, 3], [123, 91, 4], [123, 92, 4], [123, 92, 3], [123, 92, 5], [124, 90, 3], - [124, 90, 4], [124, 91, 3], [124, 91, 4], [124, 91, 5], [187, 136, 0], [187, 136, 1], - [187, 137, 0], [187, 137, 1], [188, 136, 0], [188, 136, 1], [188, 137, 0], - [188, 137, 1], [188, 138, 1], [188, 138, 0], [188, 138, 2], [189, 136, 0], - [189, 136, 1], [189, 137, 0], [189, 137, 1], [189, 137, 2], [240, 190, 64], - [240, 190, 65], [240, 191, 64], [240, 191, 65], [241, 190, 64], [241, 190, 65], - [241, 191, 64], [241, 191, 65], [241, 192, 65], [241, 192, 64], [241, 192, 66], - [242, 190, 64], [242, 190, 65], [242, 191, 64], [242, 191, 65], [242, 191, 66], - [255, 255, 0], [255, 255, 1], [255, 254, 0], [255, 254, 1], [254, 255, 0], - [254, 255, 1], [254, 254, 0], [254, 254, 1], [254, 253, 1], [254, 253, 0], - [254, 253, 2], [253, 255, 0], [253, 255, 1], [253, 254, 0], [253, 254, 1], - [253, 254, 2], [0, 220, 0], [0, 220, 1], [0, 221, 0], [0, 221, 1], [1, 220, 0], - [1, 220, 1], [1, 221, 0], [1, 221, 1], [1, 222, 1], [1, 222, 0], [1, 222, 2], - [2, 220, 0], [2, 220, 1], [2, 221, 0], [2, 221, 1], [2, 221, 2], [0, 136, 0], - [0, 136, 1], [0, 137, 0], [0, 137, 1], [1, 136, 0], [1, 136, 1], [1, 137, 0], - [1, 137, 1], [1, 138, 1], [1, 138, 0], [1, 138, 2], [2, 136, 0], [2, 136, 1], - [2, 137, 0], [2, 137, 1], [2, 137, 2], [0, 80, 0], [0, 80, 1], [0, 81, 0], - [0, 81, 1], [1, 80, 0], [1, 80, 1], [1, 81, 0], [1, 81, 1], [1, 82, 1], [1, 82, 0], - [1, 82, 2], [2, 80, 0], [2, 80, 1], [2, 81, 0], [2, 81, 1], [2, 81, 2], [0, 136, 238], - [0, 136, 239], [0, 137, 238], [0, 137, 239], [1, 136, 238], [1, 136, 239], - [1, 137, 238], [1, 137, 239], [1, 138, 239], [1, 138, 238], [1, 138, 240], - [2, 136, 238], [2, 136, 239], [2, 137, 238], [2, 137, 239], [2, 137, 240], - [0, 0, 255], [0, 0, 254], [0, 1, 255], [0, 1, 254], [1, 0, 255], [1, 0, 254], - [1, 1, 255], [1, 1, 254], [1, 2, 254], [1, 2, 255], [1, 2, 253], [2, 0, 253], - [2, 0, 254], [2, 1, 253], [2, 1, 254], [2, 1, 255], [0, 0, 170], [0, 0, 171], - [0, 1, 170], [0, 1, 171], [1, 0, 170], [1, 0, 171], [1, 1, 170], [1, 1, 171], - [1, 2, 171], [1, 2, 170], [1, 2, 172], [2, 0, 170], [2, 0, 171], [2, 1, 170], - [2, 1, 171], [2, 1, 172], [0, 0, 100], [0, 0, 101], [0, 1, 100], [0, 1, 101], - [1, 0, 100], [1, 0, 101], [1, 1, 100], [1, 1, 101], [1, 2, 101], [1, 2, 100], - [1, 2, 102], [2, 0, 100], [2, 0, 101], [2, 1, 100], [2, 1, 101], [2, 1, 102], - [183, 15, 141], [183, 15, 142], [183, 16, 141], [183, 16, 142], [184, 15, 141], - [184, 15, 142], [184, 16, 141], [184, 16, 142], [184, 17, 142], [184, 17, 141], - [184, 17, 143], [185, 15, 141], [185, 15, 142], [185, 16, 141], [185, 16, 142], - [185, 16, 143], [102, 0, 119] - ], - values: [ - 0, 50, 100, 150, 200, 250, 300, 350, 400, 450, 500, 550, 600, 650, 700, 750, 800, - 850, 900, 950, 1000, 1050, 1100, 1150, 1200, 1250, 1300, 1350, 1400, 1450, 1500, - 1550, 1600, 1650, 1700, 1750, 1800, 1850, 1900, 1950, 2000, 2050, 2100, 2150, 2200, - 2250, 2300, 2350, 2400, 2450, 2500, 2550, 2600, 2650, 2700, 2750, 2800, 2850, 2900, - 2950, 3000, 3050, 3100, 3150, 3200, 3250, 3300, 3350, 3400, 3450, 3500, 3550, 3600, - 3650, 3700, 3750, 3800, 3850, 3900, 3950, 4000, 4050, 4100, 4150, 4200, 4250, 4300, - 4350, 4400, 4450, 4500, 4550, 4600, 4650, 4700, 4750, 4800, 4850, 4900, 4950, 5000, - 5050, 5100, 5150, 5200, 5250, 5300, 5350, 5400, 5450, 5500, 5550, 5600, 5650, 5700, - 5750, 5800, 5850, 5900, 5950, 6000, 6050, 6100, 6150, 6200, 6250, 6300, 6350, 6400, - 6450, 6500, 6550, 6600, 6650, 6700, 6750, 6800, 6850, 6900, 6950, 7000, 7050, 7100, - 7150, 7200, 7250, 7300, 7350, 7400, 7450, 7500, 7550, 7600, 7650, 7700, 7750, 7800, - 7850, 7900, 7950, 8000, 8050, 8100, 8150, 8200, 8250, 8300, 8350, 8400, 8450, 8500, - 8550, 8600, 8650, 8700, 8750, 8800, 8850, 8900, 8950, 9000, 9050, 9100, 9150, 9200, - 9250, 9300, 9350, 9400, 9450, 9500, 9550, 9600, 9650, 9700, 9750, 9800, 9850, 9900, - 9950, 10000, 10050, 10100, 10150, 10200, 10250, 10300, 10350, 10400, 10450, 10500, - 10550, 10600, 10650, 10700, 10750, 10800, 10850, 10900, 10950, 11000, 11050, 11100, - 11150, 11200, 11250, 11300, 11350, 11400, 11450, 11500, 11550, 11600, 11650, 11700, - 11750, 11800, 11850, 11900, 11950, 12000 - ], - } + - {colors: [[255, 0, 0], [255, 0, 0]], min_value: 0, max_value: 800} + - {colors: [[170, 0, 0], [170, 0, 0]], min_value: 800.0001, max_value: 1600} + - {colors: [[110, 0, 0], [110, 0, 0]], min_value: 1600.0001, max_value: 2350} + - {colors: [[112, 1, 2], [112, 1, 2]], min_value: 2350.0001, max_value: 3150} + - {colors: [[124, 91, 5], [124, 91, 5]], min_value: 3150.0001, max_value: 4000} + - {colors: [[240, 190, 64], [240, 190, 64]], min_value: 4000.0001, max_value: 4800} + - {colors: [[255, 255, 0], [255, 255, 0]], min_value: 4800.0001, max_value: 5600} + - {colors: [[0, 220, 0], [0, 220, 0]], min_value: 5600.0001, max_value: 6400} + - {colors: [[0, 136, 0], [0, 136, 0]], min_value: 6400.0001, max_value: 7200} + - {colors: [[0, 80, 0], [0, 80, 0]], min_value: 7200.0001, max_value: 8000} + - {colors: [[0, 136, 238], [0, 136, 238]], min_value: 8000.0001, max_value: 8800} + - {colors: [[0, 0, 255], [0, 0, 255]], min_value: 8800.0001, max_value: 9600} + - {colors: [[0, 0, 170], [0, 0, 170]], min_value: 9600.0001, max_value: 10400} + - {colors: [[0, 0, 100], [0, 0, 100]], min_value: 10400.0001, max_value: 11200} + - {colors: [[183,15,141], [183, 15, 141]], min_value: 11200.0001, max_value: 12000} + - {colors: [[102, 0, 119], [102, 0, 119]], min_value: 12000.0001, max_value: 18000} Clear_Sky_Confidence: - name: Clear_Sky_Confidence + standard_name: cldmsk_clear_sky_confidence operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { - colors: [ - [255, 247, 236], [254, 246, 233], [254, 244, 230], [254, 243, 228], [254, 242, 224], - [254, 241, 222], [254, 239, 219], [254, 239, 216], [254, 237, 213], [254, 236, 210], - [254, 235, 207], [254, 233, 204], [254, 232, 202], [253, 231, 198], [253, 230, 195], - [253, 228, 191], [253, 226, 189], [253, 225, 185], [253, 223, 181], [253, 221, 178], - [253, 220, 174], [253, 218, 172], [253, 216, 168], [253, 215, 165], [253, 213, 161], - [253, 211, 157], [253, 210, 156], [253, 207, 153], [253, 206, 152], [253, 203, 149], - [253, 202, 148], [253, 200, 145], [253, 198, 143], [253, 196, 141], [253, 193, 139], - [253, 192, 137], [253, 189, 134], [253, 188, 133], [252, 185, 130], [252, 182, 127], - [252, 177, 123], [252, 174, 120], [252, 170, 116], [252, 166, 112], [252, 163, 109], - [252, 159, 105], [252, 156, 103], [252, 151, 99], [252, 148, 96], [252, 144, 92], - [251, 140, 88], [250, 137, 87], [249, 134, 86], [248, 131, 85], [247, 127, 83], - [246, 125, 82], [245, 121, 80], [244, 119, 79], [243, 115, 78], [242, 111, 76], - [241, 109, 75], [240, 105, 73], [239, 102, 72], [237, 98, 69], [236, 94, 67], - [234, 89, 63], [232, 86, 60], [230, 81, 57], [227, 76, 53], [226, 73, 50], [224, 68, 46], - [222, 65, 44], [220, 60, 40], [218, 56, 37], [216, 51, 33], [214, 46, 30], [211, 43, 28], - [208, 39, 25], [206, 36, 23], [202, 31, 20], [200, 28, 18], [197, 24, 15], [194, 21, 13], - [191, 16, 10], [188, 12, 7], [185, 9, 5], [182, 4, 3], [180, 1, 1], [175, 0, 0], - [172, 0, 0], [167, 0, 0], [164, 0, 0], [159, 0, 0], [154, 0, 0], [151, 0, 0], - [146, 0, 0], [143, 0, 0], [138, 0, 0], [135, 0, 0], [130, 0, 0], [127, 0, 0] - ], - values: [ - 0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, - 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, - 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4, 0.41, 0.42, - 0.43, 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5, 0.51, 0.52, 0.53, 0.54, 0.55, 0.56, - 0.57, 0.58, 0.59, 0.6, 0.61, 0.62, 0.63, 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, - 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, - 0.85, 0.86, 0.87, 0.88, 0.89, 0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, 0.99, 1 - ] + filename: clear_sky_confidence.txt } Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate: - name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate + standard_name: aerdb_aerosol_optical_thickness_500_land_ocean operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { - colors: [ - [255, 252, 199], [255, 251, 193], [255, 250, 188], [255, 249, 183], [255, 248, 178], - [255, 247, 173], [255, 246, 167], [255, 245, 162], [255, 244, 157], [255, 243, 152], - [255, 242, 147], [255, 240, 144], [255, 239, 141], [255, 238, 138], [255, 236, 135], - [255, 235, 132], [255, 234, 129], [255, 232, 126], [255, 231, 123], [255, 230, 120], - [255, 229, 118], [255, 227, 115], [255, 226, 113], [255, 225, 110], [255, 223, 108], - [255, 222, 106], [255, 221, 103], [255, 219, 101], [255, 218, 98], [255, 217, 96], - [255, 216, 94], [255, 214, 91], [255, 212, 89], [255, 210, 87], [255, 208, 85], - [255, 207, 83], [255, 205, 80], [255, 203, 78], [255, 201, 76], [255, 199, 74], - [255, 198, 72], [255, 195, 70], [255, 193, 68], [255, 190, 66], [255, 188, 64], - [255, 185, 62], [255, 183, 60], [255, 180, 58], [255, 178, 56], [255, 175, 54], - [255, 173, 53], [255, 170, 51], [255, 168, 50], [255, 165, 49], [255, 163, 47], - [255, 161, 46], [255, 158, 45], [255, 156, 43], [255, 153, 42], [255, 151, 41], - [255, 149, 40], [255, 146, 39], [255, 144, 38], [255, 142, 37], [255, 140, 37], - [255, 138, 36], [255, 135, 35], [255, 133, 35], [255, 131, 34], [255, 129, 33], - [255, 127, 33], [255, 124, 32], [255, 121, 31], [255, 118, 31], [255, 115, 30], - [255, 112, 30], [255, 109, 29], [255, 106, 28], [255, 103, 28], [255, 100, 27], - [255, 98, 27], [255, 94, 26], [255, 91, 25], [255, 88, 24], [255, 85, 24], [255, 82, 23], - [255, 78, 22], [255, 75, 22], [255, 72, 21], [255, 69, 20], [255, 66, 20], [254, 63, 19], - [253, 60, 19], [252, 58, 18], [251, 55, 18], [250, 53, 18], [249, 50, 17], [248, 47, 17], - [247, 45, 16], [246, 42, 16], [245, 40, 16], [243, 38, 15], [242, 36, 15], [240, 34, 14], - [239, 32, 14], [238, 30, 13], [236, 28, 13], [235, 26, 12], [233, 24, 12], [232, 22, 11], - [231, 20, 11], [229, 18, 11], [227, 17, 11], [225, 16, 11], [223, 14, 11], [221, 13, 11], - [219, 12, 11], [217, 10, 11], [215, 9, 11], [213, 8, 11], [211, 7, 12], [208, 6, 12], - [206, 5, 12], [204, 4, 12], [201, 4, 12], [199, 3, 13], [197, 2, 13], [194, 2, 13], - [192, 1, 13], [190, 0, 13], [188, 0, 14], [184, 0, 14], [181, 0, 14], [178, 0, 14], - [174, 0, 14], [171, 0, 14], [168, 0, 14], [164, 0, 14], [161, 0, 14], [158, 0, 14], - [155, 0, 14], [152, 0, 14], [149, 0, 14], [146, 0, 14], [143, 0, 14], [140, 0, 14], - [137, 0, 14], [134, 0, 14], [131, 0, 14], [128, 0, 14], [125, 0, 14] - ], - values: [ - 0, 0.005, 0.01, 0.015, 0.02, 0.025, 0.03, 0.035, 0.04, 0.045, 0.05, 0.055, 0.06, - 0.065, 0.07, 0.075, 0.08, 0.085, 0.09, 0.095, 0.1, 0.105, 0.11, 0.115, 0.12, 0.125, - 0.13, 0.135, 0.14, 0.145, 0.15, 0.155, 0.16, 0.165, 0.17, 0.175, 0.18, 0.185, 0.19, - 0.195, 0.2, 0.205, 0.21, 0.215, 0.22, 0.225, 0.23, 0.235, 0.24, 0.245, 0.25, 0.255, - 0.26, 0.265, 0.27, 0.275, 0.28, 0.285, 0.29, 0.295, 0.3, 0.305, 0.31, 0.315, 0.32, - 0.325, 0.33, 0.335, 0.34, 0.345, 0.35, 0.355, 0.36, 0.365, 0.37, 0.375, 0.38, 0.385, - 0.39, 0.395, 0.4, 0.405, 0.41, 0.415, 0.42, 0.425, 0.43, 0.435, 0.44, 0.445, 0.45, - 0.455, 0.46, 0.465, 0.47, 0.475, 0.48, 0.485, 0.49, 0.495, 0.5, 0.505, 0.51, 0.515, - 0.52, 0.525, 0.53, 0.535, 0.54, 0.545, 0.55, 0.555, 0.56, 0.565, 0.57, 0.575, 0.58, - 0.585, 0.59, 0.595, 0.6, 0.605, 0.61, 0.615, 0.62, 0.625, 0.63, 0.635, 0.64, 0.645, - 0.65, 0.655, 0.66, 0.665, 0.67, 0.675, 0.68, 0.685, 0.69, 0.695, 0.7, 1.13, 1.56, - 1.99, 2.42, 2.85, 3.28, 3.71, 4.14, 4.57, 5 - ], + filename: aerosol_thickness.txt } Angstrom_Exponent_Land_Ocean_Best_Estimate: - name: Angstrom_Exponent_Land_Ocean_Best_Estimate + standard_name: aerdb_angstrom_exponent_land_ocean_best_estimate operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { - colors: [ - [122, 145, 2], [123, 148, 3], [124, 150, 4], [124, 153, 5], [125, 155, 6], - [126, 158, 7], [127, 160, 8], [127, 163, 9], [128, 165, 10], [129, 168, 11], - [130, 170, 12], [130, 173, 13], [131, 175, 14], [132, 178, 15], [133, 181, 16], - [132, 183, 18], [132, 185, 20], [132, 187, 22], [132, 189, 25], [132, 191, 27], - [132, 193, 29], [132, 195, 31], [131, 197, 34], [131, 199, 36], [131, 201, 38], - [131, 203, 40], [131, 205, 43], [131, 207, 45], [131, 209, 47], [131, 212, 50], - [130, 213, 51], [129, 215, 53], [128, 217, 55], [128, 219, 57], [127, 221, 59], - [126, 222, 61], [125, 224, 63], [125, 226, 64], [124, 228, 66], [123, 230, 68], - [122, 231, 70], [122, 233, 72], [121, 235, 74], [120, 237, 76], [120, 239, 78], - [119, 239, 79], [118, 240, 80], [117, 241, 82], [116, 242, 83], [116, 243, 85], - [115, 244, 86], [114, 245, 87], [113, 246, 89], [112, 247, 90], [112, 248, 92], - [111, 249, 93], [110, 250, 94], [109, 251, 96], [108, 252, 97], [108, 253, 99], - [107, 252, 100], [106, 252, 102], [106, 252, 103], [105, 251, 105], [105, 251, 106], - [104, 251, 108], [103, 251, 109], [103, 250, 111], [102, 250, 112], [102, 250, 114], - [101, 250, 115], [100, 249, 117], [100, 249, 118], [99, 249, 120], [99, 249, 122], - [98, 247, 123], [97, 246, 124], [96, 245, 126], [95, 244, 127], [94, 243, 128], - [93, 242, 130], [92, 241, 131], [92, 239, 132], [91, 238, 134], [90, 237, 135], - [89, 236, 136], [88, 235, 138], [87, 234, 139], [86, 233, 140], [86, 232, 142], - [85, 230, 143], [84, 229, 144], [83, 228, 145], [82, 226, 147], [81, 225, 148], - [80, 224, 149], [79, 223, 150], [78, 221, 152], [77, 220, 153], [76, 219, 154], - [75, 218, 155], [74, 216, 157], [73, 215, 158], [72, 214, 159], [72, 213, 161], - [71, 211, 162], [70, 209, 163], [69, 208, 164], [68, 206, 165], [67, 205, 166], - [66, 203, 167], [65, 201, 168], [64, 200, 170], [63, 198, 171], [62, 197, 172], - [61, 195, 173], [60, 193, 174], [59, 192, 175], [58, 190, 176], [58, 189, 178], - [58, 187, 178], [58, 185, 179], [58, 184, 180], [58, 182, 181], [58, 181, 182], - [58, 179, 183], [58, 178, 184], [59, 176, 184], [59, 175, 185], [59, 173, 186], - [59, 172, 187], [59, 170, 188], [59, 169, 189], [59, 167, 190], [60, 166, 191], - [60, 164, 191], [61, 162, 192], [61, 160, 193], [62, 158, 194], [63, 156, 195], - [63, 154, 195], [64, 152, 196], [64, 150, 197], [65, 148, 198], [66, 146, 199], - [66, 144, 199], [67, 142, 200], [67, 140, 201], [68, 138, 202], [69, 137, 203], - [69, 135, 203], [70, 133, 204], [70, 131, 205], [71, 129, 205], [72, 128, 206], - [72, 126, 207], [73, 124, 207], [73, 122, 208], [74, 120, 209], [75, 119, 209], - [75, 117, 210], [76, 115, 211], [76, 113, 211], [77, 111, 212], [78, 110, 213], - [78, 108, 213], [79, 106, 214], [80, 104, 214], [80, 102, 215], [81, 101, 216], - [82, 99, 216], [82, 97, 217], [83, 95, 217], [84, 93, 218], [84, 92, 219], - [85, 90, 219], [86, 88, 220], [86, 86, 220], [87, 84, 221], [88, 83, 222], - [88, 82, 222], [89, 81, 223], [90, 80, 223], [91, 80, 224], [92, 79, 224], - [93, 78, 225], [94, 77, 225], [95, 77, 226], [96, 76, 226], [97, 75, 227], - [98, 74, 227], [99, 74, 228], [100, 73, 228], [101, 72, 229], [102, 72, 230], - [104, 72, 230], [106, 73, 230], [108, 73, 230], [110, 74, 231], [112, 74, 231], - [114, 75, 231], [116, 75, 231], [118, 76, 232], [120, 76, 232], [122, 77, 232], - [124, 77, 232], [126, 78, 233], [128, 78, 233], [130, 79, 233], [133, 80, 234], - [135, 80, 234], [137, 80, 234], [139, 81, 234], [141, 81, 234], [143, 81, 234], - [145, 82, 234], [147, 82, 234], [149, 82, 234], [151, 83, 234], [153, 83, 234], - [155, 83, 234], [157, 84, 234], [159, 84, 234], [161, 84, 234], [164, 85, 235], - [165, 85, 235], [166, 85, 235], [168, 85, 235], [169, 85, 235], [171, 85, 235], - [172, 85, 235], [174, 85, 235], [175, 86, 235], [177, 86, 235], [178, 86, 235], - [180, 86, 235], [181, 86, 235], [183, 86, 235], [184, 86, 235], [186, 87, 235], - [187, 87, 234], [188, 87, 234], [190, 87, 234], [191, 88, 234], [193, 88, 234], - [194, 88, 234], [196, 88, 234], [197, 89, 234], [199, 89, 234], [200, 89, 234], [202, 89, 234] - ], - values: [ - 0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, - 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, - 0.25, 0.26, 0.27, 0.28, 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, - 0.38, 0.39, 0.4, 0.41, 0.42, 0.43, 0.44, 0.45, 0.46, 0.47, 0.48, 0.49, 0.5, - 0.51, 0.52, 0.53, 0.54, 0.55, 0.56, 0.57, 0.58, 0.59, 0.6, 0.61, 0.62, 0.63, - 0.64, 0.65, 0.66, 0.67, 0.68, 0.69, 0.7, 0.71, 0.72, 0.73, 0.74, 0.75, 0.76, - 0.77, 0.78, 0.79, 0.8, 0.81, 0.82, 0.83, 0.84, 0.85, 0.86, 0.87, 0.88, 0.89, - 0.9, 0.91, 0.92, 0.93, 0.94, 0.95, 0.96, 0.97, 0.98, 0.99, 1, 1.01, 1.02, 1.03, - 1.04, 1.05, 1.06, 1.07, 1.08, 1.09, 1.1, 1.11, 1.12, 1.13, 1.14, 1.15, 1.16, 1.17, - 1.18, 1.19, 1.2, 1.21, 1.22, 1.23, 1.24, 1.25, 1.26, 1.27, 1.28, 1.29, 1.3, 1.31, - 1.32, 1.33, 1.34, 1.35, 1.36, 1.37, 1.38, 1.39, 1.4, 1.41, 1.42, 1.43, 1.44, 1.45, - 1.46, 1.47, 1.48, 1.49, 1.5, 1.51, 1.52, 1.53, 1.54, 1.55, 1.56, 1.57, 1.58, 1.59, - 1.6, 1.61, 1.62, 1.63, 1.64, 1.65, 1.66, 1.67, 1.68, 1.69, 1.7, 1.71, 1.72, 1.73, - 1.74, 1.75, 1.76, 1.77, 1.78, 1.79, 1.8, 1.81, 1.82, 1.83, 1.84, 1.85, 1.86, 1.87, - 1.88, 1.89, 1.9, 1.91, 1.92, 1.93, 1.94, 1.95, 1.96, 1.97, 1.98, 1.99, 2, 2.01, - 2.02, 2.03, 2.04, 2.05, 2.06, 2.07, 2.08, 2.09, 2.1, 2.11, 2.12, 2.13, 2.14, 2.15, - 2.16, 2.17, 2.18, 2.19, 2.2, 2.21, 2.22, 2.23, 2.24, 2.25, 2.26, 2.27, 2.28, 2.29, - 2.3, 2.31, 2.32, 2.33, 2.34, 2.35, 2.36, 2.37, 2.38, 2.39, 2.4, 2.41, 2.42, 2.43, - 2.44, 2.45, 2.46, 2.47, 2.48, 2.49, 2.5 - ], + filename: angstrom_exponent.txt } diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml index b86a856e82..9df5df3597 100644 --- a/satpy/etc/readers/viirs_l2.yaml +++ b/satpy/etc/readers/viirs_l2.yaml @@ -7,7 +7,6 @@ reader: supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader sensors: [viirs] - default_datasets: file_types: cldprop_l2_viirs: @@ -22,9 +21,6 @@ file_types: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler file_patterns: - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' - aerdb_l2_viirs_nrt: - file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler - file_patterns: - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.nrt.nc' cldir_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler @@ -57,14 +53,14 @@ datasets: aerdb_lon: name: aerdb_lon resolution: 1000 - file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] + file_type: [aerdb_l2_viirs] file_key: Longitude units: degrees standard_name: longitude aerdb_lat: name: aerdb_lat resolution: 1000 - file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] + file_type: [aerdb_l2_viirs] file_key: Latitude units: degrees standard_name: latitude @@ -93,7 +89,7 @@ datasets: coordinates: [cld_lon, cld_lat] file_key: geophysical_data/Clear_Sky_Confidence file_type: cldmsk_l2_viirs - + standard_name: cldmsk_clear_sky_confidence ################################### # Datasets in file cldprop_l2_viirs @@ -105,9 +101,10 @@ datasets: coordinates: [cld_lon,cld_lat] file_key: geophysical_data/Cloud_Top_Height file_type: cldprop_l2_viirs + standard_name: cldprop_cloud_top_height ########################################## -# Datasets in files aerdb_l2_viirs and nrt +# Datasets in files aerdb_l2_viirs ########################################## Angstrom_Exponent_Land_Ocean_Best_Estimate: name: Angstrom_Exponent_Land_Ocean_Best_Estimate @@ -115,7 +112,8 @@ datasets: units: "1" coordinates: [aerdb_lon,aerdb_lat] file_key: Angstrom_Exponent_Land_Ocean_Best_Estimate - file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] + file_type: [aerdb_l2_viirs] + standard_name: aerdb_angstrom_exponent_land_ocean_best_estimate Aerosol_Optical_Thickness_550_Land_Ocean: name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate @@ -123,4 +121,5 @@ datasets: units: "1" coordinates: [aerdb_lon,aerdb_lat] file_key: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate - file_type: [aerdb_l2_viirs, aerdb_l2_viirs_nrt] + file_type: [aerdb_l2_viirs] + standard_name: aerdb_aerosol_optical_thickness_500_land_ocean diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 4dc5c8beee..9277620320 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -121,18 +121,7 @@ def adjust_scaling_factors(self, factors, file_units, output_units): LOG.debug("File units and output units are the same (%s)", file_units) return factors factors = np.array(factors) - - if file_units == "1" and output_units == "%": - LOG.debug( - "Adjusting scaling factors to convert '%s' to '%s'", - file_units, - output_units, - ) - factors[::2] = np.where(factors[::2] != -999, factors[::2] * 100.0, -999) - factors[1::2] = np.where(factors[1::2] != -999, factors[1::2] * 100.0, -999) - return factors - else: - return factors + return factors def available_datasets(self, configured_datasets=None): """Generate dataset info and their availablity. @@ -147,12 +136,11 @@ def available_datasets(self, configured_datasets=None): yield is_avail, ds_info continue ft_matches = self.file_type_matches(ds_info["file_type"]) - if not ft_matches: - is_in_file = None - else: - var_path = ds_info.get("file_key", ds_info["name"]) - is_in_file = var_path in self - yield ft_matches and is_in_file, ds_info + if ft_matches is None: + yield None, ds_info + continue + var_path = ds_info.get("file_key", ds_info["name"]) + yield var_path in self, ds_info def get_dataset(self, ds_id, ds_info): """Get DataArray for specified dataset.""" diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py index cf57e54a70..79884f3d4f 100644 --- a/satpy/tests/reader_tests/test_viirs_l2.py +++ b/satpy/tests/reader_tests/test_viirs_l2.py @@ -110,14 +110,16 @@ def test_init(self, filename): @pytest.mark.parametrize( ("filename", "datasets"), [ - pytest.param("CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc",["Cloud_Top_Height"],id="CLDPROP"), - pytest.param("CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc",["Clear_Sky_Confidence"],id="CLDMSK"), + pytest.param("CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc", + ["Cloud_Top_Height"], id="CLDPROP"), + pytest.param("CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc", + ["Clear_Sky_Confidence"], id="CLDMSK"), pytest.param("AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc", ["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate", - "Angstrom_Exponent_Land_Ocean_Best_Estimate"],id="AERDB"), + "Angstrom_Exponent_Land_Ocean_Best_Estimate"], id="AERDB"), ], ) - def test_load_l2_files(self,filename,datasets): + def test_load_l2_files(self, filename, datasets): """Test L2 File Loading.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([filename]) From 9d5665c00f5cea2eb0d0d9c4acb72bf1dda3e640 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 9 Feb 2024 12:35:45 +0200 Subject: [PATCH 1119/1416] Clarify combine_metadata() docstring --- satpy/dataset/metadata.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 3f1de02ed5..087e86f6c9 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -32,12 +32,14 @@ def combine_metadata(*metadata_objects): If the values corresponding to any keys are not equal or do not exist in all provided dictionaries then they are not included in - the returned dictionary. All values of the keys containing the substring - 'start_time' will be set to the earliest value and similarly for 'end_time' - to latest time. All other keys containing the word 'time' are averaged. - Before these adjustments, non-datetime objects are filtered out. - - The same rules are applied to 'time_parameters' dictionary. + the returned dictionary. + + All values of the keys containing the substring 'start_time' will be set + to the earliest value and similarly for 'end_time' to latest time. All + other keys containing the word 'time' are averaged. Before these adjustments, + `None` values resulting from data that don't have times associated to them + are removed. These rules are applied also to values in 'time_parameters' + dictionary. In the interest of processing time, lazy arrays are compared by object identity rather than by their contents. From 83f6bc8f9ee544876c24783ef59034aaaef2c17c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 9 Feb 2024 12:44:46 -0600 Subject: [PATCH 1120/1416] Update satpy/dataset/metadata.py --- satpy/dataset/metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 087e86f6c9..bc99d3cb21 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -38,7 +38,7 @@ def combine_metadata(*metadata_objects): to the earliest value and similarly for 'end_time' to latest time. All other keys containing the word 'time' are averaged. Before these adjustments, `None` values resulting from data that don't have times associated to them - are removed. These rules are applied also to values in 'time_parameters' + are removed. These rules are applied also to values in the 'time_parameters' dictionary. In the interest of processing time, lazy arrays are compared by object From 12b8725930170e7984b21d56486ad18e42cb315f Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Fri, 9 Feb 2024 19:40:27 +0000 Subject: [PATCH 1121/1416] reverted cth colormap, changed from txt to csv --- ...ol_thickness.txt => aerosol_thickness.csv} | 302 +++++------ ...rom_exponent.txt => angstrom_exponent.csv} | 502 +++++++++--------- ...onfidence.txt => clear_sky_confidence.csv} | 202 +++---- satpy/etc/enhancements/generic.yaml | 28 +- 4 files changed, 512 insertions(+), 522 deletions(-) rename satpy/etc/colormaps/{aerosol_thickness.txt => aerosol_thickness.csv} (94%) rename satpy/etc/colormaps/{angstrom_exponent.txt => angstrom_exponent.csv} (94%) rename satpy/etc/colormaps/{clear_sky_confidence.txt => clear_sky_confidence.csv} (94%) diff --git a/satpy/etc/colormaps/aerosol_thickness.txt b/satpy/etc/colormaps/aerosol_thickness.csv similarity index 94% rename from satpy/etc/colormaps/aerosol_thickness.txt rename to satpy/etc/colormaps/aerosol_thickness.csv index e1e6a02f45..39d8527f6c 100644 --- a/satpy/etc/colormaps/aerosol_thickness.txt +++ b/satpy/etc/colormaps/aerosol_thickness.csv @@ -1,151 +1,151 @@ -0, 255, 252, 199 -0.005, 255, 251, 193 -0.01, 255, 250, 188 -0.015, 255, 249, 183 -0.02, 255, 248, 178 -0.025, 255, 247, 173 -0.03, 255, 246, 167 -0.035, 255, 245, 162 -0.04, 255, 244, 157 -0.045, 255, 243, 152 -0.05, 255, 242, 147 -0.055, 255, 240, 144 -0.06, 255, 239, 141 -0.065, 255, 238, 138 -0.07, 255, 236, 135 -0.075, 255, 235, 132 -0.08, 255, 234, 129 -0.085, 255, 232, 126 -0.09, 255, 231, 123 -0.095, 255, 230, 120 -0.1, 255, 229, 118 -0.105, 255, 227, 115 -0.11, 255, 226, 113 -0.115, 255, 225, 110 -0.12, 255, 223, 108 -0.125, 255, 222, 106 -0.13, 255, 221, 103 -0.135, 255, 219, 101 -0.14, 255, 218, 98 -0.145, 255, 217, 96 -0.15, 255, 216, 94 -0.155, 255, 214, 91 -0.16, 255, 212, 89 -0.165, 255, 210, 87 -0.17, 255, 208, 85 -0.175, 255, 207, 83 -0.18, 255, 205, 80 -0.185, 255, 203, 78 -0.19, 255, 201, 76 -0.195, 255, 199, 74 -0.2, 255, 198, 72 -0.205, 255, 195, 70 -0.21, 255, 193, 68 -0.215, 255, 190, 66 -0.22, 255, 188, 64 -0.225, 255, 185, 62 -0.23, 255, 183, 60 -0.235, 255, 180, 58 -0.24, 255, 178, 56 -0.245, 255, 175, 54 -0.25, 255, 173, 53 -0.255, 255, 170, 51 -0.26, 255, 168, 50 -0.265, 255, 165, 49 -0.27, 255, 163, 47 -0.275, 255, 161, 46 -0.28, 255, 158, 45 -0.285, 255, 156, 43 -0.29, 255, 153, 42 -0.295, 255, 151, 41 -0.3, 255, 149, 40 -0.305, 255, 146, 39 -0.31, 255, 144, 38 -0.315, 255, 142, 37 -0.32, 255, 140, 37 -0.325, 255, 138, 36 -0.33, 255, 135, 35 -0.335, 255, 133, 35 -0.34, 255, 131, 34 -0.345, 255, 129, 33 -0.35, 255, 127, 33 -0.355, 255, 124, 32 -0.36, 255, 121, 31 -0.365, 255, 118, 31 -0.37, 255, 115, 30 -0.375, 255, 112, 30 -0.38, 255, 109, 29 -0.385, 255, 106, 28 -0.39, 255, 103, 28 -0.395, 255, 100, 27 -0.4, 255, 98, 27 -0.405, 255, 94, 26 -0.41, 255, 91, 25 -0.415, 255, 88, 24 -0.42, 255, 85, 24 -0.425, 255, 82, 23 -0.43, 255, 78, 22 -0.435, 255, 75, 22 -0.44, 255, 72, 21 -0.445, 255, 69, 20 -0.45, 255, 66, 20 -0.455, 254, 63, 19 -0.46, 253, 60, 19 -0.465, 252, 58, 18 -0.47, 251, 55, 18 -0.475, 250, 53, 18 -0.48, 249, 50, 17 -0.485, 248, 47, 17 -0.49, 247, 45, 16 -0.495, 246, 42, 16 -0.5, 245, 40, 16 -0.505, 243, 38, 15 -0.51, 242, 36, 15 -0.515, 240, 34, 14 -0.52, 239, 32, 14 -0.525, 238, 30, 13 -0.53, 236, 28, 13 -0.535, 235, 26, 12 -0.54, 233, 24, 12 -0.545, 232, 22, 11 -0.55, 231, 20, 11 -0.555, 229, 18, 11 -0.56, 227, 17, 11 -0.565, 225, 16, 11 -0.57, 223, 14, 11 -0.575, 221, 13, 11 -0.58, 219, 12, 11 -0.585, 217, 10, 11 -0.59, 215, 9, 11 -0.595, 213, 8, 11 -0.6, 211, 7, 12 -0.605, 208, 6, 12 -0.61, 206, 5, 12 -0.615, 204, 4, 12 -0.62, 201, 4, 12 -0.625, 199, 3, 13 -0.63, 197, 2, 13 -0.635, 194, 2, 13 -0.64, 192, 1, 13 -0.645, 190, 0, 13 -0.65, 188, 0, 14 -0.655, 184, 0, 14 -0.66, 181, 0, 14 -0.665, 178, 0, 14 -0.67, 174, 0, 14 -0.675, 171, 0, 14 -0.68, 168, 0, 14 -0.685, 164, 0, 14 -0.69, 161, 0, 14 -0.695, 158, 0, 14 -0.7, 155, 0, 14 -1.13, 152, 0, 14 -1.56, 149, 0, 14 -1.99, 146, 0, 14 -2.42, 143, 0, 14 -2.85, 140, 0, 14 -3.28, 137, 0, 14 -3.71, 134, 0, 14 -4.14, 131, 0, 14 -4.57, 128, 0, 14 -5, 125, 0, 14 +0, 255, 252, 199 +0.005, 255, 251, 193 +0.01, 255, 250, 188 +0.015, 255, 249, 183 +0.02, 255, 248, 178 +0.025, 255, 247, 173 +0.03, 255, 246, 167 +0.035, 255, 245, 162 +0.04, 255, 244, 157 +0.045, 255, 243, 152 +0.05, 255, 242, 147 +0.055, 255, 240, 144 +0.06, 255, 239, 141 +0.065, 255, 238, 138 +0.07, 255, 236, 135 +0.075, 255, 235, 132 +0.08, 255, 234, 129 +0.085, 255, 232, 126 +0.09, 255, 231, 123 +0.095, 255, 230, 120 +0.1, 255, 229, 118 +0.105, 255, 227, 115 +0.11, 255, 226, 113 +0.115, 255, 225, 110 +0.12, 255, 223, 108 +0.125, 255, 222, 106 +0.13, 255, 221, 103 +0.135, 255, 219, 101 +0.14, 255, 218, 98 +0.145, 255, 217, 96 +0.15, 255, 216, 94 +0.155, 255, 214, 91 +0.16, 255, 212, 89 +0.165, 255, 210, 87 +0.17, 255, 208, 85 +0.175, 255, 207, 83 +0.18, 255, 205, 80 +0.185, 255, 203, 78 +0.19, 255, 201, 76 +0.195, 255, 199, 74 +0.2, 255, 198, 72 +0.205, 255, 195, 70 +0.21, 255, 193, 68 +0.215, 255, 190, 66 +0.22, 255, 188, 64 +0.225, 255, 185, 62 +0.23, 255, 183, 60 +0.235, 255, 180, 58 +0.24, 255, 178, 56 +0.245, 255, 175, 54 +0.25, 255, 173, 53 +0.255, 255, 170, 51 +0.26, 255, 168, 50 +0.265, 255, 165, 49 +0.27, 255, 163, 47 +0.275, 255, 161, 46 +0.28, 255, 158, 45 +0.285, 255, 156, 43 +0.29, 255, 153, 42 +0.295, 255, 151, 41 +0.3, 255, 149, 40 +0.305, 255, 146, 39 +0.31, 255, 144, 38 +0.315, 255, 142, 37 +0.32, 255, 140, 37 +0.325, 255, 138, 36 +0.33, 255, 135, 35 +0.335, 255, 133, 35 +0.34, 255, 131, 34 +0.345, 255, 129, 33 +0.35, 255, 127, 33 +0.355, 255, 124, 32 +0.36, 255, 121, 31 +0.365, 255, 118, 31 +0.37, 255, 115, 30 +0.375, 255, 112, 30 +0.38, 255, 109, 29 +0.385, 255, 106, 28 +0.39, 255, 103, 28 +0.395, 255, 100, 27 +0.4, 255, 98, 27 +0.405, 255, 94, 26 +0.41, 255, 91, 25 +0.415, 255, 88, 24 +0.42, 255, 85, 24 +0.425, 255, 82, 23 +0.43, 255, 78, 22 +0.435, 255, 75, 22 +0.44, 255, 72, 21 +0.445, 255, 69, 20 +0.45, 255, 66, 20 +0.455, 254, 63, 19 +0.46, 253, 60, 19 +0.465, 252, 58, 18 +0.47, 251, 55, 18 +0.475, 250, 53, 18 +0.48, 249, 50, 17 +0.485, 248, 47, 17 +0.49, 247, 45, 16 +0.495, 246, 42, 16 +0.5, 245, 40, 16 +0.505, 243, 38, 15 +0.51, 242, 36, 15 +0.515, 240, 34, 14 +0.52, 239, 32, 14 +0.525, 238, 30, 13 +0.53, 236, 28, 13 +0.535, 235, 26, 12 +0.54, 233, 24, 12 +0.545, 232, 22, 11 +0.55, 231, 20, 11 +0.555, 229, 18, 11 +0.56, 227, 17, 11 +0.565, 225, 16, 11 +0.57, 223, 14, 11 +0.575, 221, 13, 11 +0.58, 219, 12, 11 +0.585, 217, 10, 11 +0.59, 215, 9, 11 +0.595, 213, 8, 11 +0.6, 211, 7, 12 +0.605, 208, 6, 12 +0.61, 206, 5, 12 +0.615, 204, 4, 12 +0.62, 201, 4, 12 +0.625, 199, 3, 13 +0.63, 197, 2, 13 +0.635, 194, 2, 13 +0.64, 192, 1, 13 +0.645, 190, 0, 13 +0.65, 188, 0, 14 +0.655, 184, 0, 14 +0.66, 181, 0, 14 +0.665, 178, 0, 14 +0.67, 174, 0, 14 +0.675, 171, 0, 14 +0.68, 168, 0, 14 +0.685, 164, 0, 14 +0.69, 161, 0, 14 +0.695, 158, 0, 14 +0.7, 155, 0, 14 +1.13, 152, 0, 14 +1.56, 149, 0, 14 +1.99, 146, 0, 14 +2.42, 143, 0, 14 +2.85, 140, 0, 14 +3.28, 137, 0, 14 +3.71, 134, 0, 14 +4.14, 131, 0, 14 +4.57, 128, 0, 14 +5, 125, 0, 14 diff --git a/satpy/etc/colormaps/angstrom_exponent.txt b/satpy/etc/colormaps/angstrom_exponent.csv similarity index 94% rename from satpy/etc/colormaps/angstrom_exponent.txt rename to satpy/etc/colormaps/angstrom_exponent.csv index 4e56fccd68..a14ecd17d4 100644 --- a/satpy/etc/colormaps/angstrom_exponent.txt +++ b/satpy/etc/colormaps/angstrom_exponent.csv @@ -1,251 +1,251 @@ -0, 122, 145, 2 -0.01, 123, 148, 3 -0.02, 124, 150, 4 -0.03, 124, 153, 5 -0.04, 125, 155, 6 -0.05, 126, 158, 7 -0.06, 127, 160, 8 -0.07, 127, 163, 9 -0.08, 128, 165, 10 -0.09, 129, 168, 11 -0.1, 130, 170, 12 -0.11, 130, 173, 13 -0.12, 131, 175, 14 -0.13, 132, 178, 15 -0.14, 133, 181, 16 -0.15, 132, 183, 18 -0.16, 132, 185, 20 -0.17, 132, 187, 22 -0.18, 132, 189, 25 -0.19, 132, 191, 27 -0.2, 132, 193, 29 -0.21, 132, 195, 31 -0.22, 131, 197, 34 -0.23, 131, 199, 36 -0.24, 131, 201, 38 -0.25, 131, 203, 40 -0.26, 131, 205, 43 -0.27, 131, 207, 45 -0.28, 131, 209, 47 -0.29, 131, 212, 50 -0.3, 130, 213, 51 -0.31, 129, 215, 53 -0.32, 128, 217, 55 -0.33, 128, 219, 57 -0.34, 127, 221, 59 -0.35, 126, 222, 61 -0.36, 125, 224, 63 -0.37, 125, 226, 64 -0.38, 124, 228, 66 -0.39, 123, 230, 68 -0.4, 122, 231, 70 -0.41, 122, 233, 72 -0.42, 121, 235, 74 -0.43, 120, 237, 76 -0.44, 120, 239, 78 -0.45, 119, 239, 79 -0.46, 118, 240, 80 -0.47, 117, 241, 82 -0.48, 116, 242, 83 -0.49, 116, 243, 85 -0.5, 115, 244, 86 -0.51, 114, 245, 87 -0.52, 113, 246, 89 -0.53, 112, 247, 90 -0.54, 112, 248, 92 -0.55, 111, 249, 93 -0.56, 110, 250, 94 -0.57, 109, 251, 96 -0.58, 108, 252, 97 -0.59, 108, 253, 99 -0.6, 107, 252, 100 -0.61, 106, 252, 102 -0.62, 106, 252, 103 -0.63, 105, 251, 105 -0.64, 105, 251, 106 -0.65, 104, 251, 108 -0.66, 103, 251, 109 -0.67, 103, 250, 111 -0.68, 102, 250, 112 -0.69, 102, 250, 114 -0.7, 101, 250, 115 -0.71, 100, 249, 117 -0.72, 100, 249, 118 -0.73, 99, 249, 120 -0.74, 99, 249, 122 -0.75, 98, 247, 123 -0.76, 97, 246, 124 -0.77, 96, 245, 126 -0.78, 95, 244, 127 -0.79, 94, 243, 128 -0.8, 93, 242, 130 -0.81, 92, 241, 131 -0.82, 92, 239, 132 -0.83, 91, 238, 134 -0.84, 90, 237, 135 -0.85, 89, 236, 136 -0.86, 88, 235, 138 -0.87, 87, 234, 139 -0.88, 86, 233, 140 -0.89, 86, 232, 142 -0.9, 85, 230, 143 -0.91, 84, 229, 144 -0.92, 83, 228, 145 -0.93, 82, 226, 147 -0.94, 81, 225, 148 -0.95, 80, 224, 149 -0.96, 79, 223, 150 -0.97, 78, 221, 152 -0.98, 77, 220, 153 -0.99, 76, 219, 154 -1, 75, 218, 155 -1.01, 74, 216, 157 -1.02, 73, 215, 158 -1.03, 72, 214, 159 -1.04, 72, 213, 161 -1.05, 71, 211, 162 -1.06, 70, 209, 163 -1.07, 69, 208, 164 -1.08, 68, 206, 165 -1.09, 67, 205, 166 -1.1, 66, 203, 167 -1.11, 65, 201, 168 -1.12, 64, 200, 170 -1.13, 63, 198, 171 -1.14, 62, 197, 172 -1.15, 61, 195, 173 -1.16, 60, 193, 174 -1.17, 59, 192, 175 -1.18, 58, 190, 176 -1.19, 58, 189, 178 -1.2, 58, 187, 178 -1.21, 58, 185, 179 -1.22, 58, 184, 180 -1.23, 58, 182, 181 -1.24, 58, 181, 182 -1.25, 58, 179, 183 -1.26, 58, 178, 184 -1.27, 59, 176, 184 -1.28, 59, 175, 185 -1.29, 59, 173, 186 -1.3, 59, 172, 187 -1.31, 59, 170, 188 -1.32, 59, 169, 189 -1.33, 59, 167, 190 -1.34, 60, 166, 191 -1.35, 60, 164, 191 -1.36, 61, 162, 192 -1.37, 61, 160, 193 -1.38, 62, 158, 194 -1.39, 63, 156, 195 -1.4, 63, 154, 195 -1.41, 64, 152, 196 -1.42, 64, 150, 197 -1.43, 65, 148, 198 -1.44, 66, 146, 199 -1.45, 66, 144, 199 -1.46, 67, 142, 200 -1.47, 67, 140, 201 -1.48, 68, 138, 202 -1.49, 69, 137, 203 -1.5, 69, 135, 203 -1.51, 70, 133, 204 -1.52, 70, 131, 205 -1.53, 71, 129, 205 -1.54, 72, 128, 206 -1.55, 72, 126, 207 -1.56, 73, 124, 207 -1.57, 73, 122, 208 -1.58, 74, 120, 209 -1.59, 75, 119, 209 -1.6, 75, 117, 210 -1.61, 76, 115, 211 -1.62, 76, 113, 211 -1.63, 77, 111, 212 -1.64, 78, 110, 213 -1.65, 78, 108, 213 -1.66, 79, 106, 214 -1.67, 80, 104, 214 -1.68, 80, 102, 215 -1.69, 81, 101, 216 -1.7, 82, 99, 216 -1.71, 82, 97, 217 -1.72, 83, 95, 217 -1.73, 84, 93, 218 -1.74, 84, 92, 219 -1.75, 85, 90, 219 -1.76, 86, 88, 220 -1.77, 86, 86, 220 -1.78, 87, 84, 221 -1.79, 88, 83, 222 -1.8, 88, 82, 222 -1.81, 89, 81, 223 -1.82, 90, 80, 223 -1.83, 91, 80, 224 -1.84, 92, 79, 224 -1.85, 93, 78, 225 -1.86, 94, 77, 225 -1.87, 95, 77, 226 -1.88, 96, 76, 226 -1.89, 97, 75, 227 -1.9, 98, 74, 227 -1.91, 99, 74, 228 -1.92, 100, 73, 228 -1.93, 101, 72, 229 -1.94, 102, 72, 230 -1.95, 104, 72, 230 -1.96, 106, 73, 230 -1.97, 108, 73, 230 -1.98, 110, 74, 231 -1.99, 112, 74, 231 -2, 114, 75, 231 -2.01, 116, 75, 231 -2.02, 118, 76, 232 -2.03, 120, 76, 232 -2.04, 122, 77, 232 -2.05, 124, 77, 232 -2.06, 126, 78, 233 -2.07, 128, 78, 233 -2.08, 130, 79, 233 -2.09, 133, 80, 234 -2.1, 135, 80, 234 -2.11, 137, 80, 234 -2.12, 139, 81, 234 -2.13, 141, 81, 234 -2.14, 143, 81, 234 -2.15, 145, 82, 234 -2.16, 147, 82, 234 -2.17, 149, 82, 234 -2.18, 151, 83, 234 -2.19, 153, 83, 234 -2.2, 155, 83, 234 -2.21, 157, 84, 234 -2.22, 159, 84, 234 -2.23, 161, 84, 234 -2.24, 164, 85, 235 -2.25, 165, 85, 235 -2.26, 166, 85, 235 -2.27, 168, 85, 235 -2.28, 169, 85, 235 -2.29, 171, 85, 235 -2.3, 172, 85, 235 -2.31, 174, 85, 235 -2.32, 175, 86, 235 -2.33, 177, 86, 235 -2.34, 178, 86, 235 -2.35, 180, 86, 235 -2.36, 181, 86, 235 -2.37, 183, 86, 235 -2.38, 184, 86, 235 -2.39, 186, 87, 235 -2.4, 187, 87, 234 -2.41, 188, 87, 234 -2.42, 190, 87, 234 -2.43, 191, 88, 234 -2.44, 193, 88, 234 -2.45, 194, 88, 234 -2.46, 196, 88, 234 -2.47, 197, 89, 234 -2.48, 199, 89, 234 -2.49, 200, 89, 234 -2.5, 202, 89, 234 +0, 122, 145, 2 +0.01, 123, 148, 3 +0.02, 124, 150, 4 +0.03, 124, 153, 5 +0.04, 125, 155, 6 +0.05, 126, 158, 7 +0.06, 127, 160, 8 +0.07, 127, 163, 9 +0.08, 128, 165, 10 +0.09, 129, 168, 11 +0.1, 130, 170, 12 +0.11, 130, 173, 13 +0.12, 131, 175, 14 +0.13, 132, 178, 15 +0.14, 133, 181, 16 +0.15, 132, 183, 18 +0.16, 132, 185, 20 +0.17, 132, 187, 22 +0.18, 132, 189, 25 +0.19, 132, 191, 27 +0.2, 132, 193, 29 +0.21, 132, 195, 31 +0.22, 131, 197, 34 +0.23, 131, 199, 36 +0.24, 131, 201, 38 +0.25, 131, 203, 40 +0.26, 131, 205, 43 +0.27, 131, 207, 45 +0.28, 131, 209, 47 +0.29, 131, 212, 50 +0.3, 130, 213, 51 +0.31, 129, 215, 53 +0.32, 128, 217, 55 +0.33, 128, 219, 57 +0.34, 127, 221, 59 +0.35, 126, 222, 61 +0.36, 125, 224, 63 +0.37, 125, 226, 64 +0.38, 124, 228, 66 +0.39, 123, 230, 68 +0.4, 122, 231, 70 +0.41, 122, 233, 72 +0.42, 121, 235, 74 +0.43, 120, 237, 76 +0.44, 120, 239, 78 +0.45, 119, 239, 79 +0.46, 118, 240, 80 +0.47, 117, 241, 82 +0.48, 116, 242, 83 +0.49, 116, 243, 85 +0.5, 115, 244, 86 +0.51, 114, 245, 87 +0.52, 113, 246, 89 +0.53, 112, 247, 90 +0.54, 112, 248, 92 +0.55, 111, 249, 93 +0.56, 110, 250, 94 +0.57, 109, 251, 96 +0.58, 108, 252, 97 +0.59, 108, 253, 99 +0.6, 107, 252, 100 +0.61, 106, 252, 102 +0.62, 106, 252, 103 +0.63, 105, 251, 105 +0.64, 105, 251, 106 +0.65, 104, 251, 108 +0.66, 103, 251, 109 +0.67, 103, 250, 111 +0.68, 102, 250, 112 +0.69, 102, 250, 114 +0.7, 101, 250, 115 +0.71, 100, 249, 117 +0.72, 100, 249, 118 +0.73, 99, 249, 120 +0.74, 99, 249, 122 +0.75, 98, 247, 123 +0.76, 97, 246, 124 +0.77, 96, 245, 126 +0.78, 95, 244, 127 +0.79, 94, 243, 128 +0.8, 93, 242, 130 +0.81, 92, 241, 131 +0.82, 92, 239, 132 +0.83, 91, 238, 134 +0.84, 90, 237, 135 +0.85, 89, 236, 136 +0.86, 88, 235, 138 +0.87, 87, 234, 139 +0.88, 86, 233, 140 +0.89, 86, 232, 142 +0.9, 85, 230, 143 +0.91, 84, 229, 144 +0.92, 83, 228, 145 +0.93, 82, 226, 147 +0.94, 81, 225, 148 +0.95, 80, 224, 149 +0.96, 79, 223, 150 +0.97, 78, 221, 152 +0.98, 77, 220, 153 +0.99, 76, 219, 154 +1, 75, 218, 155 +1.01, 74, 216, 157 +1.02, 73, 215, 158 +1.03, 72, 214, 159 +1.04, 72, 213, 161 +1.05, 71, 211, 162 +1.06, 70, 209, 163 +1.07, 69, 208, 164 +1.08, 68, 206, 165 +1.09, 67, 205, 166 +1.1, 66, 203, 167 +1.11, 65, 201, 168 +1.12, 64, 200, 170 +1.13, 63, 198, 171 +1.14, 62, 197, 172 +1.15, 61, 195, 173 +1.16, 60, 193, 174 +1.17, 59, 192, 175 +1.18, 58, 190, 176 +1.19, 58, 189, 178 +1.2, 58, 187, 178 +1.21, 58, 185, 179 +1.22, 58, 184, 180 +1.23, 58, 182, 181 +1.24, 58, 181, 182 +1.25, 58, 179, 183 +1.26, 58, 178, 184 +1.27, 59, 176, 184 +1.28, 59, 175, 185 +1.29, 59, 173, 186 +1.3, 59, 172, 187 +1.31, 59, 170, 188 +1.32, 59, 169, 189 +1.33, 59, 167, 190 +1.34, 60, 166, 191 +1.35, 60, 164, 191 +1.36, 61, 162, 192 +1.37, 61, 160, 193 +1.38, 62, 158, 194 +1.39, 63, 156, 195 +1.4, 63, 154, 195 +1.41, 64, 152, 196 +1.42, 64, 150, 197 +1.43, 65, 148, 198 +1.44, 66, 146, 199 +1.45, 66, 144, 199 +1.46, 67, 142, 200 +1.47, 67, 140, 201 +1.48, 68, 138, 202 +1.49, 69, 137, 203 +1.5, 69, 135, 203 +1.51, 70, 133, 204 +1.52, 70, 131, 205 +1.53, 71, 129, 205 +1.54, 72, 128, 206 +1.55, 72, 126, 207 +1.56, 73, 124, 207 +1.57, 73, 122, 208 +1.58, 74, 120, 209 +1.59, 75, 119, 209 +1.6, 75, 117, 210 +1.61, 76, 115, 211 +1.62, 76, 113, 211 +1.63, 77, 111, 212 +1.64, 78, 110, 213 +1.65, 78, 108, 213 +1.66, 79, 106, 214 +1.67, 80, 104, 214 +1.68, 80, 102, 215 +1.69, 81, 101, 216 +1.7, 82, 99, 216 +1.71, 82, 97, 217 +1.72, 83, 95, 217 +1.73, 84, 93, 218 +1.74, 84, 92, 219 +1.75, 85, 90, 219 +1.76, 86, 88, 220 +1.77, 86, 86, 220 +1.78, 87, 84, 221 +1.79, 88, 83, 222 +1.8, 88, 82, 222 +1.81, 89, 81, 223 +1.82, 90, 80, 223 +1.83, 91, 80, 224 +1.84, 92, 79, 224 +1.85, 93, 78, 225 +1.86, 94, 77, 225 +1.87, 95, 77, 226 +1.88, 96, 76, 226 +1.89, 97, 75, 227 +1.9, 98, 74, 227 +1.91, 99, 74, 228 +1.92, 100, 73, 228 +1.93, 101, 72, 229 +1.94, 102, 72, 230 +1.95, 104, 72, 230 +1.96, 106, 73, 230 +1.97, 108, 73, 230 +1.98, 110, 74, 231 +1.99, 112, 74, 231 +2, 114, 75, 231 +2.01, 116, 75, 231 +2.02, 118, 76, 232 +2.03, 120, 76, 232 +2.04, 122, 77, 232 +2.05, 124, 77, 232 +2.06, 126, 78, 233 +2.07, 128, 78, 233 +2.08, 130, 79, 233 +2.09, 133, 80, 234 +2.1, 135, 80, 234 +2.11, 137, 80, 234 +2.12, 139, 81, 234 +2.13, 141, 81, 234 +2.14, 143, 81, 234 +2.15, 145, 82, 234 +2.16, 147, 82, 234 +2.17, 149, 82, 234 +2.18, 151, 83, 234 +2.19, 153, 83, 234 +2.2, 155, 83, 234 +2.21, 157, 84, 234 +2.22, 159, 84, 234 +2.23, 161, 84, 234 +2.24, 164, 85, 235 +2.25, 165, 85, 235 +2.26, 166, 85, 235 +2.27, 168, 85, 235 +2.28, 169, 85, 235 +2.29, 171, 85, 235 +2.3, 172, 85, 235 +2.31, 174, 85, 235 +2.32, 175, 86, 235 +2.33, 177, 86, 235 +2.34, 178, 86, 235 +2.35, 180, 86, 235 +2.36, 181, 86, 235 +2.37, 183, 86, 235 +2.38, 184, 86, 235 +2.39, 186, 87, 235 +2.4, 187, 87, 234 +2.41, 188, 87, 234 +2.42, 190, 87, 234 +2.43, 191, 88, 234 +2.44, 193, 88, 234 +2.45, 194, 88, 234 +2.46, 196, 88, 234 +2.47, 197, 89, 234 +2.48, 199, 89, 234 +2.49, 200, 89, 234 +2.5, 202, 89, 234 diff --git a/satpy/etc/colormaps/clear_sky_confidence.txt b/satpy/etc/colormaps/clear_sky_confidence.csv similarity index 94% rename from satpy/etc/colormaps/clear_sky_confidence.txt rename to satpy/etc/colormaps/clear_sky_confidence.csv index 58393dbbcd..c4743b694b 100644 --- a/satpy/etc/colormaps/clear_sky_confidence.txt +++ b/satpy/etc/colormaps/clear_sky_confidence.csv @@ -1,101 +1,101 @@ -0, 255, 247, 236 -0.01, 254, 246, 233 -0.02, 254, 244, 230 -0.03, 254, 243, 228 -0.04, 254, 242, 224 -0.05, 254, 241, 222 -0.06, 254, 239, 219 -0.07, 254, 239, 216 -0.08, 254, 237, 213 -0.09, 254, 236, 210 -0.1, 254, 235, 207 -0.11, 254, 233, 204 -0.12, 254, 232, 202 -0.13, 253, 231, 198 -0.14, 253, 230, 195 -0.15, 253, 228, 191 -0.16, 253, 226, 189 -0.17, 253, 225, 185 -0.18, 253, 223, 181 -0.19, 253, 221, 178 -0.2, 253, 220, 174 -0.21, 253, 218, 172 -0.22, 253, 216, 168 -0.23, 253, 215, 165 -0.24, 253, 213, 161 -0.25, 253, 211, 157 -0.26, 253, 210, 156 -0.27, 253, 207, 153 -0.28, 253, 206, 152 -0.29, 253, 203, 149 -0.3, 253, 202, 148 -0.31, 253, 200, 145 -0.32, 253, 198, 143 -0.33, 253, 196, 141 -0.34, 253, 193, 139 -0.35, 253, 192, 137 -0.36, 253, 189, 134 -0.37, 253, 188, 133 -0.38, 252, 185, 130 -0.39, 252, 182, 127 -0.4, 252, 177, 123 -0.41, 252, 174, 120 -0.42, 252, 170, 116 -0.43, 252, 166, 112 -0.44, 252, 163, 109 -0.45, 252, 159, 105 -0.46, 252, 156, 103 -0.47, 252, 151, 99 -0.48, 252, 148, 96 -0.49, 252, 144, 92 -0.5, 251, 140, 88 -0.51, 250, 137, 87 -0.52, 249, 134, 86 -0.53, 248, 131, 85 -0.54, 247, 127, 83 -0.55, 246, 125, 82 -0.56, 245, 121, 80 -0.57, 244, 119, 79 -0.58, 243, 115, 78 -0.59, 242, 111, 76 -0.6, 241, 109, 75 -0.61, 240, 105, 73 -0.62, 239, 102, 72 -0.63, 237, 98, 69 -0.64, 236, 94, 67 -0.65, 234, 89, 63 -0.66, 232, 86, 60 -0.67, 230, 81, 57 -0.68, 227, 76, 53 -0.69, 226, 73, 50 -0.7, 224, 68, 46 -0.71, 222, 65, 44 -0.72, 220, 60, 40 -0.73, 218, 56, 37 -0.74, 216, 51, 33 -0.75, 214, 46, 30 -0.76, 211, 43, 28 -0.77, 208, 39, 25 -0.78, 206, 36, 23 -0.79, 202, 31, 20 -0.8, 200, 28, 188 -0.81, 197, 24, 15 -0.82, 194, 21, 13 -0.83, 191, 16, 10 -0.84, 188, 12, 7 -0.85, 185, 9, 5 -0.86, 182, 4, 3 -0.87, 180, 1, 1 -0.88, 175, 0, 0 -0.89, 172, 0, 0 -0.9, 167, 0, 0 -0.91, 164, 0, 0 -0.92, 159, 0, 0 -0.93, 154, 0, 0 -0.94, 151, 0, 0 -0.95, 146, 0, 0 -0.96, 143, 0, 0 -0.97, 138, 0, 0 -0.98, 135, 0, 0 -0.99, 130, 0, 0 -1, 127, 0, 0 +0, 255, 247, 236 +0.01, 254, 246, 233 +0.02, 254, 244, 230 +0.03, 254, 243, 228 +0.04, 254, 242, 224 +0.05, 254, 241, 222 +0.06, 254, 239, 219 +0.07, 254, 239, 216 +0.08, 254, 237, 213 +0.09, 254, 236, 210 +0.1, 254, 235, 207 +0.11, 254, 233, 204 +0.12, 254, 232, 202 +0.13, 253, 231, 198 +0.14, 253, 230, 195 +0.15, 253, 228, 191 +0.16, 253, 226, 189 +0.17, 253, 225, 185 +0.18, 253, 223, 181 +0.19, 253, 221, 178 +0.2, 253, 220, 174 +0.21, 253, 218, 172 +0.22, 253, 216, 168 +0.23, 253, 215, 165 +0.24, 253, 213, 161 +0.25, 253, 211, 157 +0.26, 253, 210, 156 +0.27, 253, 207, 153 +0.28, 253, 206, 152 +0.29, 253, 203, 149 +0.3, 253, 202, 148 +0.31, 253, 200, 145 +0.32, 253, 198, 143 +0.33, 253, 196, 141 +0.34, 253, 193, 139 +0.35, 253, 192, 137 +0.36, 253, 189, 134 +0.37, 253, 188, 133 +0.38, 252, 185, 130 +0.39, 252, 182, 127 +0.4, 252, 177, 123 +0.41, 252, 174, 120 +0.42, 252, 170, 116 +0.43, 252, 166, 112 +0.44, 252, 163, 109 +0.45, 252, 159, 105 +0.46, 252, 156, 103 +0.47, 252, 151, 99 +0.48, 252, 148, 96 +0.49, 252, 144, 92 +0.5, 251, 140, 88 +0.51, 250, 137, 87 +0.52, 249, 134, 86 +0.53, 248, 131, 85 +0.54, 247, 127, 83 +0.55, 246, 125, 82 +0.56, 245, 121, 80 +0.57, 244, 119, 79 +0.58, 243, 115, 78 +0.59, 242, 111, 76 +0.6, 241, 109, 75 +0.61, 240, 105, 73 +0.62, 239, 102, 72 +0.63, 237, 98, 69 +0.64, 236, 94, 67 +0.65, 234, 89, 63 +0.66, 232, 86, 60 +0.67, 230, 81, 57 +0.68, 227, 76, 53 +0.69, 226, 73, 50 +0.7, 224, 68, 46 +0.71, 222, 65, 44 +0.72, 220, 60, 40 +0.73, 218, 56, 37 +0.74, 216, 51, 33 +0.75, 214, 46, 30 +0.76, 211, 43, 28 +0.77, 208, 39, 25 +0.78, 206, 36, 23 +0.79, 202, 31, 20 +0.8, 200, 28, 188 +0.81, 197, 24, 15 +0.82, 194, 21, 13 +0.83, 191, 16, 10 +0.84, 188, 12, 7 +0.85, 185, 9, 5 +0.86, 182, 4, 3 +0.87, 180, 1, 1 +0.88, 175, 0, 0 +0.89, 172, 0, 0 +0.9, 167, 0, 0 +0.91, 164, 0, 0 +0.92, 159, 0, 0 +0.93, 154, 0, 0 +0.94, 151, 0, 0 +0.95, 146, 0, 0 +0.96, 143, 0, 0 +0.97, 138, 0, 0 +0.98, 135, 0, 0 +0.99, 130, 0, 0 +1, 127, 0, 0 diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 78b59a69c7..5f63b2ad65 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1238,22 +1238,12 @@ enhancements: method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - {colors: [[255, 0, 0], [255, 0, 0]], min_value: 0, max_value: 800} - - {colors: [[170, 0, 0], [170, 0, 0]], min_value: 800.0001, max_value: 1600} - - {colors: [[110, 0, 0], [110, 0, 0]], min_value: 1600.0001, max_value: 2350} - - {colors: [[112, 1, 2], [112, 1, 2]], min_value: 2350.0001, max_value: 3150} - - {colors: [[124, 91, 5], [124, 91, 5]], min_value: 3150.0001, max_value: 4000} - - {colors: [[240, 190, 64], [240, 190, 64]], min_value: 4000.0001, max_value: 4800} - - {colors: [[255, 255, 0], [255, 255, 0]], min_value: 4800.0001, max_value: 5600} - - {colors: [[0, 220, 0], [0, 220, 0]], min_value: 5600.0001, max_value: 6400} - - {colors: [[0, 136, 0], [0, 136, 0]], min_value: 6400.0001, max_value: 7200} - - {colors: [[0, 80, 0], [0, 80, 0]], min_value: 7200.0001, max_value: 8000} - - {colors: [[0, 136, 238], [0, 136, 238]], min_value: 8000.0001, max_value: 8800} - - {colors: [[0, 0, 255], [0, 0, 255]], min_value: 8800.0001, max_value: 9600} - - {colors: [[0, 0, 170], [0, 0, 170]], min_value: 9600.0001, max_value: 10400} - - {colors: [[0, 0, 100], [0, 0, 100]], min_value: 10400.0001, max_value: 11200} - - {colors: [[183,15,141], [183, 15, 141]], min_value: 11200.0001, max_value: 12000} - - {colors: [[102, 0, 119], [102, 0, 119]], min_value: 12000.0001, max_value: 18000} + - { + colors: [[255, 0, 0], [170, 0, 0], [110, 0, 0], [112, 1, 2], [124, 91, 5], [240, 190, 64], [255, 255, 0], + [0, 220, 0], [0, 136, 0], [0, 80, 0], [0, 136, 238], [0, 0, 255], [0, 0, 170], [0, 0, 100], [183, 15, 141], + [102, 0, 119]], + values: [0, 800, 1600, 2350, 3150, 4000, 4800, 5600, 6400, 7200, 8000, 8800, 9600, 10400, 11200, 12000], + } Clear_Sky_Confidence: standard_name: cldmsk_clear_sky_confidence @@ -1263,7 +1253,7 @@ enhancements: kwargs: palettes: - { - filename: clear_sky_confidence.txt + filename: clear_sky_confidence.csv } Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate: @@ -1274,7 +1264,7 @@ enhancements: kwargs: palettes: - { - filename: aerosol_thickness.txt + filename: aerosol_thickness.csv } Angstrom_Exponent_Land_Ocean_Best_Estimate: @@ -1285,5 +1275,5 @@ enhancements: kwargs: palettes: - { - filename: angstrom_exponent.txt + filename: angstrom_exponent.csv } From e6afe2e59aab795d506bf8465eaa3326be846e33 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Fri, 9 Feb 2024 20:35:37 +0000 Subject: [PATCH 1122/1416] finalized cloud_top_height colormap --- satpy/etc/colormaps/cloud_top_height.csv | 32 ++++++++++++++++++++++++ satpy/etc/enhancements/generic.yaml | 5 +--- 2 files changed, 33 insertions(+), 4 deletions(-) create mode 100644 satpy/etc/colormaps/cloud_top_height.csv diff --git a/satpy/etc/colormaps/cloud_top_height.csv b/satpy/etc/colormaps/cloud_top_height.csv new file mode 100644 index 0000000000..d1fa053029 --- /dev/null +++ b/satpy/etc/colormaps/cloud_top_height.csv @@ -0,0 +1,32 @@ +0 , 255 , 0 , 0 +800 , 255 , 0 , 0 +800.0001 , 170 , 0 , 0 +1600 , 170 , 0 , 0 +1600.0001 , 110 , 0 , 0 +2350 , 110 , 0 , 0 +2350.0001 , 112 , 1 , 2 +3150 , 112 , 1 , 2 +3150.0001 , 124 , 91 , 5 +4000 , 124 , 91 , 5 +4000.0001 , 240 , 190 , 64 +4800 , 240 , 190 , 64 +4800.0001 , 255 , 255 , 0 +5600 , 255 , 255 , 0 +5600.0001 , 0 , 220 , 0 +6400 , 0 , 220 , 0 +6400.0001 , 0 , 136 , 0 +7200 , 0 , 136 , 0 +7200.0001 , 0 , 80 , 0 +8000 , 0 , 80 , 0 +8000.0001 , 0 , 136 , 238 +8800 , 0 , 136 , 238 +8800.0001 , 0 , 0 , 255 +9600 , 0 , 0 , 255 +9600.0001 , 0 , 0 , 170 +10400 , 0 , 0 , 170 +10400.0001 , 0 , 0 , 100 +11200 , 0 , 0 , 100 +11200.0001 , 183 , 15 , 141 +12000 , 183 , 15 , 141 +12000.0001 , 102 , 0 , 119 +18000 , 102 , 0 , 119 diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 5f63b2ad65..7dcb730528 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1239,10 +1239,7 @@ enhancements: kwargs: palettes: - { - colors: [[255, 0, 0], [170, 0, 0], [110, 0, 0], [112, 1, 2], [124, 91, 5], [240, 190, 64], [255, 255, 0], - [0, 220, 0], [0, 136, 0], [0, 80, 0], [0, 136, 238], [0, 0, 255], [0, 0, 170], [0, 0, 100], [183, 15, 141], - [102, 0, 119]], - values: [0, 800, 1600, 2350, 3150, 4000, 4800, 5600, 6400, 7200, 8000, 8800, 9600, 10400, 11200, 12000], + filename: cloud_top_height.csv } Clear_Sky_Confidence: From a2d839609b9eb1e149391679a81add405be5b246 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 12 Feb 2024 09:22:54 +0000 Subject: [PATCH 1123/1416] Make fake observation a bit longer --- satpy/tests/reader_tests/test_ahi_hsd.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 7c88c9e5ac..b52be71426 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -40,7 +40,7 @@ "satellite": "Himawari-8", "observation_area": "FLDK", "observation_start_time": 58413.12523839, - "observation_end_time": 58413.12562439, + "observation_end_time": 58413.132182834444, "observation_timeline": "0300", } FAKE_DATA_INFO: InfoDict = { @@ -343,7 +343,7 @@ def test_read_band(self, calibrate, *mocks): "nominal_start_time": datetime(2018, 10, 22, 3, 0, 0, 0), "nominal_end_time": datetime(2018, 10, 22, 3, 0, 0, 0), "observation_start_time": datetime(2018, 10, 22, 3, 0, 20, 596896), - "observation_end_time": datetime(2018, 10, 22, 3, 0, 53, 947296), + "observation_end_time": datetime(2018, 10, 22, 3, 10, 20, 596896), } actual_time_params = im.attrs["time_parameters"] for key, value in time_params_exp.items(): @@ -419,7 +419,7 @@ def test_time_properties(self): assert fh.start_time == datetime(2018, 10, 22, 3, 0) assert fh.end_time == datetime(2018, 10, 22, 3, 0) assert fh.observation_start_time == datetime(2018, 10, 22, 3, 0, 20, 596896) - assert fh.observation_end_time == datetime(2018, 10, 22, 3, 0, 53, 947296) + assert fh.observation_end_time == datetime(2018, 10, 22, 3, 10, 20, 596896) assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0) From b68920fbe66bf45d604e4ad51e0e8e4f2df8b61d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 12 Feb 2024 11:11:14 +0200 Subject: [PATCH 1124/1416] Remove douple handling of start/end times and time parameters --- satpy/readers/file_handlers.py | 26 ++------------------------ 1 file changed, 2 insertions(+), 24 deletions(-) diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 66a028eb4c..b844732a2e 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -112,10 +112,9 @@ def combine_info(self, all_infos): """ combined_info = combine_metadata(*all_infos) - new_dict = self._combine(all_infos, min, "start_time", "start_orbit") - new_dict.update(self._combine(all_infos, max, "end_time", "end_orbit")) + new_dict = self._combine(all_infos, min, "start_orbit") + new_dict.update(self._combine(all_infos, max, "end_orbit")) new_dict.update(self._combine_orbital_parameters(all_infos)) - new_dict.update(self._combine_time_parameters(all_infos)) try: area = SwathDefinition(lons=np.ma.vstack([info["area"].lons for info in all_infos]), @@ -145,27 +144,6 @@ def _combine_orbital_parameters(self, all_infos): orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) return {"orbital_parameters": orb_params_comb} - def _combine_time_parameters(self, all_infos): - time_params = [info.get("time_parameters", {}) for info in all_infos] - if not all(time_params): - return {} - # Collect all available keys - time_params_comb = {} - for d in time_params: - time_params_comb.update(d) - - start_keys = ( - "nominal_start_time", - "observation_start_time", - ) - end_keys = ( - "nominal_end_time", - "observation_end_time", - ) - time_params_comb.update(self._combine(time_params, min, *start_keys)) - time_params_comb.update(self._combine(time_params, max, *end_keys)) - return {"time_parameters": time_params_comb} - @property def start_time(self): """Get start time.""" From d1c33a1bb9f48b082eedca42a74ddae8af0c828e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 12 Feb 2024 11:21:40 +0200 Subject: [PATCH 1125/1416] Use datetimes when testing times --- satpy/tests/test_file_handlers.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index 403e686204..925da3e561 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -49,25 +49,27 @@ def setUp(self): """Set up the test.""" self.fh = BaseFileHandler( "filename", {"filename_info": "bla"}, "filetype_info") + self.early_time = datetime(2024, 2, 12, 11, 00) + self.late_time = datetime(2024, 2, 12, 12, 00) def test_combine_times(self): """Combine times.""" - info1 = {"start_time": 1} - info2 = {"start_time": 2} + info1 = {"start_time": self.early_time} + info2 = {"start_time": self.late_time} res = self.fh.combine_info([info1, info2]) - exp = {"start_time": 1} + exp = {"start_time": self.early_time} assert res == exp res = self.fh.combine_info([info2, info1]) - exp = {"start_time": 1} + exp = {"start_time": self.early_time} assert res == exp - info1 = {"end_time": 1} - info2 = {"end_time": 2} + info1 = {"end_time": self.early_time} + info2 = {"end_time": self.late_time} res = self.fh.combine_info([info1, info2]) - exp = {"end_time": 2} + exp = {"end_time": self.late_time} assert res == exp res = self.fh.combine_info([info2, info1]) - exp = {"end_time": 2} + exp = {"end_time": self.late_time} assert res == exp def test_combine_orbits(self): From 3e050b0940fac573316f67a327010a97cba007da Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 12 Feb 2024 11:22:06 +0200 Subject: [PATCH 1126/1416] Refactor to remove unnecessary return --- satpy/dataset/metadata.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index bc99d3cb21..405885ad14 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -52,7 +52,6 @@ def combine_metadata(*metadata_objects): """ info_dicts = _get_valid_dicts(metadata_objects) - if len(info_dicts) == 1: return info_dicts[0].copy() @@ -91,9 +90,8 @@ def _combine_shared_info(shared_keys, info_dicts): def _combine_values(key, values, shared_info): if "time" in key: times = _combine_times(key, values) - if times is None: - return - shared_info[key] = times + if times is not None: + shared_info[key] = times elif _are_values_combinable(values): shared_info[key] = values[0] From c9fd59617b5e0ab1f75b62abbc6c8efb0b741ae8 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 12 Feb 2024 10:00:45 +0000 Subject: [PATCH 1127/1416] Take duration into account for nominal end time --- satpy/readers/ahi_hsd.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 313e5ccab5..3c9e5fb3b7 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -443,7 +443,7 @@ def _is_valid_timeline(timeline): return False return True - def _modify_observation_time_for_nominal(self, observation_time): + def _modify_observation_time_for_nominal(self, observation_time, start_or_end_time="start"): """Round observation time to a nominal time based on known observation frequency. AHI observations are split into different sectors including Full Disk @@ -464,11 +464,18 @@ def _modify_observation_time_for_nominal(self, observation_time): ) return observation_time + observation_frequencies = {"FLDK": 600, "JP": 150, "R3": 150, "R4": 30, "R5": 30} if self.observation_area == "FLDK": - dt = 0 + dt_start = 0 + dt_end = observation_frequencies["FLDK"] else: - observation_frequency_seconds = {"JP": 150, "R3": 150, "R4": 30, "R5": 30}[self.observation_area[:2]] - dt = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) + observation_frequency_seconds = observation_frequencies[self.observation_area[:2]] + dt_start = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) + dt_end = observation_frequencies[self.observation_area[:2]] + + dt = dt_start + if start_or_end_time == "end": + dt += dt_end return observation_time.replace( hour=int(timeline[:2]), minute=int(timeline[2:4]) + dt//60, From 319d8b55c9b5d56b1cbf6e09ab46720826d0d5c6 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 12 Feb 2024 10:28:25 +0000 Subject: [PATCH 1128/1416] Update scanning frequencies test --- satpy/readers/ahi_hsd.py | 8 +-- satpy/tests/reader_tests/test_ahi_hsd.py | 71 +++++++++++++++++------- 2 files changed, 56 insertions(+), 23 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 3c9e5fb3b7..3acd3bebdc 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -414,7 +414,7 @@ def start_time(self): @property def end_time(self): """Get the nominal end time.""" - return self.nominal_start_time + return self.nominal_end_time @property def observation_start_time(self): @@ -429,12 +429,12 @@ def observation_end_time(self): @property def nominal_start_time(self): """Time this band was nominally to be recorded.""" - return self._modify_observation_time_for_nominal(self.observation_start_time) + return self._modify_observation_time_for_nominal(self.observation_start_time, "start") @property def nominal_end_time(self): """Get the nominal end time.""" - return self._modify_observation_time_for_nominal(self.observation_end_time) + return self._modify_observation_time_for_nominal(self.observation_end_time, "end") @staticmethod def _is_valid_timeline(timeline): @@ -471,7 +471,7 @@ def _modify_observation_time_for_nominal(self, observation_time, start_or_end_ti else: observation_frequency_seconds = observation_frequencies[self.observation_area[:2]] dt_start = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) - dt_end = observation_frequencies[self.observation_area[:2]] + dt_end = observation_frequency_seconds dt = dt_start if start_or_end_time == "end": diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index b52be71426..29e4cfcb9b 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -341,7 +341,7 @@ def test_read_band(self, calibrate, *mocks): time_params_exp = { "nominal_start_time": datetime(2018, 10, 22, 3, 0, 0, 0), - "nominal_end_time": datetime(2018, 10, 22, 3, 0, 0, 0), + "nominal_end_time": datetime(2018, 10, 22, 3, 10, 0, 0), "observation_start_time": datetime(2018, 10, 22, 3, 0, 20, 596896), "observation_end_time": datetime(2018, 10, 22, 3, 10, 20, 596896), } @@ -417,30 +417,63 @@ def test_time_properties(self): """Test start/end/scheduled time properties.""" with _fake_hsd_handler() as fh: assert fh.start_time == datetime(2018, 10, 22, 3, 0) - assert fh.end_time == datetime(2018, 10, 22, 3, 0) + assert fh.end_time == datetime(2018, 10, 22, 3, 10) assert fh.observation_start_time == datetime(2018, 10, 22, 3, 0, 20, 596896) assert fh.observation_end_time == datetime(2018, 10, 22, 3, 10, 20, 596896) assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0) + assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 10, 0, 0) - def test_scanning_frequencies(self): + @pytest.mark.parametrize( + ("observation_area", "start_time", "end_time"), + [ + ( + "JP01", + datetime(2018, 10, 22, 3, 0, 0), + datetime(2018, 10, 22, 3, 2, 30) + ), + ( + "JP04", + datetime(2018, 10, 22, 3, 7, 30, 0), + datetime(2018, 10, 22, 3, 10, 0, 0) + ), + ( + "R301", + datetime(2018, 10, 22, 3, 0, 0), + datetime(2018, 10, 22, 3, 2, 30) + ), + ( + "R304", + datetime(2018, 10, 22, 3, 7, 30, 0), + datetime(2018, 10, 22, 3, 10, 0, 0) + ), + ( + "R401", + datetime(2018, 10, 22, 3, 0, 0), + datetime(2018, 10, 22, 3, 0, 30) + ), + ( + "R420", + datetime(2018, 10, 22, 3, 9, 30, 0), + datetime(2018, 10, 22, 3, 10, 0, 0) + ), + ( + "R501", + datetime(2018, 10, 22, 3, 0, 0), + datetime(2018, 10, 22, 3, 0, 30) + ), + ( + "R520", + datetime(2018, 10, 22, 3, 9, 30, 0), + datetime(2018, 10, 22, 3, 10, 0, 0) + ), + ] + ) + def test_scanning_frequencies(self, observation_area, start_time, end_time): """Test scanning frequencies.""" with _fake_hsd_handler() as fh: - fh.observation_area = "JP04" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 7, 30, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 7, 30, 0) - fh.observation_area = "R304" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 7, 30, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 7, 30, 0) - fh.observation_area = "R420" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 9, 30, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 9, 30, 0) - fh.observation_area = "R520" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 9, 30, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 9, 30, 0) - fh.observation_area = "FLDK" - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0) + fh.observation_area = observation_area + assert fh.nominal_start_time == start_time + assert fh.nominal_end_time == end_time def test_blocklen_error(self, *mocks): """Test erraneous blocklength.""" From e3fb4ec1e4485501f607ee918b9181879a5c0f37 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 12 Feb 2024 10:54:45 +0000 Subject: [PATCH 1129/1416] Refactor nominal time computation --- satpy/readers/ahi_hsd.py | 38 +++++++++++++++++--------------- satpy/readers/seviri_l1b_hrit.py | 4 ++-- 2 files changed, 22 insertions(+), 20 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 3acd3bebdc..1e1f630843 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -429,12 +429,13 @@ def observation_end_time(self): @property def nominal_start_time(self): """Time this band was nominally to be recorded.""" - return self._modify_observation_time_for_nominal(self.observation_start_time, "start") + return self._modify_observation_time_for_nominal(self.observation_start_time) @property def nominal_end_time(self): """Get the nominal end time.""" - return self._modify_observation_time_for_nominal(self.observation_end_time, "end") + freq = self._observation_frequency + return self.nominal_start_time + timedelta(minutes=freq // 60, seconds=freq % 60) @staticmethod def _is_valid_timeline(timeline): @@ -443,7 +444,16 @@ def _is_valid_timeline(timeline): return False return True - def _modify_observation_time_for_nominal(self, observation_time, start_or_end_time="start"): + @property + def _observation_frequency(self): + frequencies = {"FLDK": 600, "JP": 150, "R3": 150, "R4": 30, "R5": 30} + area = self.observation_area + if area != "FLDK": + # e.g. JP01, JP02 etc + area = area[:2] + return frequencies[area] + + def _modify_observation_time_for_nominal(self, observation_time): """Round observation time to a nominal time based on known observation frequency. AHI observations are split into different sectors including Full Disk @@ -454,7 +464,6 @@ def _modify_observation_time_for_nominal(self, observation_time, start_or_end_ti sector. So if the observation time is 13:32:48 for the "JP02" sector which is the second Japan observation where every Japan observation is 2.5 minutes apart, then the result should be 13:32:30. - """ timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) if not self._is_valid_timeline(timeline): @@ -463,24 +472,17 @@ def _modify_observation_time_for_nominal(self, observation_time, start_or_end_ti stacklevel=3 ) return observation_time - - observation_frequencies = {"FLDK": 600, "JP": 150, "R3": 150, "R4": 30, "R5": 30} - if self.observation_area == "FLDK": - dt_start = 0 - dt_end = observation_frequencies["FLDK"] - else: - observation_frequency_seconds = observation_frequencies[self.observation_area[:2]] - dt_start = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) - dt_end = observation_frequency_seconds - - dt = dt_start - if start_or_end_time == "end": - dt += dt_end - + dt = self._get_offset_relative_to_timeline() return observation_time.replace( hour=int(timeline[:2]), minute=int(timeline[2:4]) + dt//60, second=dt % 60, microsecond=0) + def _get_offset_relative_to_timeline(self): + if self.observation_area == "FLDK": + return 0 + sector_repeat = int(self.observation_area[2:]) - 1 + return self._observation_frequency * sector_repeat + def get_dataset(self, key, info): """Get the dataset.""" return self.read_band(key, info) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 3b3aa82277..804198da0f 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -765,8 +765,8 @@ def _update_attrs(self, res, info): res.attrs["standard_name"] = info["standard_name"] res.attrs["platform_name"] = self.platform_name res.attrs["sensor"] = "seviri" - res.attrs["nominal_start_time"] = self.nominal_start_time, - res.attrs["nominal_end_time"] = self.nominal_end_time, + res.attrs["nominal_start_time"] = self.nominal_start_time + res.attrs["nominal_end_time"] = self.nominal_end_time res.attrs["time_parameters"] = { "nominal_start_time": self.nominal_start_time, "nominal_end_time": self.nominal_end_time, From d0821bf91459fcbc95d21c6d61d534fd37d9a32b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 12 Feb 2024 10:56:38 +0000 Subject: [PATCH 1130/1416] Reset accidental SEVIRI changes --- satpy/readers/seviri_l1b_hrit.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 804198da0f..3b3aa82277 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -765,8 +765,8 @@ def _update_attrs(self, res, info): res.attrs["standard_name"] = info["standard_name"] res.attrs["platform_name"] = self.platform_name res.attrs["sensor"] = "seviri" - res.attrs["nominal_start_time"] = self.nominal_start_time - res.attrs["nominal_end_time"] = self.nominal_end_time + res.attrs["nominal_start_time"] = self.nominal_start_time, + res.attrs["nominal_end_time"] = self.nominal_end_time, res.attrs["time_parameters"] = { "nominal_start_time": self.nominal_start_time, "nominal_end_time": self.nominal_end_time, From beaaf3db73b069813f54e8972b1f3ca58a8bc925 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Mon, 12 Feb 2024 18:29:19 +0000 Subject: [PATCH 1131/1416] changed colormap paths --- satpy/etc/enhancements/generic.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 7dcb730528..e79ec953fb 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1239,7 +1239,7 @@ enhancements: kwargs: palettes: - { - filename: cloud_top_height.csv + filename: colormaps/cloud_top_height.csv } Clear_Sky_Confidence: @@ -1250,7 +1250,7 @@ enhancements: kwargs: palettes: - { - filename: clear_sky_confidence.csv + filename: colormaps/clear_sky_confidence.csv } Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate: @@ -1261,7 +1261,7 @@ enhancements: kwargs: palettes: - { - filename: aerosol_thickness.csv + filename: colormaps/aerosol_thickness.csv } Angstrom_Exponent_Land_Ocean_Best_Estimate: @@ -1272,5 +1272,5 @@ enhancements: kwargs: palettes: - { - filename: angstrom_exponent.csv + filename: colormaps/angstrom_exponent.csv } From 143973e7239350bc6eb11d01160eb8add895d634 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Mon, 12 Feb 2024 14:08:04 -0600 Subject: [PATCH 1132/1416] Change area definition and add check for resolution --- satpy/readers/clavrx.py | 29 ++++++++++------------------- 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index e23bde44e5..39fda49d3d 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -268,24 +268,14 @@ def _read_axi_fixed_grid(filename: str, sensor: str, l1b_attr) -> geometry.AreaD x, y = l1b["x"], l1b["y"] area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h) - if sensor == "abi": - area = geometry.AreaDefinition( - "abi_geos", - "ABI L2 file area", - "abi_geos", - proj, - ncols, - nlines, - np.asarray(area_extent)) - else: - area = geometry.AreaDefinition( - "ahi_geos", - "AHI L2 file area", - "ahi_geos", - proj, - ncols, - nlines, - np.asarray(area_extent)) + area = geometry.AreaDefinition( + f"{sensor}_geos", + f"{sensor.upper()} L2 file area", + f"{sensor}_geos", + proj, + ncols, + nlines, + area_extent) return area @@ -515,7 +505,8 @@ def available_datasets(self, configured_datasets=None): # reader knows something about this dataset (file type matches) # add any information that this reader can add. new_info = ds_info.copy() - new_info["resolution"] = self.resolution + if self.resolution is not None: + new_info["resolution"] = self.resolution handled_vars.add(ds_info["name"]) yield True, new_info yield from self._available_file_datasets(handled_vars) From 678deaaa3a42f5f8993faba36cac3bb702caeeb1 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 13 Feb 2024 12:30:57 +0000 Subject: [PATCH 1133/1416] Refactor nominal timestamp computation --- satpy/readers/ahi_hsd.py | 124 ++++++++++++++--------- satpy/tests/reader_tests/test_ahi_hsd.py | 22 ++-- 2 files changed, 87 insertions(+), 59 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 1e1f630843..3520c0f953 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -429,59 +429,18 @@ def observation_end_time(self): @property def nominal_start_time(self): """Time this band was nominally to be recorded.""" - return self._modify_observation_time_for_nominal(self.observation_start_time) + timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) + calc = NominalTimeCalculator(timeline, + self.observation_area) + return calc.get_nominal_start_time(self.observation_start_time) @property def nominal_end_time(self): """Get the nominal end time.""" - freq = self._observation_frequency - return self.nominal_start_time + timedelta(minutes=freq // 60, seconds=freq % 60) - - @staticmethod - def _is_valid_timeline(timeline): - """Check that the `observation_timeline` value is not a fill value.""" - if int(timeline[:2]) > 23: - return False - return True - - @property - def _observation_frequency(self): - frequencies = {"FLDK": 600, "JP": 150, "R3": 150, "R4": 30, "R5": 30} - area = self.observation_area - if area != "FLDK": - # e.g. JP01, JP02 etc - area = area[:2] - return frequencies[area] - - def _modify_observation_time_for_nominal(self, observation_time): - """Round observation time to a nominal time based on known observation frequency. - - AHI observations are split into different sectors including Full Disk - (FLDK), Japan (JP) sectors, and smaller regional (R) sectors. Each - sector is observed at different frequencies (ex. every 10 minutes, - every 2.5 minutes, and every 30 seconds). This method will take the - actual observation time and round it to the nearest interval for this - sector. So if the observation time is 13:32:48 for the "JP02" sector - which is the second Japan observation where every Japan observation is - 2.5 minutes apart, then the result should be 13:32:30. - """ timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) - if not self._is_valid_timeline(timeline): - warnings.warn( - "Observation timeline is fill value, not rounding observation time.", - stacklevel=3 - ) - return observation_time - dt = self._get_offset_relative_to_timeline() - return observation_time.replace( - hour=int(timeline[:2]), minute=int(timeline[2:4]) + dt//60, - second=dt % 60, microsecond=0) - - def _get_offset_relative_to_timeline(self): - if self.observation_area == "FLDK": - return 0 - sector_repeat = int(self.observation_area[2:]) - 1 - return self._observation_frequency * sector_repeat + calc = NominalTimeCalculator(timeline, + self.observation_area) + return calc.get_nominal_end_time(self.nominal_start_time) def get_dataset(self, key, info): """Get the dataset.""" @@ -784,3 +743,72 @@ def _ir_calibrate(self, data): c2_ = self._header["calibration"]["c2_rad2tb_conversion"][0] return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) + + +class NominalTimeCalculator: + """Get time when a scan was nominally to be recorded.""" + + def __init__(self, timeline, area): + """Initialize the nominal timestamp calculator. + + Args: + timeline (str): Observation timeline (four characters HHMM) + area (str): Observation area (four characters, e.g. FLDK) + """ + self.timeline = timeline + self.area = area + + def get_nominal_start_time(self, observation_time): + """Get nominal start time of the scan.""" + return self._modify_observation_time_for_nominal(observation_time) + + def get_nominal_end_time(self, nominal_start_time): + """Get nominal end time of the scan.""" + freq = self._observation_frequency + return nominal_start_time + timedelta(minutes=freq // 60, + seconds=freq % 60) + + def _modify_observation_time_for_nominal(self, observation_time): + """Round observation time to a nominal time based on known observation frequency. + + AHI observations are split into different sectors including Full Disk + (FLDK), Japan (JP) sectors, and smaller regional (R) sectors. Each + sector is observed at different frequencies (ex. every 10 minutes, + every 2.5 minutes, and every 30 seconds). This method will take the + actual observation time and round it to the nearest interval for this + sector. So if the observation time is 13:32:48 for the "JP02" sector + which is the second Japan observation where every Japan observation is + 2.5 minutes apart, then the result should be 13:32:30. + """ + if not self._is_valid_timeline(self.timeline): + warnings.warn( + "Observation timeline is fill value, not rounding observation time.", + stacklevel=3 + ) + return observation_time + dt = self._get_offset_relative_to_timeline() + return observation_time.replace( + hour=int(self.timeline[:2]), minute=int(self.timeline[2:4]) + dt//60, + second=dt % 60, microsecond=0) + + @staticmethod + def _is_valid_timeline(timeline): + """Check that the `observation_timeline` value is not a fill value.""" + if int(timeline[:2]) > 23: + return False + return True + + def _get_offset_relative_to_timeline(self): + if self.area == "FLDK": + return 0 + sector_repeat = int(self.area[2:]) - 1 + return self._observation_frequency * sector_repeat + + @property + def _observation_frequency(self): + frequencies = {"FLDK": 600, "JP": 150, "R3": 150, "R4": 30, "R5": 30} + area = self.area + if area != "FLDK": + # e.g. JP01, JP02 etc + area = area[:2] + return frequencies[area] diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 29e4cfcb9b..e73efdfec2 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -29,7 +29,7 @@ import numpy as np import pytest -from satpy.readers.ahi_hsd import AHIHSDFileHandler +from satpy.readers.ahi_hsd import AHIHSDFileHandler, NominalTimeCalculator from satpy.readers.utils import get_geostationary_mask from satpy.tests.utils import make_dataid @@ -495,22 +495,22 @@ def test_blocklen_error(self, *mocks): def test_is_valid_time(self): """Test that valid times are correctly identified.""" - assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO["observation_timeline"]) - assert not AHIHSDFileHandler._is_valid_timeline("65526") + assert NominalTimeCalculator._is_valid_timeline(FAKE_BASIC_INFO["observation_timeline"]) + assert not NominalTimeCalculator._is_valid_timeline("65526") def test_time_rounding(self): """Test rounding of the nominal time.""" mocker = mock.MagicMock() in_date = datetime(2020, 1, 1, 12, 0, 0) - with mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline", mocker): - with _fake_hsd_handler() as fh: - mocker.return_value = True - assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) - mocker.return_value = False - with pytest.warns(UserWarning, - match=r"Observation timeline is fill value, not rounding observation time"): - assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) + with mock.patch("satpy.readers.ahi_hsd.NominalTimeCalculator._is_valid_timeline", mocker): + calc = NominalTimeCalculator("0300", "FLDK") + mocker.return_value = True + assert calc._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) + mocker.return_value = False + with pytest.warns(UserWarning, + match=r"Observation timeline is fill value, not rounding observation time"): + assert calc._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) class TestAHICalibration(unittest.TestCase): From 3fa2698b475351f9be245e41ff16b4992b0208ca Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 13 Feb 2024 12:33:38 +0000 Subject: [PATCH 1134/1416] Refactor corresponding tests --- satpy/readers/ahi_hsd.py | 4 +- satpy/tests/reader_tests/test_ahi_hsd.py | 142 +++++++++++------------ 2 files changed, 73 insertions(+), 73 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 3520c0f953..e721d29b2d 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -758,9 +758,9 @@ def __init__(self, timeline, area): self.timeline = timeline self.area = area - def get_nominal_start_time(self, observation_time): + def get_nominal_start_time(self, observation_start_time): """Get nominal start time of the scan.""" - return self._modify_observation_time_for_nominal(observation_time) + return self._modify_observation_time_for_nominal(observation_start_time) def get_nominal_end_time(self, nominal_start_time): """Get nominal end time of the scan.""" diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index e73efdfec2..c201c2b7b7 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -423,58 +423,6 @@ def test_time_properties(self): assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 10, 0, 0) - @pytest.mark.parametrize( - ("observation_area", "start_time", "end_time"), - [ - ( - "JP01", - datetime(2018, 10, 22, 3, 0, 0), - datetime(2018, 10, 22, 3, 2, 30) - ), - ( - "JP04", - datetime(2018, 10, 22, 3, 7, 30, 0), - datetime(2018, 10, 22, 3, 10, 0, 0) - ), - ( - "R301", - datetime(2018, 10, 22, 3, 0, 0), - datetime(2018, 10, 22, 3, 2, 30) - ), - ( - "R304", - datetime(2018, 10, 22, 3, 7, 30, 0), - datetime(2018, 10, 22, 3, 10, 0, 0) - ), - ( - "R401", - datetime(2018, 10, 22, 3, 0, 0), - datetime(2018, 10, 22, 3, 0, 30) - ), - ( - "R420", - datetime(2018, 10, 22, 3, 9, 30, 0), - datetime(2018, 10, 22, 3, 10, 0, 0) - ), - ( - "R501", - datetime(2018, 10, 22, 3, 0, 0), - datetime(2018, 10, 22, 3, 0, 30) - ), - ( - "R520", - datetime(2018, 10, 22, 3, 9, 30, 0), - datetime(2018, 10, 22, 3, 10, 0, 0) - ), - ] - ) - def test_scanning_frequencies(self, observation_area, start_time, end_time): - """Test scanning frequencies.""" - with _fake_hsd_handler() as fh: - fh.observation_area = observation_area - assert fh.nominal_start_time == start_time - assert fh.nominal_end_time == end_time - def test_blocklen_error(self, *mocks): """Test erraneous blocklength.""" open_name = "%s.open" % __name__ @@ -493,25 +441,6 @@ def test_blocklen_error(self, *mocks): with pytest.warns(UserWarning, match=r"Actual .* header size does not match expected"): fh._check_fpos(fp_, fpos, 0, "header 1") - def test_is_valid_time(self): - """Test that valid times are correctly identified.""" - assert NominalTimeCalculator._is_valid_timeline(FAKE_BASIC_INFO["observation_timeline"]) - assert not NominalTimeCalculator._is_valid_timeline("65526") - - def test_time_rounding(self): - """Test rounding of the nominal time.""" - mocker = mock.MagicMock() - in_date = datetime(2020, 1, 1, 12, 0, 0) - - with mock.patch("satpy.readers.ahi_hsd.NominalTimeCalculator._is_valid_timeline", mocker): - calc = NominalTimeCalculator("0300", "FLDK") - mocker.return_value = True - assert calc._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) - mocker.return_value = False - with pytest.warns(UserWarning, - match=r"Observation timeline is fill value, not rounding observation time"): - assert calc._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) - class TestAHICalibration(unittest.TestCase): """Test case for various AHI calibration types.""" @@ -702,3 +631,74 @@ def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None, assert in_fname != fh.filename assert str(filename_info["segment"]).zfill(2) == fh.filename[0:2] return fh + + +class TestNominalTimeCalculator: + """Test case for nominal timestamp computation.""" + + @pytest.mark.parametrize( + ("timeline", "expected"), + [ + ("0300", datetime(2020, 1, 1, 3, 0, 0)), + ("65526", datetime(2020, 1, 1, 12, 0, 0)) + ] + ) + def test_invalid_timeline(self, timeline, expected): + """Test handling of invalid timeline.""" + calc = NominalTimeCalculator(timeline, "FLDK") + res = calc.get_nominal_start_time(datetime(2020, 1, 1, 12, 0, 0)) + assert res == expected + + @pytest.mark.parametrize( + ("area", "expected"), + [ + ( + "JP01", + {"tstart": datetime(2018, 10, 22, 3, 0, 0), + "tend": datetime(2018, 10, 22, 3, 2, 30)} + ), + ( + "JP04", + {"tstart": datetime(2018, 10, 22, 3, 7, 30, 0), + "tend": datetime(2018, 10, 22, 3, 10, 0, 0)} + ), + ( + "R301", + {"tstart": datetime(2018, 10, 22, 3, 0, 0), + "tend": datetime(2018, 10, 22, 3, 2, 30)} + ), + ( + "R304", + {"tstart": datetime(2018, 10, 22, 3, 7, 30, 0), + "tend": datetime(2018, 10, 22, 3, 10, 0, 0)} + ), + ( + "R401", + {"tstart": datetime(2018, 10, 22, 3, 0, 0), + "tend": datetime(2018, 10, 22, 3, 0, 30)} + ), + ( + "R420", + {"tstart": datetime(2018, 10, 22, 3, 9, 30, 0), + "tend": datetime(2018, 10, 22, 3, 10, 0, 0)} + ), + ( + "R501", + {"tstart": datetime(2018, 10, 22, 3, 0, 0), + "tend": datetime(2018, 10, 22, 3, 0, 30)} + ), + ( + "R520", + {"tstart": datetime(2018, 10, 22, 3, 9, 30, 0), + "tend": datetime(2018, 10, 22, 3, 10, 0, 0)} + ), + ] + ) + def test_areas(self, area, expected): + """Test nominal timestamps for multiple areas.""" + obs_start_time = datetime(2018, 10, 22, 3, 0, 20, 596896) + calc = NominalTimeCalculator("0300", area) + nom_start_time = calc.get_nominal_start_time(obs_start_time) + nom_end_time = calc.get_nominal_end_time(nom_start_time) + assert nom_start_time == expected["tstart"] + assert nom_end_time == expected["tend"] From 5c3475315a9176bafa92874ac502820dcca76b11 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 13 Feb 2024 13:00:15 +0000 Subject: [PATCH 1135/1416] Add test case for multiple timelines --- satpy/tests/reader_tests/test_ahi_hsd.py | 37 ++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index c201c2b7b7..93089084c6 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -702,3 +702,40 @@ def test_areas(self, area, expected): nom_end_time = calc.get_nominal_end_time(nom_start_time) assert nom_start_time == expected["tstart"] assert nom_end_time == expected["tend"] + + @pytest.mark.parametrize( + ("timeline", "obs_start_time", "expected"), + [ + ( + "1200", + datetime(2023, 1, 1, 12, 0, 1), + {"tstart": datetime(2023, 1, 1, 12, 0, 0), + "tend": datetime(2023, 1, 1, 12, 10, 0)} + ), + ( + "1200", + datetime(2023, 1, 1, 11, 59, 59), + {"tstart": datetime(2023, 1, 1, 12, 0, 0), + "tend": datetime(2023, 1, 1, 12, 10, 0)} + ), + ( + "0000", + datetime(2023, 1, 1, 0, 0, 1), + {"tstart": datetime(2023, 1, 1, 0, 0, 0), + "tend": datetime(2023, 1, 1, 0, 10, 0)} + ), + ( + "0000", + datetime(2022, 12, 31, 23, 59, 59), + {"tstart": datetime(2023, 1, 1, 0, 0, 0), + "tend": datetime(2023, 1, 1, 0, 10, 0)} + ), + ] + ) + def test_timelines(self, timeline, obs_start_time, expected): + """Test nominal timestamps for multiple timelines.""" + calc = NominalTimeCalculator(timeline, "FLDK") + nom_start_time = calc.get_nominal_start_time(obs_start_time) + nom_end_time = calc.get_nominal_end_time(nom_start_time) + assert nom_start_time == expected["tstart"] + assert nom_end_time == expected["tend"] From 67dd4a8f5cad945acc7236f6fa4f51e5235228f5 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 13 Feb 2024 13:09:16 +0000 Subject: [PATCH 1136/1416] Convert timeline to time object --- satpy/readers/ahi_hsd.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index e721d29b2d..30e0f5e868 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -755,9 +755,15 @@ def __init__(self, timeline, area): timeline (str): Observation timeline (four characters HHMM) area (str): Observation area (four characters, e.g. FLDK) """ - self.timeline = timeline + self.timeline = self._parse_timeline(timeline) self.area = area + def _parse_timeline(self, timeline): + try: + return datetime.strptime(timeline, "%H%M").time() + except ValueError: + return None + def get_nominal_start_time(self, observation_start_time): """Get nominal start time of the scan.""" return self._modify_observation_time_for_nominal(observation_start_time) @@ -780,7 +786,7 @@ def _modify_observation_time_for_nominal(self, observation_time): which is the second Japan observation where every Japan observation is 2.5 minutes apart, then the result should be 13:32:30. """ - if not self._is_valid_timeline(self.timeline): + if not self.timeline: warnings.warn( "Observation timeline is fill value, not rounding observation time.", stacklevel=3 @@ -788,16 +794,9 @@ def _modify_observation_time_for_nominal(self, observation_time): return observation_time dt = self._get_offset_relative_to_timeline() return observation_time.replace( - hour=int(self.timeline[:2]), minute=int(self.timeline[2:4]) + dt//60, + hour=self.timeline.hour, minute=self.timeline.minute + dt//60, second=dt % 60, microsecond=0) - @staticmethod - def _is_valid_timeline(timeline): - """Check that the `observation_timeline` value is not a fill value.""" - if int(timeline[:2]) > 23: - return False - return True - def _get_offset_relative_to_timeline(self): if self.area == "FLDK": return 0 From 911bc4d87f27472d272bc62011e83986f2576ffe Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 13 Feb 2024 13:26:52 +0000 Subject: [PATCH 1137/1416] Handle scans starting earlier than planned --- satpy/readers/ahi_hsd.py | 31 +++++++++++++++++++++--- satpy/tests/reader_tests/test_ahi_hsd.py | 16 ++++++------ 2 files changed, 36 insertions(+), 11 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 30e0f5e868..6b503148b2 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -792,10 +792,35 @@ def _modify_observation_time_for_nominal(self, observation_time): stacklevel=3 ) return observation_time + timeline = self._get_closest_timeline(observation_time) dt = self._get_offset_relative_to_timeline() - return observation_time.replace( - hour=self.timeline.hour, minute=self.timeline.minute + dt//60, - second=dt % 60, microsecond=0) + return timeline + timedelta(minutes=dt//60, seconds=dt % 60) + + def _get_closest_timeline(self, observation_time): + """Find the closest timeline for the given observation time. + + Needs to check surrounding days because the observation might start + a little bit before the planned time. + + Observation start time: 2022-12-31 23:59 + Timeline: 0000 + => Nominal start time: 2023-01-01 00:00 + """ + delta_days = [-1, 0, 1] + surrounding_dates = [ + (observation_time + timedelta(days=delta)).date() + for delta in delta_days + ] + timelines = [ + datetime.combine(date, self.timeline) + for date in surrounding_dates + ] + diffs = [ + abs((timeline - observation_time)) + for timeline in timelines + ] + argmin = np.argmin(diffs) + return timelines[argmin] def _get_offset_relative_to_timeline(self): if self.area == "FLDK": diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 93089084c6..3b73521ee8 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -707,16 +707,16 @@ def test_areas(self, area, expected): ("timeline", "obs_start_time", "expected"), [ ( - "1200", - datetime(2023, 1, 1, 12, 0, 1), - {"tstart": datetime(2023, 1, 1, 12, 0, 0), - "tend": datetime(2023, 1, 1, 12, 10, 0)} + "2350", + datetime(2022, 12, 31, 23, 50, 1), + {"tstart": datetime(2022, 12, 31, 23, 50, 0), + "tend": datetime(2023, 1, 1, 0, 0, 0)} ), ( - "1200", - datetime(2023, 1, 1, 11, 59, 59), - {"tstart": datetime(2023, 1, 1, 12, 0, 0), - "tend": datetime(2023, 1, 1, 12, 10, 0)} + "2350", + datetime(2022, 12, 31, 23, 49, 59), + {"tstart": datetime(2022, 12, 31, 23, 50, 0), + "tend": datetime(2023, 1, 1, 0, 0, 0)} ), ( "0000", From 024a63f957e944828fe7cf558072332a57159398 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Tue, 13 Feb 2024 10:41:15 -0600 Subject: [PATCH 1138/1416] Address: scale_factor/add_offset dtype, area_extent type. Updates logic for the resolution value from the file. --- satpy/readers/clavrx.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index 39fda49d3d..c355a1f0ba 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -17,10 +17,12 @@ # satpy. If not, see . """Interface to CLAVR-X HDF4 products.""" +from __future__ import annotations + import logging import os from glob import glob -from typing import Optional, Union +from typing import Optional import netCDF4 import numpy as np @@ -108,11 +110,11 @@ def _get_rows_per_scan(sensor: str) -> Optional[int]: return None -def _scale_data(data_arr: Union[xr.DataArray, int], scale_factor: float, add_offset: float) -> xr.DataArray: +def _scale_data(data_arr: xr.DataArray | int, scale_factor: float, add_offset: float) -> xr.DataArray: """Scale data, if needed.""" scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0) if scaling_needed: - data_arr = data_arr * scale_factor + add_offset + data_arr = data_arr * np.float32(scale_factor) + np.float32(add_offset) return data_arr @@ -120,16 +122,17 @@ class _CLAVRxHelper: """A base class for the CLAVRx File Handlers.""" @staticmethod - def _get_nadir_resolution(sensor, resolution_from_filename_info): + def _get_nadir_resolution(sensor, filename_info_resolution): """Get nadir resolution.""" for k, v in NADIR_RESOLUTION.items(): if sensor.startswith(k): return v - res = resolution_from_filename_info - if res.endswith("m"): - return int(res[:-1]) - elif res is not None: - return int(res) + if filename_info_resolution is None: + return None + if isinstance(filename_info_resolution, str) and filename_info_resolution.startswith("m"): + return int(filename_info_resolution[:-1]) + else: + return int(filename_info_resolution) @staticmethod def _remove_attributes(attrs: dict) -> dict: From cef6e5a88b67822796952015e49b24bdbd857269 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Tue, 13 Feb 2024 11:53:56 -0600 Subject: [PATCH 1139/1416] Move clavrx tests --- satpy/tests/reader_tests/{ => test_clavrx}/test_clavrx_geohdf.py | 0 satpy/tests/reader_tests/{ => test_clavrx}/test_clavrx_nc.py | 0 .../tests/reader_tests/{ => test_clavrx}/test_clavrx_polarhdf.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename satpy/tests/reader_tests/{ => test_clavrx}/test_clavrx_geohdf.py (100%) rename satpy/tests/reader_tests/{ => test_clavrx}/test_clavrx_nc.py (100%) rename satpy/tests/reader_tests/{ => test_clavrx}/test_clavrx_polarhdf.py (100%) diff --git a/satpy/tests/reader_tests/test_clavrx_geohdf.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py similarity index 100% rename from satpy/tests/reader_tests/test_clavrx_geohdf.py rename to satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py similarity index 100% rename from satpy/tests/reader_tests/test_clavrx_nc.py rename to satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py diff --git a/satpy/tests/reader_tests/test_clavrx_polarhdf.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py similarity index 100% rename from satpy/tests/reader_tests/test_clavrx_polarhdf.py rename to satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py From aee140a1327041a8c218bde0580bc94e2f1942ad Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 13 Feb 2024 20:29:54 +0200 Subject: [PATCH 1140/1416] Update satpy/dataset/metadata.py Co-authored-by: Gerrit Holl --- satpy/dataset/metadata.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 405885ad14..577c83eb7b 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -40,7 +40,8 @@ def combine_metadata(*metadata_objects): `None` values resulting from data that don't have times associated to them are removed. These rules are applied also to values in the 'time_parameters' dictionary. - +.. versionchanged:: 0.47 + Before Satpy 0.47, all times, including `start_time` and `end_time`, were averaged. In the interest of processing time, lazy arrays are compared by object identity rather than by their contents. From 52b2d41621f7d87bc49762afd7c1bcbdadf071b3 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 13 Feb 2024 20:40:21 +0200 Subject: [PATCH 1141/1416] Update satpy/dataset/metadata.py Co-authored-by: David Hoese --- satpy/dataset/metadata.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 577c83eb7b..b08f8207a5 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -40,8 +40,11 @@ def combine_metadata(*metadata_objects): `None` values resulting from data that don't have times associated to them are removed. These rules are applied also to values in the 'time_parameters' dictionary. -.. versionchanged:: 0.47 - Before Satpy 0.47, all times, including `start_time` and `end_time`, were averaged. + + .. versionchanged:: 0.47 + + Before Satpy 0.47, all times, including `start_time` and `end_time`, were averaged. + In the interest of processing time, lazy arrays are compared by object identity rather than by their contents. From 835c551eea22c8ca7117dbc888b65e1d8bb40fd0 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 13 Feb 2024 19:53:04 +0000 Subject: [PATCH 1142/1416] Refactor duplicate code --- satpy/readers/ahi_hsd.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 6b503148b2..cf257cf579 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -426,20 +426,20 @@ def observation_end_time(self): """Get the observation end time.""" return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"].item())) + @property + def _timeline(self): + return "{:04d}".format(self.basic_info["observation_timeline"][0]) + @property def nominal_start_time(self): """Time this band was nominally to be recorded.""" - timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) - calc = NominalTimeCalculator(timeline, - self.observation_area) + calc = NominalTimeCalculator(self._timeline, self.observation_area) return calc.get_nominal_start_time(self.observation_start_time) @property def nominal_end_time(self): """Get the nominal end time.""" - timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) - calc = NominalTimeCalculator(timeline, - self.observation_area) + calc = NominalTimeCalculator(self._timeline, self.observation_area) return calc.get_nominal_end_time(self.nominal_start_time) def get_dataset(self, key, info): From b8a47a9c9d042097e2fe2467cfb7bef72329f521 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 14 Feb 2024 08:53:38 +0200 Subject: [PATCH 1143/1416] Add a warning when trying to use removed 'average_times' kwarg --- satpy/dataset/metadata.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index b08f8207a5..a328402e0a 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -17,6 +17,7 @@ # satpy. If not, see . """Utilities for merging metadata from various sources.""" +import warnings from collections.abc import Collection from datetime import datetime from functools import partial, reduce @@ -27,7 +28,7 @@ from satpy.writers.utils import flatten_dict -def combine_metadata(*metadata_objects): +def combine_metadata(*metadata_objects, average_times=None): """Combine the metadata of two or more Datasets. If the values corresponding to any keys are not equal or do not @@ -40,7 +41,7 @@ def combine_metadata(*metadata_objects): `None` values resulting from data that don't have times associated to them are removed. These rules are applied also to values in the 'time_parameters' dictionary. - + .. versionchanged:: 0.47 Before Satpy 0.47, all times, including `start_time` and `end_time`, were averaged. @@ -51,10 +52,19 @@ def combine_metadata(*metadata_objects): Args: *metadata_objects: MetadataObject or dict objects to combine + Kwargs: + average_times (bool): Removed option to average all time attributes. + Returns: dict: the combined metadata """ + if average_times is not None: + warnings.warn( + "'average_time' option has been removed and start/end times are handled with min/max instead.", + UserWarning + ) + info_dicts = _get_valid_dicts(metadata_objects) if len(info_dicts) == 1: return info_dicts[0].copy() From f8937d41257dcf5286d19e966173cbdb5588f27b Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 14 Feb 2024 13:00:49 +0000 Subject: [PATCH 1144/1416] Fix nominal time attributes in SEVIRI HRIT --- satpy/readers/seviri_l1b_hrit.py | 4 ++-- satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 3b3aa82277..804198da0f 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -765,8 +765,8 @@ def _update_attrs(self, res, info): res.attrs["standard_name"] = info["standard_name"] res.attrs["platform_name"] = self.platform_name res.attrs["sensor"] = "seviri" - res.attrs["nominal_start_time"] = self.nominal_start_time, - res.attrs["nominal_end_time"] = self.nominal_end_time, + res.attrs["nominal_start_time"] = self.nominal_start_time + res.attrs["nominal_end_time"] = self.nominal_end_time res.attrs["time_parameters"] = { "nominal_start_time": self.nominal_start_time, "nominal_end_time": self.nominal_end_time, diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index a885a5becc..d668fe5240 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -238,8 +238,8 @@ def get_attrs_exp(projection_longitude=0.0): "satellite_actual_latitude": -0.5711243456528018, "satellite_actual_altitude": 35783296.150123544}, "georef_offset_corrected": True, - "nominal_start_time": (datetime(2006, 1, 1, 12, 15),), - "nominal_end_time": (datetime(2006, 1, 1, 12, 30),), + "nominal_start_time": datetime(2006, 1, 1, 12, 15), + "nominal_end_time": datetime(2006, 1, 1, 12, 30), "time_parameters": { "nominal_start_time": datetime(2006, 1, 1, 12, 15), "nominal_end_time": datetime(2006, 1, 1, 12, 30), From 205d5e80c998c0e202783981bb142b6729d8e091 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 15 Feb 2024 13:07:06 +0100 Subject: [PATCH 1145/1416] Fix cutoffs for night_ir_alpha and bump up trollimage version --- continuous_integration/environment.yaml | 2 +- satpy/etc/composites/seviri.yaml | 1 + satpy/etc/enhancements/generic.yaml | 2 +- setup.py | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 215d215eac..f1a89319a8 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -59,6 +59,6 @@ dependencies: - bokeh - pip: - trollsift - - trollimage>=1.20 + - trollimage>=1.23 - pyspectral - pyorbital diff --git a/satpy/etc/composites/seviri.yaml b/satpy/etc/composites/seviri.yaml index f30330bb18..e53609d8e0 100644 --- a/satpy/etc/composites/seviri.yaml +++ b/satpy/etc/composites/seviri.yaml @@ -421,6 +421,7 @@ composites: - name: HRV modifiers: [sunz_corrected] - IR_108 + hrv_fog: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv_fog diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 25680d6db9..52cad8a5ce 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1104,7 +1104,7 @@ enhancements: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch - kwargs: {stretch: linear, cutoffs: [0.02, 0.02]} + kwargs: {stretch: linear, cutoffs: [[0.02, 0.02], [0.02, 0.02], [0.02, 0.02], [0.02, 0.02]]} - name: inverse method: !!python/name:satpy.enhancements.invert args: diff --git a/setup.py b/setup.py index 3439e8fa89..16c2b95512 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ from setuptools import find_packages, setup requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", - "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.14.1", + "trollimage >=1.23", "pykdtree", "pyyaml >=5.1", "xarray >=0.14.1", "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", "packaging", "pooch", "pyorbital"] From c819c975172393c9a60660dc23ba55d25ad0ae44 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Fri, 16 Feb 2024 09:44:49 +0100 Subject: [PATCH 1146/1416] fix table order and update to datatables v2. --- doc/source/_static/main.js | 7 ++++++- doc/source/conf.py | 4 ++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/doc/source/_static/main.js b/doc/source/_static/main.js index 188a335e71..b153beb2f3 100644 --- a/doc/source/_static/main.js +++ b/doc/source/_static/main.js @@ -1,6 +1,11 @@ $(document).ready( function () { $('table.datatable').DataTable( { "paging": false, - "dom": 'lfitp' + "layout": { + 'topStart': 'info', + 'topEnd': 'search', + 'bottomStart': null + }, + "order": [[0, 'asc']] } ); } ); diff --git a/doc/source/conf.py b/doc/source/conf.py index 49e47b2cc2..020544ee4a 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -194,11 +194,11 @@ def __getattr__(cls, name): html_css_files = [ "theme_overrides.css", # override wide tables in RTD theme - "https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css", + "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.css", ] html_js_files = [ - "https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js", + "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.js", "main.js", ] From 46556d31d70095695a045473c9e662e2da01cfe1 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Fri, 16 Feb 2024 09:47:00 +0100 Subject: [PATCH 1147/1416] Add status description again and insert link to it in dev guide. --- doc/source/dev_guide/custom_reader.rst | 3 ++- doc/source/index.rst | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/doc/source/dev_guide/custom_reader.rst b/doc/source/dev_guide/custom_reader.rst index a77988760e..a5656795ca 100644 --- a/doc/source/dev_guide/custom_reader.rst +++ b/doc/source/dev_guide/custom_reader.rst @@ -117,7 +117,8 @@ The parameters to provide in this section are: file format. This can be multiline if formatted properly in YAML (see example below). status - The status of the reader (one of: Nominal, Beta, Alpha) + The status of the reader (one of: Nominal, Beta, Alpha, Defunct; see :ref:`Status Description ` + for more details). supports_fsspec If the reader supports reading data via fsspec (either true or false). sensors diff --git a/doc/source/index.rst b/doc/source/index.rst index 052a7e2d03..b229c904ee 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -84,6 +84,30 @@ Documentation .. include:: reader_table.rst +.. _Status Description: +.. note:: + + Status description: + + Defunct + Most likely the reader is not functional. If it is there is a good chance of + bugs and/or performance problems (e.g. not ported to dask/xarray yet). Future + development is unclear. Users are encouraged to contribute (see section + :doc:`dev_guide/CONTRIBUTING` and/or get help on Slack or by opening a Github issue). + + Alpha + This denotes early development status. Reader is functional and implements some + or all of the nominal features. There might be bugs. Exactness of results is + not be guaranteed. Use at your own risk. + + Beta + This denotes final developement status. Reader is functional and implements all + nominal features. Results should be dependable but there might be bugs. Users + are actively encouraged to test and report bugs. + + Nominal + This denotes a finished status. Reader is functional and most likely no new + features will be introduced. It has been tested and there are no known bugs. Indices and tables ================== From b280236ba1fd70c8f9763e3d33e23d51f298b2c2 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Fri, 16 Feb 2024 09:47:44 +0100 Subject: [PATCH 1148/1416] fix missing info in multiple readers. --- satpy/etc/readers/agri_fy4a_l1.yaml | 2 +- satpy/etc/readers/agri_fy4b_l1.yaml | 4 ++++ satpy/etc/readers/ghi_l1.yaml | 4 ++++ satpy/etc/readers/meris_nc_sen3.yaml | 6 +++++- satpy/etc/readers/mersi_ll_l1b.yaml | 6 +++++- satpy/etc/readers/sgli_l1b.yaml | 6 +++++- satpy/etc/readers/viirs_edr.yaml | 6 +++++- 7 files changed, 29 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/agri_fy4a_l1.yaml b/satpy/etc/readers/agri_fy4a_l1.yaml index 5e3dfead35..cd7c7a8fe0 100644 --- a/satpy/etc/readers/agri_fy4a_l1.yaml +++ b/satpy/etc/readers/agri_fy4a_l1.yaml @@ -5,7 +5,7 @@ reader: name: agri_fy4a_l1 short_name: AGRI FY4A L1 - long_name: FY-4A AGRI L1 data in HDF5 format + long_name: FY-4A AGRI Level 1 HDF5 format description: FY-4A AGRI instrument HDF5 reader status: Beta supports_fsspec: false diff --git a/satpy/etc/readers/agri_fy4b_l1.yaml b/satpy/etc/readers/agri_fy4b_l1.yaml index b1ff44189d..2b47e51cdb 100644 --- a/satpy/etc/readers/agri_fy4b_l1.yaml +++ b/satpy/etc/readers/agri_fy4b_l1.yaml @@ -4,7 +4,11 @@ reader: name: agri_fy4b_l1 + short_name: AGRI FY4B L1 + long_name: FY-4B AGRI Level 1 data HDF5 format description: FY-4B AGRI instrument HDF5 reader + status: Beta + supports_fsspec: false sensors: [agri] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/ghi_l1.yaml b/satpy/etc/readers/ghi_l1.yaml index 59c8f35f70..0c2e595253 100644 --- a/satpy/etc/readers/ghi_l1.yaml +++ b/satpy/etc/readers/ghi_l1.yaml @@ -4,7 +4,11 @@ reader: name: ghi_l1 + short_name: GHI FY4A L1 + long_name: FY-4A GHI Level 1 HDF5 format description: FY-4A GHI instrument HDF5 reader + status: Beta + supports_fsspec: false sensors: [ghi] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/meris_nc_sen3.yaml b/satpy/etc/readers/meris_nc_sen3.yaml index ba3d02969a..28d5597665 100644 --- a/satpy/etc/readers/meris_nc_sen3.yaml +++ b/satpy/etc/readers/meris_nc_sen3.yaml @@ -1,6 +1,10 @@ reader: - description: NC Reader for MERIS data (Sentinel 3 like format) name: meris_nc_sen3 + short_name: MERIS Sentinel 3 + long_name: Sentinel 3 MERIS NetCDF format + description: NC Reader for MERIS data (Sentinel 3 like format) + status: Beta + supports_fsspec: false sensors: [meris] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index 7c572885c7..6ea44dcb99 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -1,6 +1,10 @@ reader: - description: FY-3E Medium Resolution Spectral Imager - Low Light (MERSI-LL) L1B Reader name: mersi_ll_l1b + short_name: MERSI Low Light FY3E L1B + long_name: FY-3E MERSI Low Light Level 1B + description: FY-3E Medium Resolution Spectral Imager - Low Light (MERSI-LL) L1B Reader + status: Beta + supports_fsspec: false sensors: [mersi-ll] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/sgli_l1b.yaml b/satpy/etc/readers/sgli_l1b.yaml index 9f8108510f..4cb86890c4 100644 --- a/satpy/etc/readers/sgli_l1b.yaml +++ b/satpy/etc/readers/sgli_l1b.yaml @@ -1,7 +1,11 @@ reader: + name: sgli_l1b + short_name: SGLI GCOM-C L1B + long_name: GCOM-C SGLI Level 1B HDF5 format description: Reader for SGLI data + status: Beta + supports_fsspec: false reference: https://gportal.jaxa.jp/gpr/assets/mng_upload/GCOM-C/SGLI_Level1_Product_Format_Description_en.pdf - name: sgli_l1b sensors: [sgli] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 37f36934b8..2228b25916 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -1,6 +1,10 @@ reader: - description: VIIRS NOAA Enterprise EDR product reader name: viirs_edr + short_name: VIIRS JPSS EDR nc + long_name: JPSS VIIRS EDR NetCDF format + description: VIIRS NOAA Enterprise EDR product reader + status: Beta + supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] group_keys: ['platform_shortname'] From 1eadfb573b066e99aaa4f074c7d862e2476c7d9f Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 16 Feb 2024 09:19:56 +0000 Subject: [PATCH 1149/1416] Make nominal time calculator private --- satpy/readers/ahi_hsd.py | 6 +++--- satpy/tests/reader_tests/test_ahi_hsd.py | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index cf257cf579..bf2ab09e79 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -433,13 +433,13 @@ def _timeline(self): @property def nominal_start_time(self): """Time this band was nominally to be recorded.""" - calc = NominalTimeCalculator(self._timeline, self.observation_area) + calc = _NominalTimeCalculator(self._timeline, self.observation_area) return calc.get_nominal_start_time(self.observation_start_time) @property def nominal_end_time(self): """Get the nominal end time.""" - calc = NominalTimeCalculator(self._timeline, self.observation_area) + calc = _NominalTimeCalculator(self._timeline, self.observation_area) return calc.get_nominal_end_time(self.nominal_start_time) def get_dataset(self, key, info): @@ -745,7 +745,7 @@ def _ir_calibrate(self, data): return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) -class NominalTimeCalculator: +class _NominalTimeCalculator: """Get time when a scan was nominally to be recorded.""" def __init__(self, timeline, area): diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 3b73521ee8..393afca1c8 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -29,7 +29,7 @@ import numpy as np import pytest -from satpy.readers.ahi_hsd import AHIHSDFileHandler, NominalTimeCalculator +from satpy.readers.ahi_hsd import AHIHSDFileHandler, _NominalTimeCalculator from satpy.readers.utils import get_geostationary_mask from satpy.tests.utils import make_dataid @@ -645,7 +645,7 @@ class TestNominalTimeCalculator: ) def test_invalid_timeline(self, timeline, expected): """Test handling of invalid timeline.""" - calc = NominalTimeCalculator(timeline, "FLDK") + calc = _NominalTimeCalculator(timeline, "FLDK") res = calc.get_nominal_start_time(datetime(2020, 1, 1, 12, 0, 0)) assert res == expected @@ -697,7 +697,7 @@ def test_invalid_timeline(self, timeline, expected): def test_areas(self, area, expected): """Test nominal timestamps for multiple areas.""" obs_start_time = datetime(2018, 10, 22, 3, 0, 20, 596896) - calc = NominalTimeCalculator("0300", area) + calc = _NominalTimeCalculator("0300", area) nom_start_time = calc.get_nominal_start_time(obs_start_time) nom_end_time = calc.get_nominal_end_time(nom_start_time) assert nom_start_time == expected["tstart"] @@ -734,7 +734,7 @@ def test_areas(self, area, expected): ) def test_timelines(self, timeline, obs_start_time, expected): """Test nominal timestamps for multiple timelines.""" - calc = NominalTimeCalculator(timeline, "FLDK") + calc = _NominalTimeCalculator(timeline, "FLDK") nom_start_time = calc.get_nominal_start_time(obs_start_time) nom_end_time = calc.get_nominal_end_time(nom_start_time) assert nom_start_time == expected["tstart"] From a08797142e4d9b1ef45e6db8dae8b9486ed7194b Mon Sep 17 00:00:00 2001 From: BENR0 Date: Fri, 16 Feb 2024 10:24:26 +0100 Subject: [PATCH 1150/1416] refactor viirs platform name. --- satpy/etc/readers/viirs_compact.yaml | 2 +- satpy/etc/readers/viirs_l1b.yaml | 2 +- satpy/etc/readers/viirs_sdr.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/viirs_compact.yaml b/satpy/etc/readers/viirs_compact.yaml index 31f4201930..5dca3abbaa 100644 --- a/satpy/etc/readers/viirs_compact.yaml +++ b/satpy/etc/readers/viirs_compact.yaml @@ -1,7 +1,7 @@ reader: name: viirs_compact short_name: VIIRS Compact - long_name: SNPP VIIRS SDR data in HDF5 Compact format + long_name: JPSS VIIRS SDR data in HDF5 Compact format description: Generic Eumetsat Compact VIIRS Reader status: Nominal supports_fsspec: false diff --git a/satpy/etc/readers/viirs_l1b.yaml b/satpy/etc/readers/viirs_l1b.yaml index f078c4247d..4622f7e415 100644 --- a/satpy/etc/readers/viirs_l1b.yaml +++ b/satpy/etc/readers/viirs_l1b.yaml @@ -1,7 +1,7 @@ reader: name: viirs_l1b short_name: VIIRS l1b - long_name: SNPP VIIRS Level 1b data in netCDF4 format + long_name: JPSS VIIRS Level 1b data in netCDF4 format description: Generic NASA VIIRS L1B Reader status: Nominal supports_fsspec: false diff --git a/satpy/etc/readers/viirs_sdr.yaml b/satpy/etc/readers/viirs_sdr.yaml index e85c7f4f70..70f2c5f34a 100644 --- a/satpy/etc/readers/viirs_sdr.yaml +++ b/satpy/etc/readers/viirs_sdr.yaml @@ -1,7 +1,7 @@ reader: name: viirs_sdr short_name: VIIRS SDR - long_name: SNPP VIIRS data in HDF5 SDR format + long_name: JPSS VIIRS data in HDF5 SDR format description: VIIRS SDR Reader status: Nominal supports_fsspec: false From 8d45299e7df011afa1b19f0683cc3fa813337648 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Fri, 16 Feb 2024 09:41:05 -0600 Subject: [PATCH 1151/1416] Add init to clavrx tests directory --- .../tests/reader_tests/test_clavrx/__init__.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 satpy/tests/reader_tests/test_clavrx/__init__.py diff --git a/satpy/tests/reader_tests/test_clavrx/__init__.py b/satpy/tests/reader_tests/test_clavrx/__init__.py new file mode 100644 index 0000000000..6f62e3a26b --- /dev/null +++ b/satpy/tests/reader_tests/test_clavrx/__init__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""The clavrx reader tests package.""" From f6d1f1ff2c61e47b78e5b47613ef6e874909008f Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 16 Feb 2024 09:57:31 -0600 Subject: [PATCH 1152/1416] Fix apostrophes being replaced with double quotes --- .../tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py index 6b69d8a923..f8ae93c38b 100644 --- a/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py +++ b/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py @@ -186,7 +186,7 @@ def test_available_datasets(self): assert new_ds_infos[1][0] assert new_ds_infos[1][1]["resolution"] == 742 - # we have this, but don"t want to change the resolution + # we have this, but don't want to change the resolution # because a previous handler said it has it assert new_ds_infos[2][0] assert new_ds_infos[2][1]["resolution"] == 1 @@ -201,11 +201,11 @@ def test_available_datasets(self): assert new_ds_infos[4][0] assert new_ds_infos[4][1]["resolution"] == 742 - # we don"t have this variable, don"t change it + # we don"t have this variable, don't change it assert not new_ds_infos[5][0] assert new_ds_infos[5][1].get("resolution") is None - # we have this, but it isn"t supposed to come from our file type + # we have this, but it isn't supposed to come from our file type assert new_ds_infos[6][0] is None assert new_ds_infos[6][1].get("resolution") is None From 40633bd3ed906b3b669f7c0ab8a621458477125a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 16 Feb 2024 10:09:40 -0600 Subject: [PATCH 1153/1416] Update short_name in satpy/etc/readers/viirs_edr.yaml --- satpy/etc/readers/viirs_edr.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 2228b25916..4c4c91a91f 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -1,6 +1,6 @@ reader: name: viirs_edr - short_name: VIIRS JPSS EDR nc + short_name: VIIRS EDR long_name: JPSS VIIRS EDR NetCDF format description: VIIRS NOAA Enterprise EDR product reader status: Beta From f16e85a846db61bd309c7797332483e3a1130efc Mon Sep 17 00:00:00 2001 From: BENR0 Date: Mon, 19 Feb 2024 09:51:41 +0100 Subject: [PATCH 1154/1416] refactor: set simonpr84's readers to nominal status. --- satpy/etc/readers/agri_fy4b_l1.yaml | 2 +- satpy/etc/readers/ghi_l1.yaml | 2 +- satpy/etc/readers/mersi_ll_l1b.yaml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/agri_fy4b_l1.yaml b/satpy/etc/readers/agri_fy4b_l1.yaml index 2b47e51cdb..4917d08145 100644 --- a/satpy/etc/readers/agri_fy4b_l1.yaml +++ b/satpy/etc/readers/agri_fy4b_l1.yaml @@ -7,7 +7,7 @@ reader: short_name: AGRI FY4B L1 long_name: FY-4B AGRI Level 1 data HDF5 format description: FY-4B AGRI instrument HDF5 reader - status: Beta + status: Nominal supports_fsspec: false sensors: [agri] default_channels: diff --git a/satpy/etc/readers/ghi_l1.yaml b/satpy/etc/readers/ghi_l1.yaml index 0c2e595253..08c438127b 100644 --- a/satpy/etc/readers/ghi_l1.yaml +++ b/satpy/etc/readers/ghi_l1.yaml @@ -7,7 +7,7 @@ reader: short_name: GHI FY4A L1 long_name: FY-4A GHI Level 1 HDF5 format description: FY-4A GHI instrument HDF5 reader - status: Beta + status: Nominal supports_fsspec: false sensors: [ghi] default_channels: diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index 6ea44dcb99..6e729f07d1 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -3,7 +3,7 @@ reader: short_name: MERSI Low Light FY3E L1B long_name: FY-3E MERSI Low Light Level 1B description: FY-3E Medium Resolution Spectral Imager - Low Light (MERSI-LL) L1B Reader - status: Beta + status: Nominal supports_fsspec: false sensors: [mersi-ll] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader From 8101eac924cf72506d012129958c2e877e2e26a4 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Mon, 19 Feb 2024 09:56:09 +0100 Subject: [PATCH 1155/1416] refactor: change fsspec status for readers using hdf5_utils. --- satpy/etc/readers/agri_fy4b_l1.yaml | 2 +- satpy/etc/readers/mersi_ll_l1b.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/agri_fy4b_l1.yaml b/satpy/etc/readers/agri_fy4b_l1.yaml index 4917d08145..77c616b2e4 100644 --- a/satpy/etc/readers/agri_fy4b_l1.yaml +++ b/satpy/etc/readers/agri_fy4b_l1.yaml @@ -8,7 +8,7 @@ reader: long_name: FY-4B AGRI Level 1 data HDF5 format description: FY-4B AGRI instrument HDF5 reader status: Nominal - supports_fsspec: false + supports_fsspec: true sensors: [agri] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index 6e729f07d1..652708d733 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -4,7 +4,7 @@ reader: long_name: FY-3E MERSI Low Light Level 1B description: FY-3E Medium Resolution Spectral Imager - Low Light (MERSI-LL) L1B Reader status: Nominal - supports_fsspec: false + supports_fsspec: true sensors: [mersi-ll] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader From 14e11fcdd5b3cbf423e2667afbd673029c955a88 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Tue, 20 Feb 2024 14:23:00 +0000 Subject: [PATCH 1156/1416] Removed colormaps and changed standard_names --- satpy/etc/colormaps/aerosol_thickness.csv | 151 ----------- satpy/etc/colormaps/angstrom_exponent.csv | 251 ------------------- satpy/etc/colormaps/clear_sky_confidence.csv | 101 -------- satpy/etc/colormaps/cloud_top_height.csv | 32 --- satpy/etc/enhancements/generic.yaml | 20 +- satpy/etc/readers/viirs_l2.yaml | 8 +- 6 files changed, 16 insertions(+), 547 deletions(-) delete mode 100644 satpy/etc/colormaps/aerosol_thickness.csv delete mode 100644 satpy/etc/colormaps/angstrom_exponent.csv delete mode 100644 satpy/etc/colormaps/clear_sky_confidence.csv delete mode 100644 satpy/etc/colormaps/cloud_top_height.csv diff --git a/satpy/etc/colormaps/aerosol_thickness.csv b/satpy/etc/colormaps/aerosol_thickness.csv deleted file mode 100644 index 39d8527f6c..0000000000 --- a/satpy/etc/colormaps/aerosol_thickness.csv +++ /dev/null @@ -1,151 +0,0 @@ -0, 255, 252, 199 -0.005, 255, 251, 193 -0.01, 255, 250, 188 -0.015, 255, 249, 183 -0.02, 255, 248, 178 -0.025, 255, 247, 173 -0.03, 255, 246, 167 -0.035, 255, 245, 162 -0.04, 255, 244, 157 -0.045, 255, 243, 152 -0.05, 255, 242, 147 -0.055, 255, 240, 144 -0.06, 255, 239, 141 -0.065, 255, 238, 138 -0.07, 255, 236, 135 -0.075, 255, 235, 132 -0.08, 255, 234, 129 -0.085, 255, 232, 126 -0.09, 255, 231, 123 -0.095, 255, 230, 120 -0.1, 255, 229, 118 -0.105, 255, 227, 115 -0.11, 255, 226, 113 -0.115, 255, 225, 110 -0.12, 255, 223, 108 -0.125, 255, 222, 106 -0.13, 255, 221, 103 -0.135, 255, 219, 101 -0.14, 255, 218, 98 -0.145, 255, 217, 96 -0.15, 255, 216, 94 -0.155, 255, 214, 91 -0.16, 255, 212, 89 -0.165, 255, 210, 87 -0.17, 255, 208, 85 -0.175, 255, 207, 83 -0.18, 255, 205, 80 -0.185, 255, 203, 78 -0.19, 255, 201, 76 -0.195, 255, 199, 74 -0.2, 255, 198, 72 -0.205, 255, 195, 70 -0.21, 255, 193, 68 -0.215, 255, 190, 66 -0.22, 255, 188, 64 -0.225, 255, 185, 62 -0.23, 255, 183, 60 -0.235, 255, 180, 58 -0.24, 255, 178, 56 -0.245, 255, 175, 54 -0.25, 255, 173, 53 -0.255, 255, 170, 51 -0.26, 255, 168, 50 -0.265, 255, 165, 49 -0.27, 255, 163, 47 -0.275, 255, 161, 46 -0.28, 255, 158, 45 -0.285, 255, 156, 43 -0.29, 255, 153, 42 -0.295, 255, 151, 41 -0.3, 255, 149, 40 -0.305, 255, 146, 39 -0.31, 255, 144, 38 -0.315, 255, 142, 37 -0.32, 255, 140, 37 -0.325, 255, 138, 36 -0.33, 255, 135, 35 -0.335, 255, 133, 35 -0.34, 255, 131, 34 -0.345, 255, 129, 33 -0.35, 255, 127, 33 -0.355, 255, 124, 32 -0.36, 255, 121, 31 -0.365, 255, 118, 31 -0.37, 255, 115, 30 -0.375, 255, 112, 30 -0.38, 255, 109, 29 -0.385, 255, 106, 28 -0.39, 255, 103, 28 -0.395, 255, 100, 27 -0.4, 255, 98, 27 -0.405, 255, 94, 26 -0.41, 255, 91, 25 -0.415, 255, 88, 24 -0.42, 255, 85, 24 -0.425, 255, 82, 23 -0.43, 255, 78, 22 -0.435, 255, 75, 22 -0.44, 255, 72, 21 -0.445, 255, 69, 20 -0.45, 255, 66, 20 -0.455, 254, 63, 19 -0.46, 253, 60, 19 -0.465, 252, 58, 18 -0.47, 251, 55, 18 -0.475, 250, 53, 18 -0.48, 249, 50, 17 -0.485, 248, 47, 17 -0.49, 247, 45, 16 -0.495, 246, 42, 16 -0.5, 245, 40, 16 -0.505, 243, 38, 15 -0.51, 242, 36, 15 -0.515, 240, 34, 14 -0.52, 239, 32, 14 -0.525, 238, 30, 13 -0.53, 236, 28, 13 -0.535, 235, 26, 12 -0.54, 233, 24, 12 -0.545, 232, 22, 11 -0.55, 231, 20, 11 -0.555, 229, 18, 11 -0.56, 227, 17, 11 -0.565, 225, 16, 11 -0.57, 223, 14, 11 -0.575, 221, 13, 11 -0.58, 219, 12, 11 -0.585, 217, 10, 11 -0.59, 215, 9, 11 -0.595, 213, 8, 11 -0.6, 211, 7, 12 -0.605, 208, 6, 12 -0.61, 206, 5, 12 -0.615, 204, 4, 12 -0.62, 201, 4, 12 -0.625, 199, 3, 13 -0.63, 197, 2, 13 -0.635, 194, 2, 13 -0.64, 192, 1, 13 -0.645, 190, 0, 13 -0.65, 188, 0, 14 -0.655, 184, 0, 14 -0.66, 181, 0, 14 -0.665, 178, 0, 14 -0.67, 174, 0, 14 -0.675, 171, 0, 14 -0.68, 168, 0, 14 -0.685, 164, 0, 14 -0.69, 161, 0, 14 -0.695, 158, 0, 14 -0.7, 155, 0, 14 -1.13, 152, 0, 14 -1.56, 149, 0, 14 -1.99, 146, 0, 14 -2.42, 143, 0, 14 -2.85, 140, 0, 14 -3.28, 137, 0, 14 -3.71, 134, 0, 14 -4.14, 131, 0, 14 -4.57, 128, 0, 14 -5, 125, 0, 14 diff --git a/satpy/etc/colormaps/angstrom_exponent.csv b/satpy/etc/colormaps/angstrom_exponent.csv deleted file mode 100644 index a14ecd17d4..0000000000 --- a/satpy/etc/colormaps/angstrom_exponent.csv +++ /dev/null @@ -1,251 +0,0 @@ -0, 122, 145, 2 -0.01, 123, 148, 3 -0.02, 124, 150, 4 -0.03, 124, 153, 5 -0.04, 125, 155, 6 -0.05, 126, 158, 7 -0.06, 127, 160, 8 -0.07, 127, 163, 9 -0.08, 128, 165, 10 -0.09, 129, 168, 11 -0.1, 130, 170, 12 -0.11, 130, 173, 13 -0.12, 131, 175, 14 -0.13, 132, 178, 15 -0.14, 133, 181, 16 -0.15, 132, 183, 18 -0.16, 132, 185, 20 -0.17, 132, 187, 22 -0.18, 132, 189, 25 -0.19, 132, 191, 27 -0.2, 132, 193, 29 -0.21, 132, 195, 31 -0.22, 131, 197, 34 -0.23, 131, 199, 36 -0.24, 131, 201, 38 -0.25, 131, 203, 40 -0.26, 131, 205, 43 -0.27, 131, 207, 45 -0.28, 131, 209, 47 -0.29, 131, 212, 50 -0.3, 130, 213, 51 -0.31, 129, 215, 53 -0.32, 128, 217, 55 -0.33, 128, 219, 57 -0.34, 127, 221, 59 -0.35, 126, 222, 61 -0.36, 125, 224, 63 -0.37, 125, 226, 64 -0.38, 124, 228, 66 -0.39, 123, 230, 68 -0.4, 122, 231, 70 -0.41, 122, 233, 72 -0.42, 121, 235, 74 -0.43, 120, 237, 76 -0.44, 120, 239, 78 -0.45, 119, 239, 79 -0.46, 118, 240, 80 -0.47, 117, 241, 82 -0.48, 116, 242, 83 -0.49, 116, 243, 85 -0.5, 115, 244, 86 -0.51, 114, 245, 87 -0.52, 113, 246, 89 -0.53, 112, 247, 90 -0.54, 112, 248, 92 -0.55, 111, 249, 93 -0.56, 110, 250, 94 -0.57, 109, 251, 96 -0.58, 108, 252, 97 -0.59, 108, 253, 99 -0.6, 107, 252, 100 -0.61, 106, 252, 102 -0.62, 106, 252, 103 -0.63, 105, 251, 105 -0.64, 105, 251, 106 -0.65, 104, 251, 108 -0.66, 103, 251, 109 -0.67, 103, 250, 111 -0.68, 102, 250, 112 -0.69, 102, 250, 114 -0.7, 101, 250, 115 -0.71, 100, 249, 117 -0.72, 100, 249, 118 -0.73, 99, 249, 120 -0.74, 99, 249, 122 -0.75, 98, 247, 123 -0.76, 97, 246, 124 -0.77, 96, 245, 126 -0.78, 95, 244, 127 -0.79, 94, 243, 128 -0.8, 93, 242, 130 -0.81, 92, 241, 131 -0.82, 92, 239, 132 -0.83, 91, 238, 134 -0.84, 90, 237, 135 -0.85, 89, 236, 136 -0.86, 88, 235, 138 -0.87, 87, 234, 139 -0.88, 86, 233, 140 -0.89, 86, 232, 142 -0.9, 85, 230, 143 -0.91, 84, 229, 144 -0.92, 83, 228, 145 -0.93, 82, 226, 147 -0.94, 81, 225, 148 -0.95, 80, 224, 149 -0.96, 79, 223, 150 -0.97, 78, 221, 152 -0.98, 77, 220, 153 -0.99, 76, 219, 154 -1, 75, 218, 155 -1.01, 74, 216, 157 -1.02, 73, 215, 158 -1.03, 72, 214, 159 -1.04, 72, 213, 161 -1.05, 71, 211, 162 -1.06, 70, 209, 163 -1.07, 69, 208, 164 -1.08, 68, 206, 165 -1.09, 67, 205, 166 -1.1, 66, 203, 167 -1.11, 65, 201, 168 -1.12, 64, 200, 170 -1.13, 63, 198, 171 -1.14, 62, 197, 172 -1.15, 61, 195, 173 -1.16, 60, 193, 174 -1.17, 59, 192, 175 -1.18, 58, 190, 176 -1.19, 58, 189, 178 -1.2, 58, 187, 178 -1.21, 58, 185, 179 -1.22, 58, 184, 180 -1.23, 58, 182, 181 -1.24, 58, 181, 182 -1.25, 58, 179, 183 -1.26, 58, 178, 184 -1.27, 59, 176, 184 -1.28, 59, 175, 185 -1.29, 59, 173, 186 -1.3, 59, 172, 187 -1.31, 59, 170, 188 -1.32, 59, 169, 189 -1.33, 59, 167, 190 -1.34, 60, 166, 191 -1.35, 60, 164, 191 -1.36, 61, 162, 192 -1.37, 61, 160, 193 -1.38, 62, 158, 194 -1.39, 63, 156, 195 -1.4, 63, 154, 195 -1.41, 64, 152, 196 -1.42, 64, 150, 197 -1.43, 65, 148, 198 -1.44, 66, 146, 199 -1.45, 66, 144, 199 -1.46, 67, 142, 200 -1.47, 67, 140, 201 -1.48, 68, 138, 202 -1.49, 69, 137, 203 -1.5, 69, 135, 203 -1.51, 70, 133, 204 -1.52, 70, 131, 205 -1.53, 71, 129, 205 -1.54, 72, 128, 206 -1.55, 72, 126, 207 -1.56, 73, 124, 207 -1.57, 73, 122, 208 -1.58, 74, 120, 209 -1.59, 75, 119, 209 -1.6, 75, 117, 210 -1.61, 76, 115, 211 -1.62, 76, 113, 211 -1.63, 77, 111, 212 -1.64, 78, 110, 213 -1.65, 78, 108, 213 -1.66, 79, 106, 214 -1.67, 80, 104, 214 -1.68, 80, 102, 215 -1.69, 81, 101, 216 -1.7, 82, 99, 216 -1.71, 82, 97, 217 -1.72, 83, 95, 217 -1.73, 84, 93, 218 -1.74, 84, 92, 219 -1.75, 85, 90, 219 -1.76, 86, 88, 220 -1.77, 86, 86, 220 -1.78, 87, 84, 221 -1.79, 88, 83, 222 -1.8, 88, 82, 222 -1.81, 89, 81, 223 -1.82, 90, 80, 223 -1.83, 91, 80, 224 -1.84, 92, 79, 224 -1.85, 93, 78, 225 -1.86, 94, 77, 225 -1.87, 95, 77, 226 -1.88, 96, 76, 226 -1.89, 97, 75, 227 -1.9, 98, 74, 227 -1.91, 99, 74, 228 -1.92, 100, 73, 228 -1.93, 101, 72, 229 -1.94, 102, 72, 230 -1.95, 104, 72, 230 -1.96, 106, 73, 230 -1.97, 108, 73, 230 -1.98, 110, 74, 231 -1.99, 112, 74, 231 -2, 114, 75, 231 -2.01, 116, 75, 231 -2.02, 118, 76, 232 -2.03, 120, 76, 232 -2.04, 122, 77, 232 -2.05, 124, 77, 232 -2.06, 126, 78, 233 -2.07, 128, 78, 233 -2.08, 130, 79, 233 -2.09, 133, 80, 234 -2.1, 135, 80, 234 -2.11, 137, 80, 234 -2.12, 139, 81, 234 -2.13, 141, 81, 234 -2.14, 143, 81, 234 -2.15, 145, 82, 234 -2.16, 147, 82, 234 -2.17, 149, 82, 234 -2.18, 151, 83, 234 -2.19, 153, 83, 234 -2.2, 155, 83, 234 -2.21, 157, 84, 234 -2.22, 159, 84, 234 -2.23, 161, 84, 234 -2.24, 164, 85, 235 -2.25, 165, 85, 235 -2.26, 166, 85, 235 -2.27, 168, 85, 235 -2.28, 169, 85, 235 -2.29, 171, 85, 235 -2.3, 172, 85, 235 -2.31, 174, 85, 235 -2.32, 175, 86, 235 -2.33, 177, 86, 235 -2.34, 178, 86, 235 -2.35, 180, 86, 235 -2.36, 181, 86, 235 -2.37, 183, 86, 235 -2.38, 184, 86, 235 -2.39, 186, 87, 235 -2.4, 187, 87, 234 -2.41, 188, 87, 234 -2.42, 190, 87, 234 -2.43, 191, 88, 234 -2.44, 193, 88, 234 -2.45, 194, 88, 234 -2.46, 196, 88, 234 -2.47, 197, 89, 234 -2.48, 199, 89, 234 -2.49, 200, 89, 234 -2.5, 202, 89, 234 diff --git a/satpy/etc/colormaps/clear_sky_confidence.csv b/satpy/etc/colormaps/clear_sky_confidence.csv deleted file mode 100644 index c4743b694b..0000000000 --- a/satpy/etc/colormaps/clear_sky_confidence.csv +++ /dev/null @@ -1,101 +0,0 @@ -0, 255, 247, 236 -0.01, 254, 246, 233 -0.02, 254, 244, 230 -0.03, 254, 243, 228 -0.04, 254, 242, 224 -0.05, 254, 241, 222 -0.06, 254, 239, 219 -0.07, 254, 239, 216 -0.08, 254, 237, 213 -0.09, 254, 236, 210 -0.1, 254, 235, 207 -0.11, 254, 233, 204 -0.12, 254, 232, 202 -0.13, 253, 231, 198 -0.14, 253, 230, 195 -0.15, 253, 228, 191 -0.16, 253, 226, 189 -0.17, 253, 225, 185 -0.18, 253, 223, 181 -0.19, 253, 221, 178 -0.2, 253, 220, 174 -0.21, 253, 218, 172 -0.22, 253, 216, 168 -0.23, 253, 215, 165 -0.24, 253, 213, 161 -0.25, 253, 211, 157 -0.26, 253, 210, 156 -0.27, 253, 207, 153 -0.28, 253, 206, 152 -0.29, 253, 203, 149 -0.3, 253, 202, 148 -0.31, 253, 200, 145 -0.32, 253, 198, 143 -0.33, 253, 196, 141 -0.34, 253, 193, 139 -0.35, 253, 192, 137 -0.36, 253, 189, 134 -0.37, 253, 188, 133 -0.38, 252, 185, 130 -0.39, 252, 182, 127 -0.4, 252, 177, 123 -0.41, 252, 174, 120 -0.42, 252, 170, 116 -0.43, 252, 166, 112 -0.44, 252, 163, 109 -0.45, 252, 159, 105 -0.46, 252, 156, 103 -0.47, 252, 151, 99 -0.48, 252, 148, 96 -0.49, 252, 144, 92 -0.5, 251, 140, 88 -0.51, 250, 137, 87 -0.52, 249, 134, 86 -0.53, 248, 131, 85 -0.54, 247, 127, 83 -0.55, 246, 125, 82 -0.56, 245, 121, 80 -0.57, 244, 119, 79 -0.58, 243, 115, 78 -0.59, 242, 111, 76 -0.6, 241, 109, 75 -0.61, 240, 105, 73 -0.62, 239, 102, 72 -0.63, 237, 98, 69 -0.64, 236, 94, 67 -0.65, 234, 89, 63 -0.66, 232, 86, 60 -0.67, 230, 81, 57 -0.68, 227, 76, 53 -0.69, 226, 73, 50 -0.7, 224, 68, 46 -0.71, 222, 65, 44 -0.72, 220, 60, 40 -0.73, 218, 56, 37 -0.74, 216, 51, 33 -0.75, 214, 46, 30 -0.76, 211, 43, 28 -0.77, 208, 39, 25 -0.78, 206, 36, 23 -0.79, 202, 31, 20 -0.8, 200, 28, 188 -0.81, 197, 24, 15 -0.82, 194, 21, 13 -0.83, 191, 16, 10 -0.84, 188, 12, 7 -0.85, 185, 9, 5 -0.86, 182, 4, 3 -0.87, 180, 1, 1 -0.88, 175, 0, 0 -0.89, 172, 0, 0 -0.9, 167, 0, 0 -0.91, 164, 0, 0 -0.92, 159, 0, 0 -0.93, 154, 0, 0 -0.94, 151, 0, 0 -0.95, 146, 0, 0 -0.96, 143, 0, 0 -0.97, 138, 0, 0 -0.98, 135, 0, 0 -0.99, 130, 0, 0 -1, 127, 0, 0 diff --git a/satpy/etc/colormaps/cloud_top_height.csv b/satpy/etc/colormaps/cloud_top_height.csv deleted file mode 100644 index d1fa053029..0000000000 --- a/satpy/etc/colormaps/cloud_top_height.csv +++ /dev/null @@ -1,32 +0,0 @@ -0 , 255 , 0 , 0 -800 , 255 , 0 , 0 -800.0001 , 170 , 0 , 0 -1600 , 170 , 0 , 0 -1600.0001 , 110 , 0 , 0 -2350 , 110 , 0 , 0 -2350.0001 , 112 , 1 , 2 -3150 , 112 , 1 , 2 -3150.0001 , 124 , 91 , 5 -4000 , 124 , 91 , 5 -4000.0001 , 240 , 190 , 64 -4800 , 240 , 190 , 64 -4800.0001 , 255 , 255 , 0 -5600 , 255 , 255 , 0 -5600.0001 , 0 , 220 , 0 -6400 , 0 , 220 , 0 -6400.0001 , 0 , 136 , 0 -7200 , 0 , 136 , 0 -7200.0001 , 0 , 80 , 0 -8000 , 0 , 80 , 0 -8000.0001 , 0 , 136 , 238 -8800 , 0 , 136 , 238 -8800.0001 , 0 , 0 , 255 -9600 , 0 , 0 , 255 -9600.0001 , 0 , 0 , 170 -10400 , 0 , 0 , 170 -10400.0001 , 0 , 0 , 100 -11200 , 0 , 0 , 100 -11200.0001 , 183 , 15 , 141 -12000 , 183 , 15 , 141 -12000.0001 , 102 , 0 , 119 -18000 , 102 , 0 , 119 diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index e79ec953fb..8441ac8729 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1231,8 +1231,9 @@ enhancements: min_stretch: [0,0,0] max_stretch: [1,1,1] - Cloud_Top_Height: - standard_name: cldprop_cloud_top_height + cloud_top_height: + name: Cloud_Top_Height + reader: viirs_l2 operations: - name: colorize method: !!python/name:satpy.enhancements.colorize @@ -1242,8 +1243,9 @@ enhancements: filename: colormaps/cloud_top_height.csv } - Clear_Sky_Confidence: - standard_name: cldmsk_clear_sky_confidence + clear_sky_confidence: + name: Clear_Sky_Confidence + reader: viirs_l2 operations: - name: colorize method: !!python/name:satpy.enhancements.colorize @@ -1253,8 +1255,9 @@ enhancements: filename: colormaps/clear_sky_confidence.csv } - Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate: - standard_name: aerdb_aerosol_optical_thickness_500_land_ocean + aerosol_optical_thickness_550_land_ocean_best_estimate: + name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate + reader: viirs_l2 operations: - name: colorize method: !!python/name:satpy.enhancements.colorize @@ -1264,8 +1267,9 @@ enhancements: filename: colormaps/aerosol_thickness.csv } - Angstrom_Exponent_Land_Ocean_Best_Estimate: - standard_name: aerdb_angstrom_exponent_land_ocean_best_estimate + angstrom_exponent_land_ocean_best_estimate: + name: Angstrom_Exponent_Land_Ocean_Best_Estimate + reader: viirs_l2 operations: - name: colorize method: !!python/name:satpy.enhancements.colorize diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml index 9df5df3597..bfeadab6b9 100644 --- a/satpy/etc/readers/viirs_l2.yaml +++ b/satpy/etc/readers/viirs_l2.yaml @@ -89,7 +89,7 @@ datasets: coordinates: [cld_lon, cld_lat] file_key: geophysical_data/Clear_Sky_Confidence file_type: cldmsk_l2_viirs - standard_name: cldmsk_clear_sky_confidence + standard_name: clear_sky_confidence ################################### # Datasets in file cldprop_l2_viirs @@ -101,7 +101,7 @@ datasets: coordinates: [cld_lon,cld_lat] file_key: geophysical_data/Cloud_Top_Height file_type: cldprop_l2_viirs - standard_name: cldprop_cloud_top_height + standard_name: cloud_top_height ########################################## # Datasets in files aerdb_l2_viirs @@ -113,7 +113,7 @@ datasets: coordinates: [aerdb_lon,aerdb_lat] file_key: Angstrom_Exponent_Land_Ocean_Best_Estimate file_type: [aerdb_l2_viirs] - standard_name: aerdb_angstrom_exponent_land_ocean_best_estimate + standard_name: angstrom_exponent_land_ocean_best_estimate Aerosol_Optical_Thickness_550_Land_Ocean: name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate @@ -122,4 +122,4 @@ datasets: coordinates: [aerdb_lon,aerdb_lat] file_key: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate file_type: [aerdb_l2_viirs] - standard_name: aerdb_aerosol_optical_thickness_500_land_ocean + standard_name: aerosol_optical_thickness_550_land_ocean From 5320bcae8889692eff6b57436a2d66c99239d295 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 20 Feb 2024 18:24:15 +0100 Subject: [PATCH 1157/1416] Fix concurrency group in ci --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2716e7f792..c472709bc8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -2,7 +2,7 @@ name: CI # https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency # https://docs.github.com/en/developers/webhooks-and-events/events/github-event-types#pullrequestevent concurrency: - group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.type }} + group: ${{ github.workflow }}-${{ github.event.number || github.event.ref }}-${{ github.event.type }} cancel-in-progress: true on: [push, pull_request] From fe2a54b8587c5573d1f9d77ac72178de1a9e8707 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 20 Feb 2024 19:44:33 +0000 Subject: [PATCH 1158/1416] Add tests for MERSI-RM reader. --- satpy/tests/reader_tests/test_mersi_l1b.py | 134 +++++++++++++++++++-- 1 file changed, 127 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 1df0d41f12..54bd46294b 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -27,9 +27,9 @@ from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler -def _get_calibration(num_scans): +def _get_calibration(num_scans, ftype): calibration = { - "Calibration/VIS_Cal_Coeff": + f"Calibration/{ftype}_Cal_Coeff": xr.DataArray( da.ones((19, 3), chunks=1024), attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, @@ -87,6 +87,37 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols): return data +def _get_500m_data(num_scans, rows_per_scan, num_cols): + data = { + "Data/EV_Reflectance": + xr.DataArray( + da.ones((5, num_scans * rows_per_scan, num_cols), chunks=1024, + dtype=np.uint16), + attrs={ + "Slope": np.array([1.] * 5), "Intercept": np.array([0.] * 5), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"500m Earth View Science Data", + }, + dims=("_ref_bands", "_rows", "_cols")), + "Data/EV_Emissive": + xr.DataArray( + da.ones((3, num_scans * rows_per_scan, num_cols), chunks=1024, + dtype=np.uint16), + attrs={ + "Slope": np.array([1.] * 3), "Intercept": np.array([0.] * 3), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 25000], + "long_name": b"500m Emissive Bands Earth View " + b"Science Data", + }, + dims=("_ir_bands", "_rows", "_cols")), + } + return data + + def _get_1km_data(num_scans, rows_per_scan, num_cols): data = { "Data/EV_1KM_LL": @@ -236,24 +267,30 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/Observing Ending Time": "18:38:36.728", } - global_attrs = self._set_sensor_attrs(global_attrs) + global_attrs, ftype = self._set_sensor_attrs(global_attrs) self._add_tbb_coefficients(global_attrs) data = self._get_data_file_content() test_content = {} test_content.update(global_attrs) test_content.update(data) - test_content.update(_get_calibration(self.num_scans)) + test_content.update(_get_calibration(self.num_scans, ftype)) return test_content def _set_sensor_attrs(self, global_attrs): if "mersi2_l1b" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3D" global_attrs["/attr/Sensor Identification Code"] = "MERSI" + ftype = "VIS" elif "mersi_ll" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3E" global_attrs["/attr/Sensor Identification Code"] = "MERSI LL" - return global_attrs + ftype = "VIS" + elif "mersi_rm" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3G" + global_attrs["/attr/Sensor Identification Code"] = "MERSI RM" + ftype = "RSB" + return global_attrs, ftype def _get_data_file_content(self): if "_geo" in self.filetype_info["file_type"]: @@ -272,8 +309,16 @@ def _add_band_data_file_content(self): num_scans = self.num_scans rows_per_scan = self._rows_per_scan is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") + is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") is_1km = "_1000" in self.filetype_info["file_type"] - data_func = _get_1km_data if is_1km else (_get_250m_data if is_mersi2 else _get_250m_ll_data) + if is_1km: + data_func = _get_1km_data + elif is_mersi2: + data_func = _get_250m_data + elif is_mersill: + data_func = _get_250m_ll_data + else: + data_func = _get_500m_data return data_func(num_scans, rows_per_scan, num_cols) def _add_tbb_coefficients(self, global_attrs): @@ -293,7 +338,12 @@ def _num_cols_for_file_type(self): @property def _geo_prefix_for_file_type(self): - return "Geolocation/" if "1000" in self.filetype_info["file_type"] else "" + if "1000" in self.filetype_info["file_type"]: + return "Geolocation/" + elif "500" in self.filetype_info["file_type"]: + return "Geolocation/" + else: + return "" def _test_helper(res): @@ -745,3 +795,73 @@ def test_250_resolutions(self): assert (2 * 40, 2048 * 2) == res["7"].shape assert "brightness_temperature" == res["7"].attrs["calibration"] assert "K" == res["7"].attrs["units"] + + +class TestMERSIRML1B(MERSIL1BTester): + """Test the FY3E MERSI-RM L1B reader.""" + + yaml_file = "mersi_rm_l1b.yaml" + filenames_500m = ["FY3G_MERSI_GRAN_L1_20230410_1910_0500M_V1.HDF", + "FY3G_MERSI_GRAN_L1_20230410_1910_GEOHK_V1.HDF", + ] + + def test_500m_resolution(self): + """Test loading data when all resolutions are available.""" + from satpy.dataset.data_dict import get_key + from satpy.readers import load_reader + from satpy.utils import debug_on + debug_on() + from satpy.tests.utils import make_dataid + filenames = self.filenames_500m + print(filenames) + reader = load_reader(self.reader_configs) + files = reader.select_files_from_pathnames(filenames) + assert 2 == len(files) + reader.create_filehandlers(files) + # Make sure we have some files + assert reader.file_handlers + + res = reader.load(["1", "2", "4", "7"]) + assert len(res) == 4 + assert res["4"].shape == (2 * 10, 4096) + assert res["1"].attrs["calibration"] == "reflectance" + assert res["1"].attrs["units"] == "%" + assert res["2"].shape == (2 * 10, 4096) + assert res["2"].attrs["calibration"] == "reflectance" + assert res["2"].attrs["units"] == "%" + assert res["7"].shape == (20, 2048 * 2) + assert res["7"].attrs["calibration"] == "brightness_temperature" + assert res["7"].attrs["units"] == "K" + + def test_rad_calib(self): + """Test loading data at radiance calibration.""" + from satpy.readers import load_reader + from satpy.tests.utils import make_dataid + filenames = self.filenames_500m + reader = load_reader(self.reader_configs) + files = reader.select_files_from_pathnames(filenames) + assert 2 == len(files) + reader.create_filehandlers(files) + # Make sure we have some files + assert reader.file_handlers + + ds_ids = [] + for band_name in ["1", "3", "4", "6", "7"]: + ds_ids.append(make_dataid(name=band_name, calibration="radiance")) + res = reader.load(ds_ids) + assert len(res) == 5 + assert res["1"].shape == (20, 4096) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["3"].shape == (20, 4096) + assert res["3"].attrs["calibration"] == "radiance" + assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["4"].shape == (20, 4096) + assert res["4"].attrs["calibration"] == "radiance" + assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["6"].shape == (20, 4096) + assert res["6"].attrs["calibration"] == "radiance" + assert res["6"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["7"].shape == (20, 4096) + assert res["7"].attrs["calibration"] == "radiance" + assert res["7"].attrs["units"] == "mW/ (m2 cm-1 sr)" From 8c18deaaadf98d3bd995fdbde38ffe58698fd9eb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 20 Feb 2024 19:46:58 +0000 Subject: [PATCH 1159/1416] Update tests for MERSI-RM reader. --- satpy/tests/reader_tests/test_mersi_l1b.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 54bd46294b..1b3d280566 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -807,13 +807,8 @@ class TestMERSIRML1B(MERSIL1BTester): def test_500m_resolution(self): """Test loading data when all resolutions are available.""" - from satpy.dataset.data_dict import get_key from satpy.readers import load_reader - from satpy.utils import debug_on - debug_on() - from satpy.tests.utils import make_dataid filenames = self.filenames_500m - print(filenames) reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 2 == len(files) From c986144ce37d90d3c93c7563800993b72d1d87e3 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 21 Feb 2024 08:59:58 +0100 Subject: [PATCH 1160/1416] Update satpy/tests/reader_tests/test_mersi_l1b.py Co-authored-by: Martin Raspaud --- satpy/tests/reader_tests/test_mersi_l1b.py | 22 ++++++---------------- 1 file changed, 6 insertions(+), 16 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 1b3d280566..5ec016d456 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -840,23 +840,13 @@ def test_rad_calib(self): # Make sure we have some files assert reader.file_handlers + band_names = ["1", "3", "4", "6", "7"] ds_ids = [] - for band_name in ["1", "3", "4", "6", "7"]: + for band_name in band_names: ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res["1"].shape == (20, 4096) - assert res["1"].attrs["calibration"] == "radiance" - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["3"].shape == (20, 4096) - assert res["3"].attrs["calibration"] == "radiance" - assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["4"].shape == (20, 4096) - assert res["4"].attrs["calibration"] == "radiance" - assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["6"].shape == (20, 4096) - assert res["6"].attrs["calibration"] == "radiance" - assert res["6"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["7"].shape == (20, 4096) - assert res["7"].attrs["calibration"] == "radiance" - assert res["7"].attrs["units"] == "mW/ (m2 cm-1 sr)" + for band name in band_names: + assert res[band_name].shape == (20, 4096) + assert res[band_name].attrs["calibration"] == "radiance" + assert res[band_name].attrs["units"] == "mW/ (m2 cm-1 sr)" From ed751f86d51c9a803f5169e23d2064948fd75b44 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 21 Feb 2024 09:01:18 +0100 Subject: [PATCH 1161/1416] Fix typo in MERSI tests. --- satpy/tests/reader_tests/test_mersi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 5ec016d456..a3145d3f76 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -846,7 +846,7 @@ def test_rad_calib(self): ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - for band name in band_names: + for band_name in band_names: assert res[band_name].shape == (20, 4096) assert res[band_name].attrs["calibration"] == "radiance" assert res[band_name].attrs["units"] == "mW/ (m2 cm-1 sr)" From ef357a5423c960cf02602e83ac65b8bf32c1cc39 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 21 Feb 2024 12:27:06 +0100 Subject: [PATCH 1162/1416] Update changelog for v0.47.0 --- CHANGELOG.md | 76 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8730209f99..52d47ad47f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,79 @@ +## Version 0.47.0 (2024/02/21) + +### Issues Closed + +* [Issue 2734](https://github.com/pytroll/satpy/issues/2734) - Using a static image alters time information ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2723](https://github.com/pytroll/satpy/issues/2723) - MODIS Satpy scene Don't know how to open the following files: {'MOD021KM.A2017131.1325.061.2017314123114.hdf'} +* [Issue 2719](https://github.com/pytroll/satpy/issues/2719) - Add lat lon to Seviri plots +* [Issue 2718](https://github.com/pytroll/satpy/issues/2718) - Set invert as a modifier when do composite +* [Issue 2712](https://github.com/pytroll/satpy/issues/2712) - mitiff writer add config option to add or not to add the size of a pixel in the proj string ([PR 2713](https://github.com/pytroll/satpy/pull/2713) by [@TAlonglong](https://github.com/TAlonglong)) +* [Issue 2710](https://github.com/pytroll/satpy/issues/2710) - scene.save_datasets() outputs different values for AHI_HSD reader with calibration="brightness_temperature" +* [Issue 2708](https://github.com/pytroll/satpy/issues/2708) - this is regarding slstr_l1b geometry +* [Issue 2703](https://github.com/pytroll/satpy/issues/2703) - read swath in loop +* [Issue 2680](https://github.com/pytroll/satpy/issues/2680) - satpy_cf_nc reader cannot read FCI file written with cf writer +* [Issue 2672](https://github.com/pytroll/satpy/issues/2672) - Changes in NWC SAF GEO v2021 data ([PR 2673](https://github.com/pytroll/satpy/pull/2673) by [@pnuu](https://github.com/pnuu)) +* [Issue 2630](https://github.com/pytroll/satpy/issues/2630) - wrong start_time with BackgroundCompositor ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2447](https://github.com/pytroll/satpy/issues/2447) - add more options to time handling in combine_metadata ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2446](https://github.com/pytroll/satpy/issues/2446) - combine metadata in `MultiFiller` ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2427](https://github.com/pytroll/satpy/issues/2427) - Wrong start_time, end_time attributes after MultiScene.blend(blend_function=timeseries) ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) +* [Issue 2319](https://github.com/pytroll/satpy/issues/2319) - slstr_l2.yaml points to deleted slstr_l2.py ([PR 2731](https://github.com/pytroll/satpy/pull/2731) by [@djhoese](https://github.com/djhoese)) +* [Issue 1921](https://github.com/pytroll/satpy/issues/1921) - Standardize dataset information for SEVIRI and FCI L2 products +* [Issue 1174](https://github.com/pytroll/satpy/issues/1174) - combine_metadata only supports the average of time attrs ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) + +In this release 17 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2743](https://github.com/pytroll/satpy/pull/2743) - Fix nominal time attributes in SEVIRI HRIT ([](https://github.com/`nominal_start/issues/)) +* [PR 2742](https://github.com/pytroll/satpy/pull/2742) - Fix nominal end time in AHI HSD +* [PR 2737](https://github.com/pytroll/satpy/pull/2737) - Change `start_time` and `end_time` handling in `combine_metadata` ([2734](https://github.com/pytroll/satpy/issues/2734), [2630](https://github.com/pytroll/satpy/issues/2630), [2447](https://github.com/pytroll/satpy/issues/2447), [2446](https://github.com/pytroll/satpy/issues/2446), [2427](https://github.com/pytroll/satpy/issues/2427), [1174](https://github.com/pytroll/satpy/issues/1174)) +* [PR 2731](https://github.com/pytroll/satpy/pull/2731) - Remove slstr_l2 reader in favor of ghrsst_l2 ([2319](https://github.com/pytroll/satpy/issues/2319)) +* [PR 2730](https://github.com/pytroll/satpy/pull/2730) - Pin pytest to fix CI +* [PR 2726](https://github.com/pytroll/satpy/pull/2726) - Fix AGRI L1 C07 having a valid LUT value for its fill value ([565](https://github.com/ssec/polar2grid/issues/565)) +* [PR 2713](https://github.com/pytroll/satpy/pull/2713) - Add kwargs config option to turn off mitiff corner correction ([2712](https://github.com/pytroll/satpy/issues/2712)) +* [PR 2711](https://github.com/pytroll/satpy/pull/2711) - Add support for NOAA-21 in MiRS limb correction +* [PR 2707](https://github.com/pytroll/satpy/pull/2707) - Fix 'viirs_edr' renaming two sets of dimensions to the same names +* [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures + +#### Features added + +* [PR 2746](https://github.com/pytroll/satpy/pull/2746) - Fix concurrency group in ci +* [PR 2745](https://github.com/pytroll/satpy/pull/2745) - Sort reader table by name + diverse fixes +* [PR 2744](https://github.com/pytroll/satpy/pull/2744) - Fix cutoffs for night_ir_alpha and bump up trollimage version +* [PR 2737](https://github.com/pytroll/satpy/pull/2737) - Change `start_time` and `end_time` handling in `combine_metadata` ([2734](https://github.com/pytroll/satpy/issues/2734), [2630](https://github.com/pytroll/satpy/issues/2630), [2447](https://github.com/pytroll/satpy/issues/2447), [2446](https://github.com/pytroll/satpy/issues/2446), [2427](https://github.com/pytroll/satpy/issues/2427), [1174](https://github.com/pytroll/satpy/issues/1174)) +* [PR 2728](https://github.com/pytroll/satpy/pull/2728) - Update asv dependencies +* [PR 2720](https://github.com/pytroll/satpy/pull/2720) - Add support for the MERSI-RM instrument on FY-3G +* [PR 2714](https://github.com/pytroll/satpy/pull/2714) - Add QC-based filtering to the VIIRS EDR AOD550 product +* [PR 2675](https://github.com/pytroll/satpy/pull/2675) - Make CF encoding of dataset attributes public +* [PR 2673](https://github.com/pytroll/satpy/pull/2673) - Add NWC SAF GEO v2021 ASIIF-TF and ASII-GW dataset names ([2672](https://github.com/pytroll/satpy/issues/2672)) +* [PR 2534](https://github.com/pytroll/satpy/pull/2534) - Add fsspec functionality to `viirs_sdr` reader +* [PR 2441](https://github.com/pytroll/satpy/pull/2441) - Add channel aliases to the CLAVRx reader to facilitate composites + +#### Documentation changes + +* [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures + +#### Backward incompatible changes + +* [PR 2731](https://github.com/pytroll/satpy/pull/2731) - Remove slstr_l2 reader in favor of ghrsst_l2 ([2319](https://github.com/pytroll/satpy/issues/2319)) + +#### Refactoring + +* [PR 2699](https://github.com/pytroll/satpy/pull/2699) - Move Scene.to_hvplot internals to _scene_converters + +#### Clean ups + +* [PR 2711](https://github.com/pytroll/satpy/pull/2711) - Add support for NOAA-21 in MiRS limb correction +* [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures +* [PR 2689](https://github.com/pytroll/satpy/pull/2689) - Fix/supress warnings in reader tests ([](https://github.com/and/issues/)) +* [PR 2665](https://github.com/pytroll/satpy/pull/2665) - FCI L2 CF harmonization +* [PR 2597](https://github.com/pytroll/satpy/pull/2597) - Update CI to test Python 3.12 + +In this release 29 pull requests were closed. + + ## Version 0.46.0 (2023/12/18) ### Issues Closed From 34492bc71273f5e8f284dbd35eb00cc960522409 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Wed, 21 Feb 2024 13:44:17 +0000 Subject: [PATCH 1163/1416] removed enhancements --- satpy/etc/enhancements/generic.yaml | 48 ----------------------------- 1 file changed, 48 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 8441ac8729..25680d6db9 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1230,51 +1230,3 @@ enhancements: stretch: crude min_stretch: [0,0,0] max_stretch: [1,1,1] - - cloud_top_height: - name: Cloud_Top_Height - reader: viirs_l2 - operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - { - filename: colormaps/cloud_top_height.csv - } - - clear_sky_confidence: - name: Clear_Sky_Confidence - reader: viirs_l2 - operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - { - filename: colormaps/clear_sky_confidence.csv - } - - aerosol_optical_thickness_550_land_ocean_best_estimate: - name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate - reader: viirs_l2 - operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - { - filename: colormaps/aerosol_thickness.csv - } - - angstrom_exponent_land_ocean_best_estimate: - name: Angstrom_Exponent_Land_Ocean_Best_Estimate - reader: viirs_l2 - operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - { - filename: colormaps/angstrom_exponent.csv - } From fd5655fb2d4140e10f26dc1ad2d797d7d0bc7874 Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Wed, 21 Feb 2024 14:02:51 +0000 Subject: [PATCH 1164/1416] added fsnrad file pattern --- satpy/etc/readers/viirs_l2.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/etc/readers/viirs_l2.yaml b/satpy/etc/readers/viirs_l2.yaml index bfeadab6b9..d4dceccccc 100644 --- a/satpy/etc/readers/viirs_l2.yaml +++ b/satpy/etc/readers/viirs_l2.yaml @@ -33,6 +33,7 @@ file_types: fsnrad_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler file_patterns: + - 'FSNRAD_L2_VIIRS_CRIS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' - 'FSNRAD_L2_VIIRS_CRIS_SS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' datasets: From 71c1af866c53e1e4664dbd8a67583245479d32bf Mon Sep 17 00:00:00 2001 From: Longtsing Date: Wed, 28 Feb 2024 11:44:39 +0800 Subject: [PATCH 1165/1416] Update _geos_area.py fix the document of get_area_extent function,add the description of parameter H --- satpy/readers/_geos_area.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/readers/_geos_area.py b/satpy/readers/_geos_area.py index 03dabfa9a0..f9c588b085 100644 --- a/satpy/readers/_geos_area.py +++ b/satpy/readers/_geos_area.py @@ -72,6 +72,7 @@ def get_area_extent(pdict): coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) + h: Altitude of satellite (m) Returns: aex: An area extent for the scene From 9a24fb92d54d0565ede542d5e3f2fe602efd646b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 10:55:38 +0000 Subject: [PATCH 1166/1416] Bump pypa/gh-action-pypi-publish from 1.8.11 to 1.8.12 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.11 to 1.8.12. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.11...v1.8.12) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 9fd1d86b5a..450e47864c 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.11 + uses: pypa/gh-action-pypi-publish@v1.8.12 with: user: __token__ password: ${{ secrets.pypi_password }} From ad3d2e4d19cd13de81d152acb383578a4ac3f2c3 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Tue, 5 Mar 2024 11:40:30 +0100 Subject: [PATCH 1167/1416] Refactor test --- satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 883ac8c709..793e1527d7 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -95,13 +95,11 @@ def test_read_vgac(self, nc_filename): reader="viirs_vgac_l1c_nc", filenames=[nc_filename]) scn_.load(["M05", "M15", "scanline_timestamps"]) - assert ((scn_["scanline_timestamps"][0] - - np.datetime64("2023-03-28T09:08:07") - np.timedelta64(123, "ms")) < np.timedelta64(5, "us")) - assert ((scn_["scanline_timestamps"][-1] - np.datetime64("2023-03-28T10:11:12")) < np.timedelta64(5, "us")) - assert ((np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123, "ms") - - scn_["scanline_timestamps"][0]) < np.timedelta64(5, "us")) - assert ((np.datetime64("2023-03-28T10:11:12") - scn_["scanline_timestamps"][-1]) < np.timedelta64(5, "us")) - + diff_s = (scn_["scanline_timestamps"][0] - np.datetime64("2023-03-28T09:08:07") + - np.timedelta64(123, "ms")) + diff_e = np.datetime64("2023-03-28T10:11:12") - scn_["scanline_timestamps"][-1] + assert (np.abs(diff_e) < np.timedelta64(5000, "ns")) + assert (np.abs(diff_s) < np.timedelta64(5000, "ns")) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) assert scn_.start_time == datetime.datetime(year=2023, month=3, day=28, From c09787c8047cd52bab991b5f3a15482ab1abf99f Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Tue, 5 Mar 2024 13:20:02 +0100 Subject: [PATCH 1168/1416] Increase accuracy by using ns --- satpy/readers/viirs_vgac_l1c_nc.py | 4 ++-- satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 1146edbf62..75aaa32d75 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -85,8 +85,8 @@ def extract_time_data(self, data, nc): "days since %d/%m/%YT%H:%M:%S")) delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values) delta_full_days = np.timedelta64(int(delta_full_days), "D") - delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[us]") - delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[us]") + delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[ns]") + delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[ns]") time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, coords=data.coords, attrs={"long_name": "Scanline time"}) self._start_time = self.dt64_to_datetime(time_data[0].values) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 793e1527d7..78b83277dd 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -98,8 +98,10 @@ def test_read_vgac(self, nc_filename): diff_s = (scn_["scanline_timestamps"][0] - np.datetime64("2023-03-28T09:08:07") - np.timedelta64(123, "ms")) diff_e = np.datetime64("2023-03-28T10:11:12") - scn_["scanline_timestamps"][-1] - assert (np.abs(diff_e) < np.timedelta64(5000, "ns")) - assert (np.abs(diff_s) < np.timedelta64(5000, "ns")) + assert (diff_e < np.timedelta64(5000, "ns")) + assert (diff_s < np.timedelta64(5000, "ns")) + assert (diff_e > np.timedelta64(-5000, "ns")) + assert (diff_s > np.timedelta64(-5000, "ns")) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) assert scn_.start_time == datetime.datetime(year=2023, month=3, day=28, From 799c697d34d4db55ba3c2c0d3dbdde65e92b9981 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Wed, 6 Mar 2024 15:51:16 +0100 Subject: [PATCH 1169/1416] try to make windows test running The delta_full_days was before an array of size 1, now it is a scalar. --- satpy/readers/viirs_vgac_l1c_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 75aaa32d75..919d9e1856 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -83,8 +83,8 @@ def extract_time_data(self, data, nc): """Decode time data.""" reference_time = np.datetime64(datetime.strptime(nc["proj_time0"].attrs["units"], "days since %d/%m/%YT%H:%M:%S")) - delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values) - delta_full_days = np.timedelta64(int(delta_full_days), "D") + delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values[0]) + delta_full_days = np.timedelta64(delta_full_days.astype(np.int64), "D") delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[ns]") delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[ns]") time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, From 8f201b69a86f29ea47e4083da566fc456ae6cce7 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Wed, 6 Mar 2024 21:04:45 +0100 Subject: [PATCH 1170/1416] try to fix tests on windows --- satpy/readers/viirs_vgac_l1c_nc.py | 8 ++--- .../reader_tests/test_viirs_vgac_l1c_nc.py | 33 ++++++++++--------- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 919d9e1856..47170616ed 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -84,13 +84,11 @@ def extract_time_data(self, data, nc): reference_time = np.datetime64(datetime.strptime(nc["proj_time0"].attrs["units"], "days since %d/%m/%YT%H:%M:%S")) delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values[0]) - delta_full_days = np.timedelta64(delta_full_days.astype(np.int64), "D") - delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[ns]") - delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[ns]") + delta_full_days = np.timedelta64(delta_full_days.astype(np.int64), "D").astype("timedelta64[us]") + delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[us]") + delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[us]") time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, coords=data.coords, attrs={"long_name": "Scanline time"}) - self._start_time = self.dt64_to_datetime(time_data[0].values) - self._end_time = self.dt64_to_datetime(time_data[-1].values) return time_data def decode_time_variable(self, data, file_key, nc): diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 78b83277dd..9c424e065e 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -22,7 +22,7 @@ """ -import datetime +from datetime import datetime import numpy as np import pytest @@ -33,7 +33,7 @@ @pytest.fixture() def nc_filename(tmp_path): """Create an nc test data file and return its filename.""" - now = datetime.datetime.utcnow() + now = datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) # Create test data @@ -65,7 +65,7 @@ def nc_filename(tmp_path): tb_lut[:] = np.array(range(0, n_lut)) * 0.5 tb_lut.units = "Kelvin" reference_time = np.datetime64("2010-01-01T00:00:00") - start_time = np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123, "ms") + start_time = np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123000, "us") delta_days = start_time - reference_time delta_full_days = delta_days.astype("timedelta64[D]") hidden_reference_time = reference_time + delta_full_days @@ -95,19 +95,20 @@ def test_read_vgac(self, nc_filename): reader="viirs_vgac_l1c_nc", filenames=[nc_filename]) scn_.load(["M05", "M15", "scanline_timestamps"]) - diff_s = (scn_["scanline_timestamps"][0] - np.datetime64("2023-03-28T09:08:07") - - np.timedelta64(123, "ms")) - diff_e = np.datetime64("2023-03-28T10:11:12") - scn_["scanline_timestamps"][-1] - assert (diff_e < np.timedelta64(5000, "ns")) - assert (diff_s < np.timedelta64(5000, "ns")) - assert (diff_e > np.timedelta64(-5000, "ns")) - assert (diff_s > np.timedelta64(-5000, "ns")) + diff_s = (scn_["scanline_timestamps"][0].values.astype('datetime64[us]') - + np.datetime64('2023-03-28T09:08:07.123000').astype('datetime64[us]')) + diff_e = (np.datetime64("2023-03-28T10:11:12.000000").astype('datetime64[us]') - + scn_["scanline_timestamps"][-1].values.astype('datetime64[us]')) + assert (diff_s < np.timedelta64(5, "us")) + assert (diff_e < np.timedelta64(5, "us")) + assert (diff_s > np.timedelta64(-5, "us")) + assert (diff_e > np.timedelta64(-5, "us")) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) - assert scn_.start_time == datetime.datetime(year=2023, month=3, day=28, - hour=9, minute=8, second=7) - assert scn_.end_time == datetime.datetime(year=2023, month=3, day=28, - hour=10, minute=11, second=12) + assert scn_.start_time == datetime(year=2023, month=3, day=28, + hour=9, minute=8, second=7) + assert scn_.end_time == datetime(year=2023, month=3, day=28, + hour=10, minute=11, second=12) def test_dt64_to_datetime(self): """Test datetime conversion branch.""" @@ -115,8 +116,8 @@ def test_dt64_to_datetime(self): fh = VGACFileHandler(filename="", filename_info={"start_time": "2023-03-28T09:08:07"}, filetype_info="") - in_dt = datetime.datetime(year=2023, month=3, day=28, - hour=9, minute=8, second=7) + in_dt = datetime(year=2023, month=3, day=28, + hour=9, minute=8, second=7) out_dt = fh.dt64_to_datetime(in_dt) assert out_dt == in_dt From 7e336ee6201a23ae52e85e382121d8a6a5cbf23b Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Thu, 7 Mar 2024 09:45:00 +0100 Subject: [PATCH 1171/1416] use np.int64 also when creating test file --- .../reader_tests/test_viirs_vgac_l1c_nc.py | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 9c424e065e..b9380fb859 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -41,12 +41,14 @@ def nc_filename(tmp_path): nscn = 7 npix = 800 n_lut = 12000 + start_time_srting = "2023-03-28T09:08:07" + end_time_string = "2023-03-28T10:11:12" nc.createDimension("npix", npix) nc.createDimension("nscn", nscn) nc.createDimension("n_lut", n_lut) nc.createDimension("one", 1) - nc.StartTime = "2023-03-28T09:08:07" - nc.EndTime = "2023-03-28T10:11:12" + nc.StartTime = start_time_srting + nc.EndTime = end_time_string for ind in range(1, 11, 1): ch_name = "M{:02d}".format(ind) r_a = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) @@ -71,13 +73,13 @@ def nc_filename(tmp_path): hidden_reference_time = reference_time + delta_full_days delta_part_of_days = start_time - hidden_reference_time proj_time0 = nc.createVariable("proj_time0", np.float64, ("one",)) - proj_time0[:] = (delta_full_days.astype(int) + + proj_time0[:] = (delta_full_days.astype(np.int64) + 0.000001 * delta_part_of_days.astype("timedelta64[us]").astype(np.int64) / (60 * 60 * 24)) proj_time0.units = "days since 01/01/2010T00:00:00" time_v = nc.createVariable("time", np.float64, ("nscn",)) - delta_h = np.datetime64(nc.EndTime) - start_time - delta_hours = 0.000001 * delta_h.astype("timedelta64[us]").astype(int) / (60 * 60) - time_v[:] = np.linspace(0, delta_hours, num=nscn) + delta_h = np.datetime64(end_time_string) - start_time + delta_hours = 0.000001 * delta_h.astype("timedelta64[us]").astype(np.int64) / (60 * 60) + time_v[:] = np.linspace(0, delta_hours, num=nscn).astype(np.float64) time_v.units = "hours since proj_time0" return filename_str @@ -95,13 +97,13 @@ def test_read_vgac(self, nc_filename): reader="viirs_vgac_l1c_nc", filenames=[nc_filename]) scn_.load(["M05", "M15", "scanline_timestamps"]) - diff_s = (scn_["scanline_timestamps"][0].values.astype('datetime64[us]') - - np.datetime64('2023-03-28T09:08:07.123000').astype('datetime64[us]')) - diff_e = (np.datetime64("2023-03-28T10:11:12.000000").astype('datetime64[us]') - - scn_["scanline_timestamps"][-1].values.astype('datetime64[us]')) + diff_s = (scn_["scanline_timestamps"][0].values.astype("datetime64[us]") - + np.datetime64("2023-03-28T09:08:07.123000").astype("datetime64[us]")) + diff_e = (np.datetime64("2023-03-28T10:11:12.000000").astype("datetime64[us]") - + scn_["scanline_timestamps"][-1].values.astype("datetime64[us]")) assert (diff_s < np.timedelta64(5, "us")) - assert (diff_e < np.timedelta64(5, "us")) assert (diff_s > np.timedelta64(-5, "us")) + assert (diff_e < np.timedelta64(5, "us")) assert (diff_e > np.timedelta64(-5, "us")) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) From f6d87f36856fb2d7aed5000030c5995e1d864a52 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 7 Mar 2024 14:44:49 +0200 Subject: [PATCH 1172/1416] Add fsspec support to li_l2_nc reader --- satpy/etc/readers/generic_image.yaml | 2 +- satpy/readers/li_base_nc.py | 4 ++-- satpy/readers/netcdf_utils.py | 10 +++++++--- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/satpy/etc/readers/generic_image.yaml b/satpy/etc/readers/generic_image.yaml index 07d1bdeb50..fb6e0aab5d 100644 --- a/satpy/etc/readers/generic_image.yaml +++ b/satpy/etc/readers/generic_image.yaml @@ -4,7 +4,7 @@ reader: long_name: Generic Images e.g. GeoTIFF description: generic image reader status: Nominal - supports_fsspec: false + supports_fsspec: true reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [images] default_channels: [image] diff --git a/satpy/readers/li_base_nc.py b/satpy/readers/li_base_nc.py index 848306e77c..eba9548985 100644 --- a/satpy/readers/li_base_nc.py +++ b/satpy/readers/li_base_nc.py @@ -191,12 +191,12 @@ import xarray as xr from pyproj import Proj -from satpy.readers.netcdf_utils import NetCDF4FileHandler +from satpy.readers.netcdf_utils import NetCDF4FsspecFileHandler logger = logging.getLogger(__name__) -class LINCFileHandler(NetCDF4FileHandler): +class LINCFileHandler(NetCDF4FsspecFileHandler): """Base class used as parent for the concrete LI reader classes.""" def __init__(self, filename, filename_info, filetype_info, cache_handle=True): diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index cb5c38d1cf..560039c406 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -258,13 +258,17 @@ def collect_cache_vars(self, cache_var_size): cache_vars = self._collect_cache_var_names(cache_var_size) for var_name in cache_vars: v = self.file_content[var_name] + try: + attrs = v.attrs + except AttributeError: + attrs = v.__dict__ try: arr = xr.DataArray( - v[:], dims=v.dimensions, attrs=v.__dict__, name=v.name) - except ValueError: + v[:], dims=v.dimensions, attrs=attrs, name=v.name) + except (ValueError, IndexError): # Handle scalars for h5netcdf backend arr = xr.DataArray( - v.__array__(), dims=v.dimensions, attrs=v.__dict__, name=v.name) + v.__array__(), dims=v.dimensions, attrs=attrs, name=v.name) self.cached_file_content[var_name] = arr def _collect_cache_var_names(self, cache_var_size): From f1bc699f8a80bad86607901aa5954b8ebb8f969c Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 7 Mar 2024 16:44:34 +0200 Subject: [PATCH 1173/1416] Refactor and test getting netcdf variable to xr.DataArray --- satpy/readers/netcdf_utils.py | 29 ++++++---- satpy/tests/reader_tests/test_netcdf_utils.py | 57 +++++++++++++++++++ 2 files changed, 75 insertions(+), 11 deletions(-) diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index 560039c406..457c2ecaea 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -258,17 +258,7 @@ def collect_cache_vars(self, cache_var_size): cache_vars = self._collect_cache_var_names(cache_var_size) for var_name in cache_vars: v = self.file_content[var_name] - try: - attrs = v.attrs - except AttributeError: - attrs = v.__dict__ - try: - arr = xr.DataArray( - v[:], dims=v.dimensions, attrs=attrs, name=v.name) - except (ValueError, IndexError): - # Handle scalars for h5netcdf backend - arr = xr.DataArray( - v.__array__(), dims=v.dimensions, attrs=attrs, name=v.name) + arr = get_data_as_xarray(v) self.cached_file_content[var_name] = arr def _collect_cache_var_names(self, cache_var_size): @@ -384,6 +374,23 @@ def _compose_replacement_names(variable_name_replacements, var, variable_names): variable_names.append(var.format(**{key: val})) +def get_data_as_xarray(variable): + """Get data in variable as xr.DataArray.""" + try: + attrs = variable.attrs + except AttributeError: + attrs = variable.__dict__ + try: + arr = xr.DataArray( + variable[:], dims=variable.dimensions, attrs=attrs, name=variable.name) + except (ValueError, IndexError): + # Handle scalars for h5netcdf backend + arr = xr.DataArray( + variable.__array__(), dims=variable.dimensions, attrs=attrs, name=variable.name) + + return arr + + class NetCDF4FsspecFileHandler(NetCDF4FileHandler): """NetCDF4 file handler using fsspec to read files remotely.""" diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 2d29288784..ad7b05c217 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -293,3 +293,60 @@ def test_use_h5netcdf_for_file_not_accessible_locally(self): fh = NetCDF4FsspecFileHandler(fname, {}, {}) h5_file.assert_called_once() assert fh._use_h5netcdf + + +NC_ATTRS = { + "standard_name": "test_data", + "scale_factor": 0.01, + "add_offset": 0} + +def test_get_data_as_xarray_netcdf4(): + """Test getting xr.DataArray from netcdf4 variable.""" + import tempfile + + import netCDF4 as nc + import numpy as np + + from satpy.readers.netcdf_utils import get_data_as_xarray + + data = np.array([1, 2, 3]) + + with tempfile.TemporaryDirectory() as tmpdir: + # Create an empty HDF5 + fname = os.path.join(tmpdir, "test.nc") + dset = nc.Dataset(fname, "w") + dset.createDimension("y", None) + var = dset.createVariable("test_data", "uint8", ("y",)) + var[:] = data + var.setncatts(NC_ATTRS) + # Turn off automatic scale factor and offset handling + dset.set_auto_maskandscale(False) + res = get_data_as_xarray(var) + np.testing.assert_equal(res.data, data) + assert res.attrs == NC_ATTRS + dset.close() + + +def test_get_data_as_xarray_h5netcdf(): + """Test getting xr.DataArray from h5netcdf variable.""" + import tempfile + + import h5netcdf + import numpy as np + + from satpy.readers.netcdf_utils import get_data_as_xarray + + data = np.array([1, 2, 3]) + + with tempfile.TemporaryDirectory() as tmpdir: + # Create an empty HDF5 + fname = os.path.join(tmpdir, "test.nc") + with h5netcdf.File(fname, "w") as fid: + fid.dimensions = {"y": data.size} + var = fid.create_variable("test_data", ("y",), "uint8") + var[:] = data + for key in NC_ATTRS: + var.attrs[key] = NC_ATTRS[key] + res = get_data_as_xarray(var) + np.testing.assert_equal(res.data, data) + assert res.attrs == NC_ATTRS From f6594cf80c929b988ca55d2f7e9965620bb7a9ce Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 7 Mar 2024 16:52:27 +0200 Subject: [PATCH 1174/1416] Remove unnecessary IndexError --- satpy/readers/netcdf_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index 457c2ecaea..103b8d80a5 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -383,7 +383,7 @@ def get_data_as_xarray(variable): try: arr = xr.DataArray( variable[:], dims=variable.dimensions, attrs=attrs, name=variable.name) - except (ValueError, IndexError): + except ValueError: # Handle scalars for h5netcdf backend arr = xr.DataArray( variable.__array__(), dims=variable.dimensions, attrs=attrs, name=variable.name) From e1a6f37e17ce89706974fa2d1cc4f31a6dff36d1 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 7 Mar 2024 17:08:06 +0200 Subject: [PATCH 1175/1416] Use tmp_path instead of tempfile library --- satpy/tests/reader_tests/test_netcdf_utils.py | 60 ++++++++----------- 1 file changed, 26 insertions(+), 34 deletions(-) diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index ad7b05c217..42d7c5d093 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -300,10 +300,8 @@ def test_use_h5netcdf_for_file_not_accessible_locally(self): "scale_factor": 0.01, "add_offset": 0} -def test_get_data_as_xarray_netcdf4(): +def test_get_data_as_xarray_netcdf4(tmp_path): """Test getting xr.DataArray from netcdf4 variable.""" - import tempfile - import netCDF4 as nc import numpy as np @@ -311,26 +309,22 @@ def test_get_data_as_xarray_netcdf4(): data = np.array([1, 2, 3]) - with tempfile.TemporaryDirectory() as tmpdir: - # Create an empty HDF5 - fname = os.path.join(tmpdir, "test.nc") - dset = nc.Dataset(fname, "w") - dset.createDimension("y", None) - var = dset.createVariable("test_data", "uint8", ("y",)) - var[:] = data - var.setncatts(NC_ATTRS) - # Turn off automatic scale factor and offset handling - dset.set_auto_maskandscale(False) - res = get_data_as_xarray(var) - np.testing.assert_equal(res.data, data) - assert res.attrs == NC_ATTRS - dset.close() - - -def test_get_data_as_xarray_h5netcdf(): + fname = tmp_path / "test.nc" + dset = nc.Dataset(fname, "w") + dset.createDimension("y", None) + var = dset.createVariable("test_data", "uint8", ("y",)) + var[:] = data + var.setncatts(NC_ATTRS) + # Turn off automatic scale factor and offset handling + dset.set_auto_maskandscale(False) + res = get_data_as_xarray(var) + np.testing.assert_equal(res.data, data) + assert res.attrs == NC_ATTRS + dset.close() + + +def test_get_data_as_xarray_h5netcdf(tmp_path): """Test getting xr.DataArray from h5netcdf variable.""" - import tempfile - import h5netcdf import numpy as np @@ -338,15 +332,13 @@ def test_get_data_as_xarray_h5netcdf(): data = np.array([1, 2, 3]) - with tempfile.TemporaryDirectory() as tmpdir: - # Create an empty HDF5 - fname = os.path.join(tmpdir, "test.nc") - with h5netcdf.File(fname, "w") as fid: - fid.dimensions = {"y": data.size} - var = fid.create_variable("test_data", ("y",), "uint8") - var[:] = data - for key in NC_ATTRS: - var.attrs[key] = NC_ATTRS[key] - res = get_data_as_xarray(var) - np.testing.assert_equal(res.data, data) - assert res.attrs == NC_ATTRS + fname = tmp_path / "test.nc" + with h5netcdf.File(fname, "w") as fid: + fid.dimensions = {"y": data.size} + var = fid.create_variable("test_data", ("y",), "uint8") + var[:] = data + for key in NC_ATTRS: + var.attrs[key] = NC_ATTRS[key] + res = get_data_as_xarray(var) + np.testing.assert_equal(res.data, data) + assert res.attrs == NC_ATTRS From c6cd6474162c11fa67d8ad60d9e3a688516a12ec Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 7 Mar 2024 17:16:09 +0200 Subject: [PATCH 1176/1416] Split netcdf creation from the tests --- satpy/tests/reader_tests/test_netcdf_utils.py | 47 ++++++++++++------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 42d7c5d093..c1a5e3b151 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -302,14 +302,22 @@ def test_use_h5netcdf_for_file_not_accessible_locally(self): def test_get_data_as_xarray_netcdf4(tmp_path): """Test getting xr.DataArray from netcdf4 variable.""" - import netCDF4 as nc import numpy as np from satpy.readers.netcdf_utils import get_data_as_xarray data = np.array([1, 2, 3]) - fname = tmp_path / "test.nc" + dset = _write_test_netcdf4(fname, data) + + res = get_data_as_xarray(dset["test_data"]) + np.testing.assert_equal(res.data, data) + assert res.attrs == NC_ATTRS + + +def _write_test_netcdf4(fname, data): + import netCDF4 as nc + dset = nc.Dataset(fname, "w") dset.createDimension("y", None) var = dset.createVariable("test_data", "uint8", ("y",)) @@ -317,28 +325,33 @@ def test_get_data_as_xarray_netcdf4(tmp_path): var.setncatts(NC_ATTRS) # Turn off automatic scale factor and offset handling dset.set_auto_maskandscale(False) - res = get_data_as_xarray(var) - np.testing.assert_equal(res.data, data) - assert res.attrs == NC_ATTRS - dset.close() + + return dset def test_get_data_as_xarray_h5netcdf(tmp_path): """Test getting xr.DataArray from h5netcdf variable.""" - import h5netcdf import numpy as np from satpy.readers.netcdf_utils import get_data_as_xarray data = np.array([1, 2, 3]) - fname = tmp_path / "test.nc" - with h5netcdf.File(fname, "w") as fid: - fid.dimensions = {"y": data.size} - var = fid.create_variable("test_data", ("y",), "uint8") - var[:] = data - for key in NC_ATTRS: - var.attrs[key] = NC_ATTRS[key] - res = get_data_as_xarray(var) - np.testing.assert_equal(res.data, data) - assert res.attrs == NC_ATTRS + fid = _write_test_h5netcdf(fname, data) + + res = get_data_as_xarray(fid["test_data"]) + np.testing.assert_equal(res.data, data) + assert res.attrs == NC_ATTRS + + +def _write_test_h5netcdf(fname, data): + import h5netcdf + + fid = h5netcdf.File(fname, "w") + fid.dimensions = {"y": data.size} + var = fid.create_variable("test_data", ("y",), "uint8") + var[:] = data + for key in NC_ATTRS: + var.attrs[key] = NC_ATTRS[key] + + return fid From eb6e25f832550343b2a3e743c478e1eb30ca565f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 7 Mar 2024 18:10:55 +0100 Subject: [PATCH 1177/1416] Accept mod in open_file_or_filename --- satpy/readers/__init__.py | 4 ++-- satpy/tests/test_readers.py | 10 +++++++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 43632af9c6..8141b95345 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -780,10 +780,10 @@ def _get_compression(file): return None -def open_file_or_filename(unknown_file_thing): +def open_file_or_filename(unknown_file_thing, mode="r"): """Try to open the *unknown_file_thing*, otherwise return the filename.""" try: - f_obj = unknown_file_thing.open() + f_obj = unknown_file_thing.open(mode=mode) except AttributeError: f_obj = unknown_file_thing return f_obj diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index d91e2b6fed..0e90850e82 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -29,7 +29,7 @@ from satpy.dataset.data_dict import get_key from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange -from satpy.readers import find_files_and_readers +from satpy.readers import find_files_and_readers, open_file_or_filename # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -1088,3 +1088,11 @@ def test_hash(self): assert len({hash(FSFile(fn, fs)) for fn in {self.local_filename, self.local_filename2} for fs in [None, lfs, zfs, cfs]}) == 2*4 + + +def test_open_file_or_filename_uses_mode(tmp_path): + """Test that open_file_or_filename uses provided mode.""" + with open(tmp_path / "hej", mode="wb") as fd: + fd.write(b"hej") + res = open_file_or_filename(tmp_path / "hej", mode="rb").read() + assert isinstance(res, bytes) From 4ce31f782f0026dc0e0e32f3c3e73f98b512b255 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 7 Mar 2024 18:13:00 +0100 Subject: [PATCH 1178/1416] Use geojson gcps as lon lat source --- satpy/readers/sar_c_safe.py | 91 +++++++++++---------- satpy/tests/reader_tests/test_sar_c_safe.py | 22 ++--- 2 files changed, 62 insertions(+), 51 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 565d2c1167..1d3d71fb55 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -36,8 +36,9 @@ import functools import logging -import os from collections import defaultdict +from datetime import timezone as tz +from functools import cached_property from threading import Lock import defusedxml.ElementTree as ET @@ -50,6 +51,7 @@ from satpy.dataset.data_dict import DatasetDict from satpy.dataset.dataid import DataID +from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.yaml_reader import GenericYAMLReader from satpy.utils import get_legacy_chunk_size @@ -101,10 +103,10 @@ def __init__(self, filename, filename_info, filetype_info, """Init the xml filehandler.""" super().__init__(filename, filename_info, filetype_info) - self._start_time = filename_info["start_time"] - self._end_time = filename_info["end_time"] + self._start_time = filename_info["start_time"].replace(tzinfo=tz.utc) + self._end_time = filename_info["end_time"].replace(tzinfo=tz.utc) self._polarization = filename_info["polarization"] - self.root = ET.parse(self.filename) + self.root = ET.parse(open_file_or_filename(self.filename)) self._image_shape = image_shape def get_metadata(self): @@ -507,8 +509,13 @@ def interpolate_xarray(xpoints, ypoints, values, shape, """Interpolate, generating a dask array.""" from scipy.interpolate import RectBivariateSpline - vchunks = range(0, shape[0], blocksize) - hchunks = range(0, shape[1], blocksize) + try: + blocksize_row, blocksize_col = blocksize + except ValueError: + blocksize_row = blocksize_col = blocksize + + vchunks = range(0, shape[0], blocksize_row) + hchunks = range(0, shape[1], blocksize_col) token = tokenize(blocksize, xpoints, ypoints, values, shape) name = "interpolate-" + token @@ -520,15 +527,15 @@ def interpolator(xnew, ynew): return spline(xnew, ynew).T dskx = {(name, i, j): (interpolate_slice, - slice(vcs, min(vcs + blocksize, shape[0])), - slice(hcs, min(hcs + blocksize, shape[1])), + slice(vcs, min(vcs + blocksize_row, shape[0])), + slice(hcs, min(hcs + blocksize_col, shape[1])), interpolator) for i, vcs in enumerate(vchunks) for j, hcs in enumerate(hchunks) } res = da.Array(dskx, name, shape=list(shape), - chunks=(blocksize, blocksize), + chunks=(blocksize_row, blocksize_col), dtype=values.dtype) return DataArray(res, dims=("y", "x")) @@ -573,9 +580,8 @@ class SAFEGRD(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info, calibrator, denoiser): """Init the grd filehandler.""" super().__init__(filename, filename_info, filetype_info) - - self._start_time = filename_info["start_time"] - self._end_time = filename_info["end_time"] + self._start_time = filename_info["start_time"].replace(tzinfo=tz.utc) + self._end_time = filename_info["end_time"].replace(tzinfo=tz.utc) self._polarization = filename_info["polarization"] @@ -585,7 +591,6 @@ def __init__(self, filename, filename_info, filetype_info, calibrator, denoiser) self.denoiser = denoiser self.read_lock = Lock() - self.filehandle = rasterio.open(self.filename, "r", sharing=False) self.get_lonlatalts = functools.lru_cache(maxsize=2)( self._get_lonlatalts_uncached ) @@ -606,11 +611,7 @@ def get_dataset(self, key, info): data.attrs.update(info) else: - data = xr.open_dataarray(self.filename, engine="rasterio", - chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE}).squeeze() - data = data.assign_coords(x=np.arange(len(data.coords["x"])), - y=np.arange(len(data.coords["y"]))) - data = self._calibrate_and_denoise(data, key) + data = self._calibrate_and_denoise(self._data, key) data.attrs.update(info) data.attrs.update({"platform_name": self._mission_id}) @@ -618,6 +619,17 @@ def get_dataset(self, key, info): return data + @cached_property + def _data(self): + data = xr.open_dataarray(self.filename, engine="rasterio", + chunks="auto" + ).squeeze() + self.chunks = data.data.chunksize + data = data.assign_coords(x=np.arange(len(data.coords["x"])), + y=np.arange(len(data.coords["y"]))) + + return data + @staticmethod def _change_quantity(data, quantity): """Change quantity to dB if needed.""" @@ -631,11 +643,9 @@ def _change_quantity(data, quantity): def _calibrate_and_denoise(self, data, key): """Calibrate and denoise the data.""" - chunks = CHUNK_SIZE - dn = self._get_digital_number(data) - dn = self.denoiser(dn, chunks) - data = self.calibrator(dn, key["calibration"], chunks) + dn = self.denoiser(dn, self.chunks) + data = self.calibrator(dn, key["calibration"], self.chunks) return data @@ -646,13 +656,6 @@ def _get_digital_number(self, data): dn = data * data return dn - def _denoise(self, dn, chunks): - """Denoise the data.""" - logger.debug("Reading noise data.") - noise = self.noise.get_noise_correction(chunks=chunks).fillna(0) - dn = dn - noise - return dn - def _get_lonlatalts_uncached(self): """Obtain GCPs and construct latitude and longitude arrays. @@ -662,16 +665,16 @@ def _get_lonlatalts_uncached(self): Returns: coordinates (tuple): A tuple with longitude and latitude arrays """ - band = self.filehandle + shape = self._data.shape (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (gcps, crs) = self.get_gcps() # FIXME: do interpolation on cartesian coordinates if the area is # problematic. - longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, band.shape) - latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape) - altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) + longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, shape, self.chunks) + latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, shape, self.chunks) + altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, shape, self.chunks) longitudes.attrs["gcps"] = gcps longitudes.attrs["crs"] = crs @@ -694,9 +697,12 @@ def get_gcps(self): gcp_coords (tuple): longitude and latitude 1d arrays """ - gcps = self.filehandle.gcps + gcps = self._data.coords["spatial_ref"].attrs["gcps"] + crs = self._data.rio.crs - gcp_array = np.array([(p.row, p.col, p.x, p.y, p.z) for p in gcps[0]]) + gcp_list = [(feature["properties"]["row"], feature["properties"]["col"], *feature["geometry"]["coordinates"]) + for feature in gcps["features"]] + gcp_array = np.array(gcp_list) ypoints = np.unique(gcp_array[:, 0]) xpoints = np.unique(gcp_array[:, 1]) @@ -705,7 +711,10 @@ def get_gcps(self): gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_alts = gcp_array[:, 4].reshape(ypoints.shape[0], xpoints.shape[0]) - return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), gcps + rio_gcps = [rasterio.control.GroundControlPoint(*gcp) for gcp in gcp_list] + + + return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) @property def start_time(self): @@ -731,12 +740,12 @@ def __init__(self, config, filter_parameters=None): @property def start_time(self): """Get the start time.""" - return self.storage_items.values()[0].filename_info["start_time"] + return self.storage_items.values()[0].filename_info["start_time"].replace(tzinfo=tz.utc) @property def end_time(self): """Get the end time.""" - return self.storage_items.values()[0].filename_info["end_time"] + return self.storage_items.values()[0].filename_info["end_time"].replace(tzinfo=tz.utc) def load(self, dataset_keys, **kwargs): """Load some data.""" @@ -752,20 +761,20 @@ def load(self, dataset_keys, **kwargs): if key["name"] not in ["longitude", "latitude"]: lonlats = self.load([DataID(self._id_keys, name="longitude", polarization=key["polarization"]), DataID(self._id_keys, name="latitude", polarization=key["polarization"])]) + gcps = val.coords["spatial_ref"].attrs["gcps"] from pyresample.future.geometry import SwathDefinition val.attrs["area"] = SwathDefinition(lonlats["longitude"], lonlats["latitude"], - attrs=dict(gcps=None)) + attrs=dict(gcps=gcps)) datasets[key] = val continue return datasets def create_storage_items(self, files, **kwargs): """Create the storage items.""" - filenames = [os.fspath(filename) for filename in files] + filenames = files files_by_type = defaultdict(list) for file_type, type_info in self.config["file_types"].items(): files_by_type[file_type].extend(self.filename_items_for_filetype(filenames, type_info)) - image_shapes = dict() for annotation_file, annotation_info in files_by_type["safe_annotation"]: annotation_fh = SAFEXMLAnnotation(annotation_file, diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index d6d178044e..c3a90adec1 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -38,6 +38,8 @@ dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" filename_suffix = "20190201t024655-20190201t024720-025730-02dc2a" +START_TIME = datetime(2019, 2, 1, 2, 46, 55) +END_TIME = datetime(2019, 2, 1, 2, 47, 20) @pytest.fixture(scope="module") def granule_directory(tmp_path_factory): @@ -62,7 +64,7 @@ def annotation_file(granule_directory): @pytest.fixture(scope="module") def annotation_filehandler(annotation_file): """Create an annotation filehandler.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") + filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") return SAFEXMLAnnotation(annotation_file, filename_info, None) @@ -74,16 +76,16 @@ def calibration_file(granule_directory): calibration_file = cal_dir / f"calibration-s1a-iw-grd-vv-{filename_suffix}-001.xml" with open(calibration_file, "wb") as fd: fd.write(calibration_xml) - return calibration_file + return Path(calibration_file) @pytest.fixture(scope="module") def calibration_filehandler(calibration_file, annotation_filehandler): """Create a calibration filehandler.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") + filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") return Calibrator(calibration_file, - filename_info, - None, - image_shape=annotation_filehandler.image_shape) + filename_info, + None, + image_shape=annotation_filehandler.image_shape) @pytest.fixture(scope="module") def noise_file(granule_directory): @@ -99,14 +101,14 @@ def noise_file(granule_directory): @pytest.fixture(scope="module") def noise_filehandler(noise_file, annotation_filehandler): """Create a noise filehandler.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") + filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") return Denoiser(noise_file, filename_info, None, image_shape=annotation_filehandler.image_shape) @pytest.fixture(scope="module") def noise_with_holes_filehandler(annotation_filehandler): """Create a noise filehandler from data with holes.""" - filename_info = dict(start_time=None, end_time=None, polarization="vv") + filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") noise_filehandler = Denoiser(BytesIO(noise_xml_with_holes), filename_info, None, image_shape=annotation_filehandler.image_shape) @@ -151,13 +153,13 @@ def measurement_file(granule_directory): crs="+proj=latlong", gcps=gcps) as dst: dst.write(Z, 1) - return filename + return Path(filename) @pytest.fixture(scope="module") def measurement_filehandler(measurement_file, noise_filehandler, calibration_filehandler): """Create a measurement filehandler.""" - filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": 0, "end_time": 0, + filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": START_TIME, "end_time": END_TIME, "polarization": "vv"} filetype_info = None from satpy.readers.sar_c_safe import SAFEGRD From 12854de2dc6bb09cb8a90d49d5e15e1aeb92a411 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 8 Mar 2024 09:28:49 +0200 Subject: [PATCH 1179/1416] Refactor and test h5netcdf scalar reading --- satpy/readers/netcdf_utils.py | 11 +++-- satpy/tests/reader_tests/test_netcdf_utils.py | 47 +++++++++++++++++-- 2 files changed, 48 insertions(+), 10 deletions(-) diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index 103b8d80a5..c8b8a3f85f 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -379,14 +379,15 @@ def get_data_as_xarray(variable): try: attrs = variable.attrs except AttributeError: + # netCDF4 backend requires usage of __dict__ to get the attributes attrs = variable.__dict__ try: - arr = xr.DataArray( - variable[:], dims=variable.dimensions, attrs=attrs, name=variable.name) - except ValueError: + data = variable[:] + except (ValueError, IndexError): # Handle scalars for h5netcdf backend - arr = xr.DataArray( - variable.__array__(), dims=variable.dimensions, attrs=attrs, name=variable.name) + data = variable.__array__() + + arr = xr.DataArray(data, dims=variable.dimensions, attrs=attrs, name=variable.name) return arr diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index c1a5e3b151..5e0bcc44a1 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -315,12 +315,31 @@ def test_get_data_as_xarray_netcdf4(tmp_path): assert res.attrs == NC_ATTRS +def test_get_data_as_xarray_scalar_netcdf4(tmp_path): + """Test getting scalar xr.DataArray from netcdf4 variable.""" + import numpy as np + + from satpy.readers.netcdf_utils import get_data_as_xarray + + data = 1 + fname = tmp_path / "test.nc" + dset = _write_test_netcdf4(fname, data) + + res = get_data_as_xarray(dset["test_data"]) + np.testing.assert_equal(res.data, np.array(data)) + assert res.attrs == NC_ATTRS + + def _write_test_netcdf4(fname, data): import netCDF4 as nc dset = nc.Dataset(fname, "w") - dset.createDimension("y", None) - var = dset.createVariable("test_data", "uint8", ("y",)) + try: + dset.createDimension("y", data.size) + dims = ("y",) + except AttributeError: + dims = () + var = dset.createVariable("test_data", "uint8", dims) var[:] = data var.setncatts(NC_ATTRS) # Turn off automatic scale factor and offset handling @@ -348,10 +367,28 @@ def _write_test_h5netcdf(fname, data): import h5netcdf fid = h5netcdf.File(fname, "w") - fid.dimensions = {"y": data.size} - var = fid.create_variable("test_data", ("y",), "uint8") - var[:] = data + try: + fid.dimensions = {"y": data.size} + dims = ("y",) + except AttributeError: + dims = () + var = fid.create_variable("test_data", dims, "uint8", data=data) for key in NC_ATTRS: var.attrs[key] = NC_ATTRS[key] return fid + + +def test_get_data_as_xarray_scalar_h5netcdf(tmp_path): + """Test getting xr.DataArray from h5netcdf variable.""" + import numpy as np + + from satpy.readers.netcdf_utils import get_data_as_xarray + + data = 1 + fname = tmp_path / "test.nc" + fid = _write_test_h5netcdf(fname, data) + + res = get_data_as_xarray(fid["test_data"]) + np.testing.assert_equal(res.data, np.array(data)) + assert res.attrs == NC_ATTRS From 5c4da9e39ea23acad3e83ec621073755a05a44c2 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Mar 2024 10:17:01 +0100 Subject: [PATCH 1180/1416] Use geotiepoints for lon lat interpolation --- satpy/readers/sar_c_safe.py | 64 ++++----------------- satpy/tests/reader_tests/test_sar_c_safe.py | 4 +- 2 files changed, 14 insertions(+), 54 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 1d3d71fb55..dc24bd3a95 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -46,7 +46,8 @@ import rasterio import xarray as xr from dask import array as da -from dask.base import tokenize +from geotiepoints.geointerpolator import lonlat2xyz, xyz2lonlat +from geotiepoints.interpolator import MultipleGridInterpolator from xarray import DataArray from satpy.dataset.data_dict import DatasetDict @@ -385,13 +386,6 @@ def _fill_dask_pieces(dask_pieces, shape, chunks): dask_pieces.append(new_piece) -def interpolate_slice(slice_rows, slice_cols, interpolator): - """Interpolate the given slice of the larger array.""" - fine_rows = np.arange(slice_rows.start, slice_rows.stop, slice_rows.step) - fine_cols = np.arange(slice_cols.start, slice_cols.stop, slice_cols.step) - return interpolator(fine_cols, fine_rows) - - class _AzimuthBlock: """Implementation of an single azimuth-noise block.""" @@ -504,42 +498,6 @@ def interpolate_xml_array(self, shape, chunks): return interpolate_xarray_linear(xpoints, ypoints, self.data, shape, chunks=chunks) -def interpolate_xarray(xpoints, ypoints, values, shape, - blocksize=CHUNK_SIZE): - """Interpolate, generating a dask array.""" - from scipy.interpolate import RectBivariateSpline - - try: - blocksize_row, blocksize_col = blocksize - except ValueError: - blocksize_row = blocksize_col = blocksize - - vchunks = range(0, shape[0], blocksize_row) - hchunks = range(0, shape[1], blocksize_col) - - token = tokenize(blocksize, xpoints, ypoints, values, shape) - name = "interpolate-" + token - - spline = RectBivariateSpline(xpoints, ypoints, values.T) - - def interpolator(xnew, ynew): - """Interpolator function.""" - return spline(xnew, ynew).T - - dskx = {(name, i, j): (interpolate_slice, - slice(vcs, min(vcs + blocksize_row, shape[0])), - slice(hcs, min(hcs + blocksize_col, shape[1])), - interpolator) - for i, vcs in enumerate(vchunks) - for j, hcs in enumerate(hchunks) - } - - res = da.Array(dskx, name, shape=list(shape), - chunks=(blocksize_row, blocksize_col), - dtype=values.dtype) - return DataArray(res, dims=("y", "x")) - - def intp(grid_x, grid_y, interpolator): """Interpolate.""" return interpolator((grid_y, grid_x)) @@ -622,8 +580,8 @@ def get_dataset(self, key, info): @cached_property def _data(self): data = xr.open_dataarray(self.filename, engine="rasterio", - chunks="auto" - ).squeeze() + chunks="auto" + ).squeeze() self.chunks = data.data.chunksize data = data.assign_coords(x=np.arange(len(data.coords["x"])), y=np.arange(len(data.coords["y"]))) @@ -669,12 +627,15 @@ def _get_lonlatalts_uncached(self): (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (gcps, crs) = self.get_gcps() - # FIXME: do interpolation on cartesian coordinates if the area is - # problematic. + fine_points = [np.arange(size) for size in shape] + x, y, z = lonlat2xyz(gcp_lons, gcp_lats) + interpolator = MultipleGridInterpolator((xpoints, ypoints), x, y, z, gcp_alts) + hx, hy, hz, altitudes = interpolator.interpolate(fine_points, method="cubic", chunks=self.chunks) + longitudes, latitudes = xyz2lonlat(hx, hy, hz) - longitudes = interpolate_xarray(xpoints, ypoints, gcp_lons, shape, self.chunks) - latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, shape, self.chunks) - altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, shape, self.chunks) + altitudes = xr.DataArray(altitudes, dims=["y", "x"]) + longitudes = xr.DataArray(longitudes, dims=["y", "x"]) + latitudes = xr.DataArray(latitudes, dims=["y", "x"]) longitudes.attrs["gcps"] = gcps longitudes.attrs["crs"] = crs @@ -713,7 +674,6 @@ def get_gcps(self): rio_gcps = [rasterio.control.GroundControlPoint(*gcp) for gcp in gcp_list] - return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) @property diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index c3a90adec1..be4dfb6e1a 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -303,7 +303,7 @@ def test_read_lon_lats(self, measurement_filehandler): query = DataQuery(name="longitude", polarization="vv") xarr = measurement_filehandler.get_dataset(query, info=dict()) expected = expected_longitudes - np.testing.assert_allclose(xarr.values, expected[:10, :10]) + np.testing.assert_allclose(xarr.values, expected[:10, :10], atol=1e-3) annotation_xml = b""" @@ -858,7 +858,7 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann query = DataID(reader._id_keys, **query.to_dict()) dataset_dict = reader.load([query]) array = dataset_dict["measurement"] - np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10]) + np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10], atol=1e-3) expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) np.testing.assert_allclose(array.values[:2, :2], expected_db) From c45cbb0c5c4fc57f82a7e1583cc6aefac7cbd53d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 8 Mar 2024 09:23:28 +0000 Subject: [PATCH 1181/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/test_readers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index d3f2c72814..2ee2c5b97b 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -1122,7 +1122,7 @@ def test_open_file_or_filename_uses_mode(tmp_path): res = open_file_or_filename(tmp_path / "hej", mode="rb").read() assert isinstance(res, bytes) - + @pytest.fixture(scope="module") def local_netcdf_filename(tmp_path_factory): """Create a simple local NetCDF file.""" From ae140b3a457dee131a4b7d6b84324c5633b734d4 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Mar 2024 11:12:20 +0100 Subject: [PATCH 1182/1416] Make xml parsing more robust --- satpy/readers/sar_c_safe.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index dc24bd3a95..4e416b8777 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -39,6 +39,7 @@ from collections import defaultdict from datetime import timezone as tz from functools import cached_property +from pathlib import Path from threading import Lock import defusedxml.ElementTree as ET @@ -107,7 +108,10 @@ def __init__(self, filename, filename_info, filetype_info, self._start_time = filename_info["start_time"].replace(tzinfo=tz.utc) self._end_time = filename_info["end_time"].replace(tzinfo=tz.utc) self._polarization = filename_info["polarization"] - self.root = ET.parse(open_file_or_filename(self.filename)) + if isinstance(self.filename, str): + self.filename = Path(self.filename) + with self.filename.open() as fd: + self.root = ET.parse(fd) self._image_shape = image_shape def get_metadata(self): @@ -579,7 +583,7 @@ def get_dataset(self, key, info): @cached_property def _data(self): - data = xr.open_dataarray(self.filename, engine="rasterio", + data = xr.open_dataarray(open_file_or_filename(self.filename), engine="rasterio", chunks="auto" ).squeeze() self.chunks = data.data.chunksize @@ -629,7 +633,7 @@ def _get_lonlatalts_uncached(self): fine_points = [np.arange(size) for size in shape] x, y, z = lonlat2xyz(gcp_lons, gcp_lats) - interpolator = MultipleGridInterpolator((xpoints, ypoints), x, y, z, gcp_alts) + interpolator = MultipleGridInterpolator((ypoints, xpoints), x, y, z, gcp_alts) hx, hy, hz, altitudes = interpolator.interpolate(fine_points, method="cubic", chunks=self.chunks) longitudes, latitudes = xyz2lonlat(hx, hy, hz) From 9390945acfbb78cd5b122d7ae8d726762728b852 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Mar 2024 11:15:01 +0100 Subject: [PATCH 1183/1416] Fix no mode --- satpy/readers/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index e3946f3a7c..33196b3aab 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -783,7 +783,7 @@ def _get_compression(file): return None -def open_file_or_filename(unknown_file_thing, mode="r"): +def open_file_or_filename(unknown_file_thing, mode=None): """Try to open the provided file "thing" if needed, otherwise return the filename or Path. This wraps the logic of getting something like an fsspec OpenFile object @@ -797,7 +797,10 @@ def open_file_or_filename(unknown_file_thing, mode="r"): f_obj = unknown_file_thing else: try: - f_obj = unknown_file_thing.open(mode=mode) + if mode is None: + f_obj = unknown_file_thing.open() + else: + f_obj = unknown_file_thing.open(mode=mode) except AttributeError: f_obj = unknown_file_thing return f_obj From f2e70a061c5d16c548c810d109e7eab44e52c668 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Mar 2024 11:39:28 +0100 Subject: [PATCH 1184/1416] Fix modes and tests --- satpy/readers/sar_c_safe.py | 2 +- satpy/tests/reader_tests/test_sar_c_safe.py | 12 +++++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 4e416b8777..e11bcbded3 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -583,7 +583,7 @@ def get_dataset(self, key, info): @cached_property def _data(self): - data = xr.open_dataarray(open_file_or_filename(self.filename), engine="rasterio", + data = xr.open_dataarray(open_file_or_filename(self.filename, mode="rb"), engine="rasterio", chunks="auto" ).squeeze() self.chunks = data.data.chunksize diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index be4dfb6e1a..9e24c00c4e 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -20,7 +20,6 @@ import os from datetime import datetime from enum import Enum -from io import BytesIO from pathlib import Path import numpy as np @@ -106,12 +105,15 @@ def noise_filehandler(noise_file, annotation_filehandler): @pytest.fixture(scope="module") -def noise_with_holes_filehandler(annotation_filehandler): +def noise_with_holes_filehandler(annotation_filehandler, tmpdir_factory): """Create a noise filehandler from data with holes.""" filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") - noise_filehandler = Denoiser(BytesIO(noise_xml_with_holes), - filename_info, None, - image_shape=annotation_filehandler.image_shape) + noise_xml_file = tmpdir_factory.mktemp("data").join("noise_with_holes.xml") + with open(noise_xml_file, "wb") as fd: + fd.write(noise_xml_with_holes) + noise_filehandler = Denoiser(noise_xml_file, + filename_info, None, + image_shape=annotation_filehandler.image_shape) return noise_filehandler From 7bc992cedc9746b35f1cbb85d77e9bb56e2a2ecf Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Mar 2024 12:24:55 +0100 Subject: [PATCH 1185/1416] Fix modes test --- satpy/tests/test_readers.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 2ee2c5b97b..4b477c6bdf 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -34,7 +34,7 @@ from satpy.dataset.data_dict import get_key from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange -from satpy.readers import find_files_and_readers, open_file_or_filename +from satpy.readers import FSFile, find_files_and_readers, open_file_or_filename # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -1117,9 +1117,11 @@ def test_hash(self, local_filename, local_filename2, local_zip_file): def test_open_file_or_filename_uses_mode(tmp_path): """Test that open_file_or_filename uses provided mode.""" - with open(tmp_path / "hej", mode="wb") as fd: + filename = tmp_path / "hej" + with open(filename, mode="wb") as fd: fd.write(b"hej") - res = open_file_or_filename(tmp_path / "hej", mode="rb").read() + fileobj = FSFile(os.fspath(filename)) + res = open_file_or_filename(fileobj, mode="rb").read() assert isinstance(res, bytes) From f0c382e8822e7dcbacdd1a85cb67faceeaf7b0e4 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Mar 2024 12:59:10 +0100 Subject: [PATCH 1186/1416] Refactor --- satpy/readers/sar_c_safe.py | 63 ++++++++++++++++++++++++++----------- 1 file changed, 44 insertions(+), 19 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index e11bcbded3..c175b17b3c 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -735,39 +735,64 @@ def load(self, dataset_keys, **kwargs): def create_storage_items(self, files, **kwargs): """Create the storage items.""" - filenames = files + self.files_by_type = self._get_files_by_type(files) + image_shapes = self._get_image_shapes() + calibrators = self._create_calibrators(image_shapes) + denoisers = self._create_denoisers(image_shapes) + measurement_handlers = self._create_measurement_handlers(calibrators, denoisers) + + self.storage_items = measurement_handlers + + + def _get_files_by_type(self, files): files_by_type = defaultdict(list) for file_type, type_info in self.config["file_types"].items(): - files_by_type[file_type].extend(self.filename_items_for_filetype(filenames, type_info)) + files_by_type[file_type].extend(self.filename_items_for_filetype(files, type_info)) + return files_by_type + + + def _get_image_shapes(self): image_shapes = dict() - for annotation_file, annotation_info in files_by_type["safe_annotation"]: + for annotation_file, annotation_info in self.files_by_type["safe_annotation"]: annotation_fh = SAFEXMLAnnotation(annotation_file, filename_info=annotation_info, filetype_info=None) image_shapes[annotation_info["polarization"]] = annotation_fh.image_shape + return image_shapes + - calibration_handlers = dict() - for calibration_file, calibration_info in files_by_type["safe_calibration"]: + def _create_calibrators(self, image_shapes): + calibrators = dict() + for calibration_file, calibration_info in self.files_by_type["safe_calibration"]: polarization = calibration_info["polarization"] - calibration_handlers[polarization] = Calibrator(calibration_file, - filename_info=calibration_info, - filetype_info=None, - image_shape=image_shapes[polarization]) + calibrators[polarization] = Calibrator(calibration_file, + filename_info=calibration_info, + filetype_info=None, + image_shape=image_shapes[polarization]) - noise_handlers = dict() - for noise_file, noise_info in files_by_type["safe_noise"]: + return calibrators + + + def _create_denoisers(self, image_shapes): + denoisers = dict() + for noise_file, noise_info in self.files_by_type["safe_noise"]: polarization = noise_info["polarization"] - noise_handlers[polarization] = Denoiser(noise_file, - filename_info=noise_info, - filetype_info=None, - image_shape=image_shapes[polarization]) + denoisers[polarization] = Denoiser(noise_file, + filename_info=noise_info, + filetype_info=None, + image_shape=image_shapes[polarization]) + + return denoisers + + def _create_measurement_handlers(self, calibrators, denoisers): measurement_handlers = dict() - for measurement_file, measurement_info in files_by_type["safe_measurement"]: + for measurement_file, measurement_info in self.files_by_type["safe_measurement"]: polarization = measurement_info["polarization"] measurement_handlers[polarization] = SAFEGRD(measurement_file, filename_info=measurement_info, - calibrator=calibration_handlers[polarization], - denoiser=noise_handlers[polarization], + calibrator=calibrators[polarization], + denoiser=denoisers[polarization], filetype_info=None) - self.storage_items = measurement_handlers + + return measurement_handlers From beaf06fd3e0e01b2779d93356ab4352570b2ab41 Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Fri, 8 Mar 2024 14:14:45 +0100 Subject: [PATCH 1187/1416] Fix bug introduced a few commits back --- satpy/readers/viirs_vgac_l1c_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 47170616ed..578e19bed3 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -83,7 +83,7 @@ def extract_time_data(self, data, nc): """Decode time data.""" reference_time = np.datetime64(datetime.strptime(nc["proj_time0"].attrs["units"], "days since %d/%m/%YT%H:%M:%S")) - delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values[0]) + delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values) delta_full_days = np.timedelta64(delta_full_days.astype(np.int64), "D").astype("timedelta64[us]") delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[us]") delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[us]") From 85d9117d25612254d37cf6ea79c9c36d6c34e9ce Mon Sep 17 00:00:00 2001 From: "Nina.Hakansson" Date: Fri, 8 Mar 2024 14:29:38 +0100 Subject: [PATCH 1188/1416] Fixing the bug in the unittest proj_time0 should be a scalar --- satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index b9380fb859..ba9b83d707 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -72,7 +72,7 @@ def nc_filename(tmp_path): delta_full_days = delta_days.astype("timedelta64[D]") hidden_reference_time = reference_time + delta_full_days delta_part_of_days = start_time - hidden_reference_time - proj_time0 = nc.createVariable("proj_time0", np.float64, ("one",)) + proj_time0 = nc.createVariable("proj_time0", np.float64) proj_time0[:] = (delta_full_days.astype(np.int64) + 0.000001 * delta_part_of_days.astype("timedelta64[us]").astype(np.int64) / (60 * 60 * 24)) proj_time0.units = "days since 01/01/2010T00:00:00" From eb31813965f3a655c6035eb2e258e5b66b0138a0 Mon Sep 17 00:00:00 2001 From: Florian Fichtner <12199342+fwfichtner@users.noreply.github.com> Date: Thu, 14 Mar 2024 09:26:10 +0100 Subject: [PATCH 1189/1416] add defusedxml to msi_safe --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 16c2b95512..5c16c2918a 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ "amsr2_l1b": ["h5py >= 2.7.0"], "hrpt": ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"], "hrit_msg": ["pytroll-schedule"], - "msi_safe": ["rioxarray", "bottleneck", "python-geotiepoints"], + "msi_safe": ["rioxarray", "bottleneck", "python-geotiepoints", "defusedxml"], "nc_nwcsaf_msg": ["netCDF4 >= 1.1.8"], "sar_c": ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"], "abi_l1b": ["h5netcdf"], From 25e662459a89cf135c16f85a7d021eb9ff93317f Mon Sep 17 00:00:00 2001 From: Florian Fichtner <12199342+fwfichtner@users.noreply.github.com> Date: Thu, 14 Mar 2024 09:31:06 +0100 Subject: [PATCH 1190/1416] add avhrr_l1b_eps extra because of missing defusedxml --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 5c16c2918a..2f467286d0 100644 --- a/setup.py +++ b/setup.py @@ -35,6 +35,7 @@ extras_require = { # Readers: + "avhrr_l1b_eps": ["defusedxml"], "avhrr_l1b_gaclac": ["pygac >= 1.3.0"], "modis_l1b": ["pyhdf", "python-geotiepoints >= 1.1.7"], "geocat": ["pyhdf"], From 45f918e1b98b8718c32467766ecfbb918d7c7b0a Mon Sep 17 00:00:00 2001 From: Florian Fichtner <12199342+fwfichtner@users.noreply.github.com> Date: Thu, 14 Mar 2024 10:51:40 +0100 Subject: [PATCH 1191/1416] add to authors list --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index d976cf318f..fb43d0168d 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -26,6 +26,7 @@ The following people have made contributions to this project: - [Adam Dybbroe (adybbroe)](https://github.com/adybbroe) - [Ulrik Egede (egede)](https://github.com/egede) - [Joleen Feltz (joleenf)](https://github.com/joleenf) +- [Florian Fichtner (fwfichtner)](https://github.com/fwfichtner) - [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - Deutscher Wetterdienst - [Gionata Ghiggi (ghiggi)](https://github.com/ghiggi) - [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY) From f592d9d68f9a2d5927281364b896d3b690e55e9f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 18 Mar 2024 14:24:01 +0100 Subject: [PATCH 1192/1416] Add support for EO-SIP AVHRR LAC data --- satpy/etc/readers/avhrr_l1b_gaclac.yaml | 1 + satpy/readers/avhrr_l1b_gaclac.py | 8 +++++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/avhrr_l1b_gaclac.yaml b/satpy/etc/readers/avhrr_l1b_gaclac.yaml index 484bed6797..a547815072 100644 --- a/satpy/etc/readers/avhrr_l1b_gaclac.yaml +++ b/satpy/etc/readers/avhrr_l1b_gaclac.yaml @@ -185,3 +185,4 @@ file_types: file_patterns: - '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' - '{subscription_prefix:10d}.{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' + - '{platform_id:3s}_RPRO_AVH_L1B_1P_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number:06d}/image.l1b' diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index cfc3e1283e..6da23e2021 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -98,15 +98,17 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 if self._end_time < self._start_time: self._end_time += timedelta(days=1) self.platform_id = filename_info["platform_id"] - if self.platform_id in ["NK", "NL", "NM", "NN", "NP", "M1", "M2", - "M3"]: + if self.platform_id in ["NK", "NL", "NM", "NN", "NP", + "M1", "M2", "M3", + "N15", "N16", "N17", "N18", "N19"]: if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES self.sensor = "avhrr-3" - elif self.platform_id in ["NC", "ND", "NF", "NH", "NJ"]: + elif self.platform_id in ["NC", "NE", "NF", "NG", "NH", "ND", "NJ", + "N07", "N08", "N09", "N10", "N11", "N12", "N14"]: if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: From 64dd1087fe703e63f38b89c7dcd7a98c9f37fd3c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 18 Mar 2024 15:27:17 +0100 Subject: [PATCH 1193/1416] Guess if the format of LAC is eosip --- satpy/readers/avhrr_l1b_gaclac.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index 6da23e2021..26e41387e6 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -98,9 +98,14 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 if self._end_time < self._start_time: self._end_time += timedelta(days=1) self.platform_id = filename_info["platform_id"] + + if len(self.platform_id) == 3: + self.reader_kwargs["eosip_header"] = True + if self.platform_id in ["NK", "NL", "NM", "NN", "NP", + "N15", "N16", "N17", "N18", "N19", "M1", "M2", "M3", - "N15", "N16", "N17", "N18", "N19"]: + "MOB", "MOA", "MOC"]: if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACKLMReader else: From f0c238ca2448588e3d4dd691e57926bf04bd90ba Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 18 Mar 2024 15:47:16 +0100 Subject: [PATCH 1194/1416] Add a test --- .../reader_tests/test_avhrr_l1b_gaclac.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index dfcaff4514..42298af130 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -26,6 +26,7 @@ import xarray as xr GAC_PATTERN = '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' # noqa +EOSIP_PATTERN = '{platform_id:3s}_RPRO_AVH_L1B_1P_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number:06d}/image.l1b' # noqa GAC_POD_FILENAMES = ["NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI", "NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI", @@ -68,6 +69,8 @@ "NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV", "NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI"] +LAC_EOSIP_FILENAMES = ["N06_RPRO_AVH_L1B_1P_20061206T010808_20061206T012223_007961/image.l1b"] + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) def _get_fh_mocked(init_mock, **attrs): @@ -138,6 +141,12 @@ def _get_fh(self, filename="NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", filename_info = parse(GAC_PATTERN, filename) return self.GACLACFile(filename, filename_info, {}, **kwargs) + def _get_eosip_fh(self, filename, **kwargs): + """Create a file handler.""" + from trollsift import parse + filename_info = parse(EOSIP_PATTERN, filename) + return self.GACLACFile(filename, filename_info, {}, **kwargs) + def test_init(self): """Test GACLACFile initialization.""" from pygac.gac_klm import GACKLMReader @@ -161,6 +170,28 @@ def test_init(self): assert fh.start_time < fh.end_time assert fh.reader_class is reader_cls + + def test_init_eosip(self): + """Test GACLACFile initialization.""" + from pygac.lac_pod import LACPODReader + + kwargs = {"start_line": 1, + "end_line": 2, + "strip_invalid_coords": True, + "interpolate_coords": True, + "adjust_clock_drift": True, + "tle_dir": "tle_dir", + "tle_name": "tle_name", + "tle_thresh": 123, + "calibration": "calibration"} + for filenames, reader_cls in zip([LAC_EOSIP_FILENAMES], + [LACPODReader]): + for filename in filenames: + fh = self._get_eosip_fh(filename, **kwargs) + assert fh.start_time < fh.end_time + assert fh.reader_class is reader_cls + assert fh.reader_kwargs["eosip_header"] is True + def test_read_raw_data(self): """Test raw data reading.""" fh = _get_fh_mocked(reader=None, From 0bc8a429c9f036499d172ae4085a7bf631ae004b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 18 Mar 2024 17:47:55 +0100 Subject: [PATCH 1195/1416] Refactor --- satpy/readers/avhrr_l1b_gaclac.py | 20 ++++++++++++------- .../reader_tests/test_avhrr_l1b_gaclac.py | 2 +- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index 26e41387e6..8f31fa765a 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -100,20 +100,16 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.platform_id = filename_info["platform_id"] if len(self.platform_id) == 3: - self.reader_kwargs["eosip_header"] = True + self.reader_kwargs["header_datetime"] = datetime(2000, 1, 1) - if self.platform_id in ["NK", "NL", "NM", "NN", "NP", - "N15", "N16", "N17", "N18", "N19", - "M1", "M2", "M3", - "MOB", "MOA", "MOC"]: + if self._is_avhrr3(): if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES self.sensor = "avhrr-3" - elif self.platform_id in ["NC", "NE", "NF", "NG", "NH", "ND", "NJ", - "N07", "N08", "N09", "N10", "N11", "N12", "N14"]: + elif self._is_avhrr2(): if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: @@ -129,6 +125,16 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.sensor = "avhrr" self.filename_info = filename_info + def _is_avhrr2(self): + return self.platform_id in ["NC", "NE", "NF", "NG", "NH", "ND", "NJ", + "N07", "N08", "N09", "N10", "N11", "N12", "N14"] + + def _is_avhrr3(self): + return self.platform_id in ["NK", "NL", "NM", "NN", "NP", + "N15", "N16", "N17", "N18", "N19", + "M1", "M2", "M3", + "MOB", "MOA", "MOC"] + def read_raw_data(self): """Create a pygac reader and read raw data from the file.""" if self.reader is None: diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 42298af130..8549cccc75 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -190,7 +190,7 @@ def test_init_eosip(self): fh = self._get_eosip_fh(filename, **kwargs) assert fh.start_time < fh.end_time assert fh.reader_class is reader_cls - assert fh.reader_kwargs["eosip_header"] is True + assert fh.reader_kwargs["header_datetime"] > datetime.date(1994, 11, 15) def test_read_raw_data(self): """Test raw data reading.""" From 0064e2197f5d4fabef5060fc48541831eb959345 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 18 Mar 2024 18:02:34 +0100 Subject: [PATCH 1196/1416] Replace header_datetime with header_date --- satpy/readers/avhrr_l1b_gaclac.py | 4 ++-- satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index 8f31fa765a..96a13449f7 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -30,7 +30,7 @@ """ import logging -from datetime import datetime, timedelta +from datetime import date, datetime, timedelta import dask.array as da import numpy as np @@ -100,7 +100,7 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.platform_id = filename_info["platform_id"] if len(self.platform_id) == 3: - self.reader_kwargs["header_datetime"] = datetime(2000, 1, 1) + self.reader_kwargs["header_date"] = date(2000, 1, 1) if self._is_avhrr3(): if filename_info.get("transfer_mode") == "GHRR": diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 8549cccc75..e67f3cff2c 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -190,7 +190,7 @@ def test_init_eosip(self): fh = self._get_eosip_fh(filename, **kwargs) assert fh.start_time < fh.end_time assert fh.reader_class is reader_cls - assert fh.reader_kwargs["header_datetime"] > datetime.date(1994, 11, 15) + assert fh.reader_kwargs["header_date"] > datetime.date(1994, 11, 15) def test_read_raw_data(self): """Test raw data reading.""" From 276aa7dbeffcbb70085abac4418e377281b1806c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 19 Mar 2024 07:28:35 +0100 Subject: [PATCH 1197/1416] Fix import --- satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index e67f3cff2c..4f4e8e974a 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Pygac interface.""" -from datetime import datetime +from datetime import date, datetime from unittest import TestCase, mock import dask.array as da @@ -190,7 +190,7 @@ def test_init_eosip(self): fh = self._get_eosip_fh(filename, **kwargs) assert fh.start_time < fh.end_time assert fh.reader_class is reader_cls - assert fh.reader_kwargs["header_date"] > datetime.date(1994, 11, 15) + assert fh.reader_kwargs["header_date"] > date(1994, 11, 15) def test_read_raw_data(self): """Test raw data reading.""" From 5ffe15ef34327f58430c0585c5c95a0911368ef1 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 19 Mar 2024 19:28:27 +0100 Subject: [PATCH 1198/1416] Force newer botocore --- continuous_integration/environment.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index f1a89319a8..4fc7a508f2 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -47,6 +47,7 @@ dependencies: - pytest-cov - pytest-lazy-fixture - fsspec + - botocore>=1.33 - s3fs - python-geotiepoints - pooch From 7126f752b1a123908b2aba837bab7f984e14ef5c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 19 Mar 2024 23:19:04 +0100 Subject: [PATCH 1199/1416] Replace setup with pyproject.toml --- pyproject.toml | 121 ++++++++++++++++++++++++++++++++-- setup.cfg | 25 ------- setup.py | 172 ------------------------------------------------- 3 files changed, 117 insertions(+), 201 deletions(-) delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/pyproject.toml b/pyproject.toml index 537f70cabb..35ba5e8dc2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,122 @@ +[project] +name = "satpy" +dynamic = ["version"] +description = "Python package for earth-observing satellite data processing" +authors = [ + { name = "The Pytroll Team", email = "pytroll@googlegroups.com" } +] +dependencies = [ + "appdirs", + "dask[array]>=0.17.1", + "donfig", + "numpy>=1.21", + "packaging", + "pillow", + "pooch", + "pykdtree", + "pyorbital", + "pyproj>=2.2", + "pyresample>=1.24.0", + "pyyaml>=5.1", + "trollimage>=1.23", + "trollsift", + "xarray>=0.14.1", + "zarr", +] +readme = "README.rst" +requires-python = ">=3.9" +license = { text = "GPLv3" } +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Topic :: Scientific/Engineering" +] + +[project.optional-dependencies] +avhrr_l1b_eps = ["defusedxml"] +avhrr_l1b_gaclac = ["pygac >= 1.3.0"] +modis_l1b = ["pyhdf", "python-geotiepoints >= 1.1.7"] +geocat = ["pyhdf"] +acspo = ["netCDF4 >= 1.1.8"] +clavrx = ["netCDF4 >= 1.1.8"] +viirs_l1b = ["netCDF4 >= 1.1.8"] +viirs_sdr = ["h5py >= 2.7.0"] +viirs_compact = ["h5py >= 2.7.0"] +omps_edr = ["h5py >= 2.7.0"] +amsr2_l1b = ["h5py >= 2.7.0"] +hrpt = ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"] +hrit_msg = ["pytroll-schedule"] +msi_safe = ["rioxarray", "bottleneck", "python-geotiepoints", "defusedxml"] +nc_nwcsaf_msg = ["netCDF4 >= 1.1.8"] +sar_c = ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"] +abi_l1b = ["h5netcdf"] +seviri_l1b_hrit = ["pyorbital >= 1.3.1"] +seviri_l1b_native = ["pyorbital >= 1.3.1"] +seviri_l1b_nc = ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"] +seviri_l2_bufr = ["eccodes"] +seviri_l2_grib = ["eccodes"] +hsaf_grib = ["pygrib"] +remote_reading = ["fsspec"] +insat_3d = ["xarray-datatree"] +gms5-vissr_l1b = ["numba"] +# Writers: +cf = ["h5netcdf >= 0.7.3"] +awips_tiled = ["netCDF4 >= 1.1.8"] +geotiff = ["rasterio", "trollimage[geotiff]"] +ninjo = ["pyninjotiff", "pint"] +units = ["pint-xarray"] +# Composites/Modifiers: +rayleigh = ["pyspectral >= 0.10.1"] +angles = ["pyorbital >= 1.3.1"] +filters = ["dask-image"] +# MultiScene: +animations = ["imageio"] +# Documentation: +doc = ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"] +# Other +geoviews = ["geoviews"] +holoviews = ["holoviews"] +hvplot = ["hvplot", "geoviews", "cartopy", "holoviews"] +overlays = ["pycoast", "pydecorate"] +satpos_from_tle = ["skyfield", "astropy"] +tests = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", + "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", + "rioxarray", "pytest", "pytest-lazy-fixture", "defusedxml", + "s3fs", "eccodes", "h5netcdf", "xarray-datatree", + "skyfield", "ephem", "pint-xarray", "astropy", "dask-image", "python-geotiepoints", "numba"] + +[project.scripts] +satpy_retrieve_all_aux_data = "satpy.aux_download:retrieve_all_cmd" + +[project.urls] +Homepage = "https://github.com/pytroll/satpy" +"Bug Tracker" = "https://github.com/pytroll/satpy/issues" +Documentation = "https://satpy.readthedocs.io/en/stable/" +"Source Code" = "https://github.com/pytroll/satpy" +Organization = "https://pytroll.github.io/" +Slack = "https://pytroll.slack.com/" +Twitter = "https://twitter.com/hashtag/satpy?src=hashtag_click" +"Release Notes" = "https://github.com/pytroll/satpy/blob/main/CHANGELOG.md" +Mastodon = "https://fosstodon.org/tags/satpy" + [build-system] -requires = ["setuptools>=60", "wheel", "setuptools_scm[toml]>=8.0"] -build-backend = "setuptools.build_meta" +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.hatch.build.targets.wheel] +packages = ["satpy"] + +[tool.hatch.version] +source = "vcs" -[tool.setuptools_scm] -write_to = "satpy/version.py" +[tool.hatch.build.hooks.vcs] +version-file = "satpy/version.py" [tool.isort] sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 205f924b33..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[bdist_rpm] -requires=h5py pyresample python2-numexpr pyhdf xarray dask h5netcdf -release=1 -doc_files = doc/Makefile doc/source/*.rst doc/examples/*.py - -[bdist_wheel] -universal=1 - -[flake8] -max-line-length = 120 -exclude = - satpy/readers/li_l2.py - satpy/readers/scatsat1_l2b.py - satpy/version.py - satpy/tests/features -per-file-ignores = - satpy/tests/*/conftest.py:F401 - satpy/tests/*/*/conftest.py:F401 - doc/source/doi_role.py:D103 - satpy/tests/features/steps/*.py:F811 - -[coverage:run] -relative_files = True -omit = - satpy/version.py diff --git a/setup.py b/setup.py deleted file mode 100644 index 2f467286d0..0000000000 --- a/setup.py +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2009-2023 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Setup file for satpy.""" - -import os.path -from glob import glob - -from setuptools import find_packages, setup - -requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", - "trollimage >=1.23", "pykdtree", "pyyaml >=5.1", "xarray >=0.14.1", - "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", - "packaging", "pooch", "pyorbital"] - -test_requires = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", - "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", - "rioxarray", "pytest", "pytest-lazy-fixture", "defusedxml", - "s3fs", "eccodes", "h5netcdf", "xarray-datatree", - "skyfield", "ephem", "pint-xarray", "astropy", "dask-image"] - -extras_require = { - # Readers: - "avhrr_l1b_eps": ["defusedxml"], - "avhrr_l1b_gaclac": ["pygac >= 1.3.0"], - "modis_l1b": ["pyhdf", "python-geotiepoints >= 1.1.7"], - "geocat": ["pyhdf"], - "acspo": ["netCDF4 >= 1.1.8"], - "clavrx": ["netCDF4 >= 1.1.8"], - "viirs_l1b": ["netCDF4 >= 1.1.8"], - "viirs_sdr": ["h5py >= 2.7.0"], - "viirs_compact": ["h5py >= 2.7.0"], - "omps_edr": ["h5py >= 2.7.0"], - "amsr2_l1b": ["h5py >= 2.7.0"], - "hrpt": ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"], - "hrit_msg": ["pytroll-schedule"], - "msi_safe": ["rioxarray", "bottleneck", "python-geotiepoints", "defusedxml"], - "nc_nwcsaf_msg": ["netCDF4 >= 1.1.8"], - "sar_c": ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"], - "abi_l1b": ["h5netcdf"], - "seviri_l1b_hrit": ["pyorbital >= 1.3.1"], - "seviri_l1b_native": ["pyorbital >= 1.3.1"], - "seviri_l1b_nc": ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"], - "seviri_l2_bufr": ["eccodes"], - "seviri_l2_grib": ["eccodes"], - "hsaf_grib": ["pygrib"], - "remote_reading": ["fsspec"], - "insat_3d": ["xarray-datatree"], - "gms5-vissr_l1b": ["numba"], - # Writers: - "cf": ["h5netcdf >= 0.7.3"], - "awips_tiled": ["netCDF4 >= 1.1.8"], - "geotiff": ["rasterio", "trollimage[geotiff]"], - "ninjo": ["pyninjotiff", "pint"], - "units": ["pint-xarray"], - # Composites/Modifiers: - "rayleigh": ["pyspectral >= 0.10.1"], - "angles": ["pyorbital >= 1.3.1"], - "filters": ["dask-image"], - # MultiScene: - "animations": ["imageio"], - # Documentation: - "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], - # Other - "geoviews": ["geoviews"], - "holoviews": ["holoviews"], - "hvplot": ["hvplot", "geoviews", "cartopy", "holoviews"], - "overlays": ["pycoast", "pydecorate"], - "satpos_from_tle": ["skyfield", "astropy"], - "tests": test_requires, -} -all_extras = [] -for extra_deps in extras_require.values(): - all_extras.extend(extra_deps) -extras_require["all"] = list(set(all_extras)) - - -def _config_data_files(base_dirs, extensions=(".cfg", )): - """Find all subdirectory configuration files. - - Searches each base directory relative to this setup.py file and finds - all files ending in the extensions provided. - - :param base_dirs: iterable of relative base directories to search - :param extensions: iterable of file extensions to include (with '.' prefix) - :returns: list of 2-element tuples compatible with `setuptools.setup` - """ - data_files = [] - pkg_root = os.path.realpath(os.path.dirname(__file__)) + "/" - for base_dir in base_dirs: - new_data_files = [] - for ext in extensions: - configs = glob(os.path.join(pkg_root, base_dir, "*" + ext)) - configs = [c.replace(pkg_root, "") for c in configs] - new_data_files.extend(configs) - data_files.append((base_dir, new_data_files)) - - return data_files - - -entry_points = { - "console_scripts": [ - "satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd", - ], -} - - -NAME = "satpy" -with open("README.rst", "r") as readme: - README = readme.read() - -setup(name=NAME, - description="Python package for earth-observing satellite data processing", - long_description=README, - author="The Pytroll Team", - author_email="pytroll@googlegroups.com", - classifiers=["Development Status :: 5 - Production/Stable", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: GNU General Public License v3 " + - "or later (GPLv3+)", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Topic :: Scientific/Engineering"], - url="https://github.com/pytroll/satpy", - download_url="https://pypi.python.org/pypi/satpy", - project_urls={ - "Bug Tracker": "https://github.com/pytroll/satpy/issues", - "Documentation": "https://satpy.readthedocs.io/en/stable/", - "Source Code": "https://github.com/pytroll/satpy", - "Organization": "https://pytroll.github.io/", - "Slack": "https://pytroll.slack.com/", - "Twitter": "https://twitter.com/hashtag/satpy?src=hashtag_click", - "Release Notes": "https://github.com/pytroll/satpy/blob/main/CHANGELOG.md", - "Mastodon": "https://fosstodon.org/tags/satpy", - }, - packages=find_packages(), - # Always use forward '/', even on Windows - # See https://setuptools.readthedocs.io/en/latest/userguide/datafiles.html#data-files-support - package_data={"satpy": ["etc/geo_image.cfg", - "etc/areas.yaml", - "etc/satpy.cfg", - "etc/himawari-8.cfg", - "etc/eps_avhrrl1b_6.5.xml", - "etc/readers/*.yaml", - "etc/writers/*.yaml", - "etc/composites/*.yaml", - "etc/enhancements/*.cfg", - "etc/enhancements/*.yaml", - "tests/etc/readers/*.yaml", - "tests/etc/composites/*.yaml", - "tests/etc/writers/*.yaml", - ]}, - zip_safe=False, - install_requires=requires, - python_requires=">=3.9", - extras_require=extras_require, - entry_points=entry_points, - ) From 8a54645041884bacf02a3209e2d07f95deaa62c9 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 20 Mar 2024 11:50:02 +0100 Subject: [PATCH 1200/1416] Update gitignore --- .gitignore | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.gitignore b/.gitignore index 8990fa1d46..4803714590 100644 --- a/.gitignore +++ b/.gitignore @@ -75,3 +75,9 @@ doc/source/_build/* satpy/version.py doc/source/api/*.rst doc/source/reader_table.rst + +# lock files +*.lock + +# rye files +.python-version From 30a0e6a1159ffe684194dbcbbdd55401272104d9 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 22 Mar 2024 10:15:28 +0100 Subject: [PATCH 1201/1416] Add Data Store to EUMETSAT part --- doc/source/data_download.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/source/data_download.rst b/doc/source/data_download.rst index b8742fac96..8cabacd58a 100644 --- a/doc/source/data_download.rst +++ b/doc/source/data_download.rst @@ -72,7 +72,8 @@ NASA VIIRS Atmosphere SIPS * `Resource Description `__ * Associated Readers: ``viirs_l1b`` -EUMETSAT Data Center --------------------- +EUMETSAT Data Store and Data Center +----------------------------------- -* `Data Ordering `__ +* EUMETSAT's primary source for data is the `Data Store `__ +* Some products remain available on the `Earth Observation Portal `__ From 23e150c4d99666993e1c3520d7005b2cc155da96 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 25 Mar 2024 19:00:08 +0100 Subject: [PATCH 1202/1416] Use flags from file --- satpy/readers/olci_nc.py | 32 +++++++---- satpy/tests/reader_tests/test_olci_nc.py | 67 ++++++++++++++++++++++++ 2 files changed, 88 insertions(+), 11 deletions(-) diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 84b21c3284..649984add1 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -70,17 +70,27 @@ class BitFlags: def __init__(self, value, flag_list=None): """Init the flags.""" self._value = value - flag_list = flag_list or ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", - "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", - "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", - "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", - "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", - "Extra_1", - "KDM_FAIL", - "Extra_2", - "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", - "LOWRW", "HIGHRW"] - self.meaning = {f: i for i, f in enumerate(flag_list)} + + if flag_list is None: + try: + meanings = value.attrs["flag_meanings"].split() + masks = value.attrs["flag_masks"] + except AttributeError: + meanings = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", + "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", + "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", + "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", + "Extra_1", + "KDM_FAIL", + "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW"] + self.meaning = {meaning: mask for mask, meaning in enumerate(meanings)} + else: + self.meaning = {meaning: int(np.log2(mask)) for meaning, mask in zip(meanings, masks)} + else: + self.meaning = {meaning: mask for mask, meaning in enumerate(flag_list)} def __getitem__(self, item): """Get the item.""" diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index 2f37fb2098..fe03521635 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -274,3 +274,70 @@ def test_bitflags(self): False, False, True, True, False, False, True, False]) assert all(mask == expected) + + def test_bitflags_with_non_linear_meanings(self): + """Test reading bitflags from DataArray attributes.""" + from functools import reduce + + import numpy as np + import xarray as xr + + from satpy.readers.olci_nc import BitFlags + + flag_masks = [1, 2, 4, 8, 4194304, 8388608, 16777216, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, + 32768, 65536, 131072, 262144, 524288, 1048576, 2097152, 33554432, 67108864, 134217728, 268435456, + 536870912, 4294967296, 8589934592, 17179869184, 34359738368, 68719476736, 137438953472, + 274877906944, 549755813888, 1099511627776, 2199023255552, 4398046511104, 8796093022208, + 17592186044416, 35184372088832, 70368744177664, 140737488355328, 281474976710656, 562949953421312, + 1125899906842624, 2251799813685248, 4503599627370496, 9007199254740992, 18014398509481984, + 36028797018963968] + flag_meanings = ("INVALID WATER LAND CLOUD TURBID_ATM CLOUD_AMBIGUOUS CLOUD_MARGIN SNOW_ICE INLAND_WATER " + "COASTLINE TIDAL COSMETIC SUSPECT HISOLZEN SATURATED MEGLINT HIGHGLINT WHITECAPS ADJAC " + "WV_FAIL PAR_FAIL AC_FAIL OC4ME_FAIL OCNN_FAIL KDM_FAIL BPAC_ON WHITE_SCATT LOWRW HIGHRW " + "IOP_LSD_FAIL ANNOT_ANGSTROM ANNOT_AERO_B ANNOT_ABSO_D ANNOT_ACLIM ANNOT_ABSOA ANNOT_MIXR1 " + "ANNOT_DROUT ANNOT_TAU06 RWNEG_O1 RWNEG_O2 RWNEG_O3 RWNEG_O4 RWNEG_O5 RWNEG_O6 RWNEG_O7 " + "RWNEG_O8 RWNEG_O9 RWNEG_O10 RWNEG_O11 RWNEG_O12 RWNEG_O16 RWNEG_O17 RWNEG_O18 RWNEG_O21") + + bits = np.array([1 << x for x in range(int(np.log2(max(flag_masks))) + 1)]) + bits_array = xr.DataArray(bits, attrs=dict(flag_masks=flag_masks, flag_meanings=flag_meanings)) + bflags = BitFlags(bits_array) + + items = ["INVALID", "TURBID_ATM"] + mask = reduce(np.logical_or, [bflags[item] for item in items]) + + assert mask[0].item() is True + assert any(mask[1:22]) is False + assert mask[22].item() is True + assert any(mask[23:]) is False + + + def test_bitflags_with_custom_flag_list(self): + """Test the BitFlags class providing a flag list.""" + from functools import reduce + + import numpy as np + + from satpy.readers.olci_nc import BitFlags + flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", + "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", + "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", + "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", + "WHITE_SCATT", "LOWRW", "HIGHRW"] + + bits = np.array([1 << x for x in range(len(flag_list))]) + + bflags = BitFlags(bits, flag_list) + + items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", + "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", + "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + + mask = reduce(np.logical_or, [bflags[item] for item in items]) + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, + False]) + assert all(mask == expected) From 3aa2f7915659302d3a84872dba7227c6e704e547 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 26 Mar 2024 08:40:59 +0100 Subject: [PATCH 1203/1416] Fix failing case --- satpy/readers/olci_nc.py | 2 +- satpy/tests/reader_tests/test_olci_nc.py | 34 +++++++++++++++++++++++- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 649984add1..a6637b4b8e 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -75,7 +75,7 @@ def __init__(self, value, flag_list=None): try: meanings = value.attrs["flag_meanings"].split() masks = value.attrs["flag_masks"] - except AttributeError: + except (AttributeError, KeyError): meanings = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index fe03521635..2834578176 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -275,7 +275,7 @@ def test_bitflags(self): False]) assert all(mask == expected) - def test_bitflags_with_non_linear_meanings(self): + def test_bitflags_with_flags_from_array(self): """Test reading bitflags from DataArray attributes.""" from functools import reduce @@ -310,6 +310,38 @@ def test_bitflags_with_non_linear_meanings(self): assert mask[22].item() is True assert any(mask[23:]) is False + def test_bitflags_with_dataarray_without_flags(self): + """Test the BitFlags class.""" + from functools import reduce + + import numpy as np + import xarray as xr + + from satpy.readers.olci_nc import BitFlags + flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", + "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", + "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", + "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", + "WHITE_SCATT", "LOWRW", "HIGHRW"] + + bits = np.array([1 << x for x in range(len(flag_list))]) + + bflags = BitFlags(xr.DataArray(bits)) + + items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", + "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", + "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + + mask = reduce(np.logical_or, [bflags[item] for item in items]) + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, + False]) + assert all(mask == expected) + def test_bitflags_with_custom_flag_list(self): """Test the BitFlags class providing a flag list.""" From 5a9e08db59a911394c84b63e5f0ffe958580ffe4 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Fri, 29 Mar 2024 13:55:58 +0800 Subject: [PATCH 1204/1416] add GOCI-II level2 nc support --- satpy/etc/readers/goci2_l2_nc.yaml | 319 +++++++++++++++++++++++++++++ satpy/readers/goci2_l2_nc.py | 87 ++++++++ 2 files changed, 406 insertions(+) create mode 100644 satpy/etc/readers/goci2_l2_nc.yaml create mode 100644 satpy/readers/goci2_l2_nc.py diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml new file mode 100644 index 0000000000..4394ec00d6 --- /dev/null +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -0,0 +1,319 @@ +reader: + name: goci2_l2_nc + short_name: GOCI-II L2 NetCDF4 + long_name: GK-2B GOCI-II Level 2 products in netCDF4 format from NOSC + status: Beta + supports_fsspec: true + sensors: ['goci2'] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + group_keys: ['start_time', 'platform_shortname', "area"] + +file_types: + goci_l2_kd: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Kd.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Kd.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Kd.nc' + + goci_l2_zsd: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Zsd.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Zsd.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Zsd.nc' + + goci_l2_chl: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Chl.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Chl.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Chl.nc' + + goci_l2_cdom: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_CDOM.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_CDOM.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_CDOM.nc' + + goci_l2_tss: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_TSS.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_TSS.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_TSS.nc' + + goci_l2_ac: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_AC.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AC.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AC.nc' + +datasets: +# --- Navigation Data --- + latitude: + name: latitude + file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac] + file_key: latitude + standard_name: latitude + units: degrees_north + + longitude: + name: longitude + file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac] + file_key: longitude + standard_name: longitude + units: degrees_east + +# --- Ocean Color Products --- + diffuse_attenuation_coefficient: + name: Kd + file_type: goci_l2_kd + file_key: Kd + coordinates: [longitude, latitude] + + Secchi_disk_depth: + name: Zsd + file_type: goci_l2_zsd + file_key: Zsd + coordinates: [longitude, latitude] + + Chlorophyll-a_concentration: + name: Chl + file_type: goci_l2_chl + file_key: Chl + coordinates: [longitude, latitude] + + Colored_Dissolved_Organic_Matter: + name: CDOM + file_type: goci_l2_cdom + file_key: CDOM + coordinates: [longitude, latitude] + + Total_Suspended_Sediments_concentration: + name: TSS + file_type: goci_l2_tss + file_key: TSS + coordinates: [longitude, latitude] + +# --- Atomspheric Correction Products --- + # --- Rayleigh-corrected reflectance --- + RhoC_380: + name: RhoC_380 + sensor: goci2 + wavelength: [0.37, 0.38, 0.39] + long_name: Rayleigh-corrected reflectance at 380 nm + file_type: goci_l2_ac + file_key: RhoC_380 + coordinates: [longitude, latitude] + + RhoC_412: + name: RhoC_412 + sensor: goci2 + wavelength: [0.402, 0.412, 0.422] + long_name: Rayleigh-corrected reflectance at 412 nm + file_type: goci_l2_ac + file_key: RhoC_412 + coordinates: [longitude, latitude] + + RhoC_443: + name: RhoC_443 + sensor: goci2 + wavelength: [0.433, 0.443, 0.453] + long_name: Rayleigh-corrected reflectance at 443 nm + file_type: goci_l2_ac + file_key: RhoC_443 + coordinates: [longitude, latitude] + + RhoC_490: + name: RhoC_490 + sensor: goci2 + wavelength: [0.48, 0.49, 0.50] + long_name: Rayleigh-corrected reflectance at 490 nm + file_type: goci_l2_ac + file_key: RhoC_490 + coordinates: [longitude, latitude] + + RhoC_510: + name: RhoC_510 + sensor: goci2 + wavelength: [0.50, 0.51, 0.52] + long_name: Rayleigh-corrected reflectance at 510 nm + file_type: goci_l2_ac + file_key: RhoC_510 + coordinates: [longitude, latitude] + + RhoC_555: + name: RhoC_555 + sensor: goci2 + wavelength: [0.545, 0.555, 0.565] + long_name: Rayleigh-corrected reflectance at 555 nm + file_type: goci_l2_ac + file_key: RhoC_555 + coordinates: [longitude, latitude] + + RhoC_620: + name: RhoC_620 + sensor: goci2 + wavelength: [0.61, 0.62, 0.63] + long_name: Rayleigh-corrected reflectance at 620 nm + file_type: goci_l2_ac + file_key: RhoC_620 + coordinates: [longitude, latitude] + + RhoC_660: + name: RhoC_660 + sensor: goci2 + wavelength: [0.65, 0.66, 0.67] + long_name: Rayleigh-corrected reflectance at 660 nm + file_type: goci_l2_ac + file_key: RhoC_660 + coordinates: [longitude, latitude] + + RhoC_680: + name: RhoC_680 + sensor: goci2 + wavelength: [0.675, 0.680, 0.685] + long_name: Rayleigh-corrected reflectance at 680 nm + file_type: goci_l2_ac + file_key: RhoC_680 + coordinates: [longitude, latitude] + + RhoC_709: + name: RhoC_709 + sensor: goci2 + wavelength: [0.704, 0.709, 0.714] + long_name: Rayleigh-corrected reflectance at 709 nm + file_type: goci_l2_ac + file_key: RhoC_709 + coordinates: [longitude, latitude] + + RhoC_745: + name: RhoC_745 + sensor: goci2 + wavelength: [0.735, 0.745, 0.755] + long_name: Rayleigh-corrected reflectance at 745 nm + file_type: goci_l2_ac + file_key: RhoC_745 + coordinates: [longitude, latitude] + + RhoC_865: + name: RhoC_865 + sensor: goci2 + wavelength: [0.845, 0.865, 0.885] + long_name: Rayleigh-corrected reflectance at 865 nm + file_type: goci_l2_ac + file_key: RhoC_865 + coordinates: [longitude, latitude] + + # --- Remote sensing reflectance --- + Rrs_380: + name: Rrs_380 + sensor: goci2 + wavelength: [0.37, 0.38, 0.39] + long_name: Remote sensing reflectance at 380 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_380 + coordinates: [longitude, latitude] + + Rrs_412: + name: Rrs_412 + sensor: goci2 + wavelength: [0.402, 0.412, 0.422] + long_name: Remote sensing reflectance at 412 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_412 + coordinates: [longitude, latitude] + + Rrs_443: + name: Rrs_443 + sensor: goci2 + wavelength: [0.433, 0.443, 0.453] + long_name: Remote sensing reflectance at 443 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_443 + coordinates: [longitude, latitude] + + Rrs_490: + name: Rrs_490 + sensor: goci2 + wavelength: [0.48, 0.49, 0.50] + long_name: Remote sensing reflectance at 490 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_490 + coordinates: [longitude, latitude] + + Rrs_510: + name: Rrs_510 + sensor: goci2 + wavelength: [0.50, 0.51, 0.52] + long_name: Remote sensing reflectance at 510 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_510 + coordinates: [longitude, latitude] + + Rrs_555: + name: Rrs_555 + sensor: goci2 + wavelength: [0.545, 0.555, 0.565] + long_name: Remote sensing reflectance at 555 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_555 + coordinates: [longitude, latitude] + + Rrs_620: + name: Rrs_620 + sensor: goci2 + wavelength: [0.61, 0.62, 0.63] + long_name: Remote sensing reflectance at 620 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_620 + coordinates: [longitude, latitude] + + Rrs_660: + name: Rrs_660 + sensor: goci2 + wavelength: [0.65, 0.66, 0.67] + long_name: Remote sensing reflectance at 660 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_660 + coordinates: [longitude, latitude] + + Rrs_680: + name: Rrs_680 + sensor: goci2 + wavelength: [0.675, 0.680, 0.685] + long_name: Remote sensing reflectance at 680 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_680 + coordinates: [longitude, latitude] + + Rrs_709: + name: Rrs_709 + sensor: goci2 + wavelength: [0.704, 0.709, 0.714] + long_name: Remote sensing reflectance at 709 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_709 + coordinates: [longitude, latitude] + + Rrs_745: + name: Rrs_745 + sensor: goci2 + wavelength: [0.735, 0.745, 0.755] + long_name: Remote sensing reflectance at 745 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_745 + coordinates: [longitude, latitude] + + Rrs_865: + name: Rrs_865 + sensor: goci2 + wavelength: [0.845, 0.865, 0.885] + long_name: Remote sensing reflectance at 865 nm, KOSC standard algorithm + file_type: goci_l2_ac + file_key: Rrs_865 + coordinates: [longitude, latitude] + diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py new file mode 100644 index 0000000000..e6fb356711 --- /dev/null +++ b/satpy/readers/goci2_l2_nc.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2023 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +""" +Reader for GK-2B GOCI-II L2 products from NOSC. +""" + +import logging +from datetime import datetime + +import xarray as xr + +from satpy.readers.netcdf_utils import NetCDF4FileHandler + +logger = logging.getLogger(__name__) + + +class GOCI2L2NCFileHandler(NetCDF4FileHandler): + """File handler for GOCI-II L2 official data in netCDF format.""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize the reader.""" + super().__init__(filename, filename_info, filetype_info) + self.slot = filename_info.get("slot", None) + + self.attrs = self["/attrs"] + navigation = self["navigation_data"] + if filetype_info["file_type"] == "goci_l2_ac": + Rhoc = self["geophysical_data/RhoC"] + Rrs = self["geophysical_data/Rrs"] + self.nc = xr.merge([Rhoc, Rrs, navigation]) + else: + self.nc = xr.merge(self["geophysical_data"], navigation) + + self.sensor = self.attrs["instrument"].lower() + self.nlines = self.nc.sizes["number_of_lines"] + self.ncols = self.nc.sizes["number_of_columns"] + if self.nlines != self.attrs["number_of_lines"]: + logger.warning( + "number_of_lines mismatched between metadata and data: " + f"{self.nlines} != {self.nc.sizes['number_of_lines']}" + ) + if self.ncols != self.attrs["pixels_per_line"]: + logger.warning( + "number_of_columns mismatched between metadata and data: " + f"{self.ncols} != {self.nc.sizes['pixels_per_line']}" + ) + self.platform_shortname = filename_info["platform"] + self.observation_area = filename_info["coverage"] + + @property + def start_time(self): + """Start timestamp of the dataset.""" + dt = self.attrs["observation_start_time"] + return datetime.strptime(dt, "%Y%m%d_%H%M%S") + + @property + def end_time(self): + """End timestamp of the dataset.""" + dt = self.attrs["observation_end_time"] + return datetime.strptime(dt, "%Y%m%d_%H%M%S") + + def get_dataset(self, key, info): + """Load a dataset.""" + var = info["file_key"] + logger.debug("Reading in get_dataset %s.", var) + variable = self.nc[var] + + # Data has 'Latitude' and 'Longitude' coords, these must be replaced. + variable = variable.rename({"number_of_lines": "y", "pixels_per_line": "x"}) + + variable.attrs.update(key.to_dict()) + return variable From 30fb1d503ed64a9d62791283745a9d20a3507642 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Fri, 29 Mar 2024 17:54:31 +0800 Subject: [PATCH 1205/1416] fix typos --- satpy/readers/goci2_l2_nc.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index e6fb356711..afe684c000 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -44,20 +44,20 @@ def __init__(self, filename, filename_info, filetype_info): Rrs = self["geophysical_data/Rrs"] self.nc = xr.merge([Rhoc, Rrs, navigation]) else: - self.nc = xr.merge(self["geophysical_data"], navigation) + self.nc = xr.merge([self["geophysical_data"], navigation]) self.sensor = self.attrs["instrument"].lower() self.nlines = self.nc.sizes["number_of_lines"] - self.ncols = self.nc.sizes["number_of_columns"] + self.ncols = self.nc.sizes["pixels_per_line"] if self.nlines != self.attrs["number_of_lines"]: logger.warning( "number_of_lines mismatched between metadata and data: " - f"{self.nlines} != {self.nc.sizes['number_of_lines']}" + f"{self.nlines} != {self.attrs['number_of_lines']}" ) - if self.ncols != self.attrs["pixels_per_line"]: + if self.ncols != self.attrs["number_of_columns"]: logger.warning( "number_of_columns mismatched between metadata and data: " - f"{self.ncols} != {self.nc.sizes['pixels_per_line']}" + f"{self.ncols} != {self.attrs['number_of_columns']}" ) self.platform_shortname = filename_info["platform"] self.observation_area = filename_info["coverage"] From 355c40d59c2843345e6f6cb32e87a094880e195b Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sat, 30 Mar 2024 01:25:16 +0800 Subject: [PATCH 1206/1416] fix yaml reader and units --- satpy/etc/readers/goci2_l2_nc.yaml | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml index 4394ec00d6..9dbef4fd97 100644 --- a/satpy/etc/readers/goci2_l2_nc.yaml +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -5,8 +5,9 @@ reader: status: Beta supports_fsspec: true sensors: ['goci2'] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - group_keys: ['start_time', 'platform_shortname', "area"] + reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader + # file pattern keys to sort files by with 'satpy.utils.group_files' + group_keys: ['start_time', 'platform_shortname', "slot"] file_types: goci_l2_kd: @@ -73,30 +74,35 @@ datasets: file_type: goci_l2_kd file_key: Kd coordinates: [longitude, latitude] + units: m-1 Secchi_disk_depth: name: Zsd file_type: goci_l2_zsd file_key: Zsd coordinates: [longitude, latitude] + units: m Chlorophyll-a_concentration: name: Chl file_type: goci_l2_chl file_key: Chl coordinates: [longitude, latitude] + units: mg m-3 Colored_Dissolved_Organic_Matter: name: CDOM file_type: goci_l2_cdom file_key: CDOM coordinates: [longitude, latitude] + units: m-1 Total_Suspended_Sediments_concentration: name: TSS file_type: goci_l2_tss file_key: TSS coordinates: [longitude, latitude] + units: g m-3 # --- Atomspheric Correction Products --- # --- Rayleigh-corrected reflectance --- @@ -217,6 +223,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_380 coordinates: [longitude, latitude] + units: sr-1 Rrs_412: name: Rrs_412 @@ -226,6 +233,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_412 coordinates: [longitude, latitude] + units: sr-1 Rrs_443: name: Rrs_443 @@ -235,6 +243,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_443 coordinates: [longitude, latitude] + units: sr-1 Rrs_490: name: Rrs_490 @@ -244,6 +253,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_490 coordinates: [longitude, latitude] + units: sr-1 Rrs_510: name: Rrs_510 @@ -253,6 +263,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_510 coordinates: [longitude, latitude] + units: sr-1 Rrs_555: name: Rrs_555 @@ -262,6 +273,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_555 coordinates: [longitude, latitude] + units: sr-1 Rrs_620: name: Rrs_620 @@ -271,6 +283,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_620 coordinates: [longitude, latitude] + units: sr-1 Rrs_660: name: Rrs_660 @@ -280,6 +293,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_660 coordinates: [longitude, latitude] + units: sr-1 Rrs_680: name: Rrs_680 @@ -289,6 +303,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_680 coordinates: [longitude, latitude] + units: sr-1 Rrs_709: name: Rrs_709 @@ -298,6 +313,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_709 coordinates: [longitude, latitude] + units: sr-1 Rrs_745: name: Rrs_745 @@ -307,6 +323,7 @@ datasets: file_type: goci_l2_ac file_key: Rrs_745 coordinates: [longitude, latitude] + units: sr-1 Rrs_865: name: Rrs_865 @@ -316,4 +333,5 @@ datasets: file_type: goci_l2_ac file_key: Rrs_865 coordinates: [longitude, latitude] + units: sr-1 From 12f8dfae00587f45522b40b89cf937b4acdabdba Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sat, 30 Mar 2024 01:42:04 +0800 Subject: [PATCH 1207/1416] refactor navigation data gather --- satpy/readers/goci2_l2_nc.py | 31 +++++++++++++------------------ 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index afe684c000..7172ce21cd 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -38,29 +38,24 @@ def __init__(self, filename, filename_info, filetype_info): self.slot = filename_info.get("slot", None) self.attrs = self["/attrs"] - navigation = self["navigation_data"] - if filetype_info["file_type"] == "goci_l2_ac": - Rhoc = self["geophysical_data/RhoC"] - Rrs = self["geophysical_data/Rrs"] - self.nc = xr.merge([Rhoc, Rrs, navigation]) - else: - self.nc = xr.merge([self["geophysical_data"], navigation]) + self.nc = self._merge_navigation_data(filetype_info) self.sensor = self.attrs["instrument"].lower() self.nlines = self.nc.sizes["number_of_lines"] self.ncols = self.nc.sizes["pixels_per_line"] - if self.nlines != self.attrs["number_of_lines"]: - logger.warning( - "number_of_lines mismatched between metadata and data: " - f"{self.nlines} != {self.attrs['number_of_lines']}" - ) - if self.ncols != self.attrs["number_of_columns"]: - logger.warning( - "number_of_columns mismatched between metadata and data: " - f"{self.ncols} != {self.attrs['number_of_columns']}" - ) self.platform_shortname = filename_info["platform"] - self.observation_area = filename_info["coverage"] + self.coverage = filename_info["coverage"] + + def _merge_navigation_data(self, filetype_info): + """Merge navigation data and geophysical data.""" + navigation = self["navigation_data"] + if filetype_info["file_type"] == "goci_l2_ac": + Rhoc = self["geophysical_data/RhoC"] + Rrs = self["geophysical_data/Rrs"] + data = xr.merge([Rhoc, Rrs, navigation]) + else: + data = xr.merge([self["geophysical_data"], navigation]) + return data @property def start_time(self): From 389d454c79b0325eeb3d943215d5d801c94c559b Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sat, 30 Mar 2024 01:55:17 +0800 Subject: [PATCH 1208/1416] fix docstring --- satpy/readers/goci2_l2_nc.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index 7172ce21cd..65a4262ff6 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -17,6 +17,8 @@ # satpy. If not, see . """ Reader for GK-2B GOCI-II L2 products from NOSC. + +For more information about the data, see: """ import logging From a93d14a20483fb0e7361949e0375591eae815541 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sat, 30 Mar 2024 01:56:35 +0800 Subject: [PATCH 1209/1416] fix docstring again --- satpy/readers/goci2_l2_nc.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index 65a4262ff6..7b548f179a 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -15,8 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -""" -Reader for GK-2B GOCI-II L2 products from NOSC. +"""Reader for GK-2B GOCI-II L2 products from NOSC. For more information about the data, see: """ From 4b33c3419c3434446929fa71c1b131cc09d79841 Mon Sep 17 00:00:00 2001 From: Antonio Valentino Date: Fri, 29 Mar 2024 19:43:11 +0100 Subject: [PATCH 1210/1416] Replace the unmaintained dependency appdirs with platformdirs --- AUTHORS.md | 1 + continuous_integration/environment.yaml | 2 +- doc/rtd_environment.yml | 2 +- doc/source/config.rst | 6 +++--- satpy/_config.py | 4 ++-- satpy/readers/ahi_l1b_gridded_bin.py | 2 +- setup.py | 2 +- 7 files changed, 10 insertions(+), 9 deletions(-) diff --git a/AUTHORS.md b/AUTHORS.md index fb43d0168d..9c475355a2 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -90,3 +90,4 @@ The following people have made contributions to this project: - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) - [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) +- [Antonio Valentino](https://github.com/avalentino) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 4fc7a508f2..1dcf46c0cc 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -7,7 +7,7 @@ dependencies: - distributed - dask-image - donfig - - appdirs + - platformdirs - toolz - Cython - numba diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 1e40cbb73a..f24aa09c4f 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -4,7 +4,7 @@ channels: dependencies: - python=3.10 - pip - - appdirs + - platformdirs - dask - dask-image - defusedxml diff --git a/doc/source/config.rst b/doc/source/config.rst index b1777c9751..2279e10fe5 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -35,7 +35,7 @@ locations: 3. ``~/.satpy/satpy.yaml`` 4. ``/satpy.yaml`` (see :ref:`config_path_setting` below) -The above ``user_config_dir`` is provided by the ``appdirs`` package and +The above ``user_config_dir`` is provided by the ``platformdirs`` package and differs by operating system. Typical user config directories are: * Mac OSX: ``~/Library/Preferences/satpy`` @@ -90,7 +90,7 @@ Directory where any files cached by Satpy will be stored. This directory is not necessarily cleared out by Satpy, but is rarely used without explicitly being enabled by the user. This defaults to a different path depending on your operating system following -the `appdirs `_ +the `platformdirs `_ "user cache dir". .. _config_cache_lonlats_setting: @@ -214,7 +214,7 @@ Data Directory Directory where any data Satpy needs to perform certain operations will be stored. This replaces the legacy ``SATPY_ANCPATH`` environment variable. This defaults to a different path depending on your operating system following the -`appdirs `_ +`platformdirs `_ "user data dir". .. _download_aux_setting: diff --git a/satpy/_config.py b/satpy/_config.py index 6a14f994a8..4bb7532581 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -29,7 +29,7 @@ from importlib.resources import files as impr_files from typing import Iterable -import appdirs +from platformdirs import AppDirs from donfig import Config from satpy._compat import cache @@ -40,7 +40,7 @@ # FIXME: Use package_resources? PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, "etc") -_satpy_dirs = appdirs.AppDirs(appname="satpy", appauthor="pytroll") +_satpy_dirs = AppDirs(appname="satpy", appauthor="pytroll") _CONFIG_DEFAULTS = { "tmp_dir": tempfile.gettempdir(), "cache_dir": _satpy_dirs.user_cache_dir, diff --git a/satpy/readers/ahi_l1b_gridded_bin.py b/satpy/readers/ahi_l1b_gridded_bin.py index 33289aee11..9c06e3c55b 100644 --- a/satpy/readers/ahi_l1b_gridded_bin.py +++ b/satpy/readers/ahi_l1b_gridded_bin.py @@ -37,7 +37,7 @@ import dask.array as da import numpy as np import xarray as xr -from appdirs import AppDirs +from platformdirs import AppDirs from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler diff --git a/setup.py b/setup.py index 2f467286d0..b4bc3bc841 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", "trollimage >=1.23", "pykdtree", "pyyaml >=5.1", "xarray >=0.14.1", - "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", + "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "platformdirs", "packaging", "pooch", "pyorbital"] test_requires = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", From 5a867814394059c05650f2d1a0c32346b5d54f1a Mon Sep 17 00:00:00 2001 From: Antonio Valentino Date: Sat, 30 Mar 2024 07:25:13 +0100 Subject: [PATCH 1211/1416] Fix imports sorting --- satpy/_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/_config.py b/satpy/_config.py index 4bb7532581..fbfcb0c0d5 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -29,8 +29,8 @@ from importlib.resources import files as impr_files from typing import Iterable -from platformdirs import AppDirs from donfig import Config +from platformdirs import AppDirs from satpy._compat import cache From e2b1b3bf465f6cc56b3478c926db292cda524fe8 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sat, 30 Mar 2024 16:01:33 +0800 Subject: [PATCH 1212/1416] support kd product --- satpy/etc/readers/goci2_l2_nc.yaml | 134 ++++++++++++++++++++++++----- 1 file changed, 112 insertions(+), 22 deletions(-) diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml index 9dbef4fd97..051bc3b517 100644 --- a/satpy/etc/readers/goci2_l2_nc.yaml +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -44,7 +44,7 @@ file_types: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_TSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_TSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_TSS.nc' - + goci_l2_ac: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: @@ -69,13 +69,104 @@ datasets: units: degrees_east # --- Ocean Color Products --- - diffuse_attenuation_coefficient: - name: Kd + # --- Diffuse attenuation coefficient --- + Kd_380: + name: Kd_380 + long_name: Diffuse attenuation coefficient at 380 nm + file_type: goci_l2_kd + file_key: Kd_380 + coordinates: [longitude, latitude] + units: m-1 + + Kd_412: + name: Kd_412 + long_name: Diffuse attenuation coefficient at 412 nm + file_type: goci_l2_kd + file_key: Kd_412 + coordinates: [longitude, latitude] + units: m-1 + + Kd_443: + name: Kd_443 + long_name: Diffuse attenuation coefficient at 443 nm + file_type: goci_l2_kd + file_key: Kd_443 + coordinates: [longitude, latitude] + units: m-1 + + Kd_490: + name: Kd_490 + long_name: Diffuse attenuation coefficient at 490 nm + file_type: goci_l2_kd + file_key: Kd_490 + coordinates: [longitude, latitude] + units: m-1 + + Kd_510: + name: Kd_510 + long_name: Diffuse attenuation coefficient at 510 nm + file_type: goci_l2_kd + file_key: Kd_510 + coordinates: [longitude, latitude] + units: m-1 + + Kd_555: + name: Kd_555 + long_name: Diffuse attenuation coefficient at 555 nm + file_type: goci_l2_kd + file_key: Kd_555 + coordinates: [longitude, latitude] + units: m-1 + + Kd_620: + name: Kd_620 + long_name: Diffuse attenuation coefficient at 620 nm + file_type: goci_l2_kd + file_key: Kd_620 + coordinates: [longitude, latitude] + units: m-1 + + Kd_660: + name: Kd_660 + long_name: Diffuse attenuation coefficient at 660 nm file_type: goci_l2_kd - file_key: Kd + file_key: Kd_660 coordinates: [longitude, latitude] units: m-1 - + + Kd_680: + name: Kd_680 + long_name: Diffuse attenuation coefficient at 680 nm + file_type: goci_l2_kd + file_key: Kd_680 + coordinates: [longitude, latitude] + units: m-1 + + Kd_709: + name: Kd_709 + long_name: Diffuse attenuation coefficient at 709 nm + file_type: goci_l2_kd + file_key: Kd_709 + coordinates: [longitude, latitude] + units: m-1 + + Kd_745: + name: Kd_745 + long_name: Diffuse attenuation coefficient at 745 nm + file_type: goci_l2_kd + file_key: Kd_745 + coordinates: [longitude, latitude] + units: m-1 + + Kd_865: + name: Kd_865 + long_name: Diffuse attenuation coefficient at 865 nm + file_type: goci_l2_kd + file_key: Kd_865 + coordinates: [longitude, latitude] + units: m-1 + + # --- Other OC products --- Secchi_disk_depth: name: Zsd file_type: goci_l2_zsd @@ -159,7 +250,7 @@ datasets: file_type: goci_l2_ac file_key: RhoC_555 coordinates: [longitude, latitude] - + RhoC_620: name: RhoC_620 sensor: goci2 @@ -168,7 +259,7 @@ datasets: file_type: goci_l2_ac file_key: RhoC_620 coordinates: [longitude, latitude] - + RhoC_660: name: RhoC_660 sensor: goci2 @@ -177,7 +268,7 @@ datasets: file_type: goci_l2_ac file_key: RhoC_660 coordinates: [longitude, latitude] - + RhoC_680: name: RhoC_680 sensor: goci2 @@ -186,7 +277,7 @@ datasets: file_type: goci_l2_ac file_key: RhoC_680 coordinates: [longitude, latitude] - + RhoC_709: name: RhoC_709 sensor: goci2 @@ -195,7 +286,7 @@ datasets: file_type: goci_l2_ac file_key: RhoC_709 coordinates: [longitude, latitude] - + RhoC_745: name: RhoC_745 sensor: goci2 @@ -213,7 +304,7 @@ datasets: file_type: goci_l2_ac file_key: RhoC_865 coordinates: [longitude, latitude] - + # --- Remote sensing reflectance --- Rrs_380: name: Rrs_380 @@ -234,7 +325,7 @@ datasets: file_key: Rrs_412 coordinates: [longitude, latitude] units: sr-1 - + Rrs_443: name: Rrs_443 sensor: goci2 @@ -244,7 +335,7 @@ datasets: file_key: Rrs_443 coordinates: [longitude, latitude] units: sr-1 - + Rrs_490: name: Rrs_490 sensor: goci2 @@ -254,7 +345,7 @@ datasets: file_key: Rrs_490 coordinates: [longitude, latitude] units: sr-1 - + Rrs_510: name: Rrs_510 sensor: goci2 @@ -264,7 +355,7 @@ datasets: file_key: Rrs_510 coordinates: [longitude, latitude] units: sr-1 - + Rrs_555: name: Rrs_555 sensor: goci2 @@ -274,7 +365,7 @@ datasets: file_key: Rrs_555 coordinates: [longitude, latitude] units: sr-1 - + Rrs_620: name: Rrs_620 sensor: goci2 @@ -284,7 +375,7 @@ datasets: file_key: Rrs_620 coordinates: [longitude, latitude] units: sr-1 - + Rrs_660: name: Rrs_660 sensor: goci2 @@ -294,7 +385,7 @@ datasets: file_key: Rrs_660 coordinates: [longitude, latitude] units: sr-1 - + Rrs_680: name: Rrs_680 sensor: goci2 @@ -304,7 +395,7 @@ datasets: file_key: Rrs_680 coordinates: [longitude, latitude] units: sr-1 - + Rrs_709: name: Rrs_709 sensor: goci2 @@ -314,7 +405,7 @@ datasets: file_key: Rrs_709 coordinates: [longitude, latitude] units: sr-1 - + Rrs_745: name: Rrs_745 sensor: goci2 @@ -324,7 +415,7 @@ datasets: file_key: Rrs_745 coordinates: [longitude, latitude] units: sr-1 - + Rrs_865: name: Rrs_865 sensor: goci2 @@ -334,4 +425,3 @@ datasets: file_key: Rrs_865 coordinates: [longitude, latitude] units: sr-1 - From cefeaa12c7650fede22f24b07f4baf73aa430b44 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sat, 30 Mar 2024 21:59:20 +0800 Subject: [PATCH 1213/1416] support IOP products --- satpy/etc/readers/goci2_l2_nc.yaml | 231 ++++++++++++++++++++++++++++- 1 file changed, 229 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml index 051bc3b517..b963c00004 100644 --- a/satpy/etc/readers/goci2_l2_nc.yaml +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -52,18 +52,25 @@ file_types: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AC.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AC.nc' + goci_l2_iop: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_IOP.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_IOP.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_IOP.nc' + datasets: # --- Navigation Data --- latitude: name: latitude - file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac] + file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac, goci_l2_iop] file_key: latitude standard_name: latitude units: degrees_north longitude: name: longitude - file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac] + file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac, goci_l2_iop] file_key: longitude standard_name: longitude units: degrees_east @@ -425,3 +432,223 @@ datasets: file_key: Rrs_865 coordinates: [longitude, latitude] units: sr-1 + +# --- Inherent Optical Properties products --- + # --- Absorption coefficient --- + a_total_380: + name: a_total_380 + long_name: Spectral absorption coefficient at 380 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_380 + coordinates: [longitude, latitude] + units: m-1 + + a_total_412: + name: a_total_412 + long_name: Spectral absorption coefficient at 412 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_412 + coordinates: [longitude, latitude] + units: m-1 + + a_total_443: + name: a_total_443 + long_name: Spectral absorption coefficient at 443 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_443 + coordinates: [longitude, latitude] + units: m-1 + + a_total_490: + name: a_total_490 + long_name: Spectral absorption coefficient at 490 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_490 + coordinates: [longitude, latitude] + units: m-1 + + a_total_510: + name: a_total_510 + long_name: Spectral absorption coefficient at 510 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_510 + coordinates: [longitude, latitude] + units: m-1 + + a_total_555: + name: a_total_555 + long_name: Spectral absorption coefficient at 555 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_555 + coordinates: [longitude, latitude] + units: m-1 + + a_total_620: + name: a_total_620 + long_name: Spectral absorption coefficient at 620 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_620 + coordinates: [longitude, latitude] + units: m-1 + + a_total_660: + name: a_total_660 + long_name: Spectral absorption coefficient at 660 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_660 + coordinates: [longitude, latitude] + units: m-1 + + a_total_680: + name: a_total_680 + long_name: Spectral absorption coefficient at 680 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_680 + coordinates: [longitude, latitude] + units: m-1 + + a_total_709: + name: a_total_709 + long_name: Spectral absorption coefficient at 709 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_709 + coordinates: [longitude, latitude] + units: m-1 + + a_total_745: + name: a_total_745 + long_name: Spectral absorption coefficient at 745 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_745 + coordinates: [longitude, latitude] + units: m-1 + + a_total_865: + name: a_total_865 + long_name: Spectral absorption coefficient at 865 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_total_865 + coordinates: [longitude, latitude] + units: m-1 + + # --- Backscattering coefficient --- + bb_total_380: + name: bb_total_380 + long_name: Spectral backscattering coefficient at 380 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_380 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_412: + name: bb_total_412 + long_name: Spectral backscattering coefficient at 412 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_412 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_443: + name: bb_total_443 + long_name: Spectral backscattering coefficient at 443 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_443 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_490: + name: bb_total_490 + long_name: Spectral backscattering coefficient at 490 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_490 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_510: + name: bb_total_510 + long_name: Spectral backscattering coefficient at 510 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_510 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_555: + name: bb_total_555 + long_name: Spectral backscattering coefficient at 555 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_555 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_620: + name: bb_total_620 + long_name: Spectral backscattering coefficient at 620 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_620 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_660: + name: bb_total_660 + long_name: Spectral backscattering coefficient at 660 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_660 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_680: + name: bb_total_680 + long_name: Spectral backscattering coefficient at 680 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_680 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_709: + name: bb_total_709 + long_name: Spectral backscattering coefficient at 709 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_709 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_745: + name: bb_total_745 + long_name: Spectral backscattering coefficient at 745 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_745 + coordinates: [longitude, latitude] + units: m-1 + + bb_total_865: + name: bb_total_865 + long_name: Spectral backscattering coefficient at 865 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_total_865 + coordinates: [longitude, latitude] + units: m-1 + + # --- Other IOP output --- + a_dg_443: + name: a_dg_443 + long_name: Spectral absorption coefficient of detritus and gelbstoff at 443 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_dg_443 + coordinates: [longitude, latitude] + units: m-1 + + a_chl_443: + name: a_chl_443 + long_name: Spectral absorption coefficient of chlorophyll-a at 443 nm, QAA version 6 + file_type: goci_l2_iop + file_key: a_chl_443 + coordinates: [longitude, latitude] + units: m-1 + + bb_p_555: + name: bb_p_555 + long_name: Spectral backscattering coefficient of particle at 555 nm, QAA version 6 + file_type: goci_l2_iop + file_key: bb_p_555 + coordinates: [longitude, latitude] + units: m-1 \ No newline at end of file From cf26b7342fb8a08d400e1358437b2508fee9bd17 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sat, 30 Mar 2024 22:00:01 +0800 Subject: [PATCH 1214/1416] format yaml --- satpy/etc/readers/goci2_l2_nc.yaml | 46 +++++++++++++++--------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml index b963c00004..731553b9fd 100644 --- a/satpy/etc/readers/goci2_l2_nc.yaml +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -450,7 +450,7 @@ datasets: file_key: a_total_412 coordinates: [longitude, latitude] units: m-1 - + a_total_443: name: a_total_443 long_name: Spectral absorption coefficient at 443 nm, QAA version 6 @@ -458,7 +458,7 @@ datasets: file_key: a_total_443 coordinates: [longitude, latitude] units: m-1 - + a_total_490: name: a_total_490 long_name: Spectral absorption coefficient at 490 nm, QAA version 6 @@ -482,7 +482,7 @@ datasets: file_key: a_total_555 coordinates: [longitude, latitude] units: m-1 - + a_total_620: name: a_total_620 long_name: Spectral absorption coefficient at 620 nm, QAA version 6 @@ -490,7 +490,7 @@ datasets: file_key: a_total_620 coordinates: [longitude, latitude] units: m-1 - + a_total_660: name: a_total_660 long_name: Spectral absorption coefficient at 660 nm, QAA version 6 @@ -498,7 +498,7 @@ datasets: file_key: a_total_660 coordinates: [longitude, latitude] units: m-1 - + a_total_680: name: a_total_680 long_name: Spectral absorption coefficient at 680 nm, QAA version 6 @@ -506,7 +506,7 @@ datasets: file_key: a_total_680 coordinates: [longitude, latitude] units: m-1 - + a_total_709: name: a_total_709 long_name: Spectral absorption coefficient at 709 nm, QAA version 6 @@ -514,7 +514,7 @@ datasets: file_key: a_total_709 coordinates: [longitude, latitude] units: m-1 - + a_total_745: name: a_total_745 long_name: Spectral absorption coefficient at 745 nm, QAA version 6 @@ -522,7 +522,7 @@ datasets: file_key: a_total_745 coordinates: [longitude, latitude] units: m-1 - + a_total_865: name: a_total_865 long_name: Spectral absorption coefficient at 865 nm, QAA version 6 @@ -530,7 +530,7 @@ datasets: file_key: a_total_865 coordinates: [longitude, latitude] units: m-1 - + # --- Backscattering coefficient --- bb_total_380: name: bb_total_380 @@ -539,7 +539,7 @@ datasets: file_key: bb_total_380 coordinates: [longitude, latitude] units: m-1 - + bb_total_412: name: bb_total_412 long_name: Spectral backscattering coefficient at 412 nm, QAA version 6 @@ -547,7 +547,7 @@ datasets: file_key: bb_total_412 coordinates: [longitude, latitude] units: m-1 - + bb_total_443: name: bb_total_443 long_name: Spectral backscattering coefficient at 443 nm, QAA version 6 @@ -563,7 +563,7 @@ datasets: file_key: bb_total_490 coordinates: [longitude, latitude] units: m-1 - + bb_total_510: name: bb_total_510 long_name: Spectral backscattering coefficient at 510 nm, QAA version 6 @@ -571,7 +571,7 @@ datasets: file_key: bb_total_510 coordinates: [longitude, latitude] units: m-1 - + bb_total_555: name: bb_total_555 long_name: Spectral backscattering coefficient at 555 nm, QAA version 6 @@ -579,7 +579,7 @@ datasets: file_key: bb_total_555 coordinates: [longitude, latitude] units: m-1 - + bb_total_620: name: bb_total_620 long_name: Spectral backscattering coefficient at 620 nm, QAA version 6 @@ -587,7 +587,7 @@ datasets: file_key: bb_total_620 coordinates: [longitude, latitude] units: m-1 - + bb_total_660: name: bb_total_660 long_name: Spectral backscattering coefficient at 660 nm, QAA version 6 @@ -595,7 +595,7 @@ datasets: file_key: bb_total_660 coordinates: [longitude, latitude] units: m-1 - + bb_total_680: name: bb_total_680 long_name: Spectral backscattering coefficient at 680 nm, QAA version 6 @@ -603,7 +603,7 @@ datasets: file_key: bb_total_680 coordinates: [longitude, latitude] units: m-1 - + bb_total_709: name: bb_total_709 long_name: Spectral backscattering coefficient at 709 nm, QAA version 6 @@ -611,7 +611,7 @@ datasets: file_key: bb_total_709 coordinates: [longitude, latitude] units: m-1 - + bb_total_745: name: bb_total_745 long_name: Spectral backscattering coefficient at 745 nm, QAA version 6 @@ -619,7 +619,7 @@ datasets: file_key: bb_total_745 coordinates: [longitude, latitude] units: m-1 - + bb_total_865: name: bb_total_865 long_name: Spectral backscattering coefficient at 865 nm, QAA version 6 @@ -627,7 +627,7 @@ datasets: file_key: bb_total_865 coordinates: [longitude, latitude] units: m-1 - + # --- Other IOP output --- a_dg_443: name: a_dg_443 @@ -636,7 +636,7 @@ datasets: file_key: a_dg_443 coordinates: [longitude, latitude] units: m-1 - + a_chl_443: name: a_chl_443 long_name: Spectral absorption coefficient of chlorophyll-a at 443 nm, QAA version 6 @@ -644,11 +644,11 @@ datasets: file_key: a_chl_443 coordinates: [longitude, latitude] units: m-1 - + bb_p_555: name: bb_p_555 long_name: Spectral backscattering coefficient of particle at 555 nm, QAA version 6 file_type: goci_l2_iop file_key: bb_p_555 coordinates: [longitude, latitude] - units: m-1 \ No newline at end of file + units: m-1 From 251f92f18b0852622a8cc7a1a2b686c285c74d08 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sat, 30 Mar 2024 23:34:23 +0800 Subject: [PATCH 1215/1416] support aerosol products --- satpy/etc/readers/goci2_l2_nc.yaml | 43 ++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml index 731553b9fd..9eace849ea 100644 --- a/satpy/etc/readers/goci2_l2_nc.yaml +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -59,6 +59,13 @@ file_types: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_IOP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_IOP.nc' + goci_l2_aod: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_AOD.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AOD.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AOD.nc' + datasets: # --- Navigation Data --- latitude: @@ -652,3 +659,39 @@ datasets: file_key: bb_p_555 coordinates: [longitude, latitude] units: m-1 + +# --- Aerosol products --- + AOD_550: + name: AOD_550 + long_name: Aerosol Optical Depth at 550 nm + file_type: goci_l2_aod + file_key: Aerosol_Optical_Depth + coordinates: [longitude, latitude] + + Aerosol_Type: + name: Aerosol_Type + long_name: Aerosol type; 1 = Dust, 2 = Non-absorbing Coarse, 3 = Mixture, 4 = High-absorbing Fine, 5 = Moderate-absorbing Fine, 6 = Non-absorbing Fine + file_type: goci_l2_aod + file_key: Aerosol_Type + coordinates: [longitude, latitude] + + Angstrom_Exponent: + name: Angstrom_Exponent + long_name: Calculated Angstrom Exponent between 440 and 870 nm + file_type: goci_l2_aod + file_key: Angstrom_Exponent + coordinates: [longitude, latitude] + + Fine_Mode_Fraction: + name: Fine_Mode_Fraction + long_name: Fine Mode Fraction at 550 nm + file_type: goci_l2_aod + file_key: Fine_Mode_Fraction + coordinates: [longitude, latitude] + + Single_Scattering_Albedo: + name: Single_Scattering_Albedo + long_name: Single Scattering Albedo at 440 nm + file_type: goci_l2_aod + file_key: Single_Scattering_Albedo + coordinates: [longitude, latitude] From cc1ad849be98cfa71dd0519aff4613e455e4f347 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sun, 31 Mar 2024 00:11:57 +0800 Subject: [PATCH 1216/1416] Fix IOP reader --- satpy/etc/readers/goci2_l2_nc.yaml | 4 ++-- satpy/readers/goci2_l2_nc.py | 7 ++++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml index 9eace849ea..997b39f5d3 100644 --- a/satpy/etc/readers/goci2_l2_nc.yaml +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -70,14 +70,14 @@ datasets: # --- Navigation Data --- latitude: name: latitude - file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac, goci_l2_iop] + file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac, goci_l2_iop, goci_l2_aod] file_key: latitude standard_name: latitude units: degrees_north longitude: name: longitude - file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac, goci_l2_iop] + file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac, goci_l2_iop, goci_l2_aod] file_key: longitude standard_name: longitude units: degrees_east diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index 7b548f179a..574d44a829 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -54,8 +54,13 @@ def _merge_navigation_data(self, filetype_info): Rhoc = self["geophysical_data/RhoC"] Rrs = self["geophysical_data/Rrs"] data = xr.merge([Rhoc, Rrs, navigation]) + elif filetype_info["file_type"] == "goci_l2_iop": + a = self["geophysical_data/a_total"] + bb = self["geophysical_data/bb_total"] + data = xr.merge([a, bb, navigation]) else: - data = xr.merge([self["geophysical_data"], navigation]) + data = self["geophysical_data"] + data = xr.merge([data, navigation]) return data @property From b5d69982383e915f543896a96a33ba43b91493c2 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sun, 31 Mar 2024 01:45:47 +0800 Subject: [PATCH 1217/1416] support ocean products --- satpy/etc/readers/goci2_l2_nc.yaml | 141 +++++++++++++++++++++++++++++ 1 file changed, 141 insertions(+) diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml index 997b39f5d3..f369eb9f92 100644 --- a/satpy/etc/readers/goci2_l2_nc.yaml +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -66,6 +66,62 @@ file_types: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AOD.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AOD.nc' + goci_l2_mf: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_MF.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_MF.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_MF.nc' + + goci_l2_cf: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_CF.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_CF.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_CF.nc' + + goci_l2_fa: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_FA.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_FA.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_FA.nc' + + goci_l2_fgi: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_FGI.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_FGI.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_FGI.nc' + + goci_l2_lsss: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_LSSS.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_LSSS.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_LSSS.nc' + + goci_l2_pp: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_PP.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_PP.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_PP.nc' + + goci_l2_ri: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_RI.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_RI.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_RI.nc' + + goci_l2_ssc: + file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler + file_patterns: + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_SSC.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_SSC.nc' + - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_SSC.nc' + datasets: # --- Navigation Data --- latitude: @@ -695,3 +751,88 @@ datasets: file_type: goci_l2_aod file_key: Single_Scattering_Albedo coordinates: [longitude, latitude] + +# --- Ocean Products --- + MF: + name: MF + long_name: Marine fog existence(Yes/No/Possible), Machine learning based KOSC Algorithm + file_type: goci_l2_mf + file_key: MF + coordinates: [longitude, latitude] + + CF: + name: CF + long_name: Chlorophyll(-a) Front, CF + file_type: goci_l2_cf + file_key: CF + coordinates: [longitude, latitude] + units : mg m-3 km-1 + + FA: + name: FA + long_name: Subpixel area fraction covered by floating brown algae or green algae + file_type: goci_l2_fa + file_key: FA + coordinates: [longitude, latitude] + + FGI: + name: FGI + long_name: Fishing ground index for chub mackerel + file_type: goci_l2_fgi + file_key: FGI + coordinates: [longitude, latitude] + + SSS: + name: SSS + long_name: Sea Surface Salinity, Neural network algorithm + file_type: goci_l2_lsss + file_key: sss + coordinates: [longitude, latitude] + units: psu + + PP: + name: PP + long_name: Primary Production + file_type: goci_l2_pp + file_key: PP + coordinates: [longitude, latitude] + units: PP unit + + RI: + name: RI + long_name: Red Tide Index + file_type: goci_l2_ri + file_key: RI + coordinates: [longitude, latitude] + + SSC_direction: + name: SSC_direction + long_name: Sea Surface Current direction + file_type: goci_l2_ssc + file_key: SSC_direction + coordinates: [longitude, latitude] + units: degree + + SSC_speed: + name: SSC_speed + long_name: Sea Surface Current speed + file_type: goci_l2_ssc + file_key: SSC_speed + coordinates: [longitude, latitude] + units: m s-1 + + SSC_u: + name: SSC_u + long_name: Sea Surface Current u-component + file_type: goci_l2_ssc + file_key: SSC_u + coordinates: [longitude, latitude] + units: m s-1 + + SSC_v: + name: SSC_v + long_name: Sea Surface Current v-component + file_type: goci_l2_ssc + file_key: SSC_v + coordinates: [longitude, latitude] + units: m s-1 From 4f1d6982d088201989c1ea44c7d40df4dc1bc2e4 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sun, 31 Mar 2024 15:26:26 +0800 Subject: [PATCH 1218/1416] fix sensor name typo --- satpy/etc/readers/goci2_l2_nc.yaml | 202 ++++++++++++++--------------- satpy/readers/goci2_l2_nc.py | 8 +- 2 files changed, 105 insertions(+), 105 deletions(-) diff --git a/satpy/etc/readers/goci2_l2_nc.yaml b/satpy/etc/readers/goci2_l2_nc.yaml index f369eb9f92..d11db0a5d1 100644 --- a/satpy/etc/readers/goci2_l2_nc.yaml +++ b/satpy/etc/readers/goci2_l2_nc.yaml @@ -10,112 +10,112 @@ reader: group_keys: ['start_time', 'platform_shortname', "slot"] file_types: - goci_l2_kd: + goci2_l2_kd: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Kd.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Kd.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Kd.nc' - goci_l2_zsd: + goci2_l2_zsd: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Zsd.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Zsd.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Zsd.nc' - goci_l2_chl: + goci2_l2_chl: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Chl.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Chl.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Chl.nc' - goci_l2_cdom: + goci2_l2_cdom: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_CDOM.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_CDOM.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_CDOM.nc' - goci_l2_tss: + goci2_l2_tss: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_TSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_TSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_TSS.nc' - goci_l2_ac: + goci2_l2_ac: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_AC.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AC.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AC.nc' - goci_l2_iop: + goci2_l2_iop: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_IOP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_IOP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_IOP.nc' - goci_l2_aod: + goci2_l2_aod: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_AOD.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AOD.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AOD.nc' - goci_l2_mf: + goci2_l2_mf: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_MF.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_MF.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_MF.nc' - goci_l2_cf: + goci2_l2_cf: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_CF.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_CF.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_CF.nc' - goci_l2_fa: + goci2_l2_fa: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_FA.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_FA.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_FA.nc' - goci_l2_fgi: + goci2_l2_fgi: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_FGI.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_FGI.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_FGI.nc' - goci_l2_lsss: + goci2_l2_lsss: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_LSSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_LSSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_LSSS.nc' - goci_l2_pp: + goci2_l2_pp: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_PP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_PP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_PP.nc' - goci_l2_ri: + goci2_l2_ri: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_RI.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_RI.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_RI.nc' - goci_l2_ssc: + goci2_l2_ssc: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_SSC.nc' @@ -126,14 +126,14 @@ datasets: # --- Navigation Data --- latitude: name: latitude - file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac, goci_l2_iop, goci_l2_aod] + file_type: [goci2_l2_kd, goci2_l2_zsd, goci2_l2_chl, goci2_l2_cdom, goci2_l2_tss, goci2_l2_ac, goci2_l2_iop, goci2_l2_aod, goci2_l2_mf, goci2_l2_cf, goci2_l2_fa, goci2_l2_fgi, goci2_l2_lsss, goci2_l2_pp, goci2_l2_ri, goci2_l2_ssc] file_key: latitude standard_name: latitude units: degrees_north longitude: name: longitude - file_type: [goci_l2_kd, goci_l2_zsd, goci_l2_chl, goci_l2_cdom, goci_l2_tss, goci_l2_ac, goci_l2_iop, goci_l2_aod] + file_type: [goci2_l2_kd, goci2_l2_zsd, goci2_l2_chl, goci2_l2_cdom, goci2_l2_tss, goci2_l2_ac, goci2_l2_iop, goci2_l2_aod, goci2_l2_mf, goci2_l2_cf, goci2_l2_fa, goci2_l2_fgi, goci2_l2_lsss, goci2_l2_pp, goci2_l2_ri, goci2_l2_ssc] file_key: longitude standard_name: longitude units: degrees_east @@ -143,7 +143,7 @@ datasets: Kd_380: name: Kd_380 long_name: Diffuse attenuation coefficient at 380 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_380 coordinates: [longitude, latitude] units: m-1 @@ -151,7 +151,7 @@ datasets: Kd_412: name: Kd_412 long_name: Diffuse attenuation coefficient at 412 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_412 coordinates: [longitude, latitude] units: m-1 @@ -159,7 +159,7 @@ datasets: Kd_443: name: Kd_443 long_name: Diffuse attenuation coefficient at 443 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_443 coordinates: [longitude, latitude] units: m-1 @@ -167,7 +167,7 @@ datasets: Kd_490: name: Kd_490 long_name: Diffuse attenuation coefficient at 490 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_490 coordinates: [longitude, latitude] units: m-1 @@ -175,7 +175,7 @@ datasets: Kd_510: name: Kd_510 long_name: Diffuse attenuation coefficient at 510 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_510 coordinates: [longitude, latitude] units: m-1 @@ -183,7 +183,7 @@ datasets: Kd_555: name: Kd_555 long_name: Diffuse attenuation coefficient at 555 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_555 coordinates: [longitude, latitude] units: m-1 @@ -191,7 +191,7 @@ datasets: Kd_620: name: Kd_620 long_name: Diffuse attenuation coefficient at 620 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_620 coordinates: [longitude, latitude] units: m-1 @@ -199,7 +199,7 @@ datasets: Kd_660: name: Kd_660 long_name: Diffuse attenuation coefficient at 660 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_660 coordinates: [longitude, latitude] units: m-1 @@ -207,7 +207,7 @@ datasets: Kd_680: name: Kd_680 long_name: Diffuse attenuation coefficient at 680 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_680 coordinates: [longitude, latitude] units: m-1 @@ -215,7 +215,7 @@ datasets: Kd_709: name: Kd_709 long_name: Diffuse attenuation coefficient at 709 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_709 coordinates: [longitude, latitude] units: m-1 @@ -223,7 +223,7 @@ datasets: Kd_745: name: Kd_745 long_name: Diffuse attenuation coefficient at 745 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_745 coordinates: [longitude, latitude] units: m-1 @@ -231,7 +231,7 @@ datasets: Kd_865: name: Kd_865 long_name: Diffuse attenuation coefficient at 865 nm - file_type: goci_l2_kd + file_type: goci2_l2_kd file_key: Kd_865 coordinates: [longitude, latitude] units: m-1 @@ -239,28 +239,28 @@ datasets: # --- Other OC products --- Secchi_disk_depth: name: Zsd - file_type: goci_l2_zsd + file_type: goci2_l2_zsd file_key: Zsd coordinates: [longitude, latitude] units: m Chlorophyll-a_concentration: name: Chl - file_type: goci_l2_chl + file_type: goci2_l2_chl file_key: Chl coordinates: [longitude, latitude] units: mg m-3 Colored_Dissolved_Organic_Matter: name: CDOM - file_type: goci_l2_cdom + file_type: goci2_l2_cdom file_key: CDOM coordinates: [longitude, latitude] units: m-1 Total_Suspended_Sediments_concentration: name: TSS - file_type: goci_l2_tss + file_type: goci2_l2_tss file_key: TSS coordinates: [longitude, latitude] units: g m-3 @@ -272,7 +272,7 @@ datasets: sensor: goci2 wavelength: [0.37, 0.38, 0.39] long_name: Rayleigh-corrected reflectance at 380 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_380 coordinates: [longitude, latitude] @@ -281,7 +281,7 @@ datasets: sensor: goci2 wavelength: [0.402, 0.412, 0.422] long_name: Rayleigh-corrected reflectance at 412 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_412 coordinates: [longitude, latitude] @@ -290,7 +290,7 @@ datasets: sensor: goci2 wavelength: [0.433, 0.443, 0.453] long_name: Rayleigh-corrected reflectance at 443 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_443 coordinates: [longitude, latitude] @@ -299,7 +299,7 @@ datasets: sensor: goci2 wavelength: [0.48, 0.49, 0.50] long_name: Rayleigh-corrected reflectance at 490 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_490 coordinates: [longitude, latitude] @@ -308,7 +308,7 @@ datasets: sensor: goci2 wavelength: [0.50, 0.51, 0.52] long_name: Rayleigh-corrected reflectance at 510 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_510 coordinates: [longitude, latitude] @@ -317,7 +317,7 @@ datasets: sensor: goci2 wavelength: [0.545, 0.555, 0.565] long_name: Rayleigh-corrected reflectance at 555 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_555 coordinates: [longitude, latitude] @@ -326,7 +326,7 @@ datasets: sensor: goci2 wavelength: [0.61, 0.62, 0.63] long_name: Rayleigh-corrected reflectance at 620 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_620 coordinates: [longitude, latitude] @@ -335,7 +335,7 @@ datasets: sensor: goci2 wavelength: [0.65, 0.66, 0.67] long_name: Rayleigh-corrected reflectance at 660 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_660 coordinates: [longitude, latitude] @@ -344,7 +344,7 @@ datasets: sensor: goci2 wavelength: [0.675, 0.680, 0.685] long_name: Rayleigh-corrected reflectance at 680 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_680 coordinates: [longitude, latitude] @@ -353,7 +353,7 @@ datasets: sensor: goci2 wavelength: [0.704, 0.709, 0.714] long_name: Rayleigh-corrected reflectance at 709 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_709 coordinates: [longitude, latitude] @@ -362,7 +362,7 @@ datasets: sensor: goci2 wavelength: [0.735, 0.745, 0.755] long_name: Rayleigh-corrected reflectance at 745 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_745 coordinates: [longitude, latitude] @@ -371,7 +371,7 @@ datasets: sensor: goci2 wavelength: [0.845, 0.865, 0.885] long_name: Rayleigh-corrected reflectance at 865 nm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: RhoC_865 coordinates: [longitude, latitude] @@ -381,7 +381,7 @@ datasets: sensor: goci2 wavelength: [0.37, 0.38, 0.39] long_name: Remote sensing reflectance at 380 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_380 coordinates: [longitude, latitude] units: sr-1 @@ -391,7 +391,7 @@ datasets: sensor: goci2 wavelength: [0.402, 0.412, 0.422] long_name: Remote sensing reflectance at 412 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_412 coordinates: [longitude, latitude] units: sr-1 @@ -401,7 +401,7 @@ datasets: sensor: goci2 wavelength: [0.433, 0.443, 0.453] long_name: Remote sensing reflectance at 443 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_443 coordinates: [longitude, latitude] units: sr-1 @@ -411,7 +411,7 @@ datasets: sensor: goci2 wavelength: [0.48, 0.49, 0.50] long_name: Remote sensing reflectance at 490 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_490 coordinates: [longitude, latitude] units: sr-1 @@ -421,7 +421,7 @@ datasets: sensor: goci2 wavelength: [0.50, 0.51, 0.52] long_name: Remote sensing reflectance at 510 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_510 coordinates: [longitude, latitude] units: sr-1 @@ -431,7 +431,7 @@ datasets: sensor: goci2 wavelength: [0.545, 0.555, 0.565] long_name: Remote sensing reflectance at 555 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_555 coordinates: [longitude, latitude] units: sr-1 @@ -441,7 +441,7 @@ datasets: sensor: goci2 wavelength: [0.61, 0.62, 0.63] long_name: Remote sensing reflectance at 620 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_620 coordinates: [longitude, latitude] units: sr-1 @@ -451,7 +451,7 @@ datasets: sensor: goci2 wavelength: [0.65, 0.66, 0.67] long_name: Remote sensing reflectance at 660 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_660 coordinates: [longitude, latitude] units: sr-1 @@ -461,7 +461,7 @@ datasets: sensor: goci2 wavelength: [0.675, 0.680, 0.685] long_name: Remote sensing reflectance at 680 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_680 coordinates: [longitude, latitude] units: sr-1 @@ -471,7 +471,7 @@ datasets: sensor: goci2 wavelength: [0.704, 0.709, 0.714] long_name: Remote sensing reflectance at 709 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_709 coordinates: [longitude, latitude] units: sr-1 @@ -481,7 +481,7 @@ datasets: sensor: goci2 wavelength: [0.735, 0.745, 0.755] long_name: Remote sensing reflectance at 745 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_745 coordinates: [longitude, latitude] units: sr-1 @@ -491,7 +491,7 @@ datasets: sensor: goci2 wavelength: [0.845, 0.865, 0.885] long_name: Remote sensing reflectance at 865 nm, KOSC standard algorithm - file_type: goci_l2_ac + file_type: goci2_l2_ac file_key: Rrs_865 coordinates: [longitude, latitude] units: sr-1 @@ -501,7 +501,7 @@ datasets: a_total_380: name: a_total_380 long_name: Spectral absorption coefficient at 380 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_380 coordinates: [longitude, latitude] units: m-1 @@ -509,7 +509,7 @@ datasets: a_total_412: name: a_total_412 long_name: Spectral absorption coefficient at 412 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_412 coordinates: [longitude, latitude] units: m-1 @@ -517,7 +517,7 @@ datasets: a_total_443: name: a_total_443 long_name: Spectral absorption coefficient at 443 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_443 coordinates: [longitude, latitude] units: m-1 @@ -525,7 +525,7 @@ datasets: a_total_490: name: a_total_490 long_name: Spectral absorption coefficient at 490 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_490 coordinates: [longitude, latitude] units: m-1 @@ -533,7 +533,7 @@ datasets: a_total_510: name: a_total_510 long_name: Spectral absorption coefficient at 510 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_510 coordinates: [longitude, latitude] units: m-1 @@ -541,7 +541,7 @@ datasets: a_total_555: name: a_total_555 long_name: Spectral absorption coefficient at 555 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_555 coordinates: [longitude, latitude] units: m-1 @@ -549,7 +549,7 @@ datasets: a_total_620: name: a_total_620 long_name: Spectral absorption coefficient at 620 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_620 coordinates: [longitude, latitude] units: m-1 @@ -557,7 +557,7 @@ datasets: a_total_660: name: a_total_660 long_name: Spectral absorption coefficient at 660 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_660 coordinates: [longitude, latitude] units: m-1 @@ -565,7 +565,7 @@ datasets: a_total_680: name: a_total_680 long_name: Spectral absorption coefficient at 680 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_680 coordinates: [longitude, latitude] units: m-1 @@ -573,7 +573,7 @@ datasets: a_total_709: name: a_total_709 long_name: Spectral absorption coefficient at 709 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_709 coordinates: [longitude, latitude] units: m-1 @@ -581,7 +581,7 @@ datasets: a_total_745: name: a_total_745 long_name: Spectral absorption coefficient at 745 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_745 coordinates: [longitude, latitude] units: m-1 @@ -589,7 +589,7 @@ datasets: a_total_865: name: a_total_865 long_name: Spectral absorption coefficient at 865 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_total_865 coordinates: [longitude, latitude] units: m-1 @@ -598,7 +598,7 @@ datasets: bb_total_380: name: bb_total_380 long_name: Spectral backscattering coefficient at 380 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_380 coordinates: [longitude, latitude] units: m-1 @@ -606,7 +606,7 @@ datasets: bb_total_412: name: bb_total_412 long_name: Spectral backscattering coefficient at 412 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_412 coordinates: [longitude, latitude] units: m-1 @@ -614,7 +614,7 @@ datasets: bb_total_443: name: bb_total_443 long_name: Spectral backscattering coefficient at 443 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_443 coordinates: [longitude, latitude] units: m-1 @@ -622,7 +622,7 @@ datasets: bb_total_490: name: bb_total_490 long_name: Spectral backscattering coefficient at 490 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_490 coordinates: [longitude, latitude] units: m-1 @@ -630,7 +630,7 @@ datasets: bb_total_510: name: bb_total_510 long_name: Spectral backscattering coefficient at 510 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_510 coordinates: [longitude, latitude] units: m-1 @@ -638,7 +638,7 @@ datasets: bb_total_555: name: bb_total_555 long_name: Spectral backscattering coefficient at 555 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_555 coordinates: [longitude, latitude] units: m-1 @@ -646,7 +646,7 @@ datasets: bb_total_620: name: bb_total_620 long_name: Spectral backscattering coefficient at 620 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_620 coordinates: [longitude, latitude] units: m-1 @@ -654,7 +654,7 @@ datasets: bb_total_660: name: bb_total_660 long_name: Spectral backscattering coefficient at 660 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_660 coordinates: [longitude, latitude] units: m-1 @@ -662,7 +662,7 @@ datasets: bb_total_680: name: bb_total_680 long_name: Spectral backscattering coefficient at 680 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_680 coordinates: [longitude, latitude] units: m-1 @@ -670,7 +670,7 @@ datasets: bb_total_709: name: bb_total_709 long_name: Spectral backscattering coefficient at 709 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_709 coordinates: [longitude, latitude] units: m-1 @@ -678,7 +678,7 @@ datasets: bb_total_745: name: bb_total_745 long_name: Spectral backscattering coefficient at 745 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_745 coordinates: [longitude, latitude] units: m-1 @@ -686,7 +686,7 @@ datasets: bb_total_865: name: bb_total_865 long_name: Spectral backscattering coefficient at 865 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_total_865 coordinates: [longitude, latitude] units: m-1 @@ -695,7 +695,7 @@ datasets: a_dg_443: name: a_dg_443 long_name: Spectral absorption coefficient of detritus and gelbstoff at 443 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_dg_443 coordinates: [longitude, latitude] units: m-1 @@ -703,7 +703,7 @@ datasets: a_chl_443: name: a_chl_443 long_name: Spectral absorption coefficient of chlorophyll-a at 443 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: a_chl_443 coordinates: [longitude, latitude] units: m-1 @@ -711,7 +711,7 @@ datasets: bb_p_555: name: bb_p_555 long_name: Spectral backscattering coefficient of particle at 555 nm, QAA version 6 - file_type: goci_l2_iop + file_type: goci2_l2_iop file_key: bb_p_555 coordinates: [longitude, latitude] units: m-1 @@ -720,35 +720,35 @@ datasets: AOD_550: name: AOD_550 long_name: Aerosol Optical Depth at 550 nm - file_type: goci_l2_aod + file_type: goci2_l2_aod file_key: Aerosol_Optical_Depth coordinates: [longitude, latitude] Aerosol_Type: name: Aerosol_Type long_name: Aerosol type; 1 = Dust, 2 = Non-absorbing Coarse, 3 = Mixture, 4 = High-absorbing Fine, 5 = Moderate-absorbing Fine, 6 = Non-absorbing Fine - file_type: goci_l2_aod + file_type: goci2_l2_aod file_key: Aerosol_Type coordinates: [longitude, latitude] Angstrom_Exponent: name: Angstrom_Exponent long_name: Calculated Angstrom Exponent between 440 and 870 nm - file_type: goci_l2_aod + file_type: goci2_l2_aod file_key: Angstrom_Exponent coordinates: [longitude, latitude] Fine_Mode_Fraction: name: Fine_Mode_Fraction long_name: Fine Mode Fraction at 550 nm - file_type: goci_l2_aod + file_type: goci2_l2_aod file_key: Fine_Mode_Fraction coordinates: [longitude, latitude] Single_Scattering_Albedo: name: Single_Scattering_Albedo long_name: Single Scattering Albedo at 440 nm - file_type: goci_l2_aod + file_type: goci2_l2_aod file_key: Single_Scattering_Albedo coordinates: [longitude, latitude] @@ -756,14 +756,14 @@ datasets: MF: name: MF long_name: Marine fog existence(Yes/No/Possible), Machine learning based KOSC Algorithm - file_type: goci_l2_mf + file_type: goci2_l2_mf file_key: MF coordinates: [longitude, latitude] CF: name: CF long_name: Chlorophyll(-a) Front, CF - file_type: goci_l2_cf + file_type: goci2_l2_cf file_key: CF coordinates: [longitude, latitude] units : mg m-3 km-1 @@ -771,21 +771,21 @@ datasets: FA: name: FA long_name: Subpixel area fraction covered by floating brown algae or green algae - file_type: goci_l2_fa + file_type: goci2_l2_fa file_key: FA coordinates: [longitude, latitude] FGI: name: FGI long_name: Fishing ground index for chub mackerel - file_type: goci_l2_fgi + file_type: goci2_l2_fgi file_key: FGI coordinates: [longitude, latitude] SSS: name: SSS long_name: Sea Surface Salinity, Neural network algorithm - file_type: goci_l2_lsss + file_type: goci2_l2_lsss file_key: sss coordinates: [longitude, latitude] units: psu @@ -793,7 +793,7 @@ datasets: PP: name: PP long_name: Primary Production - file_type: goci_l2_pp + file_type: goci2_l2_pp file_key: PP coordinates: [longitude, latitude] units: PP unit @@ -801,14 +801,14 @@ datasets: RI: name: RI long_name: Red Tide Index - file_type: goci_l2_ri + file_type: goci2_l2_ri file_key: RI coordinates: [longitude, latitude] SSC_direction: name: SSC_direction long_name: Sea Surface Current direction - file_type: goci_l2_ssc + file_type: goci2_l2_ssc file_key: SSC_direction coordinates: [longitude, latitude] units: degree @@ -816,7 +816,7 @@ datasets: SSC_speed: name: SSC_speed long_name: Sea Surface Current speed - file_type: goci_l2_ssc + file_type: goci2_l2_ssc file_key: SSC_speed coordinates: [longitude, latitude] units: m s-1 @@ -824,7 +824,7 @@ datasets: SSC_u: name: SSC_u long_name: Sea Surface Current u-component - file_type: goci_l2_ssc + file_type: goci2_l2_ssc file_key: SSC_u coordinates: [longitude, latitude] units: m s-1 @@ -832,7 +832,7 @@ datasets: SSC_v: name: SSC_v long_name: Sea Surface Current v-component - file_type: goci_l2_ssc + file_type: goci2_l2_ssc file_key: SSC_v coordinates: [longitude, latitude] units: m s-1 diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index 574d44a829..fe00d1ef96 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -50,14 +50,15 @@ def __init__(self, filename, filename_info, filetype_info): def _merge_navigation_data(self, filetype_info): """Merge navigation data and geophysical data.""" navigation = self["navigation_data"] - if filetype_info["file_type"] == "goci_l2_ac": + if filetype_info["file_type"] == "goci2_l2_ac": Rhoc = self["geophysical_data/RhoC"] Rrs = self["geophysical_data/Rrs"] data = xr.merge([Rhoc, Rrs, navigation]) - elif filetype_info["file_type"] == "goci_l2_iop": + elif filetype_info["file_type"] == "goci2_l2_iop": a = self["geophysical_data/a_total"] bb = self["geophysical_data/bb_total"] - data = xr.merge([a, bb, navigation]) + data = self["geophysical_data"] + data = xr.merge([a, bb, data, navigation]) else: data = self["geophysical_data"] data = xr.merge([data, navigation]) @@ -81,7 +82,6 @@ def get_dataset(self, key, info): logger.debug("Reading in get_dataset %s.", var) variable = self.nc[var] - # Data has 'Latitude' and 'Longitude' coords, these must be replaced. variable = variable.rename({"number_of_lines": "y", "pixels_per_line": "x"}) variable.attrs.update(key.to_dict()) From a6b0badcc8848f8fb9b64f3d5a532f7c04085efa Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Sun, 31 Mar 2024 16:08:57 +0800 Subject: [PATCH 1219/1416] fix primary production reader --- satpy/readers/goci2_l2_nc.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index fe00d1ef96..208b14c13b 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -84,5 +84,9 @@ def get_dataset(self, key, info): variable = variable.rename({"number_of_lines": "y", "pixels_per_line": "x"}) + # Some products may miss lon/lat standard_name, use name as base name if it is not already present + if variable.attrs.get("standard_name", None) is None: + variable.attrs.update({"standard_name": variable.name}) + variable.attrs.update(key.to_dict()) return variable From 8ee9bb8ff8af747683604c665a0c6c746cf02299 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Mon, 1 Apr 2024 11:33:28 +0800 Subject: [PATCH 1220/1416] add pytest for GOCI-II --- satpy/tests/reader_tests/test_goci2_l2_nc.py | 210 +++++++++++++++++++ 1 file changed, 210 insertions(+) create mode 100644 satpy/tests/reader_tests/test_goci2_l2_nc.py diff --git a/satpy/tests/reader_tests/test_goci2_l2_nc.py b/satpy/tests/reader_tests/test_goci2_l2_nc.py new file mode 100644 index 0000000000..865ac3184e --- /dev/null +++ b/satpy/tests/reader_tests/test_goci2_l2_nc.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2016-2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.goci2_l2_nc module.""" +from datetime import datetime + +import numpy as np +import pytest +import xarray as xr +from pytest_lazyfixture import lazy_fixture + +from satpy import Scene + +# NOTE: +# The following fixtures are not defined in this file, but are used and injected by Pytest: +# - tmp_path_factory + + +start_time = datetime(2024, 2, 14, 2, 32, 27) +end_time = datetime(2024, 2, 14, 2, 33, 31) + +global_attrs = { + "observation_start_time": start_time.strftime("%Y%m%d_%H%M%S"), + "observation_end_time": end_time.strftime("%Y%m%d_%H%M%S"), + "instrument": "GOCI-II", + "platform": "GK-2B", +} + +badarea_attrs = global_attrs.copy() +badarea_attrs["cdm_data_type"] = "bad_area" + + +def _create_lonlat(): + """Create a fake navigation dataset with lon/lat.""" + lon, lat = np.meshgrid(np.linspace(120, 130, 10), np.linspace(30, 40, 10)) + lon = xr.DataArray( + lon, + dims=("number_of_lines", "pixels_per_line"), + attrs={"standard_name": "longitude", "units": "degrees_east"}, + ) + lat = xr.DataArray( + lat, + dims=("number_of_lines", "pixels_per_line"), + attrs={"standard_name": "latitude", "units": "degrees_north"}, + ) + ds = xr.Dataset() + ds["longitude"] = lon + ds["latitude"] = lat + return ds + + +def _create_bad_lon_lat(): + """Create a fake navigation dataset with lon/lat base name missing.""" + lon, lat = np.meshgrid(np.linspace(120, 130, 10), np.linspace(30, 40, 10)) + ds = xr.Dataset( + { + "longitude": (["number_of_lines", "pixels_per_line"], lon), + "latitude": (["number_of_lines", "pixels_per_line"], lat), + } + ) + return ds + + +@pytest.fixture(scope="session") +def ac_file(tmp_path_factory): + """Create a fake atmospheric correction product.""" + data = np.random.random((10, 10)) + RhoC = xr.Dataset( + {"RhoC_555": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + Rrs = xr.Dataset( + {"Rrs_555": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + navigation = _create_lonlat() + ds = xr.Dataset(attrs=global_attrs) + fname = ( + f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_AC.nc' + ) + ds.to_netcdf(fname) + navigation.to_netcdf(fname, group="navigation_data", mode="a") + RhoC.to_netcdf(fname, group="geophysical_data/RhoC", mode="a") + Rrs.to_netcdf(fname, group="geophysical_data/Rrs", mode="a") + return fname + + +@pytest.fixture(scope="module") +def iop_file(tmp_path_factory): + """Create a fake IOP product.""" + data = np.random.random((10, 10)) + a = xr.Dataset( + {"a_total_555": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + bb = xr.Dataset( + {"bb_total_555": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + navigation = _create_lonlat() + ds = xr.Dataset(attrs=global_attrs) + fname = f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_IOP.nc' + ds.to_netcdf(fname) + navigation.to_netcdf(fname, group="navigation_data", mode="a") + a.to_netcdf(fname, group="geophysical_data/a_total", mode="a") + bb.to_netcdf(fname, group="geophysical_data/bb_total", mode="a") + return fname + + +@pytest.fixture(scope="module") +def generic_file(tmp_path_factory): + """Create a fake ouput product like Chl, Zsd etc.""" + data = np.random.random((10, 10)) + geophysical_data = xr.Dataset( + {"Chl": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + navigation = _create_lonlat() + ds = xr.Dataset(attrs=global_attrs) + fname = f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_Chl.nc' + ds.to_netcdf(fname) + navigation.to_netcdf(fname, group="navigation_data", mode="a") + geophysical_data.to_netcdf(fname, group="geophysical_data", mode="a") + return fname + + +@pytest.fixture(scope="module") +def generic_bad_file(tmp_path_factory): + """Create a PP product with lon/lat base name missing.""" + data = np.random.random((10, 10)) + geophysical_data = xr.Dataset( + {"PP": (["number_of_lines", "pixels_per_line"], data)}, + coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, + ) + navigation = _create_bad_lon_lat() + ds = xr.Dataset(attrs=global_attrs) + fname = ( + f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_PP.nc' + ) + ds.to_netcdf(fname) + navigation.to_netcdf(fname, group="navigation_data", mode="a") + geophysical_data.to_netcdf(fname, group="geophysical_data", mode="a") + return fname + + +class TestGOCI2Reader: + """Test the GOCI-II L2 netcdf file reader.""" + + @pytest.mark.parametrize( + "test_files", + [ + lazy_fixture("ac_file"), + lazy_fixture("iop_file"), + lazy_fixture("generic_file"), + lazy_fixture("generic_bad_file"), + ], + ) + def test_scene_available_datasets(self, test_files): + """Test that datasets are available.""" + scene = Scene(filenames=[test_files], reader="goci2_l2_nc") + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert "longitude" in available_datasets + assert "latitude" in available_datasets + + @pytest.mark.parametrize( + "test_files", + [ + lazy_fixture("ac_file"), + lazy_fixture("iop_file"), + lazy_fixture("generic_file"), + lazy_fixture("generic_bad_file"), + ], + ) + def test_start_end_time(self, test_files): + """Test dataset start_time and end_time.""" + scene = Scene(filenames=[test_files], reader="goci2_l2_nc") + assert scene.start_time == start_time + assert scene.end_time == end_time + + @pytest.mark.parametrize( + ("test_files", "datasets"), + [ + (lazy_fixture("ac_file"), ["RhoC_555", "Rrs_555"]), + (lazy_fixture("iop_file"), ["a_total_555", "bb_total_555"]), + (lazy_fixture("generic_file"), ["Chl"]), + (lazy_fixture("generic_bad_file"), ["PP"]), + ], + ) + def test_load_dataset(self, test_files, datasets): + """Test dataset loading.""" + scene = Scene(filenames=[test_files], reader="goci2_l2_nc") + scene.load(datasets) + for dataset in datasets: + data_arr = scene[dataset] + assert data_arr.dims == ("y", "x") From db109c445d4b723ee43664c4a95ff64b1522bfee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 10:58:32 +0000 Subject: [PATCH 1221/1416] Bump pypa/gh-action-pypi-publish from 1.8.12 to 1.8.14 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.8.12 to 1.8.14. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.8.12...v1.8.14) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 450e47864c..797eab716b 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.8.12 + uses: pypa/gh-action-pypi-publish@v1.8.14 with: user: __token__ password: ${{ secrets.pypi_password }} From 63245b26f83c6cff520cfeadb35ea4fe6ba4e742 Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Mon, 1 Apr 2024 23:46:18 +0800 Subject: [PATCH 1222/1416] refactor groups merge --- satpy/readers/goci2_l2_nc.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index 208b14c13b..7cef5c074b 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -29,6 +29,15 @@ logger = logging.getLogger(__name__) +GROUPS_MAP = { + "goci2_l2_ac": ["geophysical_data/RhoC", "geophysical_data/Rrs", "navigation_data"], + "goci2_l2_iop": [ + "geophysical_data/a_total", + "geophysical_data/bb_total", + "navigation_data", + ], +} + class GOCI2L2NCFileHandler(NetCDF4FileHandler): """File handler for GOCI-II L2 official data in netCDF format.""" @@ -39,7 +48,8 @@ def __init__(self, filename, filename_info, filetype_info): self.slot = filename_info.get("slot", None) self.attrs = self["/attrs"] - self.nc = self._merge_navigation_data(filetype_info) + self.filetype = filetype_info["file_type"] + self.nc = self._merge_navigation_data(self.filetype) self.sensor = self.attrs["instrument"].lower() self.nlines = self.nc.sizes["number_of_lines"] @@ -47,22 +57,13 @@ def __init__(self, filename, filename_info, filetype_info): self.platform_shortname = filename_info["platform"] self.coverage = filename_info["coverage"] - def _merge_navigation_data(self, filetype_info): + def _merge_navigation_data(self, filetype): """Merge navigation data and geophysical data.""" - navigation = self["navigation_data"] - if filetype_info["file_type"] == "goci2_l2_ac": - Rhoc = self["geophysical_data/RhoC"] - Rrs = self["geophysical_data/Rrs"] - data = xr.merge([Rhoc, Rrs, navigation]) - elif filetype_info["file_type"] == "goci2_l2_iop": - a = self["geophysical_data/a_total"] - bb = self["geophysical_data/bb_total"] - data = self["geophysical_data"] - data = xr.merge([a, bb, data, navigation]) + if filetype in GROUPS_MAP.keys(): + groups = GROUPS_MAP[filetype] else: - data = self["geophysical_data"] - data = xr.merge([data, navigation]) - return data + groups = ["geophysical_data", "navigation_data"] + return xr.merge([self[group] for group in groups]) @property def start_time(self): From 10c03778e5ce70f6adecdddaa006a1639fd9481a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 21:40:49 +0000 Subject: [PATCH 1223/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.2.0 → v0.3.5](https://github.com/astral-sh/ruff-pre-commit/compare/v0.2.0...v0.3.5) - [github.com/PyCQA/bandit: 1.7.7 → 1.7.8](https://github.com/PyCQA/bandit/compare/1.7.7...1.7.8) - [github.com/pre-commit/mirrors-mypy: v1.8.0 → v1.9.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.8.0...v1.9.0) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4a90da5ce9..eadd0f2c55 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.2.0' + rev: 'v0.3.5' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -14,12 +14,12 @@ repos: - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.7' # Update me! + rev: '1.7.8' # Update me! hooks: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.8.0' # Use the sha / tag you want to point at + rev: 'v1.9.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From d5c27ef0497ecb9bdeb40744a2d9f807fe08e2fd Mon Sep 17 00:00:00 2001 From: Isotr0py <2037008807@qq.com> Date: Tue, 2 Apr 2024 10:39:25 +0800 Subject: [PATCH 1224/1416] reduce abc for codebeat --- satpy/readers/goci2_l2_nc.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index 7cef5c074b..a79d582544 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -45,16 +45,13 @@ class GOCI2L2NCFileHandler(NetCDF4FileHandler): def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super().__init__(filename, filename_info, filetype_info) - self.slot = filename_info.get("slot", None) self.attrs = self["/attrs"] - self.filetype = filetype_info["file_type"] - self.nc = self._merge_navigation_data(self.filetype) + self.nc = self._merge_navigation_data(filetype_info["file_type"]) - self.sensor = self.attrs["instrument"].lower() + # Read metadata which are common to all datasets self.nlines = self.nc.sizes["number_of_lines"] self.ncols = self.nc.sizes["pixels_per_line"] - self.platform_shortname = filename_info["platform"] self.coverage = filename_info["coverage"] def _merge_navigation_data(self, filetype): From fbcf1a5b177c4d6d439477b32c85e75a678fc645 Mon Sep 17 00:00:00 2001 From: Longtsing Date: Tue, 2 Apr 2024 11:31:56 +0800 Subject: [PATCH 1225/1416] Update _geos_area.py --- satpy/readers/_geos_area.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/_geos_area.py b/satpy/readers/_geos_area.py index f9c588b085..8a89b091c9 100644 --- a/satpy/readers/_geos_area.py +++ b/satpy/readers/_geos_area.py @@ -72,7 +72,7 @@ def get_area_extent(pdict): coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) - h: Altitude of satellite (m) + h: Altitude of satellite above the Earth's surface (m) Returns: aex: An area extent for the scene From 24109a1a418385b8c48500663c7efb8cd4d6b777 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 2 Apr 2024 16:31:22 +0200 Subject: [PATCH 1226/1416] Fix the viirs EDR tests for newer xarray --- satpy/tests/reader_tests/test_viirs_edr.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index e61718e9db..d764891760 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -132,7 +132,8 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: "valid_min": -180.0, "valid_max": 180.0} lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9, "valid_min": -90.0, "valid_max": 90.0} - sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} + sr_attrs = {"units": "unitless", "_FillValue": -9999, + "scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)} i_data = np.random.random_sample((I_ROWS, I_COLS)).astype(np.float32) m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) @@ -257,7 +258,8 @@ def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataA lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} - cont_attrs = {"units": "Kelvin", "_FillValue": -9999, "scale_factor": 0.0001, "add_offset": 0.0} + cont_attrs = {"units": "Kelvin", "_FillValue": -9999, + "scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)} m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) data_arrs = { From 525cef947910db4c884ca66bf5ba903112aad6c0 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Sun, 7 Apr 2024 12:48:32 +0100 Subject: [PATCH 1227/1416] Update MSI SAFE reader to explicitly state that only L1C files are presently supported. Also, add an option to enable the user to select the observation time based on tile metadata rather than filename, which will prevent one tile from overwriting another when saving multiple tiles individually. --- satpy/etc/readers/msi_safe.yaml | 36 ++++++++++++++++----------------- satpy/readers/msi_safe.py | 29 +++++++++++++++++++++----- 2 files changed, 42 insertions(+), 23 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index d93d269782..aec2f4b65e 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -1,7 +1,7 @@ reader: name: msi_safe short_name: MSI SAFE - long_name: Sentinel-2 A and B MSI data in SAFE format + long_name: Sentinel-2 A and B MSI data in SAFE format, supporting L1C format only. description: SAFE Reader for MSI data (Sentinel-2) status: Nominal supports_fsspec: false @@ -10,16 +10,16 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: - safe_granule: + safe_granule_l1c: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] + file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] requires: [safe_metadata, safe_tile_metadata] safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] + file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] datasets: @@ -36,7 +36,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B02: name: B02 @@ -50,7 +50,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B03: name: B03 @@ -64,7 +64,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B04: name: B04 @@ -78,7 +78,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B05: name: B05 @@ -92,7 +92,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B06: name: B06 @@ -106,7 +106,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B07: name: B07 @@ -120,7 +120,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B08: name: B08 @@ -134,7 +134,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B8A: name: B8A @@ -148,7 +148,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B09: name: B09 @@ -162,7 +162,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B10: name: B10 @@ -176,7 +176,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B11: name: B11 @@ -190,7 +190,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c B12: name: B12 @@ -204,7 +204,7 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 - file_type: safe_granule + file_type: safe_granule_l1c solar_zenith_angle: diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 1131e40a96..839539bd4f 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -27,8 +27,16 @@ reader='msi_safe', reader_kwargs={'mask_saturated': False}) scene.load(['B01']) - -L1B format description for the files read here: +MSI data typically have the same start time across multiple tiles, which can cause +problems if iterating over multiple tiles, as the saved imagery from one tile +may be overwritten by the next tile. +To overcome this, the user can specify `use_tile_time`, which will determine the start +time from the tile metadata rather than from the filename:: + scene = satpy.Scene(filenames, + reader='msi_safe', + reader_kwargs={'use_tile_time': True}) + scene.load(['B01']) +L1C format description for the files read here: https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/ @@ -37,6 +45,7 @@ import logging import dask.array as da +from datetime import datetime import defusedxml.ElementTree as ET import numpy as np import xarray as xr @@ -58,18 +67,22 @@ class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" - def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True): + def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True, use_tile_time=False): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated - self._start_time = filename_info["observation_time"] - self._end_time = filename_info["observation_time"] self._channel = filename_info["band_name"] self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] + if use_tile_time: + self._start_time = self._tile_mda.start_time() + else: + self._start_time = filename_info["observation_time"] + self._end_time = filename_info["observation_time"] + def get_dataset(self, key, info): """Load a dataset.""" if self._channel != key["name"]: @@ -267,6 +280,12 @@ def _shape(self, resolution): cols = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text) return cols, rows + def start_time(self): + """Get the observation time from the tile metadata.""" + timestr = self.root.find('.//SENSING_TIME').text + return datetime.strptime(timestr, "%Y-%m-%dT%H:%M:%S.%fZ") + + @staticmethod def _do_interp(minterp, xcoord, ycoord): interp_points2 = np.vstack((ycoord.ravel(), xcoord.ravel())) From 63567fee256574c2d85236c77910f49e357ce95c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Sun, 7 Apr 2024 12:49:04 +0100 Subject: [PATCH 1228/1416] Remove extra blank line. --- satpy/readers/msi_safe.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 839539bd4f..93e09bc8e0 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -285,7 +285,6 @@ def start_time(self): timestr = self.root.find('.//SENSING_TIME').text return datetime.strptime(timestr, "%Y-%m-%dT%H:%M:%S.%fZ") - @staticmethod def _do_interp(minterp, xcoord, ycoord): interp_points2 = np.vstack((ycoord.ravel(), xcoord.ravel())) From e722e9fb753590ae4d2f3d46d30469319ca4c51a Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Sun, 7 Apr 2024 13:39:57 +0100 Subject: [PATCH 1229/1416] Add tests to MSI SAFE reader to check that start_time is read as-expected. --- satpy/tests/reader_tests/test_msi_safe.py | 24 ++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index bcee32ddbb..85178f463a 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -17,6 +17,7 @@ # satpy. If not, see . """Module for testing the satpy.readers.msi_safe module.""" import unittest.mock as mock +from datetime import datetime from io import BytesIO, StringIO import numpy as np @@ -25,6 +26,10 @@ from satpy.tests.utils import make_dataid +# Datetimes used for checking start time is correctly set. +fname_dt = datetime(2020, 10, 1, 18, 35, 41) +tilemd_dt = datetime(2020, 10, 1, 16, 34, 23, 153611) + mtd_tile_xml = b""" @@ -873,6 +878,10 @@ def setup_method(self): self.old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) + def test_start_time(self): + """Ensure start time is read correctly from XML.""" + assert self.xml_tile_fh.start_time() == tilemd_dt + def test_satellite_zenith_array(self): """Test reading the satellite zenith array.""" info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith") @@ -971,10 +980,11 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" from satpy.readers.msi_safe import SAFEMSITileMDXML - self.filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None) + self.filename_info = dict(observation_time=fname_dt, fmission_id="S2A", band_name="B01", dtile_number=None) self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), self.filename_info, mock.MagicMock()) + self.tile_mda.start_time.return_value = tilemd_dt @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), @@ -991,3 +1001,15 @@ def test_calibration_and_masking(self, mask_saturated, calibration, expected): with mock.patch("xarray.open_dataset", return_value=self.fake_data): res = self.jp2_fh.get_dataset(make_dataid(name="B01", calibration=calibration), info=dict()) np.testing.assert_allclose(res, expected) + + @pytest.mark.parametrize(("use_obs_time", "expected"), + [(True, tilemd_dt), + (False, fname_dt)]) + def test_start_time(self, use_obs_time, expected): + """Test that the correct start time is returned.""" + from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML + + mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock()) + self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), + mda, self.tile_mda, use_tile_time=use_obs_time) + assert expected == self.jp2_fh.start_time From 89a75cf1757d01dcb5aa4db9fd3299006a924d4d Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Sun, 7 Apr 2024 13:53:18 +0100 Subject: [PATCH 1230/1416] Fix optional kwarg in MSI SAFE reader. --- satpy/readers/msi_safe.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 93e09bc8e0..bd01bbf7ea 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -43,9 +43,9 @@ """ import logging +from datetime import datetime import dask.array as da -from datetime import datetime import defusedxml.ElementTree as ET import numpy as np import xarray as xr @@ -123,7 +123,7 @@ def get_area_def(self, dsid): class SAFEMSIXMLMetadata(BaseFileHandler): """Base class for SAFE MSI XML metadata filehandlers.""" - def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): + def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, use_tile_time=False): """Init the reader.""" super().__init__(filename, filename_info, filetype_info) self._start_time = filename_info["observation_time"] @@ -239,7 +239,7 @@ def _fill_swath_edges(angles): class SAFEMSITileMDXML(SAFEMSIXMLMetadata): """File handle for sentinel 2 safe XML tile metadata.""" - def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): + def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, use_tile_time=False): """Init the reader.""" super().__init__(filename, filename_info, filetype_info, mask_saturated) self.geocoding = self.root.find(".//Tile_Geocoding") @@ -282,7 +282,7 @@ def _shape(self, resolution): def start_time(self): """Get the observation time from the tile metadata.""" - timestr = self.root.find('.//SENSING_TIME').text + timestr = self.root.find(".//SENSING_TIME").text return datetime.strptime(timestr, "%Y-%m-%dT%H:%M:%S.%fZ") @staticmethod From b83675e2ca0299497e13cbe4039ebd615954330c Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 8 Apr 2024 07:43:57 +0100 Subject: [PATCH 1231/1416] Fix MSI SAFE docs. --- satpy/readers/msi_safe.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index bd01bbf7ea..dc54dbe971 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -27,15 +27,18 @@ reader='msi_safe', reader_kwargs={'mask_saturated': False}) scene.load(['B01']) + MSI data typically have the same start time across multiple tiles, which can cause problems if iterating over multiple tiles, as the saved imagery from one tile may be overwritten by the next tile. To overcome this, the user can specify `use_tile_time`, which will determine the start time from the tile metadata rather than from the filename:: + scene = satpy.Scene(filenames, reader='msi_safe', reader_kwargs={'use_tile_time': True}) scene.load(['B01']) + L1C format description for the files read here: https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/ From 0b78e313ad3d8d41f3a90ff84201e7dfb3a084df Mon Sep 17 00:00:00 2001 From: clement laplace Date: Tue, 9 Apr 2024 12:16:49 +0000 Subject: [PATCH 1232/1416] feat: Add the reader for the fci L1C Africa files --- satpy/etc/readers/fci_l1c_nc.yaml | 1906 ++++++++++++++----- satpy/readers/fci_l1c_nc.py | 111 +- satpy/tests/reader_tests/test_fci_l1c_nc.py | 261 ++- 3 files changed, 1738 insertions(+), 540 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index d241b3fa9e..88630cd017 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -9,106 +9,737 @@ reader: status: Beta for full-disc FDHSI and HRFI, RSS not supported yet supports_fsspec: true reader: !!python/name:satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader - sensors: [ fci ] + sensors: [fci] # Source: MTG FCI L1 Product User Guide [FCIL1PUG] # https://www.eumetsat.int/media/45923 file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler - file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc", + ] expected_segments: 40 required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time variable_name_replacements: channel_name: - - vis_04 - - vis_05 - - vis_06 - - vis_08 - - vis_09 - - nir_13 - - nir_16 - - nir_22 - - ir_38 - - wv_63 - - wv_73 - - ir_87 - - ir_97 - - ir_105 - - ir_123 - - ir_133 + - vis_04 + - vis_05 + - vis_06 + - vis_08 + - vis_09 + - nir_13 + - nir_16 + - nir_22 + - ir_38 + - wv_63 + - wv_73 + - ir_87 + - ir_97 + - ir_105 + - ir_123 + - ir_133 fci_l1c_hrfi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler - file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ] + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc", + ] expected_segments: 40 required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time variable_name_replacements: channel_name: - - vis_06_hr - - nir_22_hr - - ir_38_hr - - ir_105_hr - + - vis_06_hr + - nir_22_hr + - ir_38_hr + - ir_105_hr + fci_l1c_af_vis_06: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - vis_06 + fci_l1c_af_vis_04: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - vis_04 + fci_l1c_af_vis_05: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - vis_05 + fci_l1c_af_vis_08: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - vis_08 + fci_l1c_af_vis_09: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - vis_09 + fci_l1c_af_nir_13: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - nir_13 + fci_l1c_af_nir_16: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - nir_16 + fci_l1c_af_nir_22: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - nir_22 + fci_l1c_af_ir_38: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - ir_38 + fci_l1c_af_wv_63: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - wv_63 + fci_l1c_af_wv_73: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - wv_73 + fci_l1c_af_ir_87: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - ir_87 + fci_l1c_af_ir_97: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - ir_97 + fci_l1c_af_ir_105: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - ir_105 + fci_l1c_af_ir_123: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - ir_123 + fci_l1c_af_ir_133: + file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler + file_patterns: + [ + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + ] + expected_segments: 1 + required_netcdf_variables: + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + variable_name_replacements: + channel_name: + - ir_133 datasets: vis_04: name: vis_04 sensor: fci - wavelength: [ 0.384, 0.444, 0.504 ] - resolution: 1000 + wavelength: [0.384, 0.444, 0.504] + resolution: + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_04] } + 3000: { file_type: fci_l1c_af_vis_04 } calibration: counts: standard_name: counts @@ -119,13 +750,14 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi vis_05: name: vis_05 sensor: fci wavelength: [0.470, 0.510, 0.550] - resolution: 1000 + resolution: + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_05] } + 3000: { file_type: fci_l1c_af_vis_05 } calibration: counts: standard_name: counts @@ -136,15 +768,15 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi vis_06: name: vis_06 sensor: fci wavelength: [0.590, 0.640, 0.690] resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06] } + 3000: { file_type: fci_l1c_af_vis_06 } calibration: counts: standard_name: counts @@ -160,7 +792,9 @@ datasets: name: vis_08 sensor: fci wavelength: [0.815, 0.865, 0.915] - resolution: 1000 + resolution: + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_08] } + 3000: { file_type: fci_l1c_af_vis_08 } calibration: counts: standard_name: counts @@ -171,13 +805,14 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi vis_09: name: vis_09 sensor: fci wavelength: [0.894, 0.914, 0.934] - resolution: 1000 + resolution: + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_09] } + 3000: { file_type: fci_l1c_af_vis_09 } calibration: counts: standard_name: counts @@ -188,13 +823,14 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi nir_13: name: nir_13 sensor: fci wavelength: [1.350, 1.380, 1.410] - resolution: 1000 + resolution: + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_nir_13] } + 3000: { file_type: fci_l1c_af_nir_13 } calibration: counts: standard_name: counts @@ -205,13 +841,14 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi nir_16: name: nir_16 sensor: fci wavelength: [1.560, 1.610, 1.660] - resolution: 1000 + resolution: + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_nir_16] } + 3000: { file_type: fci_l1c_af_nir_16 } calibration: counts: standard_name: counts @@ -222,15 +859,15 @@ datasets: reflectance: standard_name: toa_bidirectional_reflectance units: "%" - file_type: fci_l1c_fdhsi nir_22: name: nir_22 sensor: fci wavelength: [2.200, 2.250, 2.300] resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_nir_22] } + 3000: { file_type: fci_l1c_af_nir_22 } calibration: counts: standard_name: counts @@ -247,8 +884,9 @@ datasets: sensor: fci wavelength: [3.400, 3.800, 4.200] resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } calibration: counts: standard_name: counts @@ -264,7 +902,10 @@ datasets: name: wv_63 sensor: fci wavelength: [5.300, 6.300, 7.300] - resolution: 2000 + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } calibration: counts: standard_name: counts @@ -275,13 +916,15 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi wv_73: name: wv_73 sensor: fci wavelength: [6.850, 7.350, 7.850] - resolution: 2000 + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } calibration: counts: standard_name: counts @@ -292,13 +935,15 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi ir_87: name: ir_87 sensor: fci wavelength: [8.300, 8.700, 9.100] - resolution: 2000 + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } calibration: counts: standard_name: counts @@ -309,13 +954,15 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi ir_97: name: ir_97 sensor: fci wavelength: [9.360, 9.660, 9.960] - resolution: 2000 + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } calibration: counts: standard_name: counts @@ -326,15 +973,15 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi ir_105: name: ir_105 sensor: fci wavelength: [9.800, 10.500, 11.200] resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } calibration: counts: standard_name: counts @@ -350,7 +997,10 @@ datasets: name: ir_123 sensor: fci wavelength: [11.800, 12.300, 12.800] - resolution: 2000 + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } calibration: counts: standard_name: counts @@ -361,13 +1011,15 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi ir_133: name: ir_133 sensor: fci wavelength: [12.700, 13.300, 13.900] - resolution: 2000 + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } calibration: counts: standard_name: counts @@ -378,1333 +1030,1595 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: fci_l1c_fdhsi vis_04_pixel_quality: name: vis_04_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_pixel_quality: name: vis_05_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_pixel_quality: name: vis_06_pixel_quality sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_pixel_quality: name: vis_08_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_pixel_quality: name: vis_09_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_pixel_quality: name: nir_13_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_pixel_quality: name: nir_16_pixel_quality sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_pixel_quality: name: nir_22_pixel_quality sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_pixel_quality: name: ir_38_pixel_quality sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_pixel_quality: name: wv_63_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_pixel_quality: name: wv_73_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_pixel_quality: name: ir_87_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_pixel_quality: name: ir_97_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_pixel_quality: name: ir_105_pixel_quality sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_pixel_quality: name: ir_123_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_pixel_quality: name: ir_133_pixel_quality sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_index_map: name: vis_04_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_index_map: name: vis_05_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_index_map: name: vis_06_index_map sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_index_map: name: vis_08_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_index_map: name: vis_09_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_index_map: name: nir_13_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_index_map: name: nir_16_index_map sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_index_map: name: nir_22_index_map sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_index_map: name: ir_38_index_map sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_index_map: name: wv_63_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_index_map: name: wv_73_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_index_map: name: ir_87_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_index_map: name: ir_97_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_index_map: name: ir_105_index_map sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_index_map: name: ir_123_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_index_map: name: ir_133_index_map sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_time: name: vis_04_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_time: name: vis_05_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_time: name: vis_06_time units: s sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_time: name: vis_08_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_time: name: vis_09_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_time: name: nir_13_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_time: name: nir_16_time units: s sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_time: name: nir_22_time units: s sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_time: name: ir_38_time units: s sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_time: name: wv_63_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_time: name: wv_73_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_time: name: ir_87_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_time: name: ir_97_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_time: name: ir_105_time units: s sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_time: name: ir_123_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_time: name: ir_133_time units: s sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_swath_direction: name: vis_04_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_swath_direction: name: vis_05_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_swath_direction: name: vis_06_swath_direction sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_swath_direction: name: vis_08_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_swath_direction: name: vis_09_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_swath_direction: name: nir_13_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_swath_direction: name: nir_16_swath_direction sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_swath_direction: name: nir_22_swath_direction sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_swath_direction: name: ir_38_swath_direction sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_swath_direction: name: wv_63_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_swath_direction: name: wv_73_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_swath_direction: name: ir_87_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_swath_direction: name: ir_97_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_swath_direction: name: ir_105_swath_direction sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_swath_direction: name: ir_123_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_swath_direction: name: ir_133_swath_direction sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_swath_number: name: vis_04_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_swath_number: name: vis_05_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_swath_number: name: vis_06_swath_number sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_swath_number: name: vis_08_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_swath_number: name: vis_09_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_swath_number: name: nir_13_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_swath_number: name: nir_16_swath_number sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_swath_number: name: nir_22_swath_number sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_swath_number: name: ir_38_swath_number sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_swath_number: name: wv_63_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_swath_number: name: wv_73_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_swath_number: name: ir_87_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_swath_number: name: ir_97_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_swath_number: name: ir_105_swath_number sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_swath_number: name: ir_123_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_swath_number: name: ir_133_swath_number sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsatellite_latitude: name: vis_04_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsatellite_latitude: name: vis_05_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsatellite_latitude: name: vis_06_subsatellite_latitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_subsatellite_latitude: name: vis_08_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsatellite_latitude: name: vis_09_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsatellite_latitude: name: nir_13_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsatellite_latitude: name: nir_16_subsatellite_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsatellite_latitude: name: nir_22_subsatellite_latitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsatellite_latitude: name: ir_38_subsatellite_latitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsatellite_latitude: name: wv_63_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsatellite_latitude: name: wv_73_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsatellite_latitude: name: ir_87_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsatellite_latitude: name: ir_97_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsatellite_latitude: name: ir_105_subsatellite_latitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsatellite_latitude: name: ir_123_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsatellite_latitude: name: ir_133_subsatellite_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsatellite_longitude: name: vis_04_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsatellite_longitude: name: vis_05_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsatellite_longitude: name: vis_06_subsatellite_longitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_subsatellite_longitude: name: vis_08_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsatellite_longitude: name: vis_09_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsatellite_longitude: name: nir_13_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsatellite_longitude: name: nir_16_subsatellite_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsatellite_longitude: name: nir_22_subsatellite_longitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsatellite_longitude: name: ir_38_subsatellite_longitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsatellite_longitude: name: wv_63_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsatellite_longitude: name: wv_73_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsatellite_longitude: name: ir_87_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsatellite_longitude: name: ir_97_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsatellite_longitude: name: ir_105_subsatellite_longitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsatellite_longitude: name: ir_123_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsatellite_longitude: name: ir_133_subsatellite_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsolar_latitude: name: vis_04_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsolar_latitude: name: vis_05_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsolar_latitude: name: vis_06_subsolar_latitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_subsolar_latitude: name: vis_08_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsolar_latitude: name: vis_09_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsolar_latitude: name: nir_13_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsolar_latitude: name: nir_16_subsolar_latitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsolar_latitude: name: nir_22_subsolar_latitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsolar_latitude: name: ir_38_subsolar_latitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsolar_latitude: name: wv_63_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsolar_latitude: name: wv_73_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsolar_latitude: name: ir_87_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsolar_latitude: name: ir_97_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsolar_latitude: name: ir_105_subsolar_latitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsolar_latitude: name: ir_123_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsolar_latitude: name: ir_133_subsolar_latitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsolar_longitude: name: vis_04_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsolar_longitude: name: vis_05_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsolar_longitude: name: vis_06_subsolar_longitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_subsolar_longitude: name: vis_08_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsolar_longitude: name: vis_09_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsolar_longitude: name: nir_13_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsolar_longitude: name: nir_16_subsolar_longitude units: deg sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsolar_longitude: name: nir_22_subsolar_longitude units: deg sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsolar_longitude: name: ir_38_subsolar_longitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsolar_longitude: name: wv_63_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsolar_longitude: name: wv_73_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsolar_longitude: name: ir_87_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsolar_longitude: name: ir_97_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsolar_longitude: name: ir_105_subsolar_longitude units: deg sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af } ir_123_subsolar_longitude: name: ir_123_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsolar_longitude: name: ir_133_subsolar_longitude units: deg sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi - + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_platform_altitude: name: vis_04_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_platform_altitude: name: vis_05_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_platform_altitude: name: vis_06_platform_altitude units: m sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_platform_altitude: name: vis_08_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_platform_altitude: name: vis_09_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_platform_altitude: name: nir_13_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_platform_altitude: name: nir_16_platform_altitude units: m sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_platform_altitude: name: nir_22_platform_altitude units: m sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_platform_altitude: name: ir_38_platform_altitude units: m sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_platform_altitude: name: wv_63_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_platform_altitude: name: wv_73_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_platform_altitude: name: ir_87_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_platform_altitude: name: ir_97_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_platform_altitude: name: ir_105_platform_altitude units: m sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af } ir_123_platform_altitude: name: ir_123_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_platform_altitude: name: ir_133_platform_altitude units: m sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_earth_sun_distance: name: vis_04_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_earth_sun_distance: name: vis_05_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_earth_sun_distance: name: vis_06_earth_sun_distance units: km sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_earth_sun_distance: name: vis_08_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_earth_sun_distance: name: vis_09_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_earth_sun_distance: name: nir_13_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_earth_sun_distance: name: nir_16_earth_sun_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_earth_sun_distance: name: nir_22_earth_sun_distance units: km sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_earth_sun_distance: name: ir_38_earth_sun_distance units: km sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_earth_sun_distance: name: wv_63_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_earth_sun_distance: name: wv_73_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_earth_sun_distance: name: ir_87_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_earth_sun_distance: name: ir_97_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_earth_sun_distance: name: ir_105_earth_sun_distance units: km sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_earth_sun_distance: name: ir_123_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_earth_sun_distance: name: ir_133_earth_sun_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } vis_04_sun_satellite_distance: name: vis_04_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_04 } vis_05_sun_satellite_distance: name: vis_05_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_05 } vis_06_sun_satellite_distance: name: vis_06_sun_satellite_distance units: km sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_vis_06, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_06 } vis_08_sun_satellite_distance: name: vis_08_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_08 } vis_09_sun_satellite_distance: name: vis_09_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_vis_09 } nir_13_sun_satellite_distance: name: nir_13_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_13 } nir_16_sun_satellite_distance: name: nir_16_sun_satellite_distance units: km sensor: fci - resolution: 1000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_16 } nir_22_sun_satellite_distance: name: nir_22_sun_satellite_distance units: km sensor: fci resolution: - 500: {file_type: fci_l1c_hrfi} - 1000: {file_type: fci_l1c_fdhsi} + 500: { file_type: fci_l1c_hrfi } + 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 3000: { file_type: fci_l1c_af_nir_22 } ir_38_sun_satellite_distance: name: ir_38_sun_satellite_distance units: km sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af } wv_63_sun_satellite_distance: name: wv_63_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_63 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_63 } wv_73_sun_satellite_distance: name: wv_73_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_wv_73 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_wv_73 } ir_87_sun_satellite_distance: name: ir_87_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_87 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_87 } ir_97_sun_satellite_distance: name: ir_97_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_97 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_97 } ir_105_sun_satellite_distance: name: ir_105_sun_satellite_distance units: km sensor: fci resolution: - 1000: {file_type: fci_l1c_hrfi} - 2000: {file_type: fci_l1c_fdhsi} + 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_105 } ir_123_sun_satellite_distance: name: ir_123_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_123 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_123 } ir_133_sun_satellite_distance: name: ir_133_sun_satellite_distance units: km sensor: fci - resolution: 2000 - file_type: fci_l1c_fdhsi + resolution: + 1000: { file_type: fci_l1c_af_ir_133 } + 2000: { file_type: fci_l1c_fdhsi } + 3000: { file_type: fci_l1c_af_ir_133 } diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 0c7b9fb8cc..6a2d5c05a5 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -146,11 +146,15 @@ HIGH_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "500m", "grid_width": 22272}, "fci_l1c_fdhsi": {"grid_type": "1km", + "grid_width": 11136}, + "fci_l1c_af": {"grid_type": "1km", "grid_width": 11136}} LOW_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "1km", "grid_width": 11136}, "fci_l1c_fdhsi": {"grid_type": "2km", - "grid_width": 5568}} + "grid_width": 5568}, + "fci_l1c_af":{"grid_type": "3km", + "grid_width":3712}} def _get_aux_data_name_from_dsname(dsname): @@ -218,7 +222,7 @@ def rc_period_min(self): As RSS is not yet implemeted and error will be raised if RSS are to be read """ - if not self.filename_info["coverage"] == "FD": + if self.filename_info["coverage"] not in ["FD","AF"]: raise NotImplementedError(f"coverage for {self.filename_info['coverage']} not supported by this reader") return 2.5 return 10 @@ -263,39 +267,62 @@ def get_channel_measured_group_path(self, channel): return measured_group_path def get_segment_position_info(self): - """Get information about the size and the position of the segment inside the final image array. - - As the final array is composed by stacking segments vertically, the position of a segment - inside the array is defined by the numbers of the start (lowest) and end (highest) row of the segment. - The row numbering is assumed to start with 1. - This info is used in the GEOVariableSegmentYAMLReader to compute optimal segment sizes for missing segments. - - Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept - of chunk, and to be consistent with SEVIRI, we opt to use the word segment. - """ - vis_06_measured_path = self.get_channel_measured_group_path("vis_06") - ir_105_measured_path = self.get_channel_measured_group_path("ir_105") - - file_type = self.filetype_info["file_type"] - - segment_position_info = { - HIGH_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - - self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, - "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] - }, - LOW_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - - self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, - "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] - } - } - - return segment_position_info + """Get information about the size and the position of the segment inside the final image array. + + As the final array is composed by stacking segments vertically, the position of a segment + inside the array is defined by the numbers of the start (lowest) and end (highest) row of the segment. + The row numbering is assumed to start with 1. + This info is used in the GEOVariableSegmentYAMLReader to compute optimal segment sizes for missing segments. + + Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept + of chunk, and to be consistent with SEVIRI, we opt to use the word segment. + """ + file_type = self.filetype_info["file_type"] + if self.filename_info["coverage"] == "AF": + channel_data = [key for key in self.file_content.keys() + if ((key.startswith("data/vis") or + key.startswith("data/ir") or + key.startswith("data/hrv") or + key.startswith("data/nir") or + key.startswith("data/wv")) + and key.endswith("measured"))][0] + segment_position_info = { + HIGH_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(f"{channel_data}/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item() - + self.get_and_cache_npxr(f"{channel_data}/start_position_row").item() + 1, + "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] + }, + LOW_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(f"{channel_data}/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item() - + self.get_and_cache_npxr(f"{channel_data}/start_position_row").item() + 1, + "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] + } + } + else: + vis_06_measured_path = self.get_channel_measured_group_path("vis_06") + ir_105_measured_path = self.get_channel_measured_group_path("ir_105") + segment_position_info = { + HIGH_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, + "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] + }, + LOW_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, + "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] + } + } + + return segment_position_info def get_dataset(self, key, info=None): """Load a dataset.""" @@ -397,9 +424,12 @@ def orbital_param(self): actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector("platform_altitude"))) - nominal_and_proj_subsat_lon = float( - self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) - nominal_and_proj_subsat_lat = 0 + try : + nominal_and_proj_subsat_lon = float( + self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) + except ValueError: + nominal_and_proj_subsat_lon = 0.0 + nominal_and_proj_subsat_lat = 0.0 nominal_and_proj_sat_alt = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) @@ -551,7 +581,10 @@ def get_area_def(self, key): a = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/semi_major_axis")) h = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) rf = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/inverse_flattening")) - lon_0 = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) + try: + lon_0 = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) + except ValueError: + lon_0 = 0.0 sweep = str(self.get_and_cache_npxr("data/mtg_geos_projection/attr/sweep_angle_axis")) area_extent, nlines, ncols = self.calc_area_extent(key) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 792de90462..31ac3b927f 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -57,6 +57,12 @@ "scale_factor": 5.58871526031607e-05, "add_offset": 1.55617776423501e-01, }, + "3km": { + "nrows": 66, + "ncols": 3712, + "scale_factor": 8.38307287956433e-05, + "add_offset": 0.155631748009112, + }, } @@ -366,6 +372,10 @@ class FakeFCIFileHandlerHRFI(FakeFCIFileHandlerBase): } +class FakeFCIFileHandlerAF(FakeFCIFileHandlerBase): + """Mock AF data.""" + chan_patterns = {} + # ---------------------------------------------------- # Fixtures preparation ------------------------------- # ---------------------------------------------------- @@ -394,12 +404,16 @@ def clear_cache(reader): for fh in fhs: fh.cached_file_content = {} +list_channel_solar = ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", + "nir_13", "nir_16", "nir_22"] +list_channel_terran = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", + "ir_123", "ir_133"] +list_total_channel = list_channel_solar + list_channel_terran +list_resolution = ["1km","3km"] -_chans_fdhsi = {"solar": ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", - "nir_13", "nir_16", "nir_22"], +_chans_fdhsi = {"solar": list_channel_solar, "solar_grid_type": ["1km"] * 8, - "terran": ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", - "ir_123", "ir_133"], + "terran": list_channel_terran, "terran_grid_type": ["2km"] * 8} _chans_hrfi = {"solar": ["vis_06", "nir_22"], @@ -407,6 +421,9 @@ def clear_cache(reader): "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} +_chans_af = {} + + _test_filenames = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" @@ -419,6 +436,20 @@ def clear_cache(reader): ] } +for channel in list_total_channel: + for resol in list_resolution: + chann_upp = channel.replace("_","").upper() + _test_filenames[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" + f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" + f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] + if channel.split("_")[0] in ["vis","nir"]: + _chans_af[f"{channel}_{resol}"] = {"solar":[channel], + "solar_grid_type": [resol]} + elif channel.split("_")[0] in ["ir","wv"]: + _chans_af[f"{channel}_{resol}"] = {"terran":[channel], + "terran_grid_type": [resol]} + +#W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD-1KM-AF-VIS06-x-x---NC4E_C_EUMT_20240125144647_DT_OPE_20240109080007_20240109080924_N_JLS_T_0049_0000.nc @contextlib.contextmanager def mocked_basefilehandler(filehandler): @@ -452,6 +483,19 @@ def FakeFCIFileHandlerHRFI_fixture(): } yield param_dict +@pytest.fixture() +def FakeFCIFileHandlerAF_fixture(channel,resolution): + """Get a fixture for the fake AF filehandler, including channel and file names.""" + chan_patterns = {channel.split("_")[0]+"_{:>02d}": {"channels": [int(channel.split("_")[1])], + "grid_type": f"{resolution}"},} + FakeFCIFileHandlerAF.chan_patterns = chan_patterns + with mocked_basefilehandler(FakeFCIFileHandlerAF): + param_dict = { + "filetype": f"fci_l1c_af_{channel}", + "channels": _chans_af[f"{channel}_{resolution}"], + "filenames": _test_filenames[f"af_{channel}_{resolution}"], + } + yield param_dict # ---------------------------------------------------- # Tests ---------------------------------------------- @@ -466,7 +510,7 @@ class TestFCIL1cNCReader: "fdhsi": {"channels": _chans_fdhsi, "filenames": _test_filenames["fdhsi"]}} - @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"], _test_filenames["hrfi"]]) + @pytest.mark.parametrize("filenames", [_test_filenames[filename] for filename in _test_filenames.keys()]) def test_file_pattern(self, reader_configs, filenames): """Test file pattern matching.""" from satpy.readers import load_reader @@ -509,6 +553,34 @@ def test_load_counts(self, reader_configs, fh_param, else: numpy.testing.assert_array_equal(res[ch], 1) + @pytest.mark.parametrize("channel",list_total_channel) + @pytest.mark.parametrize("resolution",list_resolution) + def test_load_counts_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test loading with counts for AF files.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + if channel.split("_")[0] in ["vis","nir"]: + type_ter = "solar" + elif channel.split("_")[0] in ["wv","ir"]: + type_ter = "terran" + res = reader.load([make_dataid(name=name, calibration="counts") + for name in fh_param["channels"][type_ter]], pad_data=False) + assert expected_res_n == len(res) + for ch, grid_type in zip(fh_param["channels"][type_ter], + fh_param["channels"][f"{type_ter}_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + assert res[ch].dtype == np.uint16 + assert res[ch].attrs["calibration"] == "counts" + assert res[ch].attrs["units"] == "count" + if ch == "ir_38": + numpy.testing.assert_array_equal(res[ch][-1], 1) + numpy.testing.assert_array_equal(res[ch][0], 5000) + else: + numpy.testing.assert_array_equal(res[ch], 1) + + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_radiance(self, reader_configs, fh_param, @@ -534,6 +606,35 @@ def test_load_radiance(self, reader_configs, fh_param, else: numpy.testing.assert_array_equal(res[ch], 15) + + @pytest.mark.parametrize("channel",list_total_channel) + @pytest.mark.parametrize("resolution",list_resolution) + def test_load_radiance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test loading with radiance for AF files.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + if channel.split("_")[0] in ["vis","nir"]: + type_ter = "solar" + elif channel.split("_")[0] in ["wv","ir"]: + type_ter = "terran" + res = reader.load([make_dataid(name=name, calibration="radiance") + for name in fh_param["channels"][type_ter]], pad_data=False) + assert expected_res_n == len(res) + for ch, grid_type in zip(fh_param["channels"][type_ter], + fh_param["channels"][f"{type_ter}_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + assert res[ch].dtype == np.float32 + assert res[ch].attrs["calibration"] == "radiance" + assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" + assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56) + if ch == "ir_38": + numpy.testing.assert_array_equal(res[ch][-1], 15) + numpy.testing.assert_array_equal(res[ch][0], 9700) + else: + numpy.testing.assert_array_equal(res[ch], 15) + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) def test_load_reflectance(self, reader_configs, fh_param, @@ -552,6 +653,29 @@ def test_load_reflectance(self, reader_configs, fh_param, assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) + @pytest.mark.parametrize("channel",list_channel_solar) + @pytest.mark.parametrize("resolution",list_resolution) + def test_load_reflectance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test loading with reflectance for AF files.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + if channel.split("_")[0] in ["vis","nir"]: + type_ter = "solar" + elif channel.split("_")[0] in ["wv","ir"]: + type_ter = "terran" + res = reader.load([make_dataid(name=name, calibration="reflectance") + for name in fh_param["channels"][type_ter]], pad_data=False) + assert expected_res_n == len(res) + for ch, grid_type in zip(fh_param["channels"][type_ter], + fh_param["channels"][f"{type_ter}_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + assert res[ch].dtype == np.float32 + assert res[ch].attrs["calibration"] == "reflectance" + assert res[ch].attrs["units"] == "%" + numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) def test_load_bt(self, reader_configs, caplog, fh_param, @@ -577,6 +701,37 @@ def test_load_bt(self, reader_configs, caplog, fh_param, else: numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275)) + + @pytest.mark.parametrize("channel",list_channel_terran) + @pytest.mark.parametrize("resolution",list_resolution) + def test_load_bt_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution,caplog): + """Test loading with brightness_temperature for AF files.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + if channel.split("_")[0] in ["vis","nir"]: + type_ter = "solar" + elif channel.split("_")[0] in ["wv","ir"]: + type_ter = "terran" + with caplog.at_level(logging.WARNING): + res = reader.load([make_dataid(name=name, calibration="brightness_temperature") + for name in fh_param["channels"][type_ter]], pad_data=False) + assert caplog.text == "" + assert expected_res_n == len(res) + for ch, grid_type in zip(fh_param["channels"][type_ter], + fh_param["channels"][f"{type_ter}_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + assert res[ch].dtype == np.float32 + assert res[ch].attrs["calibration"] == "brightness_temperature" + assert res[ch].attrs["units"] == "K" + + if ch == "ir_38": + numpy.testing.assert_array_almost_equal(res[ch][-1], np.float32(209.68275)) + numpy.testing.assert_array_almost_equal(res[ch][0], np.float32(1888.8513)) + else: + numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275)) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_orbital_parameters_attr(self, reader_configs, fh_param): @@ -599,6 +754,33 @@ def test_orbital_parameters_attr(self, reader_configs, fh_param): "projection_altitude": 35786400.0, } + @pytest.mark.parametrize("channel",list_total_channel) + @pytest.mark.parametrize("resolution",list_resolution) + def test_orbital_parameters_attr_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test the orbital parametters for AF data.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + if channel.split("_")[0] in ["vis","nir"]: + type_ter = "solar" + elif channel.split("_")[0] in ["wv","ir"]: + type_ter = "terran" + res = reader.load([make_dataid(name=name) + for name in fh_param["channels"][type_ter]], pad_data=False) + assert expected_res_n == len(res) + for ch in fh_param["channels"][type_ter]: + assert res[ch].attrs["orbital_parameters"] == { + "satellite_actual_longitude": np.mean(np.arange(6000)), + "satellite_actual_latitude": np.mean(np.arange(6000)), + "satellite_actual_altitude": np.mean(np.arange(6000)), + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0, + "satellite_nominal_altitude": 35786400.0, + "projection_longitude": 0.0, + "projection_latitude": 0, + "projection_altitude": 35786400.0, + } + expected_pos_info_for_filetype = { "fdhsi": {"1km": {"start_position_row": 1, "end_position_row": 200, @@ -645,6 +827,26 @@ def test_load_index_map(self, reader_configs, fh_param, expected_res_n): GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110) + @pytest.mark.parametrize("channel",list_total_channel) + @pytest.mark.parametrize("resolution",list_resolution) + def test_load_index_map_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test loading with index_map for AF files.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + if channel.split("_")[0] in ["vis","nir"]: + type_ter = "solar" + elif channel.split("_")[0] in ["wv","ir"]: + type_ter = "terran" + res = reader.load([f"{name}_index_map" + for name in fh_param["channels"][type_ter]], pad_data=False) + assert expected_res_n == len(res) + for ch, grid_type in zip(fh_param["channels"][type_ter], + fh_param["channels"][f"{type_ter}_grid_type"]): + assert res[f"{ch}_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + numpy.testing.assert_array_equal(res[f"{ch}_index_map"][1, 1], 110) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_load_aux_data(self, reader_configs, fh_param): @@ -679,6 +881,27 @@ def test_load_quality_only(self, reader_configs, fh_param, expected_res_n): numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3) assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality" + @pytest.mark.parametrize("channel",list_total_channel) + @pytest.mark.parametrize("resolution",list_resolution) + def test_load_quality_only_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test loading with quality works for AF files.""" + expected_res_n = 1 + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + if channel.split("_")[0] in ["vis","nir"]: + type_ter = "solar" + elif channel.split("_")[0] in ["wv","ir"]: + type_ter = "terran" + res = reader.load([f"{name}_pixel_quality" + for name in fh_param["channels"][type_ter]], pad_data=False) + assert expected_res_n == len(res) + for ch, grid_type in zip(fh_param["channels"][type_ter], + fh_param["channels"][f"{type_ter}_grid_type"]): + assert res[f"{ch}_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + numpy.testing.assert_array_equal(res[f"{ch}_pixel_quality"][1, 1], 3) + assert res[f"{ch}_pixel_quality"].attrs["name"] == f"{ch}_pixel_quality" + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_platform_name(self, reader_configs, fh_param): @@ -691,6 +914,21 @@ def test_platform_name(self, reader_configs, fh_param): res = reader.load(["vis_06"], pad_data=False) assert res["vis_06"].attrs["platform_name"] == "MTG-I1" + @pytest.mark.parametrize("channel",list_total_channel) + @pytest.mark.parametrize("resolution",list_resolution) + def test_platform_name_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test that platform name is exposed for AF file.""" + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + if channel.split("_")[0] in ["vis","nir"]: + type_ter = "solar" + elif channel.split("_")[0] in ["wv","ir"]: + type_ter = "terran" + res = reader.load([f"{name}" + for name in fh_param["channels"][type_ter]], pad_data=False) + for ch in fh_param["channels"][type_ter]: + assert res[ch].attrs["platform_name"] == "MTG-I1" + @pytest.mark.parametrize(("fh_param", "expected_area"), [ (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), @@ -732,6 +970,19 @@ def test_excs(self, reader_configs, fh_param): make_dataid(name="ir_123", calibration="unknown"), {"units": "unknown"}) + @pytest.mark.parametrize("channel",list_total_channel) + @pytest.mark.parametrize("resolution",list_resolution) + def test_excs_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test exceptions for AF files.""" + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + with pytest.raises(ValueError, match="Unknown dataset key, not a channel, quality or auxiliary data: invalid"): + reader.file_handlers[fh_param["filetype"]][0].get_dataset(make_dataid(name="invalid"), {}) + with pytest.raises(ValueError, match="unknown invalid value for "): + reader.file_handlers[fh_param["filetype"]][0].get_dataset( + make_dataid(name=f"{channel}", calibration="unknown"), + {"units": "unknown"}) + def test_load_composite(self): """Test that composites are loadable.""" # when dedicated composites for FCI are implemented in satpy, From d01d4a6aba6997a143be6a530991b096a988e227 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 10 Apr 2024 12:29:18 +0200 Subject: [PATCH 1233/1416] fix: Improve the code quality for fci_l1c_nc.py and test_fci_l1c_nc.py --- satpy/readers/fci_l1c_nc.py | 78 +++++++++++--------- satpy/tests/reader_tests/test_fci_l1c_nc.py | 79 +++++++++------------ 2 files changed, 78 insertions(+), 79 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 6a2d5c05a5..b377ae0177 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -147,7 +147,7 @@ "grid_width": 22272}, "fci_l1c_fdhsi": {"grid_type": "1km", "grid_width": 11136}, - "fci_l1c_af": {"grid_type": "1km", + "fci_l1c_af": {"grid_type": "1km", "grid_width": 11136}} LOW_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "1km", "grid_width": 11136}, @@ -266,27 +266,41 @@ def get_channel_measured_group_path(self, channel): return measured_group_path - def get_segment_position_info(self): - """Get information about the size and the position of the segment inside the final image array. + def _get_segment_position_info_FD(self): + """get_position_info applied for FD.""" + file_type = self.filetype_info["file_type"] + vis_06_measured_path = self.get_channel_measured_group_path("vis_06") + ir_105_measured_path = self.get_channel_measured_group_path("ir_105") + segment_position_info = { + HIGH_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, + "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] + }, + LOW_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, + "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] + } + } + return segment_position_info - As the final array is composed by stacking segments vertically, the position of a segment - inside the array is defined by the numbers of the start (lowest) and end (highest) row of the segment. - The row numbering is assumed to start with 1. - This info is used in the GEOVariableSegmentYAMLReader to compute optimal segment sizes for missing segments. - Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept - of chunk, and to be consistent with SEVIRI, we opt to use the word segment. - """ - file_type = self.filetype_info["file_type"] - if self.filename_info["coverage"] == "AF": - channel_data = [key for key in self.file_content.keys() + def _get_segment_position_info_AF(self): + """get_position_info applied for AF.""" + file_type = self.filetype_info["file_type"] + channel_data = [key for key in self.file_content.keys() if ((key.startswith("data/vis") or key.startswith("data/ir") or key.startswith("data/hrv") or key.startswith("data/nir") or key.startswith("data/wv")) and key.endswith("measured"))][0] - segment_position_info = { + segment_position_info = { HIGH_RES_GRID_INFO[file_type]["grid_type"]: { "start_position_row": self.get_and_cache_npxr(f"{channel_data}/start_position_row").item(), "end_position_row": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item(), @@ -302,27 +316,23 @@ def get_segment_position_info(self): "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] } } - else: - vis_06_measured_path = self.get_channel_measured_group_path("vis_06") - ir_105_measured_path = self.get_channel_measured_group_path("ir_105") - segment_position_info = { - HIGH_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - - self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, - "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] - }, - LOW_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - - self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, - "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] - } - } + return segment_position_info + + def get_segment_position_info(self): + """Get information about the size and the position of the segment inside the final image array. - return segment_position_info + As the final array is composed by stacking segments vertically, the position of a segment + inside the array is defined by the numbers of the start (lowest) and end (highest) row of the segment. + The row numbering is assumed to start with 1. + This info is used in the GEOVariableSegmentYAMLReader to compute optimal segment sizes for missing segments. + + Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept + of chunk, and to be consistent with SEVIRI, we opt to use the word segment. + """ + if self.filename_info["coverage"] == "AF": + return self._get_segment_position_info_AF() + else : + return self._get_segment_position_info_FD() def get_dataset(self, key, info=None): """Load a dataset.""" diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 31ac3b927f..67f2f70787 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -60,7 +60,7 @@ "3km": { "nrows": 66, "ncols": 3712, - "scale_factor": 8.38307287956433e-05, + "scale_factor": 8.38307287956433e-05, "add_offset": 0.155631748009112, }, } @@ -510,6 +510,22 @@ class TestFCIL1cNCReader: "fdhsi": {"channels": _chans_fdhsi, "filenames": _test_filenames["fdhsi"]}} + def _get_type_ter_AF(self,channel): + """Get the type_ter.""" + if channel.split("_")[0] in ["vis","nir"]: + return "solar" + elif channel.split("_")[0] in ["wv","ir"]: + return "terran" + + + def _get_res_AF(self,channel,fh_param,calibration,reader_configs): + """Load the reader for AF data.""" + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + type_ter = self._get_type_ter_AF(channel) + res = reader.load([make_dataid(name=name, calibration=calibration) + for name in fh_param["channels"][type_ter]], pad_data=False) + return res + @pytest.mark.parametrize("filenames", [_test_filenames[filename] for filename in _test_filenames.keys()]) def test_file_pattern(self, reader_configs, filenames): """Test file pattern matching.""" @@ -559,20 +575,16 @@ def test_load_counts_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel """Test loading with counts for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - if channel.split("_")[0] in ["vis","nir"]: - type_ter = "solar" - elif channel.split("_")[0] in ["wv","ir"]: - type_ter = "terran" - res = reader.load([make_dataid(name=name, calibration="counts") - for name in fh_param["channels"][type_ter]], pad_data=False) + type_ter = self._get_type_ter_AF(channel) + calibration = "counts" + res = self._get_res_AF(channel,fh_param,calibration,reader_configs) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.uint16 - assert res[ch].attrs["calibration"] == "counts" + assert res[ch].attrs["calibration"] == calibration assert res[ch].attrs["units"] == "count" if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 1) @@ -613,20 +625,16 @@ def test_load_radiance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,chann """Test loading with radiance for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - if channel.split("_")[0] in ["vis","nir"]: - type_ter = "solar" - elif channel.split("_")[0] in ["wv","ir"]: - type_ter = "terran" - res = reader.load([make_dataid(name=name, calibration="radiance") - for name in fh_param["channels"][type_ter]], pad_data=False) + type_ter = self._get_type_ter_AF(channel) + calibration = "radiance" + res = self._get_res_AF(channel,fh_param,calibration,reader_configs) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "radiance" + assert res[ch].attrs["calibration"] == calibration assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56) if ch == "ir_38": @@ -659,20 +667,16 @@ def test_load_reflectance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,ch """Test loading with reflectance for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - if channel.split("_")[0] in ["vis","nir"]: - type_ter = "solar" - elif channel.split("_")[0] in ["wv","ir"]: - type_ter = "terran" - res = reader.load([make_dataid(name=name, calibration="reflectance") - for name in fh_param["channels"][type_ter]], pad_data=False) + type_ter = self._get_type_ter_AF(channel) + calibration = "reflectance" + res = self._get_res_AF(channel,fh_param,calibration,reader_configs) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "reflectance" + assert res[ch].attrs["calibration"] == calibration assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) @@ -709,10 +713,7 @@ def test_load_bt_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,res expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - if channel.split("_")[0] in ["vis","nir"]: - type_ter = "solar" - elif channel.split("_")[0] in ["wv","ir"]: - type_ter = "terran" + type_ter = self._get_type_ter_AF(channel) with caplog.at_level(logging.WARNING): res = reader.load([make_dataid(name=name, calibration="brightness_temperature") for name in fh_param["channels"][type_ter]], pad_data=False) @@ -761,10 +762,7 @@ def test_orbital_parameters_attr_af(self,FakeFCIFileHandlerAF_fixture,reader_con expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - if channel.split("_")[0] in ["vis","nir"]: - type_ter = "solar" - elif channel.split("_")[0] in ["wv","ir"]: - type_ter = "terran" + type_ter = self._get_type_ter_AF(channel) res = reader.load([make_dataid(name=name) for name in fh_param["channels"][type_ter]], pad_data=False) assert expected_res_n == len(res) @@ -834,10 +832,7 @@ def test_load_index_map_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,chan expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - if channel.split("_")[0] in ["vis","nir"]: - type_ter = "solar" - elif channel.split("_")[0] in ["wv","ir"]: - type_ter = "terran" + type_ter = self._get_type_ter_AF(channel) res = reader.load([f"{name}_index_map" for name in fh_param["channels"][type_ter]], pad_data=False) assert expected_res_n == len(res) @@ -888,10 +883,7 @@ def test_load_quality_only_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,c expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - if channel.split("_")[0] in ["vis","nir"]: - type_ter = "solar" - elif channel.split("_")[0] in ["wv","ir"]: - type_ter = "terran" + type_ter = self._get_type_ter_AF(channel) res = reader.load([f"{name}_pixel_quality" for name in fh_param["channels"][type_ter]], pad_data=False) assert expected_res_n == len(res) @@ -920,10 +912,7 @@ def test_platform_name_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,chann """Test that platform name is exposed for AF file.""" fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - if channel.split("_")[0] in ["vis","nir"]: - type_ter = "solar" - elif channel.split("_")[0] in ["wv","ir"]: - type_ter = "terran" + type_ter = self._get_type_ter_AF(channel) res = reader.load([f"{name}" for name in fh_param["channels"][type_ter]], pad_data=False) for ch in fh_param["channels"][type_ter]: From f51d58e5f11d4b7fbdfa28fd85a2947e728019e4 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 10 Apr 2024 14:18:55 +0200 Subject: [PATCH 1234/1416] fix: Erase the resolution arguments into the test_fci_l1c_nc.py files --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 67f2f70787..e8ac46f5d6 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -571,7 +571,7 @@ def test_load_counts(self, reader_configs, fh_param, @pytest.mark.parametrize("channel",list_total_channel) @pytest.mark.parametrize("resolution",list_resolution) - def test_load_counts_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + def test_load_counts_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with counts for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture @@ -621,7 +621,7 @@ def test_load_radiance(self, reader_configs, fh_param, @pytest.mark.parametrize("channel",list_total_channel) @pytest.mark.parametrize("resolution",list_resolution) - def test_load_radiance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + def test_load_radiance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with radiance for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture @@ -663,7 +663,7 @@ def test_load_reflectance(self, reader_configs, fh_param, @pytest.mark.parametrize("channel",list_channel_solar) @pytest.mark.parametrize("resolution",list_resolution) - def test_load_reflectance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + def test_load_reflectance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with reflectance for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture @@ -708,15 +708,14 @@ def test_load_bt(self, reader_configs, caplog, fh_param, @pytest.mark.parametrize("channel",list_channel_terran) @pytest.mark.parametrize("resolution",list_resolution) - def test_load_bt_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution,caplog): + def test_load_bt_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,caplog): """Test loading with brightness_temperature for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) type_ter = self._get_type_ter_AF(channel) + calibration = "brightness_temperature" with caplog.at_level(logging.WARNING): - res = reader.load([make_dataid(name=name, calibration="brightness_temperature") - for name in fh_param["channels"][type_ter]], pad_data=False) + res = self._get_res_AF(channel,fh_param,calibration,reader_configs) assert caplog.text == "" assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], @@ -757,7 +756,7 @@ def test_orbital_parameters_attr(self, reader_configs, fh_param): @pytest.mark.parametrize("channel",list_total_channel) @pytest.mark.parametrize("resolution",list_resolution) - def test_orbital_parameters_attr_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + def test_orbital_parameters_attr_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test the orbital parametters for AF data.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture @@ -827,7 +826,7 @@ def test_load_index_map(self, reader_configs, fh_param, expected_res_n): @pytest.mark.parametrize("channel",list_total_channel) @pytest.mark.parametrize("resolution",list_resolution) - def test_load_index_map_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + def test_load_index_map_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with index_map for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture @@ -878,7 +877,7 @@ def test_load_quality_only(self, reader_configs, fh_param, expected_res_n): @pytest.mark.parametrize("channel",list_total_channel) @pytest.mark.parametrize("resolution",list_resolution) - def test_load_quality_only_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + def test_load_quality_only_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with quality works for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture @@ -961,7 +960,7 @@ def test_excs(self, reader_configs, fh_param): @pytest.mark.parametrize("channel",list_total_channel) @pytest.mark.parametrize("resolution",list_resolution) - def test_excs_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + def test_excs_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test exceptions for AF files.""" fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) From 5c4305a0d854389c29eb26e531385a2104a8568e Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 12 Apr 2024 15:54:23 +0200 Subject: [PATCH 1235/1416] add true color with night layer --- satpy/etc/composites/fci.yaml | 39 +++++++++++++++++++++++++-------- satpy/etc/enhancements/fci.yaml | 19 ++++++++++++++++ 2 files changed, 49 insertions(+), 9 deletions(-) create mode 100644 satpy/etc/enhancements/fci.yaml diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 5fa8997731..6f0fcb0c9d 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -2,6 +2,16 @@ sensor_name: visir/fci composites: + binary_cloud_mask: + # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and + # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). + compositor: !!python/name:satpy.composites.CategoricalDataCompositor + prerequisites: + - name: 'cloud_state' + lut: [ .nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan ] + standard_name: binary_cloud_mask + +# Green corrections ndvi_hybrid_green: description: > The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that @@ -47,6 +57,7 @@ composites: modifiers: [ sunz_corrected ] standard_name: toa_bidirectional_reflectance +# True Color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > @@ -85,6 +96,17 @@ composites: - name: vis_04 standard_name: true_color_raw + true_color_with_night_ir105: + description: > + True Color during daytime, and a simple IR105 layer during nighttime. + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 73 + lim_high: 82 + prerequisites: + - true_color + - night_ir105 + true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html @@ -116,7 +138,14 @@ composites: - name: vis_04 standard_name: true_color_reproduction_color_stretch - # GeoColor +# Night Layers + night_ir105: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: ir_105 + standard_name: night_ir105 + +# GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor description: > @@ -227,11 +256,3 @@ composites: modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: cloud_phase - binary_cloud_mask: - # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and - # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). - compositor: !!python/name:satpy.composites.CategoricalDataCompositor - prerequisites: - - name: 'cloud_state' - lut: [ .nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan ] - standard_name: binary_cloud_mask diff --git a/satpy/etc/enhancements/fci.yaml b/satpy/etc/enhancements/fci.yaml new file mode 100644 index 0000000000..d03eb89940 --- /dev/null +++ b/satpy/etc/enhancements/fci.yaml @@ -0,0 +1,19 @@ +enhancements: + fci_day_night_blend: + standard_name: fci_day_night_blend + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [ 0,0,0 ] + max_stretch: [ 1,1,1 ] + + night_ir105: + standard_name: night_ir105 + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: greys, min_value: 190, max_value: 295 } \ No newline at end of file From 741ea5cb0198f9e92c2e8436ebbcdba0883555b8 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 12 Apr 2024 16:46:59 +0200 Subject: [PATCH 1236/1416] add night_ir --- satpy/etc/composites/fci.yaml | 58 +++++++++++++++++++++++++++++++---- 1 file changed, 52 insertions(+), 6 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 6f0fcb0c9d..68279a6b8c 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -2,6 +2,7 @@ sensor_name: visir/fci composites: +### L2 binary_cloud_mask: # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). @@ -11,7 +12,7 @@ composites: lut: [ .nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan ] standard_name: binary_cloud_mask -# Green corrections +### Green Corrections ndvi_hybrid_green: description: > The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that @@ -57,7 +58,7 @@ composites: modifiers: [ sunz_corrected ] standard_name: toa_bidirectional_reflectance -# True Color +### True Color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > @@ -107,6 +108,28 @@ composites: - true_color - night_ir105 + true_color_with_night_ir: + description: > + True Color during daytime, and a simple IR105 layer during nighttime. + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 73 + lim_high: 82 + prerequisites: + - true_color + - night_ir_with_background + - + true_color_with_night_ir_hires: + description: > + True Color during daytime, and a simple IR105 layer during nighttime. + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 73 + lim_high: 82 + prerequisites: + - true_color + - night_ir_with_background + true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html @@ -138,14 +161,37 @@ composites: - name: vis_04 standard_name: true_color_reproduction_color_stretch -# Night Layers +### Night Layers night_ir105: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ir_105 standard_name: night_ir105 -# GeoColor + night_ir_alpha: + compositor: !!python/name:satpy.composites.GenericCompositor + standard_name: night_ir_alpha + prerequisites: + - name: ir_38 + - name: ir_105 + - name: ir_123 + - name: ir_105 + + night_ir_with_background: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - night_ir_alpha + - _night_background + + night_ir_with_background_hires: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - night_ir_alpha + - _night_background_hires + +### GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor description: > @@ -162,7 +208,6 @@ composites: - true_color - geo_color_night - # GeoColor Night-time geo_color_high_clouds: standard_name: geo_color_high_clouds compositor: !!python/name:satpy.composites.HighCloudCompositor @@ -201,6 +246,7 @@ composites: - geo_color_high_clouds - geo_color_background_with_low_clouds +### IR-Sandwich ir_sandwich: compositor: !!python/name:satpy.composites.SandwichCompositor standard_name: ir_sandwich @@ -224,6 +270,7 @@ composites: - ir_sandwich - colorized_ir_clouds +### other RGBs cloud_type: description: > Equal to cimss_cloud_type, but with additional sunz_reducer modifier to avoid saturation at the terminator. @@ -255,4 +302,3 @@ composites: - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: cloud_phase - From 0efd6a42c99462832a389020df93b884a2884d4c Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 12 Apr 2024 17:34:22 +0200 Subject: [PATCH 1237/1416] fix extra line --- satpy/etc/composites/fci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 68279a6b8c..574af1cfaf 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -118,7 +118,7 @@ composites: prerequisites: - true_color - night_ir_with_background - - + true_color_with_night_ir_hires: description: > True Color during daytime, and a simple IR105 layer during nighttime. From a0138b1633e20bf8273812603d8d6c90ac4e8d60 Mon Sep 17 00:00:00 2001 From: andream Date: Fri, 12 Apr 2024 19:14:53 +0200 Subject: [PATCH 1238/1416] add fire_temperatures --- satpy/etc/composites/fci.yaml | 133 ++++++++++++++++++++++++-------- satpy/etc/enhancements/fci.yaml | 43 ++++++++++- 2 files changed, 142 insertions(+), 34 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 574af1cfaf..741548ca7f 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -12,6 +12,36 @@ composites: lut: [ .nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan ] standard_name: binary_cloud_mask +### Night Layers + night_ir105: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: ir_105 + standard_name: night_ir105 + + night_ir_alpha: + compositor: !!python/name:satpy.composites.GenericCompositor + standard_name: night_ir_alpha + prerequisites: + - name: ir_38 + - name: ir_105 + - name: ir_123 + - name: ir_105 + + night_ir_with_background: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - night_ir_alpha + - _night_background + + night_ir_with_background_hires: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - night_ir_alpha + - _night_background_hires + ### Green Corrections ndvi_hybrid_green: description: > @@ -128,7 +158,7 @@ composites: lim_high: 82 prerequisites: - true_color - - night_ir_with_background + - night_ir_with_background_hires true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. @@ -161,36 +191,6 @@ composites: - name: vis_04 standard_name: true_color_reproduction_color_stretch -### Night Layers - night_ir105: - compositor: !!python/name:satpy.composites.SingleBandCompositor - prerequisites: - - name: ir_105 - standard_name: night_ir105 - - night_ir_alpha: - compositor: !!python/name:satpy.composites.GenericCompositor - standard_name: night_ir_alpha - prerequisites: - - name: ir_38 - - name: ir_105 - - name: ir_123 - - name: ir_105 - - night_ir_with_background: - compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir_with_background - prerequisites: - - night_ir_alpha - - _night_background - - night_ir_with_background_hires: - compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir_with_background - prerequisites: - - night_ir_alpha - - _night_background_hires - ### GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor @@ -273,7 +273,7 @@ composites: ### other RGBs cloud_type: description: > - Equal to cimss_cloud_type, but with additional sunz_reducer modifier to avoid saturation at the terminator. + Equal to cimss_cloud_type recipe, but with additional sunz_reducer modifier to avoid saturation at the terminator. references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf @@ -287,9 +287,18 @@ composites: modifiers: [ sunz_corrected, sunz_reduced ] standard_name: cimss_cloud_type + cloud_type_with_night_ir1ß5: + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 73 + lim_high: 82 + prerequisites: + - cloud_type + - night_ir105 + cloud_phase: description: > - Equal to cloud_phase, but with additional sunz_reducer modifier to avoid saturation at the terminator. + Equal to cloud_phase recipe, but with additional sunz_reducer modifier to avoid saturation at the terminator. references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf @@ -302,3 +311,61 @@ composites: - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: cloud_phase + + cloud_phase_with_night_ir105: + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: fci_day_night_blend + lim_low: 73 + lim_high: 82 + prerequisites: + - cloud_phase + - night_ir105 + + fire_temperature: + standard_name: fire_temperature_fci + compositor: !!python/name:satpy.composites.GenericCompositor + description: > + The fire temperature RGB highlights intense fires and differentiate these + from low temperature fires. Small low temperature fires will only show up at 3.9 μm and + appear red. With the increasing intensity and temperature the fires will also be detected + by the 2.2 μm and 1.6 μm bands resulting very intense fires in white. + Note: the EUM, CIRA and AWIPS recipes are identical (apart from neglectable 0.15K difference due to + unprecise C->K conversion) + references: + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + Cira Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf + Eumetrain Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/FireTemperatureRGB.pdf + prerequisites: + - name: ir_38 + - name: nir_22 + - name: nir_16 + + fire_temperature_38refl: + standard_name: fire_temperature_fci_38refl + compositor: !!python/name:satpy.composites.GenericCompositor + description: > + Same as fire_temperature, but uses only reflective part of 3.8 + references: + discussion: See https://github.com/pytroll/satpy/pull/728 + prerequisites: + - name: ir_38 + modifiers: [nir_reflectance] + - name: nir_22 + modifiers: [sunz_corrected] + - name: nir_16 + modifiers: [sunz_corrected] + + fire_temperature_rad: + standard_name: fire_temperature_fci_rad + compositor: !!python/name:satpy.composites.GenericCompositor + description: > + Same as fire_temperature, but uses the channels in radiance units. This is the original VIIRS recipe. + references: + discussion: See https://github.com/pytroll/satpy/pull/728 + prerequisites: + - name: ir_38 + calibration: radiance + - name: nir_22 + calibration: radiance + - name: nir_16 + calibration: radiance \ No newline at end of file diff --git a/satpy/etc/enhancements/fci.yaml b/satpy/etc/enhancements/fci.yaml index d03eb89940..e36718689a 100644 --- a/satpy/etc/enhancements/fci.yaml +++ b/satpy/etc/enhancements/fci.yaml @@ -16,4 +16,45 @@ enhancements: method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - { colors: greys, min_value: 190, max_value: 295 } \ No newline at end of file + - { colors: greys, min_value: 190, max_value: 295 } + + fire_temperature_fci: + standard_name: fire_temperature_fci + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [273.15, 0.0, 0.0] + max_stretch: [333.15, 100.0, 75.0] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [0.4, 1, 1] + + fire_temperature_fci_38refl: + standard_name: fire_temperature_fci_38refl + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0, 0.0, 0.0] + max_stretch: [50, 100.0, 75.0] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [1, 1, 1] + + fire_temperature_fci_rad: + standard_name: fire_temperature_fci_rad + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0, 0, 0] + max_stretch: [3.5, 35., 85.] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: [1.0, 1.0, 1.0]} \ No newline at end of file From 37709bac88b4d2f8a95b231db955dd90cb1145e6 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 13 Apr 2024 22:46:41 +0800 Subject: [PATCH 1239/1416] Update msi_safe.yaml --- satpy/etc/readers/msi_safe.yaml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index d93d269782..670ef99dd4 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -26,7 +26,7 @@ datasets: B01: name: B01 - sensor: MSI + sensor: msi wavelength: [0.415, 0.443, 0.470] resolution: 60 calibration: @@ -40,7 +40,7 @@ datasets: B02: name: B02 - sensor: MSI + sensor: msi wavelength: [0.440, 0.490, 0.540] resolution: 10 calibration: @@ -54,7 +54,7 @@ datasets: B03: name: B03 - sensor: MSI + sensor: msi wavelength: [0.540, 0.560, 0.580] resolution: 10 calibration: @@ -68,7 +68,7 @@ datasets: B04: name: B04 - sensor: MSI + sensor: msi wavelength: [0.645, 0.665, 0.685] resolution: 10 calibration: @@ -82,7 +82,7 @@ datasets: B05: name: B05 - sensor: MSI + sensor: msi wavelength: [0.695, 0.705, 0.715] resolution: 20 calibration: @@ -96,7 +96,7 @@ datasets: B06: name: B06 - sensor: MSI + sensor: msi wavelength: [0.731, 0.740, 0.749] resolution: 20 calibration: @@ -110,7 +110,7 @@ datasets: B07: name: B07 - sensor: MSI + sensor: msi wavelength: [0.764, 0.783, 0.802] resolution: 20 calibration: @@ -124,7 +124,7 @@ datasets: B08: name: B08 - sensor: MSI + sensor: msi wavelength: [0.780, 0.842, 0.905] resolution: 10 calibration: @@ -138,7 +138,7 @@ datasets: B8A: name: B8A - sensor: MSI + sensor: msi wavelength: [0.855, 0.865, 0.875] resolution: 20 calibration: @@ -152,7 +152,7 @@ datasets: B09: name: B09 - sensor: MSI + sensor: msi wavelength: [0.935, 0.945, 0.955] resolution: 60 calibration: @@ -166,7 +166,7 @@ datasets: B10: name: B10 - sensor: MSI + sensor: msi wavelength: [1.365, 1.375, 1.385] resolution: 60 calibration: @@ -180,7 +180,7 @@ datasets: B11: name: B11 - sensor: MSI + sensor: msi wavelength: [1.565, 1.610, 1.655] resolution: 20 calibration: @@ -194,7 +194,7 @@ datasets: B12: name: B12 - sensor: MSI + sensor: msi wavelength: [2.100, 2.190, 2.280] resolution: 20 calibration: From 39f732696fc0a9bb37b6e32acbe9383101de14fd Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 15 Apr 2024 11:52:57 +0200 Subject: [PATCH 1240/1416] convert fire temperature stretch limits to wavelength units --- satpy/etc/enhancements/fci.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/etc/enhancements/fci.yaml b/satpy/etc/enhancements/fci.yaml index e36718689a..0775c83a76 100644 --- a/satpy/etc/enhancements/fci.yaml +++ b/satpy/etc/enhancements/fci.yaml @@ -49,12 +49,14 @@ enhancements: fire_temperature_fci_rad: standard_name: fire_temperature_fci_rad operations: + # note: the stretch parameters have been converted to wavelength units + # compared to e.g. the VIIRS recipe - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] - max_stretch: [3.5, 35., 85.] + max_stretch: [5.1, 17.7, 22.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [1.0, 1.0, 1.0]} \ No newline at end of file From 1342014f21890d25919f60cc40661fe9fcc1f7e3 Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 15 Apr 2024 14:42:23 +0200 Subject: [PATCH 1241/1416] add 24h_microphysics --- satpy/etc/composites/visir.yaml | 17 +++++++++++++++++ satpy/etc/enhancements/generic.yaml | 16 ++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index d9798057a2..ffe3be4183 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -462,6 +462,23 @@ composites: - 10.8 standard_name: night_microphysics + 24h_microphysics: + references: + EUMETRAIN Quick Guide: https://eumetrain.org/sites/default/files/2021-05/24MicroRGB.pdf + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - 12.0 + - 10.8 + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - 10.8 + - 8.7 + - 10.8 + standard_name: 24h_microphysics + ir108_3d: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: ir108_3d diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index b3ec45501c..cea87de760 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -929,6 +929,7 @@ enhancements: [252, 254, 254], [253, 254, 254], [253, 254, 254], [ 253, 254, 254], [253, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [255, 255, 255]] + night_microphysics_default: standard_name: night_microphysics operations: @@ -938,6 +939,21 @@ enhancements: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 10, 293] + + 24h_microphysics_default: + standard_name: 24h_microphysics + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [-4, 0, 248] + max_stretch: [2, 6, 303] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [ 1, 1.2, 1 ] + ir_overview_default: standard_name: ir_overview operations: From e716b814619cdc182265ec41b334902dbf7cca5e Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 15 Apr 2024 15:19:44 +0200 Subject: [PATCH 1242/1416] apply fix to L2 as well and trigger the filter only if processing_time is not None --- satpy/etc/composites/fci.yaml | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 741548ca7f..5dd440b900 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -1,6 +1,5 @@ sensor_name: visir/fci - composites: ### L2 binary_cloud_mask: @@ -329,7 +328,7 @@ composites: from low temperature fires. Small low temperature fires will only show up at 3.9 μm and appear red. With the increasing intensity and temperature the fires will also be detected by the 2.2 μm and 1.6 μm bands resulting very intense fires in white. - Note: the EUM, CIRA and AWIPS recipes are identical (apart from neglectable 0.15K difference due to + Note: the EUM, CIRA and AWIPS recipes are identical (apart from neglectable 0.15K difference due to unprecise C->K conversion) references: Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf @@ -368,4 +367,18 @@ composites: - name: nir_22 calibration: radiance - name: nir_16 - calibration: radiance \ No newline at end of file + calibration: radiance + + snow: + references: + EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/SnowRGB.pdf + Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: vis_08 + modifiers: [sunz_corrected] + - name: nir_16 + modifiers: [sunz_corrected] + - name: ir_38 + modifiers: [nir_reflectance] + standard_name: snow From c8d9c18dc7f26ee9c001f32d949ebf31c97a77ab Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 15 Apr 2024 15:52:28 +0200 Subject: [PATCH 1243/1416] update lim low and high for daynight blend --- satpy/etc/composites/fci.yaml | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 5dd440b900..bc46b4101c 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -131,8 +131,8 @@ composites: True Color during daytime, and a simple IR105 layer during nighttime. compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 prerequisites: - true_color - night_ir105 @@ -142,8 +142,8 @@ composites: True Color during daytime, and a simple IR105 layer during nighttime. compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 prerequisites: - true_color - night_ir_with_background @@ -153,8 +153,8 @@ composites: True Color during daytime, and a simple IR105 layer during nighttime. compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 prerequisites: - true_color - night_ir_with_background_hires @@ -164,8 +164,8 @@ composites: # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_reproduction - lim_low: 73. - lim_high: 85. + lim_low: 73 + lim_high: 85 prerequisites: - true_color_reproduction_corr - true_color_reproduction_uncorr @@ -263,8 +263,8 @@ composites: ir_sandwich_with_night_colorized_ir_clouds: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 prerequisites: - ir_sandwich - colorized_ir_clouds @@ -289,8 +289,8 @@ composites: cloud_type_with_night_ir1ß5: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 prerequisites: - cloud_type - night_ir105 @@ -314,8 +314,8 @@ composites: cloud_phase_with_night_ir105: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend - lim_low: 73 - lim_high: 82 + lim_low: 78 + lim_high: 88 prerequisites: - cloud_phase - night_ir105 From 02ccd2eeaa0ca13a59ffa726ca4c543872c97b29 Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 15 Apr 2024 15:53:00 +0200 Subject: [PATCH 1244/1416] add final yaml line --- satpy/etc/enhancements/fci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/enhancements/fci.yaml b/satpy/etc/enhancements/fci.yaml index 0775c83a76..05ce0f9e53 100644 --- a/satpy/etc/enhancements/fci.yaml +++ b/satpy/etc/enhancements/fci.yaml @@ -59,4 +59,4 @@ enhancements: max_stretch: [5.1, 17.7, 22.0] - name: gamma method: !!python/name:satpy.enhancements.gamma - kwargs: {gamma: [1.0, 1.0, 1.0]} \ No newline at end of file + kwargs: {gamma: [1.0, 1.0, 1.0]} From ea67c07e3b7b1586755931d7e63059821724fd29 Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 15 Apr 2024 15:57:43 +0200 Subject: [PATCH 1245/1416] fix typo in cloud_type_with_night_ir105 --- satpy/etc/composites/fci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index bc46b4101c..6850bb8f07 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -286,7 +286,7 @@ composites: modifiers: [ sunz_corrected, sunz_reduced ] standard_name: cimss_cloud_type - cloud_type_with_night_ir1ß5: + cloud_type_with_night_ir105: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 From 20fd4c134b1c137322bb9f19c841a30ed01f01da Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 15 Apr 2024 16:26:26 +0200 Subject: [PATCH 1246/1416] add descriptions to night cloud RGBs --- satpy/etc/composites/fci.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 6850bb8f07..963a5a198f 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -287,6 +287,8 @@ composites: standard_name: cimss_cloud_type cloud_type_with_night_ir105: + description: > + Combines the cloud_type during daytime with the simple 10.5µm night_ir105 layer during nighttime compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 @@ -312,6 +314,8 @@ composites: standard_name: cloud_phase cloud_phase_with_night_ir105: + description: > + Combines the cloud_phase during daytime with the simple 10.5µm night_ir105 layer during nighttime compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 From 3ce9d03d0f2c222a1cd988de93b53605226b69d7 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 17 Apr 2024 18:24:56 +0800 Subject: [PATCH 1247/1416] Update msi_safe.yaml --- satpy/etc/readers/msi_safe.yaml | 39 +++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index 670ef99dd4..f65ae4bb01 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -36,6 +36,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B02: @@ -50,6 +53,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B03: @@ -64,6 +70,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B04: @@ -78,6 +87,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B05: @@ -92,6 +104,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B06: @@ -106,6 +121,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B07: @@ -120,6 +138,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B08: @@ -134,6 +155,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B8A: @@ -148,6 +172,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B09: @@ -162,6 +189,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B10: @@ -176,6 +206,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B11: @@ -190,6 +223,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule B12: @@ -204,6 +240,9 @@ datasets: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" file_type: safe_granule From 6c7cb5c59053e7f882a293082b6f690c6ae9f507 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 17 Apr 2024 18:35:46 +0800 Subject: [PATCH 1248/1416] Update msi_safe.yaml --- satpy/etc/readers/msi_safe.yaml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index f65ae4bb01..c83b560539 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -38,7 +38,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B02: @@ -55,7 +55,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B03: @@ -72,7 +72,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B04: @@ -89,7 +89,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B05: @@ -106,7 +106,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B06: @@ -123,7 +123,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B07: @@ -140,7 +140,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B08: @@ -157,7 +157,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B8A: @@ -174,7 +174,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B09: @@ -191,7 +191,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B10: @@ -208,7 +208,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B11: @@ -225,7 +225,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule B12: @@ -242,7 +242,7 @@ datasets: units: W m-2 um-1 sr-1 counts: standard_name: counts - units: "1" + units: "1" file_type: safe_granule From 611e0dfb6a850ddce17eef450941ce5b0bd7fa93 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 17 Apr 2024 19:28:38 +0800 Subject: [PATCH 1249/1416] Update msi_safe.py --- satpy/readers/msi_safe.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 1131e40a96..ec17a98872 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -89,6 +89,8 @@ def _read_from_file(self, key): return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": return self._mda.calibrate_to_radiances(proj, self._channel) + if key["calibration"] == "counts": + return self._mda._sanitize_data(proj) @property def start_time(self): From 05e420c6ebfa868af26e908b62a1ede7a3bd0ab1 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 17 Apr 2024 19:46:29 +0800 Subject: [PATCH 1250/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index bcee32ddbb..0ed647d5d1 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -920,6 +920,15 @@ def test_xml_calibration(self): np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], [0.04 - 10, 0, 655.34 - 10, np.inf]]]) + def test_xml_calibration_to_counts(self): + """Test the calibration to counts.""" + fake_data = xr.DataArray([[[0, 1, 2, 3], + [4, 1000, 65534, 65535]]], + dims=["band", "x", "y"]) + result = self.xml_fh._sanitize_data(fake_data) + np.testing.assert_allclose(result, [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]]) + def test_xml_calibration_unmasked_saturated(self): """Test the calibration with radiometric offset but unmasked saturated pixels.""" from satpy.readers.msi_safe import SAFEMSIMDXML From c6ec6b66d1b5534d2f6c29cbe8d1a68fc1773c03 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 17 Apr 2024 20:00:41 +0800 Subject: [PATCH 1251/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 0ed647d5d1..c0d6314676 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -988,7 +988,8 @@ def setup_method(self): @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), - (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]])]) + (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]]), + (False, "counts", [[np.nan, 1], [65534, np.inf]])]) def test_calibration_and_masking(self, mask_saturated, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML From 4faccbb22619472f43f82baf3b54bc20c127ddd7 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 17 Apr 2024 20:13:23 +0800 Subject: [PATCH 1252/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index c0d6314676..0255aac085 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -989,7 +989,7 @@ def setup_method(self): [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]]), - (False, "counts", [[np.nan, 1], [65534, np.inf]])]) + (False, "counts", [[np.nan, 1], [65534, 65535]])]) def test_calibration_and_masking(self, mask_saturated, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML From 59258086db1c0f1bd2f115314820370ce3053932 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 17 Apr 2024 22:06:50 +0800 Subject: [PATCH 1253/1416] Update test_olci_nc.py --- satpy/tests/reader_tests/test_olci_nc.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index 2834578176..0f2e7acbfe 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -335,10 +335,10 @@ def test_bitflags_with_dataarray_without_flags(self): "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) - expected = np.array([True, False, True, True, True, True, False, - False, True, True, False, False, False, False, - False, False, False, True, False, True, False, - False, False, True, True, False, False, True, + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, False]) assert all(mask == expected) @@ -367,9 +367,9 @@ def test_bitflags_with_custom_flag_list(self): "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) - expected = np.array([True, False, True, True, True, True, False, - False, True, True, False, False, False, False, - False, False, False, True, False, True, False, - False, False, True, True, False, False, True, + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, False]) assert all(mask == expected) From 71a2acad34eb462d972235e5b1d792ad2d606c5f Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 15:26:08 +0800 Subject: [PATCH 1254/1416] Update __init__.py --- satpy/composites/__init__.py | 80 ++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 35 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 84b588b3fc..3976b5adc5 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1670,7 +1670,28 @@ def __call__(self, *args, **kwargs): class BackgroundCompositor(GenericCompositor): - """A compositor that overlays one composite on top of another.""" + """A compositor that overlays one composite on top of another. + + The output image mode will be determined by both foreground and background. Generally, when the background has + an alpha band, the output image will also have one. + # L/L -> L + # L/LA -> LA + # L/RGB -> RGB + # L/RGBA -> RGBA + # LA/L -> L + # LA/LA -> LA + # LA/RGB -> RGB + # LA/RGBA -> RGBA + # RGB/L -> RGB + # RGB/LA -> RGBA + # RGB/RGB -> RGB + # RGB/RGBA -> RGBA + # RGBA/L -> RGB + # RGBA/LA -> RGBA + # RGBA/RGB -> RGB + # RGBA/RGBA -> RGBA + + """ def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" @@ -1681,15 +1702,12 @@ def __call__(self, projectables, *args, **kwargs): background = enhance2dataset(projectables[1], convert_p=True) before_bg_mode = background.attrs["mode"] - # Adjust bands so that they match - # L/RGB -> RGB/RGB - # LA/RGB -> RGBA/RGBA - # RGB/RGBA -> RGBA/RGBA + # Adjust bands so that they have the same mode foreground = add_bands(foreground, background["bands"]) background = add_bands(background, foreground["bands"]) - # It's important to judge whether the alpha band of background is initially generated, e.g. by CloudCompositor - # The result will be used to decide the output image mode + # It's important whether the alpha band of background is initially generated, e.g. by CloudCompositor + # The result will be used to determine the output image mode initial_bg_alpha = "A" in before_bg_mode attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) @@ -1723,22 +1741,7 @@ def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray, initial_bg_alpha: bool, ) -> list[xr.DataArray]: - def _get_alpha(dataset: xr.DataArray): - # If the dataset contains an alpha channel, just use it - # If not, we still need one. So build it and fill it with 1 - if "A" in dataset.attrs["mode"]: - alpha = dataset.sel(bands="A") - else: - first_band = dataset.isel(bands=0) - alpha = xr.full_like(first_band, 1) - alpha["bands"] = "A" - - # There could be Nans in the alpha - # Replace them with 0 to prevent cases like 1 + nan = nan, so they won't affect new_alpha - alpha = xr.where(alpha.isnull(), 0, alpha) - - return alpha - + # For more info about alpha compositing please review https://en.wikipedia.org/wiki/Alpha_compositing alpha_fore = _get_alpha(foreground) alpha_back = _get_alpha(background) new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) @@ -1748,23 +1751,14 @@ def _get_alpha(dataset: xr.DataArray): # Pass the image data (alpha band will be dropped temporally) to the writer output_mode = background.attrs["mode"].replace("A", "") - # For more info about alpha compositing please review https://en.wikipedia.org/wiki/Alpha_compositing - # Whether there's no initial alpha band, or it has been dropped, we're actually asking the writer for decision - # So first, we must fill the transparent areas in the image with np.nan - # The best way is through a modified version of new alpha - new_alpha_nan = xr.where(alpha_fore + alpha_back == 0, np.nan, new_alpha) if "A" not in output_mode \ - else new_alpha - for band in output_mode: fg_band = foreground.sel(bands=band) bg_band = background.sel(bands=band) - - chan = (fg_band * alpha_fore + - bg_band * alpha_back * (1 - alpha_fore)) / new_alpha_nan - + # Do the alpha compositing + chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha + # Fill the NaN area with background chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) chan["bands"] = band - data.append(chan) # If background has an initial alpha band, it will also be passed to the writer @@ -1786,6 +1780,22 @@ def _simple_overlay(foreground: xr.DataArray, return data +def _get_alpha(dataset: xr.DataArray): + # If the dataset contains an alpha channel, just use it + if "A" in dataset.attrs["mode"]: + alpha = dataset.sel(bands="A") + # There could be NaNs in the alpha + # Replace them with 0 to prevent cases like 1 + nan = nan, so they won't affect new_alpha + alpha = xr.where(alpha.isnull(), 0, alpha) + # If not, we still need one. So build it and fill it with 1 + else: + first_band = dataset.isel(bands=0) + alpha = xr.full_like(first_band, 1) + alpha["bands"] = "A" + + return alpha + + class MaskingCompositor(GenericCompositor): """A compositor that masks e.g. IR 10.8 channel data using cloud products from NWC SAF.""" From 258e32d2fa3ed2bda3afbace8e21b46be1968988 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 15:33:34 +0800 Subject: [PATCH 1255/1416] Update test_composites.py --- satpy/tests/test_composites.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 49752d7ee3..60528cc048 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1507,6 +1507,11 @@ def setup_class(cls): [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), + ("RGB", "LA", "RGBA", np.array([ + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 1.], [1., 1.]]])), ("RGB", "RGBA", "RGBA", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], From cd6fecfe1a1e6753e28b7978889ff39c80003194 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 16:11:39 +0800 Subject: [PATCH 1256/1416] Update test_composites.py --- satpy/tests/test_composites.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 60528cc048..c075755d17 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1496,6 +1496,10 @@ def setup_class(cls): ("foreground_bands", "background_bands", "exp_bands", "exp_result"), [ ("L", "L", "L", np.array([[1., 0.5], [0., 1.]])), + ("L", "RGB", "RGB", np.array([ + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]], + [[1., 0.5], [0., 1.]]])), ("LA", "LA", "LA", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 1.], [1., 1.]]])), From 92b435c89e64790524a9fdce6cd27aa7b62df417 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 16:33:25 +0800 Subject: [PATCH 1257/1416] Update __init__.py --- satpy/composites/__init__.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 3976b5adc5..3440c5631d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1781,17 +1781,15 @@ def _simple_overlay(foreground: xr.DataArray, def _get_alpha(dataset: xr.DataArray): - # If the dataset contains an alpha channel, just use it - if "A" in dataset.attrs["mode"]: - alpha = dataset.sel(bands="A") - # There could be NaNs in the alpha - # Replace them with 0 to prevent cases like 1 + nan = nan, so they won't affect new_alpha - alpha = xr.where(alpha.isnull(), 0, alpha) - # If not, we still need one. So build it and fill it with 1 - else: - first_band = dataset.isel(bands=0) - alpha = xr.full_like(first_band, 1) - alpha["bands"] = "A" + # 1. This function is only used by _get_merged_image_data + # 2. Both foreground and background have been through add_bands, so they have the same mode + # 3. If none of them has alpha band, they will be passed directly to _simple_overlay not _get_merged_image_data + # So any dataset(whether foreground or background) passed to this function has an alpha band for certain + # We will use it directly + alpha = dataset.sel(bands="A") + # There could be NaNs in the alpha + # Replace them with 0 to prevent cases like 1 + nan = nan, so they won't affect new_alpha + alpha = xr.where(alpha.isnull(), 0, alpha) return alpha From 95f26b5f05dcd06fa37808202421c795481a3a3e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 16:34:09 +0800 Subject: [PATCH 1258/1416] Update __init__.py --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 3440c5631d..0b4d01c833 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1783,7 +1783,7 @@ def _simple_overlay(foreground: xr.DataArray, def _get_alpha(dataset: xr.DataArray): # 1. This function is only used by _get_merged_image_data # 2. Both foreground and background have been through add_bands, so they have the same mode - # 3. If none of them has alpha band, they will be passed directly to _simple_overlay not _get_merged_image_data + # 3. If none of them has alpha band, they will be passed to _simple_overlay not _get_merged_image_data # So any dataset(whether foreground or background) passed to this function has an alpha band for certain # We will use it directly alpha = dataset.sel(bands="A") From ef3290466e5871aad699136204bec0a319eb2151 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Thu, 18 Apr 2024 10:22:41 +0000 Subject: [PATCH 1259/1416] fix: Correct the code according to the comment in https://github.com/pytroll/satpy/pull/2778 --- AUTHORS.md | 1 + satpy/etc/readers/fci_l1c_nc.yaml | 329 +++++++------------- satpy/readers/fci_l1c_nc.py | 67 ++-- satpy/tests/reader_tests/test_fci_l1c_nc.py | 315 +++++++++---------- 4 files changed, 284 insertions(+), 428 deletions(-) diff --git a/AUTHORS.md b/AUTHORS.md index 796ee9743b..7dcfd7d31d 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -88,3 +88,4 @@ The following people have made contributions to this project: - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) +- [Clement Laplace (ClementLaplace)](https://github.com/ClementLaplace) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 88630cd017..5f86ad2326 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -6,7 +6,7 @@ reader: Reader for FCI L1c data in NetCDF4 format. Used to read Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) L1c data. - status: Beta for full-disc FDHSI and HRFI, RSS not supported yet + status: Beta for full-disc FDHSI, HRFI, and African dissemination format. RSS not supported yet supports_fsspec: true reader: !!python/name:satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader sensors: [fci] @@ -152,7 +152,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -191,7 +190,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -230,7 +228,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -269,7 +266,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -308,7 +304,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -347,7 +342,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -386,7 +380,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -425,7 +418,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -464,7 +456,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -503,7 +494,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -542,7 +532,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -581,7 +570,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -620,7 +608,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -659,7 +646,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -698,7 +684,6 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: @@ -738,7 +723,7 @@ datasets: sensor: fci wavelength: [0.384, 0.444, 0.504] resolution: - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_04] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } calibration: counts: @@ -756,7 +741,7 @@ datasets: sensor: fci wavelength: [0.470, 0.510, 0.550] resolution: - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_05] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } calibration: counts: @@ -793,7 +778,7 @@ datasets: sensor: fci wavelength: [0.815, 0.865, 0.915] resolution: - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_08] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } calibration: counts: @@ -811,7 +796,7 @@ datasets: sensor: fci wavelength: [0.894, 0.914, 0.934] resolution: - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_09] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } calibration: counts: @@ -829,7 +814,7 @@ datasets: sensor: fci wavelength: [1.350, 1.380, 1.410] resolution: - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_nir_13] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } calibration: counts: @@ -847,7 +832,7 @@ datasets: sensor: fci wavelength: [1.560, 1.610, 1.660] resolution: - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_nir_16] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } calibration: counts: @@ -866,7 +851,7 @@ datasets: wavelength: [2.200, 2.250, 2.300] resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_nir_22] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } calibration: counts: @@ -884,7 +869,7 @@ datasets: sensor: fci wavelength: [3.400, 3.800, 4.200] resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } calibration: @@ -903,7 +888,6 @@ datasets: sensor: fci wavelength: [5.300, 6.300, 7.300] resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } calibration: @@ -922,7 +906,6 @@ datasets: sensor: fci wavelength: [6.850, 7.350, 7.850] resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } calibration: @@ -941,7 +924,6 @@ datasets: sensor: fci wavelength: [8.300, 8.700, 9.100] resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } calibration: @@ -960,7 +942,6 @@ datasets: sensor: fci wavelength: [9.360, 9.660, 9.960] resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } calibration: @@ -979,7 +960,7 @@ datasets: sensor: fci wavelength: [9.800, 10.500, 11.200] resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } calibration: @@ -998,7 +979,6 @@ datasets: sensor: fci wavelength: [11.800, 12.300, 12.800] resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } calibration: @@ -1017,7 +997,6 @@ datasets: sensor: fci wavelength: [12.700, 13.300, 13.900] resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } calibration: @@ -1035,14 +1014,14 @@ datasets: name: vis_04_pixel_quality sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_pixel_quality: name: vis_05_pixel_quality sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_pixel_quality: @@ -1057,28 +1036,28 @@ datasets: name: vis_08_pixel_quality sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_pixel_quality: name: vis_09_pixel_quality sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_pixel_quality: name: nir_13_pixel_quality sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_pixel_quality: name: nir_16_pixel_quality sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_pixel_quality: @@ -1086,14 +1065,14 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_pixel_quality: name: ir_38_pixel_quality sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -1101,7 +1080,6 @@ datasets: name: wv_63_pixel_quality sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -1109,7 +1087,6 @@ datasets: name: wv_73_pixel_quality sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -1117,7 +1094,6 @@ datasets: name: ir_87_pixel_quality sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -1125,7 +1101,6 @@ datasets: name: ir_97_pixel_quality sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -1133,7 +1108,7 @@ datasets: name: ir_105_pixel_quality sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -1141,7 +1116,6 @@ datasets: name: ir_123_pixel_quality sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -1149,7 +1123,6 @@ datasets: name: ir_133_pixel_quality sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -1157,14 +1130,14 @@ datasets: name: vis_04_index_map sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_index_map: name: vis_05_index_map sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_index_map: @@ -1179,28 +1152,28 @@ datasets: name: vis_08_index_map sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_index_map: name: vis_09_index_map sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_index_map: name: nir_13_index_map sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_index_map: name: nir_16_index_map sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_index_map: @@ -1208,14 +1181,14 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_index_map: name: ir_38_index_map sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -1223,7 +1196,6 @@ datasets: name: wv_63_index_map sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -1231,7 +1203,6 @@ datasets: name: wv_73_index_map sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -1239,7 +1210,6 @@ datasets: name: ir_87_index_map sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -1247,7 +1217,6 @@ datasets: name: ir_97_index_map sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -1255,7 +1224,7 @@ datasets: name: ir_105_index_map sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -1263,7 +1232,6 @@ datasets: name: ir_123_index_map sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -1271,7 +1239,6 @@ datasets: name: ir_133_index_map sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -1280,7 +1247,7 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_time: @@ -1288,7 +1255,7 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_time: @@ -1305,7 +1272,7 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_time: @@ -1313,7 +1280,7 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_time: @@ -1321,7 +1288,7 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_time: @@ -1329,7 +1296,7 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_time: @@ -1338,7 +1305,7 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_time: @@ -1346,7 +1313,7 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -1355,7 +1322,6 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -1364,7 +1330,6 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -1373,7 +1338,6 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -1382,7 +1346,6 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -1391,7 +1354,7 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -1400,7 +1363,6 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -1409,7 +1371,6 @@ datasets: units: s sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -1417,14 +1378,14 @@ datasets: name: vis_04_swath_direction sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_swath_direction: name: vis_05_swath_direction sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_swath_direction: @@ -1439,28 +1400,28 @@ datasets: name: vis_08_swath_direction sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_swath_direction: name: vis_09_swath_direction sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_swath_direction: name: nir_13_swath_direction sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_swath_direction: name: nir_16_swath_direction sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_swath_direction: @@ -1468,14 +1429,14 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_swath_direction: name: ir_38_swath_direction sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -1483,7 +1444,6 @@ datasets: name: wv_63_swath_direction sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -1491,7 +1451,6 @@ datasets: name: wv_73_swath_direction sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -1499,7 +1458,6 @@ datasets: name: ir_87_swath_direction sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -1507,7 +1465,6 @@ datasets: name: ir_97_swath_direction sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -1515,7 +1472,7 @@ datasets: name: ir_105_swath_direction sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -1523,7 +1480,6 @@ datasets: name: ir_123_swath_direction sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -1531,7 +1487,6 @@ datasets: name: ir_133_swath_direction sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -1539,14 +1494,14 @@ datasets: name: vis_04_swath_number sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_swath_number: name: vis_05_swath_number sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_swath_number: @@ -1561,28 +1516,28 @@ datasets: name: vis_08_swath_number sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_swath_number: name: vis_09_swath_number sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_swath_number: name: nir_13_swath_number sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_swath_number: name: nir_16_swath_number sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_swath_number: @@ -1590,14 +1545,14 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_swath_number: name: ir_38_swath_number sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -1605,7 +1560,6 @@ datasets: name: wv_63_swath_number sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -1613,7 +1567,6 @@ datasets: name: wv_73_swath_number sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -1621,7 +1574,6 @@ datasets: name: ir_87_swath_number sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -1629,7 +1581,6 @@ datasets: name: ir_97_swath_number sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -1637,7 +1588,7 @@ datasets: name: ir_105_swath_number sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -1645,7 +1596,6 @@ datasets: name: ir_123_swath_number sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -1653,7 +1603,6 @@ datasets: name: ir_133_swath_number sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -1662,7 +1611,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsatellite_latitude: @@ -1670,7 +1619,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsatellite_latitude: @@ -1687,7 +1636,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsatellite_latitude: @@ -1695,7 +1644,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsatellite_latitude: @@ -1703,7 +1652,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsatellite_latitude: @@ -1711,7 +1660,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsatellite_latitude: @@ -1720,7 +1669,7 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsatellite_latitude: @@ -1728,7 +1677,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -1737,7 +1686,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -1746,7 +1694,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -1755,7 +1702,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -1764,7 +1710,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -1773,7 +1718,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -1782,7 +1727,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -1791,7 +1735,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -1800,7 +1743,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsatellite_longitude: @@ -1808,7 +1751,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsatellite_longitude: @@ -1825,7 +1768,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsatellite_longitude: @@ -1833,7 +1776,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsatellite_longitude: @@ -1841,7 +1784,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsatellite_longitude: @@ -1849,7 +1792,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsatellite_longitude: @@ -1858,7 +1801,7 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsatellite_longitude: @@ -1866,7 +1809,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -1875,7 +1818,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -1884,7 +1826,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -1893,7 +1834,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -1902,7 +1842,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -1911,7 +1850,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -1920,7 +1859,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -1929,7 +1867,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -1938,7 +1875,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsolar_latitude: @@ -1946,7 +1883,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsolar_latitude: @@ -1963,7 +1900,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsolar_latitude: @@ -1971,7 +1908,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsolar_latitude: @@ -1979,7 +1916,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsolar_latitude: @@ -1987,7 +1924,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsolar_latitude: @@ -1996,7 +1933,7 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsolar_latitude: @@ -2004,7 +1941,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -2013,7 +1950,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -2022,7 +1958,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -2031,7 +1966,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -2040,7 +1974,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -2049,7 +1982,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -2058,7 +1991,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -2067,7 +1999,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -2076,7 +2007,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsolar_longitude: @@ -2084,7 +2015,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsolar_longitude: @@ -2101,7 +2032,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsolar_longitude: @@ -2109,7 +2040,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsolar_longitude: @@ -2117,7 +2048,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsolar_longitude: @@ -2125,7 +2056,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsolar_longitude: @@ -2134,7 +2065,7 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsolar_longitude: @@ -2142,7 +2073,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -2151,7 +2082,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -2160,7 +2090,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -2169,7 +2098,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -2178,7 +2106,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -2187,7 +2114,7 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af } @@ -2196,7 +2123,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -2205,7 +2131,6 @@ datasets: units: deg sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -2214,7 +2139,7 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_platform_altitude: @@ -2222,7 +2147,7 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_platform_altitude: @@ -2239,7 +2164,7 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_platform_altitude: @@ -2247,7 +2172,7 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_platform_altitude: @@ -2255,7 +2180,7 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_platform_altitude: @@ -2263,7 +2188,7 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_platform_altitude: @@ -2272,7 +2197,7 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_platform_altitude: @@ -2280,7 +2205,7 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -2289,7 +2214,6 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -2298,7 +2222,6 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -2307,7 +2230,6 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -2316,7 +2238,6 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -2325,7 +2246,7 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af } @@ -2334,7 +2255,6 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -2343,7 +2263,6 @@ datasets: units: m sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -2360,7 +2279,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_earth_sun_distance: @@ -2377,7 +2296,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_earth_sun_distance: @@ -2385,7 +2304,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_earth_sun_distance: @@ -2393,7 +2312,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_earth_sun_distance: @@ -2401,7 +2320,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_earth_sun_distance: @@ -2410,7 +2329,7 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_earth_sun_distance: @@ -2418,7 +2337,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_38] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } @@ -2427,7 +2346,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -2436,7 +2354,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -2445,7 +2362,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -2454,7 +2370,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -2463,7 +2378,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -2472,7 +2387,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -2481,7 +2395,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } @@ -2490,7 +2403,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_sun_satellite_distance: @@ -2498,7 +2411,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_05, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_sun_satellite_distance: @@ -2515,7 +2428,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_08, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_sun_satellite_distance: @@ -2523,7 +2436,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_vis_09, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_sun_satellite_distance: @@ -2531,7 +2444,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_13, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_sun_satellite_distance: @@ -2539,7 +2452,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_af_nir_16, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_sun_satellite_distance: @@ -2548,7 +2461,7 @@ datasets: sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } - 1000: { file_type: [fci_l1c_af_nir_22, fci_l1c_fdhsi] } + 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_sun_satellite_distance: @@ -2556,16 +2469,15 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } - 3000: { file_type: fci_l1c_af } + 3000: { file_type: fci_l1c_af_ir_38 } wv_63_sun_satellite_distance: name: wv_63_sun_satellite_distance units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_63 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } @@ -2574,7 +2486,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_wv_73 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } @@ -2583,7 +2494,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_87 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } @@ -2592,7 +2502,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_97 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } @@ -2601,7 +2510,7 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: [fci_l1c_hrfi, fci_l1c_af_ir_105] } + 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } @@ -2610,7 +2519,6 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_123 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } @@ -2619,6 +2527,5 @@ datasets: units: km sensor: fci resolution: - 1000: { file_type: fci_l1c_af_ir_133 } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index b377ae0177..5b7c669d21 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -29,7 +29,8 @@ .. note:: This reader currently supports Full Disk High Spectral Resolution Imagery - (FDHSI) and High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") scanning mode. + (FDHSI) ,High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") scanning mode. + The african case ("AF") scanning mode has been added. If the user provides a list of both FDHSI and HRFI files from the same repeat cycle to the Satpy ``Scene``, Satpy will automatically read the channels from the source with the finest resolution, i.e. from the HRFI files for the vis_06, nir_22, ir_38, and ir_105 channels. @@ -154,7 +155,7 @@ "fci_l1c_fdhsi": {"grid_type": "2km", "grid_width": 5568}, "fci_l1c_af":{"grid_type": "3km", - "grid_width":3712}} + "grid_width": 3712}} def _get_aux_data_name_from_dsname(dsname): @@ -266,8 +267,19 @@ def get_channel_measured_group_path(self, channel): return measured_group_path - def _get_segment_position_info_FD(self): - """get_position_info applied for FD.""" + def get_segment_position_info(self): + """Get information about the size and the position of the segment inside the final image array. + + As the final array is composed by stacking segments vertically, the position of a segment + inside the array is defined by the numbers of the start (lowest) and end (highest) row of the segment. + The row numbering is assumed to start with 1. + This info is used in the GEOVariableSegmentYAMLReader to compute optimal segment sizes for missing segments. + + Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept + of chunk, and to be consistent with SEVIRI, we opt to use the word segment. + + Note: This function is not used for the African data as it contains only one segment. + """ file_type = self.filetype_info["file_type"] vis_06_measured_path = self.get_channel_measured_group_path("vis_06") ir_105_measured_path = self.get_channel_measured_group_path("ir_105") @@ -289,51 +301,6 @@ def _get_segment_position_info_FD(self): } return segment_position_info - - def _get_segment_position_info_AF(self): - """get_position_info applied for AF.""" - file_type = self.filetype_info["file_type"] - channel_data = [key for key in self.file_content.keys() - if ((key.startswith("data/vis") or - key.startswith("data/ir") or - key.startswith("data/hrv") or - key.startswith("data/nir") or - key.startswith("data/wv")) - and key.endswith("measured"))][0] - segment_position_info = { - HIGH_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(f"{channel_data}/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item() - - self.get_and_cache_npxr(f"{channel_data}/start_position_row").item() + 1, - "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] - }, - LOW_RES_GRID_INFO[file_type]["grid_type"]: { - "start_position_row": self.get_and_cache_npxr(f"{channel_data}/start_position_row").item(), - "end_position_row": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item(), - "segment_height": self.get_and_cache_npxr(f"{channel_data}/end_position_row").item() - - self.get_and_cache_npxr(f"{channel_data}/start_position_row").item() + 1, - "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] - } - } - return segment_position_info - - def get_segment_position_info(self): - """Get information about the size and the position of the segment inside the final image array. - - As the final array is composed by stacking segments vertically, the position of a segment - inside the array is defined by the numbers of the start (lowest) and end (highest) row of the segment. - The row numbering is assumed to start with 1. - This info is used in the GEOVariableSegmentYAMLReader to compute optimal segment sizes for missing segments. - - Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept - of chunk, and to be consistent with SEVIRI, we opt to use the word segment. - """ - if self.filename_info["coverage"] == "AF": - return self._get_segment_position_info_AF() - else : - return self._get_segment_position_info_FD() - def get_dataset(self, key, info=None): """Load a dataset.""" logger.debug("Reading {} from {}".format(key["name"], self.filename)) @@ -434,6 +401,7 @@ def orbital_param(self): actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector("platform_altitude"))) + # The "try" is a temporary part of the code as long as the AF data are not modified try : nominal_and_proj_subsat_lon = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) @@ -591,6 +559,7 @@ def get_area_def(self, key): a = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/semi_major_axis")) h = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) rf = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/inverse_flattening")) + # The "try" is a temporary part of the code as long as the AF data are not modified try: lon_0 = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) except ValueError: diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index e8ac46f5d6..af4d487eb1 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -58,7 +58,7 @@ "add_offset": 1.55617776423501e-01, }, "3km": { - "nrows": 66, + "nrows": 67, "ncols": 3712, "scale_factor": 8.38307287956433e-05, "add_offset": 0.155631748009112, @@ -396,7 +396,6 @@ def _get_reader_with_filehandlers(filenames, reader_configs): clear_cache(reader) return reader - def clear_cache(reader): """Clear the cache for file handlres in reader.""" for key in reader.file_handlers: @@ -409,7 +408,66 @@ def clear_cache(reader): list_channel_terran = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", "ir_123", "ir_133"] list_total_channel = list_channel_solar + list_channel_terran -list_resolution = ["1km","3km"] +list_resolution_v06 = ["1km","3km"] +list_resolution = ["3km"] +expected_pos_info_for_filetype = { + "fdhsi": {"1km": {"start_position_row": 1, + "end_position_row": 200, + "segment_height": 200, + "grid_width": 11136}, + "2km": {"start_position_row": 1, + "end_position_row": 100, + "segment_height": 100, + "grid_width": 5568}}, + "hrfi": {"500m": {"start_position_row": 1, + "end_position_row": 400, + "segment_height": 400, + "grid_width": 22272}, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200}}, + "fci_af" : {"3km": {"start_position_row": 1, + "end_position_row": 67, + "segment_height": 67, + "grid_width": 3712 + }, + }, + "fci_af_vis_06" : {"3km": {"start_position_row": 1, + "end_position_row": 67, + "segment_height": 67, + "grid_width": 3712 + }, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200} + } + } + + +def resolutions(channel): + """Get the resolutions.""" + if channel == "vis_06": + return list_resolution_v06 + else: + return list_resolution + +def generate_parameters_segment(list_channel): + """Generate dinamicaly the parameters.""" + for channel in list_channel: + if channel == "vis_06": + expected_pos_info = expected_pos_info_for_filetype["fci_af_vis_06"] + else : + expected_pos_info = expected_pos_info_for_filetype["fci_af"] + for resolution in resolutions(channel): + yield (channel, resolution, expected_pos_info) + +def generate_parameters(list_channel): + """Generate dinamicaly the parameters.""" + for channel in list_channel: + for resolution in resolutions(channel): + yield (channel, resolution) _chans_fdhsi = {"solar": list_channel_solar, "solar_grid_type": ["1km"] * 8, @@ -421,8 +479,30 @@ def clear_cache(reader): "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} -_chans_af = {} - +_dict_arg_radiance = {"dtype": np.float32, + "value_1": 15, + "value_0":9700, + "attrs_dict":{"calibration":"radiance", + "units":"mW m-2 sr-1 (cm-1)-1", + "radiance_unit_conversion_coefficient": np.float32(1234.56) + } + } + +_dict_arg_counts = {"dtype": np.uint16, + "value_1": 1, + "value_0": 5000, + "attrs_dict":{"calibration":"counts", + "units":"count", + } + } + +_dict_arg_bt = {"dtype": np.float32, + "value_1": np.float32(209.68275), + "value_0": np.float32(1888.8513), + "attrs_dict":{"calibration":"brightness_temperature", + "units":"K", + } + } _test_filenames = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" @@ -436,20 +516,28 @@ def clear_cache(reader): ] } -for channel in list_total_channel: - for resol in list_resolution: - chann_upp = channel.replace("_","").upper() - _test_filenames[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" - f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" - f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] - if channel.split("_")[0] in ["vis","nir"]: - _chans_af[f"{channel}_{resol}"] = {"solar":[channel], - "solar_grid_type": [resol]} - elif channel.split("_")[0] in ["ir","wv"]: - _chans_af[f"{channel}_{resol}"] = {"terran":[channel], - "terran_grid_type": [resol]} - -#W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD-1KM-AF-VIS06-x-x---NC4E_C_EUMT_20240125144647_DT_OPE_20240109080007_20240109080924_N_JLS_T_0049_0000.nc +def fill_chans_af(): + """Fill the dict _chans_af with the right channel and resolution.""" + _chans_af = {} + for channel in list_total_channel: + if channel == "vis_06": + list_resol = list_resolution_v06 + else : + list_resol = list_resolution + for resol in list_resol: + chann_upp = channel.replace("_","").upper() + _test_filenames[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" + f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" + f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] + if channel.split("_")[0] in ["vis","nir"]: + _chans_af[f"{channel}_{resol}"] = {"solar":[channel], + "solar_grid_type": [resol]} + elif channel.split("_")[0] in ["ir","wv"]: + _chans_af[f"{channel}_{resol}"] = {"terran":[channel], + "terran_grid_type": [resol]} + return _chans_af + +_chans_af = fill_chans_af() @contextlib.contextmanager def mocked_basefilehandler(filehandler): @@ -485,13 +573,13 @@ def FakeFCIFileHandlerHRFI_fixture(): @pytest.fixture() def FakeFCIFileHandlerAF_fixture(channel,resolution): - """Get a fixture for the fake AF filehandler, including channel and file names.""" + """Get a fixture for the fake AF filehandler, it contains only one channel and one resolution.""" chan_patterns = {channel.split("_")[0]+"_{:>02d}": {"channels": [int(channel.split("_")[1])], "grid_type": f"{resolution}"},} FakeFCIFileHandlerAF.chan_patterns = chan_patterns with mocked_basefilehandler(FakeFCIFileHandlerAF): param_dict = { - "filetype": f"fci_l1c_af_{channel}", + "filetype": "fci_l1c_af", "channels": _chans_af[f"{channel}_{resolution}"], "filenames": _test_filenames[f"af_{channel}_{resolution}"], } @@ -517,6 +605,22 @@ def _get_type_ter_AF(self,channel): elif channel.split("_")[0] in ["wv","ir"]: return "terran" + def _get_assert_attrs(self,res,ch,attrs_dict): + """Test the differents attributes values.""" + for key,item in attrs_dict.items(): + assert res[ch].attrs[key] == item + + def _get_assert_load(self,res,ch,grid_type,dict_arg): + """Test the value for differents channels.""" + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + assert res[ch].dtype == dict_arg["dtype"] + self._get_assert_attrs(res,ch,dict_arg["attrs_dict"]) + if ch == "ir_38": + numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_1"]) + numpy.testing.assert_array_equal(res[ch][0], dict_arg["value_0"]) + else: + numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) def _get_res_AF(self,channel,fh_param,calibration,reader_configs): """Load the reader for AF data.""" @@ -558,19 +662,9 @@ def test_load_counts(self, reader_configs, fh_param, for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.uint16 - assert res[ch].attrs["calibration"] == "counts" - assert res[ch].attrs["units"] == "count" - if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], 1) - numpy.testing.assert_array_equal(res[ch][0], 5000) - else: - numpy.testing.assert_array_equal(res[ch], 1) + self._get_assert_load(res,ch,grid_type,_dict_arg_counts) - @pytest.mark.parametrize("channel",list_total_channel) - @pytest.mark.parametrize("resolution",list_resolution) + @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) def test_load_counts_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with counts for AF files.""" expected_res_n = 1 @@ -581,17 +675,7 @@ def test_load_counts_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.uint16 - assert res[ch].attrs["calibration"] == calibration - assert res[ch].attrs["units"] == "count" - if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], 1) - numpy.testing.assert_array_equal(res[ch][0], 5000) - else: - numpy.testing.assert_array_equal(res[ch], 1) - + self._get_assert_load(res,ch,grid_type,_dict_arg_counts) @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) @@ -606,21 +690,9 @@ def test_load_radiance(self, reader_configs, fh_param, for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "radiance" - assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" - assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56) - if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], 15) - numpy.testing.assert_array_equal(res[ch][0], 9700) - else: - numpy.testing.assert_array_equal(res[ch], 15) - + self._get_assert_load(res,ch,grid_type,_dict_arg_radiance) - @pytest.mark.parametrize("channel",list_total_channel) - @pytest.mark.parametrize("resolution",list_resolution) + @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) def test_load_radiance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with radiance for AF files.""" expected_res_n = 1 @@ -631,17 +703,7 @@ def test_load_radiance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,chann assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == calibration - assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" - assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56) - if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], 15) - numpy.testing.assert_array_equal(res[ch][0], 9700) - else: - numpy.testing.assert_array_equal(res[ch], 15) + self._get_assert_load(res,ch,grid_type,_dict_arg_radiance) @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) @@ -661,8 +723,7 @@ def test_load_reflectance(self, reader_configs, fh_param, assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) - @pytest.mark.parametrize("channel",list_channel_solar) - @pytest.mark.parametrize("resolution",list_resolution) + @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_channel_solar)) def test_load_reflectance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with reflectance for AF files.""" expected_res_n = 1 @@ -693,21 +754,9 @@ def test_load_bt(self, reader_configs, caplog, fh_param, assert caplog.text == "" assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "brightness_temperature" - assert res[ch].attrs["units"] == "K" - - if ch == "ir_38": - numpy.testing.assert_array_almost_equal(res[ch][-1], np.float32(209.68275)) - numpy.testing.assert_array_almost_equal(res[ch][0], np.float32(1888.8513)) - else: - numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275)) + self._get_assert_load(res,ch,grid_type,_dict_arg_bt) - - @pytest.mark.parametrize("channel",list_channel_terran) - @pytest.mark.parametrize("resolution",list_resolution) + @pytest.mark.parametrize(("channel","resolution"), generate_parameters(list_channel_terran)) def test_load_bt_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,caplog): """Test loading with brightness_temperature for AF files.""" expected_res_n = 1 @@ -720,17 +769,7 @@ def test_load_bt_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,cap assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "brightness_temperature" - assert res[ch].attrs["units"] == "K" - - if ch == "ir_38": - numpy.testing.assert_array_almost_equal(res[ch][-1], np.float32(209.68275)) - numpy.testing.assert_array_almost_equal(res[ch][0], np.float32(1888.8513)) - else: - numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275)) + self._get_assert_load(res,ch,grid_type,_dict_arg_bt) @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) @@ -754,49 +793,6 @@ def test_orbital_parameters_attr(self, reader_configs, fh_param): "projection_altitude": 35786400.0, } - @pytest.mark.parametrize("channel",list_total_channel) - @pytest.mark.parametrize("resolution",list_resolution) - def test_orbital_parameters_attr_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): - """Test the orbital parametters for AF data.""" - expected_res_n = 1 - fh_param = FakeFCIFileHandlerAF_fixture - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - type_ter = self._get_type_ter_AF(channel) - res = reader.load([make_dataid(name=name) - for name in fh_param["channels"][type_ter]], pad_data=False) - assert expected_res_n == len(res) - for ch in fh_param["channels"][type_ter]: - assert res[ch].attrs["orbital_parameters"] == { - "satellite_actual_longitude": np.mean(np.arange(6000)), - "satellite_actual_latitude": np.mean(np.arange(6000)), - "satellite_actual_altitude": np.mean(np.arange(6000)), - "satellite_nominal_longitude": 0.0, - "satellite_nominal_latitude": 0, - "satellite_nominal_altitude": 35786400.0, - "projection_longitude": 0.0, - "projection_latitude": 0, - "projection_altitude": 35786400.0, - } - - expected_pos_info_for_filetype = { - "fdhsi": {"1km": {"start_position_row": 1, - "end_position_row": 200, - "segment_height": 200, - "grid_width": 11136}, - "2km": {"start_position_row": 1, - "end_position_row": 100, - "segment_height": 100, - "grid_width": 5568}}, - "hrfi": {"500m": {"start_position_row": 1, - "end_position_row": 400, - "segment_height": 400, - "grid_width": 22272}, - "1km": {"start_position_row": 1, - "end_position_row": 200, - "grid_width": 11136, - "segment_height": 200}} - } - @pytest.mark.parametrize(("fh_param", "expected_pos_info"), [ (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), expected_pos_info_for_filetype["fdhsi"]), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), expected_pos_info_for_filetype["hrfi"]) @@ -808,6 +804,16 @@ def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_ segpos_info = filetype_handler.get_segment_position_info() assert segpos_info == expected_pos_info + @mock.patch("satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader") + @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) + def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): + """Test that checks that the get_segment_position_info has not be called for AF data.""" + with mock.patch("satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info") as gspi: + fh_param = FakeFCIFileHandlerAF_fixture + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + reader.load([channel]) + gspi.assert_not_called() + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_index_map(self, reader_configs, fh_param, expected_res_n): @@ -824,8 +830,7 @@ def test_load_index_map(self, reader_configs, fh_param, expected_res_n): GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110) - @pytest.mark.parametrize("channel",list_total_channel) - @pytest.mark.parametrize("resolution",list_resolution) + @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) def test_load_index_map_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with index_map for AF files.""" expected_res_n = 1 @@ -875,8 +880,7 @@ def test_load_quality_only(self, reader_configs, fh_param, expected_res_n): numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3) assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality" - @pytest.mark.parametrize("channel",list_total_channel) - @pytest.mark.parametrize("resolution",list_resolution) + @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) def test_load_quality_only_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): """Test loading with quality works for AF files.""" expected_res_n = 1 @@ -905,18 +909,6 @@ def test_platform_name(self, reader_configs, fh_param): res = reader.load(["vis_06"], pad_data=False) assert res["vis_06"].attrs["platform_name"] == "MTG-I1" - @pytest.mark.parametrize("channel",list_total_channel) - @pytest.mark.parametrize("resolution",list_resolution) - def test_platform_name_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): - """Test that platform name is exposed for AF file.""" - fh_param = FakeFCIFileHandlerAF_fixture - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - type_ter = self._get_type_ter_AF(channel) - res = reader.load([f"{name}" - for name in fh_param["channels"][type_ter]], pad_data=False) - for ch in fh_param["channels"][type_ter]: - assert res[ch].attrs["platform_name"] == "MTG-I1" - @pytest.mark.parametrize(("fh_param", "expected_area"), [ (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), @@ -958,19 +950,6 @@ def test_excs(self, reader_configs, fh_param): make_dataid(name="ir_123", calibration="unknown"), {"units": "unknown"}) - @pytest.mark.parametrize("channel",list_total_channel) - @pytest.mark.parametrize("resolution",list_resolution) - def test_excs_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): - """Test exceptions for AF files.""" - fh_param = FakeFCIFileHandlerAF_fixture - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - with pytest.raises(ValueError, match="Unknown dataset key, not a channel, quality or auxiliary data: invalid"): - reader.file_handlers[fh_param["filetype"]][0].get_dataset(make_dataid(name="invalid"), {}) - with pytest.raises(ValueError, match="unknown invalid value for "): - reader.file_handlers[fh_param["filetype"]][0].get_dataset( - make_dataid(name=f"{channel}", calibration="unknown"), - {"units": "unknown"}) - def test_load_composite(self): """Test that composites are loadable.""" # when dedicated composites for FCI are implemented in satpy, From 086f45fb0a5662dfd0f361c1a5b8f95463b08bb1 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 18:33:27 +0800 Subject: [PATCH 1260/1416] initial --- satpy/etc/readers/msi_safe.yaml | 267 +++++++++++++++++++++++++++++--- satpy/readers/msi_safe.py | 7 +- 2 files changed, 247 insertions(+), 27 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index c83b560539..e1ac444f2e 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -10,20 +10,38 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: - safe_granule: + l1c_safe_granule: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] - requires: [safe_metadata, safe_tile_metadata] - safe_tile_metadata: + requires: [l1c_safe_metadata, l1c_safe_tile_metadata] + l1c_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] - safe_metadata: + l1c_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] + l2a_safe_granule_10m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_granule_20m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_granule_60m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_tile_metadata: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML + file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + l2a_safe_metadata: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML + file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] -datasets: +datasets: B01: name: B01 sensor: msi @@ -39,7 +57,25 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B01_L2A: + name: B01_L2A + sensor: msi + wavelength: [0.415, 0.443, 0.470] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B02: name: B02 @@ -56,7 +92,26 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B02_L2A: + name: B02_L2A + sensor: msi + wavelength: [0.440, 0.490, 0.540] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B03: name: B03 @@ -73,7 +128,26 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B03_L2A: + name: B03_L2A + sensor: msi + wavelength: [0.540, 0.560, 0.580] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B04: name: B04 @@ -90,7 +164,26 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B04_L2A: + name: B04_L2A + sensor: msi + wavelength: [0.645, 0.665, 0.685] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B05: name: B05 @@ -107,7 +200,25 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B05_L2A: + name: B05_L2A + sensor: msi + wavelength: [0.695, 0.705, 0.715] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B06: name: B06 @@ -124,7 +235,25 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B06_L2A: + name: B06_L2A + sensor: msi + wavelength: [0.731, 0.740, 0.749] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B07: name: B07 @@ -141,7 +270,25 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B07_L2A: + name: B07_L2A + sensor: msi + wavelength: [0.764, 0.783, 0.802] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B08: name: B08 @@ -158,7 +305,24 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B08_L2A: + name: B08_L2A + sensor: msi + wavelength: [0.780, 0.842, 0.905] + resolution: + 10: {file_type: l2a_safe_granule_10m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B8A: name: B8A @@ -175,7 +339,25 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B8A_L2A: + name: B8A + sensor: msi + wavelength: [0.855, 0.865, 0.875] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B09: name: B09 @@ -192,7 +374,24 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + + B09_L2A: + name: B09_L2A + sensor: msi + wavelength: [0.935, 0.945, 0.955] + resolution: + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" B10: name: B10 @@ -209,13 +408,15 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule - B11: - name: B11 + B11_L2A: + name: B11_L2A sensor: msi wavelength: [1.565, 1.610, 1.655] - resolution: 20 + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -226,7 +427,6 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule B12: name: B12 @@ -243,31 +443,48 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule + file_type: l1c_safe_granule + B12_L2A: + name: B12_L2A + sensor: msi + wavelength: [2.100, 2.190, 2.280] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Sun_Angles_Grid/Zenith solar_azimuth_angle: name: solar_azimuth_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Sun_Angles_Grid/Azimuth satellite_azimuth_angle: name: satellite_azimuth_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth satellite_zenith_angle: name: satellite_zenith_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index ec17a98872..55f761b7c6 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -121,6 +121,7 @@ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): self.tile = filename_info["dtile_number"] self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated + self.process_level = "L2A" if "MSIL2A" in filename else "L1C" import bottleneck # noqa import geotiepoints # noqa @@ -140,7 +141,8 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level == "L1C" else \ + int(self.root.find(".//BOA_QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 @@ -172,7 +174,8 @@ def band_indices(self): @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find(".//Radiometric_Offset_List") + offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level == "L1C" else \ + self.root.find(".//BOA_ADD_OFFSET_VALUES_LIST") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: From 79106e81f9e2f5ad42ab6036ae5153eb97adbdc6 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 18:38:50 +0800 Subject: [PATCH 1261/1416] Update msi_safe.yaml --- satpy/etc/readers/msi_safe.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index e1ac444f2e..82f0822f0c 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -40,7 +40,6 @@ file_types: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] - datasets: B01: name: B01 From 9cdb03ede36e52e70f9a15d011b3f07f933b288d Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 23:03:30 +0800 Subject: [PATCH 1262/1416] job part1 -- readers --- satpy/dataset/dataid.py | 4 +- satpy/etc/readers/msi_safe.yaml | 96 ++++++++++++++++++++++++++++++--- satpy/readers/msi_safe.py | 44 ++++++++++++--- 3 files changed, 129 insertions(+), 15 deletions(-) diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index d8301bc453..7bca3d0147 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -254,7 +254,9 @@ def __hash__(self): "brightness_temperature", "radiance", "radiance_wavenumber", - "counts" + "counts", + "aerosol_thickness", + "water_vapor" ], "transitive": True, }, diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index 82f0822f0c..8ac40f2725 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -310,7 +310,7 @@ datasets: name: B08_L2A sensor: msi wavelength: [0.780, 0.842, 0.905] - resolution: + resolution: 10: {file_type: l2a_safe_granule_10m} calibration: reflectance: @@ -341,10 +341,10 @@ datasets: file_type: l1c_safe_granule B8A_L2A: - name: B8A + name: B8A_L2A sensor: msi wavelength: [0.855, 0.865, 0.875] - resolution: + resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: @@ -379,7 +379,7 @@ datasets: name: B09_L2A sensor: msi wavelength: [0.935, 0.945, 0.955] - resolution: + resolution: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: @@ -409,11 +409,28 @@ datasets: units: "1" file_type: l1c_safe_granule + B11: + name: B11 + sensor: msi + wavelength: [1.565, 1.610, 1.655] + resolution: 20 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: l1c_safe_granule + B11_L2A: name: B11_L2A sensor: msi wavelength: [1.565, 1.610, 1.655] - resolution: + resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: @@ -448,7 +465,7 @@ datasets: name: B12_L2A sensor: msi wavelength: [2.100, 2.190, 2.280] - resolution: + resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: @@ -462,6 +479,47 @@ datasets: standard_name: counts units: "1" + AOT_L2A: + name: AOT_L2A + sensor: msi + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + aerosol_thickness: + standard_name: aerosol_optical_thickness + units: "1" + counts: + standard_name: counts + units: "1" + + WVP_L2A: + name: WVP_L2A + sensor: msi + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + water_vapor: + standard_name: water_vapor + units: cm + counts: + standard_name: counts + units: "1" + + SCL_L2A: + name: SCL_L2A + sensor: msi + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + counts: + standard_name: counts + units: "1" + solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] @@ -487,3 +545,29 @@ datasets: file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith + + solar_zenith_angle_l2a: + name: solar_zenith_angle_l2a + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Sun_Angles_Grid/Zenith + + solar_azimuth_angle_l2a: + name: solar_azimuth_angle_l2a + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Sun_Angles_Grid/Azimuth + + satellite_azimuth_angle_l2a: + name: satellite_azimuth_angle_l2a + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Viewing_Incidence_Angles_Grids + xml_item: Azimuth + + satellite_zenith_angle_l2a: + name: satellite_zenith_angle_l2a + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Viewing_Incidence_Angles_Grids + xml_item: Zenith diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 55f761b7c6..eaf98d6a38 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -28,9 +28,12 @@ reader_kwargs={'mask_saturated': False}) scene.load(['B01']) -L1B format description for the files read here: +L1C/L2A format description for the files read here: - https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/ + https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 + +Please note: for L2A datasets, the band name has been fixed with a "_L2A" suffix. Do not change it in the YAML file or +the reader can't recogonize it and nothing will be loaded. """ @@ -69,13 +72,19 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] + self.process_level = "L2A" if "MSIL2A" in filename else "L1C" def get_dataset(self, key, info): """Load a dataset.""" - if self._channel != key["name"]: - return + if self.process_level == "L1C": + if self._channel != key["name"]: + return + else: + if self._channel + "_L2A" != key["name"]: + return logger.debug("Reading %s.", key["name"]) + proj = self._read_from_file(key) proj.attrs = info.copy() proj.attrs["units"] = "%" @@ -91,6 +100,8 @@ def _read_from_file(self, key): return self._mda.calibrate_to_radiances(proj, self._channel) if key["calibration"] == "counts": return self._mda._sanitize_data(proj) + if key["calibration"] in ["aerosol_thickness", "water_vapor"]: + return self._mda.calibrate_to_atmospheric(proj, self._channel) @property def start_time(self): @@ -104,8 +115,13 @@ def end_time(self): def get_area_def(self, dsid): """Get the area def.""" - if self._channel != dsid["name"]: - return + if self.process_level == "L1C": + if self._channel != dsid["name"]: + return + else: + if self._channel + "_L2A" != dsid["name"]: + return + return self._tile_mda.get_area_def(dsid) @@ -146,6 +162,16 @@ def calibrate_to_reflectances(self, data, band_name): data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 + def calibrate_to_atmospheric(self, data, band_name): + """Calibrate L2A AOT/WVP product.""" + atmospheric_products = ["AOT", "WVP"] + if self.process_level == "L1C" or (self.process_level == "L2A" and band_name not in atmospheric_products): + return + + quantification = float(self.root.find(f".//{band_name}_QUANTIFICATION_VALUE").text) + data = self._sanitize_data(data) + return data / quantification + def _sanitize_data(self, data): data = data.where(data != self.no_data) if self.mask_saturated: @@ -298,9 +324,11 @@ def interpolate_angles(self, angles, resolution): def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" angles = self.root.find(".//Tile_Angles") - if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle"]: + if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", + "solar_zenith_angle_l2a", "solar_azimuth_angle_l2a"]: angles = self._get_solar_angles(angles, info) - elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle"]: + elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle", + "satellite_zenith_angle_l2a", "satellite_azimuth_angle_l2a"]: angles = self._get_satellite_angles(angles, info) else: angles = None From 7a26f155050b3e6f3b1f5435854c1b6b1ab6cea6 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Thu, 18 Apr 2024 15:49:19 +0000 Subject: [PATCH 1263/1416] fix: Refactore the test_fci_l1c file to reduce function repetition --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 285 +++++++------------- 1 file changed, 100 insertions(+), 185 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index af4d487eb1..c87db9c6c2 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -453,19 +453,18 @@ def resolutions(channel): else: return list_resolution -def generate_parameters_segment(list_channel): - """Generate dinamicaly the parameters.""" - for channel in list_channel: - if channel == "vis_06": - expected_pos_info = expected_pos_info_for_filetype["fci_af_vis_06"] - else : - expected_pos_info = expected_pos_info_for_filetype["fci_af"] - for resolution in resolutions(channel): - yield (channel, resolution, expected_pos_info) +def get_list_channel_calibration(calibration): + """Get the channel's list according the calibration.""" + if calibration == "reflectance": + return list_channel_solar + elif calibration == "brightness_temperature": + return list_channel_terran + else: + return list_total_channel -def generate_parameters(list_channel): +def generate_parameters(calibration): """Generate dinamicaly the parameters.""" - for channel in list_channel: + for channel in get_list_channel_calibration(calibration): for resolution in resolutions(channel): yield (channel, resolution) @@ -479,31 +478,37 @@ def generate_parameters(list_channel): "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} -_dict_arg_radiance = {"dtype": np.float32, +dict_calibration = { "radiance" : {"dtype": np.float32, "value_1": 15, "value_0":9700, "attrs_dict":{"calibration":"radiance", "units":"mW m-2 sr-1 (cm-1)-1", "radiance_unit_conversion_coefficient": np.float32(1234.56) - } - } + }, + }, -_dict_arg_counts = {"dtype": np.uint16, + "reflectance" : {"dtype": np.float32, + "attrs_dict":{"calibration":"reflectance", + "units":"%" + }, + }, + + "counts" : {"dtype": np.uint16, "value_1": 1, "value_0": 5000, "attrs_dict":{"calibration":"counts", "units":"count", - } - } + }, + }, -_dict_arg_bt = {"dtype": np.float32, + "brightness_temperature" : {"dtype": np.float32, "value_1": np.float32(209.68275), "value_0": np.float32(1888.8513), "attrs_dict":{"calibration":"brightness_temperature", "units":"K", - } - } - + }, + }, +} _test_filenames = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" @@ -520,10 +525,7 @@ def fill_chans_af(): """Fill the dict _chans_af with the right channel and resolution.""" _chans_af = {} for channel in list_total_channel: - if channel == "vis_06": - list_resol = list_resolution_v06 - else : - list_resol = list_resolution + list_resol = resolutions(channel) for resol in list_resol: chann_upp = channel.replace("_","").upper() _test_filenames[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" @@ -616,11 +618,14 @@ def _get_assert_load(self,res,ch,grid_type,dict_arg): GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == dict_arg["dtype"] self._get_assert_attrs(res,ch,dict_arg["attrs_dict"]) - if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_1"]) - numpy.testing.assert_array_equal(res[ch][0], dict_arg["value_0"]) - else: - numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) + if dict_arg["attrs_dict"]["calibration"] == "reflectance": + numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) + else : + if ch == "ir_38": + numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_1"]) + numpy.testing.assert_array_equal(res[ch][0], dict_arg["value_0"]) + else: + numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) def _get_res_AF(self,channel,fh_param,calibration,reader_configs): """Load the reader for AF data.""" @@ -649,127 +654,57 @@ def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 0 - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) - def test_load_counts(self, reader_configs, fh_param, - expected_res_n): - """Test loading with counts.""" - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - res = reader.load( - [make_dataid(name=name, calibration="counts") for name in - fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], - fh_param["channels"]["solar_grid_type"] + - fh_param["channels"]["terran_grid_type"]): - self._get_assert_load(res,ch,grid_type,_dict_arg_counts) - - @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) - def test_load_counts_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): - """Test loading with counts for AF files.""" - expected_res_n = 1 - fh_param = FakeFCIFileHandlerAF_fixture - type_ter = self._get_type_ter_AF(channel) - calibration = "counts" - res = self._get_res_AF(channel,fh_param,calibration,reader_configs) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"][type_ter], - fh_param["channels"][f"{type_ter}_grid_type"]): - self._get_assert_load(res,ch,grid_type,_dict_arg_counts) - - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) - def test_load_radiance(self, reader_configs, fh_param, - expected_res_n): - """Test loading with radiance.""" - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - res = reader.load( - [make_dataid(name=name, calibration="radiance") for name in - fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], - fh_param["channels"]["solar_grid_type"] + - fh_param["channels"]["terran_grid_type"]): - self._get_assert_load(res,ch,grid_type,_dict_arg_radiance) - - @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) - def test_load_radiance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): - """Test loading with radiance for AF files.""" - expected_res_n = 1 - fh_param = FakeFCIFileHandlerAF_fixture - type_ter = self._get_type_ter_AF(channel) - calibration = "radiance" - res = self._get_res_AF(channel,fh_param,calibration,reader_configs) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"][type_ter], - fh_param["channels"][f"{type_ter}_grid_type"]): - self._get_assert_load(res,ch,grid_type,_dict_arg_radiance) - - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) - def test_load_reflectance(self, reader_configs, fh_param, - expected_res_n): - """Test loading with reflectance.""" - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - res = reader.load( - [make_dataid(name=name, calibration="reflectance") for name in - fh_param["channels"]["solar"]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == "reflectance" - assert res[ch].attrs["units"] == "%" - numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) - - @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_channel_solar)) - def test_load_reflectance_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): - """Test loading with reflectance for AF files.""" - expected_res_n = 1 - fh_param = FakeFCIFileHandlerAF_fixture - type_ter = self._get_type_ter_AF(channel) - calibration = "reflectance" - res = self._get_res_AF(channel,fh_param,calibration,reader_configs) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"][type_ter], - fh_param["channels"][f"{type_ter}_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - assert res[ch].dtype == np.float32 - assert res[ch].attrs["calibration"] == calibration - assert res[ch].attrs["units"] == "%" - numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) - - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) - def test_load_bt(self, reader_configs, caplog, fh_param, - expected_res_n): - """Test loading with bt.""" + @pytest.mark.parametrize("calibration", ["counts","radiance","brightness_temperature","reflectance"]) + @pytest.mark.parametrize(("fh_param","res_type"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"),"hdfi"), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"),"hrfi")]) + def test_load_calibration(self, reader_configs, fh_param, + caplog,calibration,res_type): + """Test loading with counts,radiance,reflectance and bt.""" + expected_res_n = {} + if calibration == "reflectance": + list_chan = fh_param["channels"]["solar"] + list_grid = fh_param["channels"]["solar_grid_type"] + expected_res_n["hdfi"] = 8 + expected_res_n["hrfi"] = 2 + elif calibration == "brightness_temperature": + list_chan = fh_param["channels"]["terran"] + list_grid = fh_param["channels"]["terran_grid_type"] + expected_res_n["hdfi"] = 8 + expected_res_n["hrfi"] = 2 + else: + list_chan = fh_param["channels"]["solar"] + fh_param["channels"]["terran"] + list_grid = fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"] + expected_res_n["hdfi"] = 16 + expected_res_n["hrfi"] = 4 reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with caplog.at_level(logging.WARNING): res = reader.load( - [make_dataid(name=name, calibration="brightness_temperature") for - name in fh_param["channels"]["terran"]], pad_data=False) + [make_dataid(name=name, calibration=calibration) for name in + list_chan], pad_data=False) assert caplog.text == "" - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]): - self._get_assert_load(res,ch,grid_type,_dict_arg_bt) - - @pytest.mark.parametrize(("channel","resolution"), generate_parameters(list_channel_terran)) - def test_load_bt_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,caplog): - """Test loading with brightness_temperature for AF files.""" + assert expected_res_n[res_type] == len(res) + for ch, grid_type in zip(list_chan, + list_grid): + self._get_assert_load(res,ch,grid_type,dict_calibration[calibration]) + + @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ + (calibration, channel, resolution) + for calibration in ["counts", "radiance", "brightness_temperature", "reflectance"] + for channel, resolution in generate_parameters(calibration) + ]) + def test_load_calibration_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,calibration,caplog): + """Test loading with counts,radiance,reflectance and bt for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture type_ter = self._get_type_ter_AF(channel) - calibration = "brightness_temperature" with caplog.at_level(logging.WARNING): res = self._get_res_AF(channel,fh_param,calibration,reader_configs) assert caplog.text == "" assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): - self._get_assert_load(res,ch,grid_type,_dict_arg_bt) + self._get_assert_load(res,ch,grid_type,dict_calibration[calibration]) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) @@ -805,7 +740,7 @@ def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_ assert segpos_info == expected_pos_info @mock.patch("satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader") - @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) + @pytest.mark.parametrize(("channel", "resolution"), generate_parameters("radiance")) def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): """Test that checks that the get_segment_position_info has not be called for AF data.""" with mock.patch("satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info") as gspi: @@ -814,37 +749,51 @@ def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader reader.load([channel]) gspi.assert_not_called() + @pytest.mark.parametrize("calibration", ["index_map","pixel_quality"]) @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) - def test_load_index_map(self, reader_configs, fh_param, expected_res_n): - """Test loading of index_map.""" + def test_load_map_and_pixel(self, reader_configs, fh_param, expected_res_n,calibration): + """Test loading of index_map and pixel_quality.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( - [name + "_index_map" for name in + [f"{name}_{calibration}" for name in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"]): - assert res[ch + "_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + assert res[f"{ch}_{calibration}"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110) - - @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) - def test_load_index_map_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): - """Test loading with index_map for AF files.""" + if calibration == "index_map": + numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 110) + elif calibration == "pixel_quality": + numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 3) + assert res[f"{ch}_{calibration}"].attrs["name"] == ch + "_pixel_quality" + + @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ + (calibration, channel, resolution) + for calibration in ["index_map","pixel_quality"] + for channel, resolution in generate_parameters(calibration) + ]) + def test_load_map_and_pixel_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,calibration): + """Test loading with of index_map and pixel_quality for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) type_ter = self._get_type_ter_AF(channel) - res = reader.load([f"{name}_index_map" + res = reader.load([f"{name}_{calibration}" for name in fh_param["channels"][type_ter]], pad_data=False) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): - assert res[f"{ch}_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + assert res[f"{ch}_{calibration}"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - numpy.testing.assert_array_equal(res[f"{ch}_index_map"][1, 1], 110) + if calibration == "index_map": + numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 110) + elif calibration == "pixel_quality": + numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 3) + assert res[f"{ch}_{calibration}"].attrs["name"] == ch + "_pixel_quality" + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) @@ -863,40 +812,6 @@ def test_load_aux_data(self, reader_configs, fh_param): else: numpy.testing.assert_array_equal(res[aux][1, 1], 10) - @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) - def test_load_quality_only(self, reader_configs, fh_param, expected_res_n): - """Test that loading quality only works.""" - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - res = reader.load( - [name + "_pixel_quality" for name in - fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], - fh_param["channels"]["solar_grid_type"] + - fh_param["channels"]["terran_grid_type"]): - assert res[ch + "_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3) - assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality" - - @pytest.mark.parametrize(("channel", "resolution"), generate_parameters(list_total_channel)) - def test_load_quality_only_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel): - """Test loading with quality works for AF files.""" - expected_res_n = 1 - fh_param = FakeFCIFileHandlerAF_fixture - reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - type_ter = self._get_type_ter_AF(channel) - res = reader.load([f"{name}_pixel_quality" - for name in fh_param["channels"][type_ter]], pad_data=False) - assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param["channels"][type_ter], - fh_param["channels"][f"{type_ter}_grid_type"]): - assert res[f"{ch}_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) - numpy.testing.assert_array_equal(res[f"{ch}_pixel_quality"][1, 1], 3) - assert res[f"{ch}_pixel_quality"].attrs["name"] == f"{ch}_pixel_quality" - @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_platform_name(self, reader_configs, fh_param): From 6f15335964b4e0963c7dfebd720f6e54abdeae3f Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 23:56:51 +0800 Subject: [PATCH 1264/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 4970 ++++++++++++++++++++- 1 file changed, 4954 insertions(+), 16 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 0255aac085..8cd5852ee9 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -25,7 +25,7 @@ from satpy.tests.utils import make_dataid -mtd_tile_xml = b""" +mtd_l1c_tile_xml = b""" @@ -575,6 +575,4918 @@ """ # noqa +mtd_l2a_tile_xml = b""" + + + S2A_OPER_MSI_L1C_TL_2APS_20240411T054822_A045975_T50TMK_N05.10 + S2A_OPER_MSI_L2A_TL_2APS_20240411T080950_A045975_T50TMK_N05.10 + S2A_OPER_MSI_L2A_DS_2APS_20240411T080950_S20240411T030632_N05.10 + NOMINAL + 2024-04-11T03:16:45.260288Z + + 2APS + 2024-04-11T08:49:40.661681Z + + + + + WGS84 / UTM zone 50N + EPSG:32650 + + 10980 + 10980 + + + 5490 + 5490 + + + 1830 + 1830 + + + 399960 + 4500000 + 10 + -10 + + + 399960 + 4500000 + 20 + -20 + + + 399960 + 4500000 + 60 + -60 + + + + + + 5000 + 5000 + + 34.9209 34.9012 34.8817 34.8621 34.8426 34.8232 34.8037 34.7844 34.765 34.7457 34.7264 34.7072 34.688 34.6689 34.6498 34.6307 34.6117 34.5927 34.5737 34.5548 34.536 34.5171 34.4983 + 34.8794 34.8598 34.8402 34.8206 34.8011 34.7816 34.7622 34.7428 34.7234 34.7041 34.6848 34.6656 34.6464 34.6272 34.6081 34.589 34.57 34.551 34.532 34.5131 34.4942 34.4754 34.4566 + 34.838 34.8184 34.7987 34.7792 34.7596 34.7401 34.7207 34.7013 34.6819 34.6626 34.6433 34.624 34.6048 34.5856 34.5665 34.5474 34.5283 34.5093 34.4903 34.4714 34.4525 34.4336 34.4148 + 34.7966 34.7769 34.7573 34.7377 34.7182 34.6986 34.6792 34.6597 34.6404 34.621 34.6017 34.5824 34.5632 34.544 34.5248 34.5057 34.4866 34.4676 34.4486 34.4296 34.4107 34.3918 34.373 + 34.7552 34.7355 34.7159 34.6963 34.6767 34.6572 34.6377 34.6182 34.5988 34.5794 34.5601 34.5408 34.5216 34.5023 34.4832 34.464 34.445 34.4259 34.4069 34.3879 34.369 34.3501 34.3312 + 34.7138 34.6941 34.6744 34.6548 34.6352 34.6157 34.5962 34.5767 34.5573 34.5379 34.5185 34.4992 34.48 34.4607 34.4415 34.4224 34.4033 34.3842 34.3652 34.3462 34.3273 34.3083 34.2895 + 34.6724 34.6527 34.633 34.6134 34.5938 34.5742 34.5547 34.5352 34.5157 34.4963 34.477 34.4577 34.4384 34.4191 34.3999 34.3808 34.3616 34.3425 34.3235 34.3045 34.2855 34.2666 34.2477 + 34.631 34.6113 34.5916 34.5719 34.5523 34.5327 34.5132 34.4937 34.4742 34.4548 34.4354 34.4161 34.3968 34.3775 34.3583 34.3391 34.32 34.3009 34.2818 34.2628 34.2438 34.2249 34.206 + 34.5896 34.5699 34.5502 34.5305 34.5109 34.4913 34.4717 34.4522 34.4327 34.4133 34.3939 34.3745 34.3552 34.3359 34.3167 34.2975 34.2783 34.2592 34.2401 34.2211 34.2021 34.1831 34.1642 + 34.5483 34.5285 34.5088 34.4891 34.4694 34.4498 34.4302 34.4107 34.3912 34.3717 34.3523 34.333 34.3136 34.2943 34.2751 34.2559 34.2367 34.2176 34.1985 34.1794 34.1604 34.1414 34.1225 + 34.5069 34.4871 34.4674 34.4477 34.428 34.4084 34.3888 34.3692 34.3497 34.3302 34.3108 34.2914 34.2721 34.2527 34.2335 34.2142 34.1951 34.1759 34.1568 34.1377 34.1187 34.0997 34.0808 + 34.4655 34.4457 34.426 34.4062 34.3866 34.3669 34.3473 34.3277 34.3082 34.2887 34.2693 34.2499 34.2305 34.2112 34.1919 34.1726 34.1534 34.1343 34.1151 34.096 34.077 34.058 34.039 + 34.4242 34.4044 34.3846 34.3648 34.3451 34.3255 34.3059 34.2863 34.2667 34.2472 34.2278 34.2083 34.1889 34.1696 34.1503 34.131 34.1118 34.0926 34.0735 34.0544 34.0353 34.0163 33.9973 + 34.3828 34.363 34.3432 34.3234 34.3037 34.284 34.2644 34.2448 34.2252 34.2057 34.1862 34.1668 34.1474 34.128 34.1087 34.0894 34.0702 34.051 34.0318 34.0127 33.9936 33.9746 33.9556 + 34.3415 34.3216 34.3018 34.2821 34.2623 34.2426 34.223 34.2033 34.1838 34.1642 34.1447 34.1253 34.1059 34.0865 34.0671 34.0478 34.0286 34.0094 33.9902 33.9711 33.952 33.9329 33.9139 + 34.3002 34.2803 34.2605 34.2407 34.2209 34.2012 34.1815 34.1619 34.1423 34.1227 34.1032 34.0838 34.0643 34.0449 34.0256 34.0063 33.987 33.9677 33.9486 33.9294 33.9103 33.8912 33.8722 + 34.2589 34.239 34.2191 34.1993 34.1795 34.1598 34.1401 34.1205 34.1008 34.0813 34.0617 34.0422 34.0228 34.0034 33.984 33.9647 33.9454 33.9261 33.9069 33.8878 33.8686 33.8495 33.8305 + 34.2175 34.1976 34.1778 34.1579 34.1381 34.1184 34.0987 34.079 34.0594 34.0398 34.0202 34.0007 33.9813 33.9618 33.9425 33.9231 33.9038 33.8845 33.8653 33.8461 33.827 33.8079 33.7888 + 34.1762 34.1563 34.1364 34.1166 34.0968 34.077 34.0573 34.0376 34.0179 33.9983 33.9788 33.9592 33.9398 33.9203 33.9009 33.8815 33.8622 33.8429 33.8237 33.8045 33.7853 33.7662 33.7471 + 34.1349 34.115 34.0951 34.0752 34.0554 34.0356 34.0159 33.9962 33.9765 33.9569 33.9373 33.9177 33.8982 33.8788 33.8594 33.84 33.8206 33.8013 33.7821 33.7629 33.7437 33.7245 33.7054 + 34.0936 34.0737 34.0537 34.0339 34.014 33.9942 33.9745 33.9547 33.9351 33.9154 33.8958 33.8763 33.8567 33.8373 33.8178 33.7984 33.7791 33.7597 33.7405 33.7212 33.702 33.6829 33.6638 + 34.0523 34.0324 34.0124 33.9925 33.9727 33.9529 33.9331 33.9133 33.8936 33.874 33.8544 33.8348 33.8152 33.7957 33.7763 33.7569 33.7375 33.7182 33.6989 33.6796 33.6604 33.6412 33.6221 + 34.0111 33.9911 33.9711 33.9512 33.9313 33.9115 33.8917 33.8719 33.8522 33.8325 33.8129 33.7933 33.7738 33.7542 33.7348 33.7153 33.6959 33.6766 33.6573 33.638 33.6188 33.5996 33.5805 + + + + 5000 + 5000 + + 152.939 153.035 153.13 153.225 153.32 153.416 153.511 153.606 153.702 153.798 153.893 153.989 154.085 154.181 154.277 154.373 154.469 154.565 154.662 154.758 154.854 154.951 155.047 + 152.916 153.011 153.107 153.202 153.297 153.392 153.488 153.583 153.679 153.775 153.87 153.966 154.062 154.158 154.254 154.35 154.446 154.542 154.639 154.735 154.831 154.928 155.025 + 152.893 152.988 153.083 153.179 153.274 153.369 153.465 153.56 153.656 153.752 153.847 153.943 154.039 154.135 154.231 154.327 154.423 154.519 154.616 154.712 154.809 154.905 155.002 + 152.87 152.965 153.06 153.155 153.251 153.346 153.442 153.537 153.633 153.728 153.824 153.92 154.016 154.112 154.208 154.304 154.4 154.496 154.593 154.689 154.786 154.882 154.979 + 152.846 152.941 153.037 153.132 153.227 153.323 153.418 153.514 153.609 153.705 153.801 153.897 153.993 154.089 154.185 154.281 154.377 154.473 154.57 154.666 154.763 154.859 154.956 + 152.823 152.918 153.013 153.109 153.204 153.299 153.395 153.49 153.586 153.682 153.778 153.873 153.969 154.065 154.161 154.258 154.354 154.45 154.547 154.643 154.739 154.836 154.933 + 152.799 152.895 152.99 153.085 153.18 153.276 153.371 153.467 153.563 153.658 153.754 153.85 153.946 154.042 154.138 154.234 154.331 154.427 154.523 154.62 154.716 154.813 154.91 + 152.776 152.871 152.966 153.062 153.157 153.252 153.348 153.444 153.539 153.635 153.731 153.827 153.923 154.019 154.115 154.211 154.307 154.404 154.5 154.597 154.693 154.79 154.886 + 152.752 152.847 152.943 153.038 153.133 153.229 153.324 153.42 153.516 153.611 153.707 153.803 153.899 153.995 154.091 154.188 154.284 154.38 154.477 154.573 154.67 154.766 154.863 + 152.728 152.824 152.919 153.014 153.11 153.205 153.301 153.396 153.492 153.588 153.684 153.78 153.876 153.972 154.068 154.164 154.26 154.357 154.453 154.55 154.646 154.743 154.84 + 152.705 152.8 152.895 152.991 153.086 153.181 153.277 153.373 153.468 153.564 153.66 153.756 153.852 153.948 154.044 154.141 154.237 154.333 154.43 154.526 154.623 154.72 154.816 + 152.681 152.776 152.871 152.967 153.062 153.158 153.253 153.349 153.445 153.541 153.636 153.732 153.829 153.925 154.021 154.117 154.213 154.31 154.406 154.503 154.599 154.696 154.793 + 152.657 152.752 152.847 152.943 153.038 153.134 153.229 153.325 153.421 153.517 153.613 153.709 153.805 153.901 153.997 154.093 154.19 154.286 154.383 154.479 154.576 154.673 154.769 + 152.633 152.728 152.823 152.919 153.014 153.11 153.206 153.301 153.397 153.493 153.589 153.685 153.781 153.877 153.973 154.07 154.166 154.263 154.359 154.456 154.552 154.649 154.746 + 152.609 152.704 152.799 152.895 152.99 153.086 153.182 153.277 153.373 153.469 153.565 153.661 153.757 153.853 153.95 154.046 154.142 154.239 154.335 154.432 154.529 154.625 154.722 + 152.585 152.68 152.775 152.871 152.966 153.062 153.158 153.253 153.349 153.445 153.541 153.637 153.733 153.829 153.926 154.022 154.118 154.215 154.311 154.408 154.505 154.602 154.698 + 152.56 152.656 152.751 152.847 152.942 153.038 153.133 153.229 153.325 153.421 153.517 153.613 153.709 153.805 153.902 153.998 154.095 154.191 154.288 154.384 154.481 154.578 154.675 + 152.536 152.631 152.727 152.822 152.918 153.014 153.109 153.205 153.301 153.397 153.493 153.589 153.685 153.781 153.878 153.974 154.071 154.167 154.264 154.36 154.457 154.554 154.651 + 152.512 152.607 152.703 152.798 152.894 152.989 153.085 153.181 153.277 153.373 153.469 153.565 153.661 153.757 153.854 153.95 154.047 154.143 154.24 154.336 154.433 154.53 154.627 + 152.487 152.583 152.678 152.774 152.869 152.965 153.061 153.157 153.252 153.348 153.445 153.541 153.637 153.733 153.829 153.926 154.022 154.119 154.216 154.312 154.409 154.506 154.603 + 152.463 152.558 152.654 152.749 152.845 152.941 153.036 153.132 153.228 153.324 153.42 153.516 153.613 153.709 153.805 153.902 153.998 154.095 154.191 154.288 154.385 154.482 154.579 + 152.438 152.534 152.629 152.725 152.82 152.916 153.012 153.108 153.204 153.3 153.396 153.492 153.588 153.685 153.781 153.877 153.974 154.071 154.167 154.264 154.361 154.458 154.555 + 152.414 152.509 152.605 152.7 152.796 152.892 152.987 153.083 153.179 153.275 153.371 153.468 153.564 153.66 153.757 153.853 153.95 154.046 154.143 154.24 154.337 154.434 154.531 + + + + + 34.2508883033046 + 153.732570441329 + + + + 5000 + 5000 + + 1.97703 2.1053 2.29413 2.5298 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.00462 2.15075 2.35339 2.59967 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.03731 2.19992 2.41573 2.67162 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.07478 2.25294 2.48052 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.1168 2.30961 2.54835 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.16312 2.36962 2.61877 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.21367 2.43296 2.6915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.26781 2.49894 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.32569 2.56807 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.38701 2.63968 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.45114 2.71276 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.51821 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.58721 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.659 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 208.344 218.797 227.795 235.278 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 211.295 221.381 229.961 237.056 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 214.157 223.829 232.007 238.731 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 216.884 226.167 233.932 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 219.525 228.405 235.779 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 222.07 230.538 237.528 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 224.503 232.56 239.186 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 226.83 234.485 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 229.047 236.314 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 231.162 238.059 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 233.155 239.675 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 235.048 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 236.822 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 238.514 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.64951 2.96573 3.29685 3.64043 3.99275 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.73274 3.05304 3.38864 3.73509 4.09035 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.51047 2.81637 3.14143 3.48007 3.82888 4.18472 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.59005 2.9021 3.2313 3.57285 3.92399 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.67197 2.98919 3.3223 3.66692 4.02043 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.75571 3.07766 3.41441 3.7612 4.11517 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.53269 2.84107 3.16751 3.50699 3.85521 4.21181 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.61388 2.92772 3.25821 3.60085 3.9508 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.69706 3.01473 3.34862 3.69441 4.04598 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.78078 3.10372 3.44082 3.78694 4.14155 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.55734 2.86715 3.19341 3.53275 3.88181 4.23811 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.63887 2.95325 3.28382 3.62646 3.97758 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.72227 3.04131 3.376 3.72263 4.07367 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.5008 2.80632 3.13157 3.46978 3.81877 4.17029 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.58119 2.89312 3.22131 3.56146 3.9119 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.66461 2.98122 3.3122 3.65548 4.00738 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.74814 3.06947 3.40515 3.75127 4.10378 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.83388 3.15864 3.49697 3.84607 4.20128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.91919 3.24865 3.59019 3.94101 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.00713 3.3404 3.68514 4.03645 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.09748 3.434 3.779 4.13296 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.18661 3.52592 3.87415 4.22993 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.27687 3.61929 3.96988 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 323.875 319.18 315.438 312.386 309.87 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 322.492 318.084 314.534 311.639 309.24 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 326.407 321.206 317.049 313.693 310.947 308.671 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 324.885 319.984 316.066 312.89 310.281 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 323.443 318.832 315.135 312.126 309.642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 322.088 317.748 314.25 311.402 309.049 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 325.936 320.806 316.721 313.42 310.725 308.479 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 324.426 319.608 315.755 312.629 310.069 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 323.001 318.49 314.851 311.885 309.456 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 321.68 317.425 313.991 311.194 308.87 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 325.455 320.424 316.426 313.184 310.521 308.308 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 323.982 319.264 315.484 312.41 309.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 322.592 318.16 314.585 311.66 309.272 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 326.571 321.309 317.107 313.727 310.953 308.691 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 325.034 320.076 316.13 312.942 310.312 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 323.556 318.911 315.207 312.182 309.687 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 322.207 317.835 314.316 311.447 309.085 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 320.921 316.814 313.495 310.768 308.511 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 319.743 315.863 312.712 310.121 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 318.607 314.942 311.957 309.506 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 317.521 314.064 311.256 308.915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 316.524 313.255 310.578 308.35 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 315.579 312.48 309.936 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.42528 4.77697 5.13643 5.4944 5.86044 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.51892 4.87221 5.23118 5.59655 5.95918 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.26204 4.61261 4.96819 5.32906 5.69405 6.0593 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.35606 4.70847 5.06509 5.42879 5.79188 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.45033 4.80301 5.16215 5.52309 5.8888 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.54321 4.89826 5.25789 5.61913 5.98529 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.28804 4.63877 4.99321 5.35406 5.71819 6.08372 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.38065 4.73327 5.09101 5.452 5.81556 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.47413 4.82945 5.18704 5.55048 5.91388 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.21956 4.56894 4.9238 5.28338 5.64677 6.01255 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.31264 4.66366 5.02006 5.38068 5.74465 6.11133 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.40658 4.75909 5.11671 5.47833 5.84319 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.50092 4.85501 5.21389 5.57648 5.94203 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.24618 4.59591 4.9514 5.31133 5.67477 6.04096 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.34035 4.69341 5.04906 5.40905 5.77338 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.43457 4.78889 5.14515 5.5071 5.87217 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.52892 4.88338 5.24251 5.6054 5.97116 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.27496 4.62443 4.97992 5.34015 5.70385 6.07032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.36804 4.71979 5.07693 5.4381 5.80258 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.46235 4.8158 5.17407 5.53632 5.90155 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.55735 4.91218 5.27167 5.63483 6.00077 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.30198 4.65281 5.00903 5.36959 5.73355 6.10016 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.39631 4.74868 5.10619 5.4677 5.83246 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 259.438 261.542 263.374 264.943 266.343 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 260.028 262.049 263.804 265.347 266.684 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 258.321 260.591 262.539 264.235 265.721 267.02 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 258.966 261.146 263.017 264.658 266.079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 259.585 261.668 263.474 265.04 266.424 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 260.165 262.172 263.91 265.418 266.755 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 258.496 260.737 262.655 264.331 265.796 267.083 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 259.126 261.279 263.132 264.741 266.152 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 259.73 261.803 263.582 265.143 266.501 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 258.01 260.32 262.3 264.016 265.516 266.837 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 258.669 260.88 262.782 264.436 265.886 267.166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 259.298 261.42 263.252 264.847 266.245 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 259.904 261.939 263.701 265.239 266.592 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 258.204 260.481 262.438 264.135 265.62 266.928 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 258.857 261.05 262.924 264.553 265.986 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 259.478 261.581 263.379 264.956 266.341 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 260.071 262.082 263.824 265.345 266.683 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 258.402 260.643 262.574 264.252 265.72 267.014 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 259.034 261.188 263.046 264.663 266.081 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 259.641 261.711 263.5 265.06 266.431 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 260.226 262.214 263.937 265.441 266.766 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 258.567 260.786 262.697 264.356 265.809 267.092 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 259.193 261.324 263.162 264.762 266.166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.20011 6.57702 6.95471 7.33472 7.71001 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.30177 6.68303 7.05691 7.43541 7.81114 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02935 6.40541 6.78524 7.15835 7.53305 7.9119 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13173 6.50461 6.88625 7.26132 7.63551 8.01305 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22913 6.60581 6.98443 7.3594 7.73613 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.32911 6.70656 7.0822 7.45966 7.83787 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05415 6.43153 6.80634 7.1834 7.56141 7.93969 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15469 6.53072 6.90756 7.28517 7.6632 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25604 6.63233 7.00929 7.38695 7.76506 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.35731 6.73386 7.11106 7.48887 7.86707 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08281 6.45877 6.8356 7.21309 7.59085 7.96898 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18422 6.56037 6.93737 7.31487 7.69287 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.28565 6.66203 7.03927 7.41702 7.79501 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01162 6.3873 6.76382 7.14125 7.51902 7.89706 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11316 6.48908 6.86586 7.24325 7.62108 7.99928 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21474 6.59085 6.96778 7.34537 7.7233 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.31639 6.6928 7.06991 7.44757 7.82553 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04248 6.41816 6.79477 7.17202 7.54977 7.92782 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.144 6.51989 6.89676 7.27422 7.65206 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.2457 6.62195 6.99889 7.37639 7.75429 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.34747 6.72384 7.10097 7.47868 7.85668 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.07357 6.4493 6.82596 7.20329 7.58102 7.95903 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.17526 6.55128 6.92808 7.30551 7.68339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.294 300.476 299.753 299.105 298.535 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.048 300.249 299.559 298.935 298.379 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.672 300.809 300.038 299.371 298.772 298.227 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.412 300.586 299.837 299.188 298.608 298.08 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.177 300.37 299.651 299.019 298.45 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.944 300.161 299.471 298.852 298.298 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.573 300.712 299.961 299.29 298.688 298.148 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.324 300.497 299.764 299.113 298.529 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.084 300.285 299.574 298.94 298.374 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.852 300.077 299.389 298.775 298.225 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.476 300.628 299.878 299.211 298.614 298.078 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.232 300.412 299.684 299.036 298.457 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.995 300.201 299.497 298.869 298.307 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.637 300.767 299.999 299.316 298.706 298.158 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.387 300.546 299.802 299.139 298.548 298.016 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.147 300.332 299.612 298.969 298.394 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.914 300.126 299.428 298.805 298.245 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.552 300.69 299.927 299.249 298.643 298.1 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.304 300.47 299.732 299.076 298.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.067 300.261 299.546 298.907 298.335 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.837 300.056 299.362 298.743 298.189 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 301.468 300.614 299.859 299.188 298.586 298.045 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 301.226 300.399 299.666 299.014 298.43 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08452 8.45638 8.82965 9.20261 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.18443 8.55562 8.92971 9.30313 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90931 8.28187 8.65554 9.02927 9.4038 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00912 8.38232 8.75586 9.13013 9.50517 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10985 8.48282 8.85656 9.23808 9.61045 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.21032 8.58314 8.95763 9.33519 9.71055 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93698 8.31003 8.68369 9.05876 9.43294 9.80827 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03731 8.41139 8.78632 9.16185 9.53447 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13893 8.5125 8.88626 9.26212 9.6352 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.23827 8.61366 8.98688 9.35967 9.73641 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96562 8.33878 8.71237 9.08655 9.4605 9.83435 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06609 8.43947 8.81313 9.18696 9.56102 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.1667 8.54013 8.91385 9.28778 9.66183 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89431 8.26729 8.64086 9.01471 9.38872 9.76283 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9949 8.368 8.74168 9.11551 9.48953 9.86363 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09547 8.46873 8.84248 9.21641 9.5905 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.19618 8.56956 8.94338 9.31735 9.6914 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92397 8.29696 8.67041 9.04428 9.41832 9.79243 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02464 8.39778 8.77138 9.14528 9.51928 9.89342 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12538 8.49866 8.87236 9.24634 9.62039 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.22624 8.59962 8.97339 9.34737 9.7215 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9541 8.32712 8.70065 9.0745 9.44852 9.82258 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05496 8.42812 8.80175 9.17562 9.54964 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.205 272.908 273.556 274.152 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.393 273.079 273.714 274.297 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.839 272.573 273.245 273.865 274.44 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.034 272.751 273.411 274.019 274.582 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.229 272.93 273.573 274.179 274.725 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.419 273.101 273.732 274.319 274.86 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.873 272.602 273.271 273.889 274.458 274.987 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.071 272.785 273.44 274.045 274.6 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.269 272.963 273.601 274.194 274.738 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.454 273.137 273.761 274.336 274.875 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.916 272.64 273.304 273.914 274.479 275.003 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.114 272.82 273.467 274.066 274.62 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.307 272.997 273.631 274.216 274.757 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.763 272.497 273.17 273.789 274.361 274.893 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.963 272.68 273.339 273.946 274.507 275.026 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.16 272.861 273.504 274.096 274.646 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.351 273.035 273.664 274.245 274.784 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.809 272.538 273.206 273.822 274.39 274.917 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.008 272.72 273.373 273.975 274.532 275.049 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.202 272.897 273.536 274.125 274.671 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.391 273.07 273.695 274.272 274.807 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.851 272.575 273.239 273.85 274.415 274.939 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.047 272.753 273.402 274.001 274.555 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93809 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0418 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1429 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.8632 10.2422 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.96528 10.3439 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0675 10.4458 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1696 10.5499 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89328 10.2719 10.6503 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99559 10.374 10.7519 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0979 10.4762 10.854 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2002 10.5784 10.9559 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.924 10.3025 10.6805 11.0579 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0264 10.4048 10.7827 11.16 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1289 10.5071 10.8847 11.2619 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2313 10.6093 10.9869 11.3639 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95517 10.3337 10.7116 11.089 11.4658 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.143 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.06 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.983 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.173 295.907 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.093 295.833 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.014 295.759 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.936 295.687 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.126 295.862 295.618 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.046 295.787 295.55 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.968 295.716 295.484 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.893 295.646 295.418 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.084 295.819 295.577 295.355 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.005 295.747 295.511 295.294 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.929 295.678 295.445 295.232 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.854 295.607 295.382 295.173 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.043 295.782 295.541 295.319 295.114 + + + + + + 5000 + 5000 + + 1.43228 1.60524 1.8464 2.13265 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.47061 1.66474 1.91995 2.21533 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.51524 1.72801 1.99607 2.29961 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.56542 1.79529 2.07417 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.62096 1.86621 2.15515 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.68128 1.94029 2.23816 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.74598 2.0173 2.32311 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.81462 2.09678 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.88669 2.17894 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.96221 2.26337 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.03976 2.34819 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.12021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.20185 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.28602 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 213.859 227.218 237.55 245.315 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 217.766 230.299 239.872 247.059 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 221.46 233.135 242.008 248.666 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 224.881 235.765 243.971 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 228.1 238.213 245.81 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 231.113 240.484 247.517 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 233.911 242.584 249.101 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 236.505 244.533 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 238.913 246.344 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 241.144 248.031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 243.203 249.572 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 245.104 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 246.851 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 248.48 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.33111 2.68405 3.04481 3.41291 3.78575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.42496 2.77992 3.14389 3.51368 3.88862 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.17275 2.51866 2.87653 3.2421 3.6131 3.9876 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.26386 2.61399 2.97422 3.34134 3.71369 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.3569 2.7102 3.07271 3.44153 3.8153 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.45113 2.80718 3.17193 3.54178 3.91494 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.19851 2.54651 2.90522 3.27117 3.64121 4.01608 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.29124 2.64257 3.00361 3.37133 3.74218 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.3854 2.73829 3.10125 3.47095 3.84227 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.47946 2.83579 3.20029 3.56905 3.94262 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.22682 2.57555 2.93336 3.29873 3.6694 4.0438 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.31963 2.67074 3.03128 3.39867 3.77031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.41379 2.76748 3.13068 3.50088 3.8714 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.16175 2.50772 2.86606 3.23129 3.60278 3.97273 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.25366 2.60403 2.96347 3.3292 3.70104 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.34847 2.7014 3.06171 3.4293 3.80152 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.44244 2.79805 3.16179 3.53108 3.90281 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.5381 2.89538 3.26026 3.63137 4.00498 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.63262 2.99271 3.35959 3.73144 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.72941 3.0918 3.46054 3.83192 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.82825 3.19241 3.56005 3.93314 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.92527 3.29075 3.66069 4.03471 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.02299 3.39032 3.76158 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 313.045 309.176 306.236 303.925 302.074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 311.876 308.298 305.543 303.369 301.616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 315.228 310.813 307.48 304.903 302.857 301.201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 313.903 309.815 306.714 304.297 302.368 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 312.674 308.891 305.998 303.724 301.9 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 311.54 308.027 305.322 303.188 301.471 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 314.815 310.485 307.223 304.694 302.69 301.057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 313.513 309.512 306.472 304.098 302.208 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 312.305 308.617 305.779 303.546 301.764 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 311.207 307.777 305.125 303.034 301.338 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 314.405 310.178 306.996 304.516 302.54 300.935 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 313.143 309.241 306.268 303.938 302.073 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 311.973 308.363 305.581 303.382 301.631 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 315.391 310.91 307.534 304.929 302.86 301.212 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 314.05 309.901 306.771 304.339 302.39 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 312.792 308.968 306.062 303.772 301.933 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 311.661 308.111 305.38 303.227 301.5 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 310.601 307.312 304.762 302.729 301.085 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 309.643 306.57 304.17 302.254 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 308.729 305.862 303.609 301.807 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 307.87 305.192 303.087 301.376 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 307.087 304.58 302.589 300.969 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 306.353 303.999 302.117 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.21509 4.58387 4.95832 5.32901 5.70652 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.31351 4.68322 5.05653 5.43457 5.80807 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.04285 4.41182 4.78335 5.158 5.5352 5.91099 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.14218 4.51234 4.88419 5.26127 5.63591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.2416 4.61122 4.98503 5.35879 5.7358 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.3392 4.71067 5.08447 5.45792 5.83499 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.07051 4.43949 4.80959 5.18413 5.56025 5.93616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.16835 4.53848 4.91133 5.28536 5.66053 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.26676 4.63904 5.01107 5.38737 5.76178 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 3.99836 4.36651 4.73747 5.11105 5.48658 5.86321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.09685 4.46578 4.83769 5.21182 5.58767 5.96477 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.19595 4.56574 4.9383 5.31293 5.68918 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.29527 4.66587 5.0391 5.41429 5.79092 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.02672 4.39496 4.7664 5.14018 5.51578 5.89263 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.12626 4.49709 4.86816 5.24133 5.61738 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.22557 4.59699 4.96792 5.34277 5.71915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.32477 4.69561 5.06896 5.44426 5.82096 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.05728 4.42498 4.79621 5.17014 5.54584 5.92289 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.15556 4.52477 4.89707 5.27147 5.64756 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.25473 4.62508 4.99796 5.37299 5.74941 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.35451 4.72557 5.09913 5.47458 5.8514 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.08573 4.45457 4.8264 5.20051 5.57638 5.95346 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.18513 4.55488 4.92742 5.30202 5.67823 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 266.45 268.096 269.511 270.713 271.778 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 266.913 268.489 269.841 271.018 272.032 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 265.564 267.352 268.864 270.167 271.3 272.284 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 266.073 267.783 269.232 270.49 271.571 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 266.561 268.187 269.582 270.779 271.829 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 267.014 268.576 269.914 271.066 272.079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 265.698 267.461 268.948 270.235 271.351 272.325 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 266.197 267.882 269.316 270.548 271.62 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 266.674 268.29 269.66 270.853 271.884 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 265.312 267.133 268.673 269.994 271.138 272.137 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 265.836 267.572 269.046 270.313 271.416 272.384 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 266.332 267.99 269.405 270.626 271.689 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 266.809 268.397 269.753 270.925 271.949 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 265.471 267.262 268.78 270.084 271.214 272.205 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 265.987 267.706 269.157 270.405 271.493 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 266.478 268.119 269.506 270.71 271.76 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 266.943 268.507 269.846 271.006 272.019 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 265.629 267.39 268.887 270.173 271.29 272.268 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 266.129 267.815 269.251 270.487 271.564 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 266.608 268.221 269.599 270.789 271.828 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 267.066 268.61 269.934 271.08 272.081 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 265.76 267.503 268.981 270.252 271.358 272.327 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 266.254 267.919 269.337 270.56 271.626 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.06427 6.44803 6.83196 7.2174 7.59778 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.16793 6.55589 6.93589 7.31949 7.70015 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89029 6.27351 6.66009 7.0387 7.4186 7.80228 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99495 6.37463 6.7627 7.14316 7.52246 7.90458 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.09422 6.47774 6.86233 7.24287 7.62442 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19606 6.5801 6.96173 7.34443 7.72743 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.91577 6.30049 6.6816 7.0644 7.44757 7.83053 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01841 6.40138 6.78447 7.16764 7.55068 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12175 6.50475 6.88773 7.27086 7.65387 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22489 6.60801 6.99109 7.37418 7.75714 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.94515 6.32823 6.71141 7.09457 7.47746 7.86027 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04854 6.43161 6.81476 7.19783 7.58084 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15194 6.53503 6.91825 7.30135 7.6842 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87249 6.25545 6.63846 7.02172 7.40473 7.78753 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97609 6.35907 6.74215 7.12521 7.50814 7.89093 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07961 6.4626 6.84564 7.22873 7.61163 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18318 6.56624 6.94927 7.33228 7.71508 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.90386 6.28674 6.6698 7.05284 7.43582 7.81861 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00738 6.39032 6.77345 7.15648 7.53939 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11108 6.4941 6.87708 7.26005 7.6429 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21465 6.59766 6.98072 7.36374 7.74652 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.93541 6.31833 6.70138 7.08441 7.46732 7.85004 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.03904 6.42203 6.80506 7.18807 7.57099 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.13 295.598 295.129 294.715 294.353 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.966 295.448 295.001 294.601 294.248 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.372 295.806 295.307 294.876 294.494 294.149 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.199 295.66 295.176 294.758 294.386 294.052 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.043 295.516 295.052 294.647 294.284 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.888 295.379 294.933 294.538 294.184 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.298 295.734 295.247 294.814 294.43 294.088 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.133 295.593 295.119 294.7 294.326 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.975 295.453 294.993 294.587 294.226 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.82 295.316 294.874 294.48 294.13 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.228 295.674 295.187 294.757 294.374 294.034 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.067 295.531 295.06 294.643 294.274 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.911 295.394 294.939 294.535 294.176 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.332 295.762 295.262 294.821 294.429 294.081 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.168 295.618 295.134 294.706 294.327 293.989 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.009 295.477 295.01 294.597 294.229 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.856 295.343 294.891 294.489 294.132 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.275 295.708 295.211 294.773 294.386 294.04 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.11 295.565 295.086 294.663 294.286 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.957 295.429 294.964 294.552 294.187 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.803 295.294 294.846 294.449 294.094 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 296.217 295.658 295.166 294.731 294.345 294 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 296.056 295.515 295.039 294.619 294.245 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9782 8.35573 8.73425 9.11201 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07985 8.45653 8.83562 9.21379 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80029 8.17871 8.55779 8.93653 9.31567 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90165 8.28066 8.65954 9.03872 9.41814 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00399 8.38269 8.76158 9.14809 9.52479 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10613 8.48444 8.86397 9.24621 9.62624 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82843 8.20739 8.58644 8.96639 9.34511 9.72472 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93046 8.31034 8.69055 9.07079 9.44796 9.82594 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03387 8.41296 8.79174 9.17247 9.54999 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13464 8.5156 8.89375 9.27117 9.65231 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8577 8.23673 8.61567 8.99473 9.37326 9.75124 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95982 8.33887 8.71778 9.09642 9.47493 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06206 8.4411 8.8199 9.19851 9.57685 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.78537 8.16428 8.54324 8.92207 9.30068 9.67905 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88759 8.26651 8.64553 9.02423 9.40274 9.78095 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.98983 8.36878 8.74768 9.12635 9.50484 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09211 8.47103 8.84992 9.22856 9.60693 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81558 8.19448 8.57331 8.95214 9.33072 9.70904 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91791 8.2968 8.67568 9.05441 9.43289 9.81115 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02028 8.39918 8.77802 9.15675 9.53515 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.1227 8.50157 8.88039 9.259 9.63741 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84622 8.22512 8.60402 8.98277 9.36132 9.73957 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94869 8.32757 8.70646 9.08514 9.46361 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.209 276.743 277.235 277.688 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.349 276.869 277.352 277.796 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.925 276.484 276.995 277.465 277.901 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.073 276.617 277.117 277.579 278.007 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.218 276.75 277.24 277.699 278.113 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.361 276.879 277.357 277.803 278.214 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.944 276.497 277.006 277.475 277.907 278.309 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.093 276.635 277.133 277.591 278.012 278.405 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.241 276.769 277.253 277.703 278.116 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.381 276.899 277.373 277.809 278.218 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.972 276.521 277.025 277.489 277.917 278.314 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.122 276.658 277.149 277.602 278.022 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.266 276.79 277.271 277.715 278.126 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.852 276.41 276.921 277.39 277.824 278.227 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.005 276.548 277.047 277.507 277.933 278.328 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.153 276.685 277.173 277.623 278.038 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.299 276.816 277.293 277.733 278.142 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.886 276.438 276.945 277.413 277.844 278.243 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.036 276.576 277.071 277.527 277.949 278.341 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.183 276.71 277.194 277.64 278.054 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.326 276.84 277.314 277.751 278.156 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.916 276.464 276.968 277.431 277.859 278.255 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.064 276.6 277.091 277.544 277.963 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84649 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95109 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0528 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.77119 10.1526 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87397 10.2549 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97676 10.3574 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0796 10.4622 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80149 10.1824 10.5631 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90448 10.2852 10.6654 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0074 10.388 10.768 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1104 10.4908 10.8705 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.83236 10.2132 10.5935 10.973 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93543 10.3162 10.6962 11.0755 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0385 10.419 10.7988 11.178 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.76048 10.1415 10.5219 10.9015 11.2805 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.8637 10.2446 10.6247 11.0041 11.3829 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.871 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.821 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.775 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.879 292.731 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.832 292.687 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.784 292.643 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.739 292.601 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.843 292.695 292.56 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.796 292.652 292.521 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.751 292.61 292.482 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.707 292.57 292.444 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.812 292.664 292.53 292.408 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.766 292.623 292.492 292.373 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.722 292.581 292.453 292.337 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.829 292.677 292.541 292.417 292.304 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.784 292.638 292.503 292.38 292.269 + + + + + + 5000 + 5000 + + 1.56418 1.7238 1.95023 2.22302 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.59921 1.77927 2.02001 2.30248 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.64036 1.83858 2.09237 2.38358 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.68673 1.90188 2.16697 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.73826 1.96893 2.24457 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.79465 2.03922 2.3243 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.85529 2.11248 2.40619 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.92006 2.18857 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.98817 2.26722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.05985 2.34849 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.13391 2.43022 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.21081 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.28925 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.37021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 212.152 224.705 234.751 242.531 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 215.781 227.666 237.053 244.303 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 219.241 230.416 239.192 245.95 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 222.477 232.991 241.168 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 225.551 235.405 243.032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 228.451 237.664 244.772 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 231.172 239.77 246.395 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 233.714 241.732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 236.098 243.575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 238.324 245.295 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 240.386 246.879 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 242.311 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 244.085 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 245.753 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.40295 2.74686 3.10058 3.46299 3.8311 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.49413 2.8407 3.19795 3.5624 3.93286 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.24935 2.58532 2.93538 3.2946 3.66048 4.03079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.3376 2.67833 3.03122 3.39239 3.75982 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.42791 2.7724 3.12787 3.49114 3.86028 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.51944 2.86736 3.22549 3.59004 3.95886 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.27427 2.61247 2.96345 3.3232 3.68815 4.05891 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.36407 2.70622 3.05997 3.42187 3.78801 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.45548 2.79987 3.156 3.52014 3.88692 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.54709 2.89533 3.25339 3.61692 3.9862 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.30163 2.64076 2.99104 3.35034 3.71608 4.08635 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.39159 2.73378 3.08723 3.44885 3.81579 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.48317 2.82848 3.18494 3.54964 3.91573 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.23876 2.57467 2.92501 3.28395 3.65027 4.01604 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.32768 2.66866 3.02071 3.38043 3.74737 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.41976 2.76383 3.11715 3.47907 3.84666 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.51108 2.85842 3.2156 3.57957 3.9469 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.60439 2.95394 3.31255 3.67852 4.04795 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.69663 3.04943 3.41044 3.77747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.79135 3.14687 3.51004 3.8768 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.88814 3.24581 3.60819 3.97695 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.9833 3.34266 3.70763 4.0775 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.07929 3.4408 3.80732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 316.046 311.889 308.695 306.162 304.117 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 314.798 310.942 307.938 305.553 303.614 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 318.359 313.655 310.056 307.238 304.986 303.155 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 316.958 312.582 309.221 306.575 304.446 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 315.652 311.585 308.436 305.945 303.93 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 314.435 310.651 307.699 305.354 303.456 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 317.925 313.306 309.776 307.011 304.802 302.996 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 316.542 312.256 308.956 306.356 304.274 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 315.254 311.289 308.201 305.749 303.779 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 314.08 310.376 307.483 305.183 303.308 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 317.487 312.972 309.527 306.815 304.64 302.861 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 316.144 311.962 308.734 306.179 304.122 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 314.897 311.012 307.982 305.566 303.632 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 318.526 313.757 310.109 307.267 304.991 303.168 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 317.105 312.672 309.283 306.62 304.473 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 315.77 311.664 308.504 305.993 303.965 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 314.558 310.734 307.76 305.397 303.487 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 313.423 309.87 307.082 304.843 303.024 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 312.389 309.057 306.433 304.321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 311.404 308.288 305.816 303.825 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 310.471 307.553 305.239 303.349 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 309.621 306.882 304.691 302.898 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 308.822 306.244 304.17 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.2605 4.62548 4.99656 5.36443 5.73941 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.35787 4.72387 5.09397 5.46918 5.84033 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.09031 4.4551 4.82306 5.19466 5.5692 5.94262 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.18842 4.55462 4.92307 5.29713 5.66919 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.28671 4.6525 5.02302 5.39393 5.76847 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.38322 4.75102 5.12167 5.4924 5.86705 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.11755 4.48245 4.84906 5.22055 5.59402 5.96761 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.21425 4.58044 4.9499 5.32099 5.69363 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.31151 4.67997 5.04882 5.42229 5.79427 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.04625 4.41021 4.77757 5.148 5.52081 5.89503 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.14357 4.50842 4.87683 5.24795 5.6212 5.99605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.24142 4.60735 4.97661 5.34838 5.72207 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.33968 4.70657 5.07655 5.44893 5.82314 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.07421 4.43823 4.80613 5.17686 5.54977 5.92428 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.17254 4.53936 4.90704 5.27724 5.65066 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.27072 4.63826 5.00592 5.37791 5.75181 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.3688 4.73597 5.10615 5.47871 5.85299 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.10435 4.46796 4.83567 5.20655 5.5796 5.95431 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.20149 4.56675 4.93565 5.30711 5.68064 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.2995 4.66607 5.03572 5.40791 5.78187 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.39825 4.76568 5.1361 5.5088 5.88321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.13251 4.49729 4.86558 5.23668 5.60994 5.98472 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.23072 4.59658 4.96576 5.33747 5.71115 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 264.671 266.44 267.968 269.267 270.42 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 265.167 266.865 268.325 269.601 270.697 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 263.721 265.642 267.273 268.678 269.904 270.971 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 264.267 266.105 267.668 269.028 270.199 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 264.79 266.542 268.048 269.343 270.479 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 265.279 266.963 268.407 269.652 270.751 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 263.867 265.76 267.363 268.754 269.962 271.018 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 264.401 266.214 267.762 269.094 270.254 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 264.914 266.656 268.133 269.423 270.54 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 263.453 265.407 267.066 268.493 269.732 270.817 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 264.012 265.88 267.471 268.841 270.034 271.082 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 264.548 266.332 267.857 269.177 270.329 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 265.058 266.768 268.234 269.504 270.613 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 263.622 265.548 267.184 268.591 269.815 270.888 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 264.175 266.023 267.588 268.938 270.118 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 264.702 266.469 267.967 269.27 270.407 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 265.202 266.888 268.334 269.589 270.688 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 263.791 265.684 267.298 268.689 269.897 270.959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 264.327 266.141 267.691 269.029 270.195 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 264.842 266.58 268.067 269.355 270.48 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 265.333 266.998 268.429 269.67 270.757 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 263.93 265.804 267.4 268.775 269.971 271.021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 264.462 266.254 267.785 269.108 270.262 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.09385 6.47612 6.85877 7.24302 7.62227 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1971 6.58356 6.9623 7.34475 7.72437 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92056 6.30221 6.68737 7.06479 7.4436 7.82623 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02475 6.40297 6.78964 7.16889 7.54713 7.92825 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1236 6.50566 6.88894 7.2683 7.64883 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22504 6.60762 6.98803 7.36957 7.75153 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.94585 6.32906 6.70878 7.09036 7.47243 7.85436 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04809 6.42958 6.81131 7.19324 7.57524 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.151 6.53251 6.91423 7.29619 7.67815 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25374 6.63546 7.01726 7.39922 7.78111 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97515 6.35668 6.73846 7.12039 7.50219 7.88404 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07808 6.45964 6.84149 7.22339 7.60531 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18107 6.56273 6.94463 7.32658 7.70837 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.90281 6.28418 6.66578 7.04778 7.42968 7.81147 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00595 6.38738 6.76911 7.15098 7.53281 7.9146 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10906 6.49057 6.87229 7.25419 7.63601 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21222 6.59384 6.97556 7.35743 7.7392 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93404 6.31535 6.69703 7.07886 7.46071 7.84248 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03716 6.4186 6.80035 7.18216 7.56396 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14042 6.52194 6.90363 7.28544 7.66726 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.2436 6.62519 7.007 7.38885 7.77056 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.96555 6.3469 6.72853 7.11033 7.49215 7.87387 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.06872 6.45019 6.8319 7.21373 7.59557 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.453 296.846 296.312 295.838 295.419 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.269 296.675 296.164 295.706 295.301 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.731 297.087 296.515 296.025 295.587 295.189 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.535 296.922 296.367 295.888 295.463 295.078 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.358 296.758 296.227 295.762 295.347 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.184 296.602 296.093 295.638 295.232 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.65 297.009 296.452 295.957 295.517 295.122 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.464 296.849 296.307 295.825 295.398 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.284 296.688 296.164 295.698 295.284 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.109 296.535 296.027 295.576 295.172 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.575 296.943 296.386 295.893 295.455 295.065 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.391 296.779 296.242 295.766 295.341 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.214 296.624 296.103 295.641 295.228 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.693 297.044 296.473 295.968 295.52 295.121 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.507 296.879 296.327 295.838 295.404 295.015 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.326 296.72 296.187 295.713 295.291 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.152 296.567 296.049 295.59 295.181 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.627 296.983 296.417 295.917 295.473 295.075 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.443 296.822 296.274 295.789 295.356 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.266 296.664 296.133 295.664 295.246 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.093 296.513 296.001 295.545 295.137 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 297.566 296.928 296.365 295.868 295.426 295.032 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 297.381 296.764 296.222 295.741 295.313 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00067 8.37694 8.75434 9.13104 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10198 8.47744 8.85542 9.23257 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82339 8.20052 8.57839 8.95602 9.33418 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92436 8.3021 8.67984 9.05795 9.4364 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02637 8.40384 8.78158 9.16701 9.54274 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12815 8.50523 8.88365 9.26488 9.64399 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8514 8.22905 8.60695 8.98582 9.36353 9.7422 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95307 8.33169 8.71073 9.08991 9.4661 9.8432 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05611 8.43396 8.81162 9.19132 9.5679 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15651 8.53629 8.91335 9.28976 9.66995 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88054 8.25829 8.63606 9.01403 9.39156 9.76864 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9823 8.36008 8.73783 9.11545 9.49299 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08415 8.46199 8.8397 9.21727 9.59466 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80846 8.18606 8.56381 8.94154 9.31913 9.6966 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91026 8.2879 8.66577 9.04343 9.42098 9.79826 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01217 8.38988 8.76765 9.14525 9.52277 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11409 8.49177 8.86956 9.24721 9.62464 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8385 8.21611 8.59377 8.97153 9.3491 9.72648 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94048 8.3181 8.69582 9.0735 9.45099 9.82837 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04249 8.42015 8.79787 9.17557 9.55302 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.14455 8.52223 8.89995 9.27755 9.65502 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86904 8.24665 8.62437 9.00204 9.3796 9.75693 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97115 8.34876 8.72649 9.10415 9.48164 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.217 275.794 276.324 276.814 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.368 275.931 276.451 276.931 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.911 275.514 276.067 276.576 277.045 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.072 275.661 276.2 276.698 277.16 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.229 275.803 276.332 276.829 277.277 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.384 275.944 276.461 276.941 277.384 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.935 275.533 276.081 276.587 277.055 277.489 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.096 275.681 276.219 276.715 277.169 277.592 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.257 275.826 276.35 276.835 277.281 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.409 275.968 276.479 276.95 277.392 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.966 275.559 276.103 276.605 277.068 277.496 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.128 275.708 276.239 276.728 277.181 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.286 275.85 276.37 276.85 277.294 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.836 275.44 275.992 276.5 276.969 277.403 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.004 275.591 276.13 276.625 277.085 277.512 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.163 275.737 276.265 276.751 277.201 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.321 275.881 276.396 276.871 277.312 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.875 275.472 276.019 276.524 276.99 277.422 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.038 275.621 276.156 276.649 277.105 277.528 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.197 275.766 276.289 276.771 277.218 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.351 275.907 276.418 276.891 277.329 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.908 275.501 276.045 276.545 277.008 277.437 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.068 275.648 276.179 276.668 277.121 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.86676 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97122 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0728 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.79155 10.1725 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89419 10.2746 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99681 10.377 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0996 10.4816 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.82179 10.2022 10.5825 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.92464 10.3049 10.6846 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0274 10.4076 10.7871 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1303 10.5102 10.8894 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.85262 10.233 10.6128 10.9919 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.9556 10.3358 10.7154 11.0943 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0585 10.4385 10.8179 11.1967 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.78087 10.1614 10.5413 10.9205 11.299 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88398 10.2643 10.644 11.023 11.4013 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.708 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.65 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.595 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.722 293.543 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.666 293.491 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.61 293.44 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.557 293.39 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.683 293.504 293.341 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.627 293.454 293.295 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.574 293.404 293.249 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.522 293.356 293.205 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.648 293.471 293.309 293.162 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.595 293.422 293.263 293.118 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.542 293.372 293.218 293.078 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.671 293.491 293.326 293.175 293.036 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.619 293.441 293.279 293.131 292.997 + + + + + + 5000 + 5000 + + 1.55006 1.71098 1.93889 2.21306 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.58541 1.76686 2.00906 2.29285 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.6269 1.82656 2.08179 2.37429 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.67364 1.89026 2.15677 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.72557 1.95771 2.23471 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.78236 2.02838 2.31479 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.84339 2.10203 2.39699 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.90856 2.17846 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.97707 2.25748 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.04914 2.33909 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.12356 2.42112 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.20083 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.27961 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.3609 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.4441 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 212.32 224.957 235.037 242.819 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 215.978 227.931 237.342 244.59 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 219.461 230.691 239.482 246.233 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 222.717 233.273 241.458 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 225.807 235.692 243.32 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 228.719 237.953 245.058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 231.45 240.06 246.678 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 233.998 242.022 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 236.386 243.862 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 238.613 245.58 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 240.677 247.16 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 242.6 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 244.373 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 246.037 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 247.589 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.38844 2.73414 3.08925 3.45281 3.82187 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.48013 2.82839 3.18696 3.55249 3.92386 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.23391 2.57182 2.92345 3.28394 3.65084 4.02201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.32272 2.6653 3.01965 3.38202 3.75044 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.41356 2.75981 3.11667 3.48105 3.85113 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.5056 2.85516 3.21462 3.58022 3.94993 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.25897 2.59913 2.95163 3.31264 3.6786 4.05019 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.34937 2.69333 3.04854 3.41159 3.77869 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.44131 2.78739 3.14488 3.51014 3.87783 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.53342 2.88326 3.24261 3.60719 3.97732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.28654 2.62757 2.97934 3.33985 3.70658 4.07769 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.37706 2.72101 3.07587 3.43865 3.80653 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.46914 2.81612 3.17393 3.53973 3.9067 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.56113 2.91305 3.27325 3.64061 4.00722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.31274 2.65556 3.0091 3.37003 3.73795 4.10851 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.40536 2.75119 3.1059 3.46895 3.83748 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.49719 2.84619 3.20466 3.56971 3.93793 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.59097 2.94207 3.30194 3.66893 4.0392 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.68367 3.03791 3.40011 3.76811 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.7788 3.13568 3.49998 3.86767 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.876 3.23497 3.5984 3.96803 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.97153 3.33211 3.69808 4.06879 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.06786 3.43054 3.79802 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 315.475 311.37 308.222 305.731 303.722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 314.241 310.436 307.477 305.132 303.228 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 317.766 313.112 309.562 306.789 304.574 302.777 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 316.378 312.053 308.739 306.137 304.044 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 315.085 311.07 307.967 305.517 303.538 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 313.882 310.148 307.241 304.936 303.073 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 317.335 312.767 309.286 306.565 304.394 302.621 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 315.966 311.731 308.479 305.92 303.875 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 314.692 310.777 307.736 305.324 303.39 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 313.532 309.878 307.029 304.768 302.927 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 316.902 312.438 309.041 306.372 304.234 302.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 315.573 311.441 308.26 305.747 303.726 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 314.34 310.504 307.52 305.145 303.244 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 313.214 309.615 306.817 304.58 302.789 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 316.525 312.142 308.801 306.181 304.071 302.354 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 315.204 311.148 308.035 305.565 303.572 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 314.005 310.231 307.302 304.978 303.103 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 312.884 309.379 306.636 304.435 302.649 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 311.863 308.579 305.997 303.922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 310.892 307.821 305.391 303.435 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 309.972 307.098 304.824 302.967 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 309.134 306.439 304.285 302.525 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 308.348 305.811 303.773 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.25613 4.62146 4.99286 5.361 5.73621 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.35358 4.71994 5.09033 5.46581 5.83719 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.08573 4.45092 4.81922 5.19111 5.5659 5.93954 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.18397 4.55054 4.9193 5.29365 5.66595 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.28236 4.64851 5.01934 5.39052 5.7653 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.37897 4.74712 5.11807 5.48905 5.86392 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.11301 4.47831 4.84524 5.21701 5.59074 5.96454 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.20982 4.57639 4.94616 5.31753 5.69041 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.30719 4.67602 5.04516 5.4189 5.79111 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.40599 4.77369 5.14441 5.51748 5.89193 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.13907 4.5043 4.87303 5.24445 5.61795 5.99301 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.23704 4.60333 4.9729 5.34493 5.71887 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.3354 4.70263 5.07291 5.44556 5.82 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.06963 4.43405 4.80229 5.1733 5.54647 5.9212 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.16808 4.53528 4.90327 5.27375 5.64742 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.26636 4.63427 5.00224 5.3745 5.74863 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.36454 4.73206 5.10255 5.47537 5.84987 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.09982 4.46381 4.83185 5.20301 5.57631 5.95124 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.19706 4.5627 4.93192 5.30365 5.67743 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.29518 4.66211 5.03206 5.40452 5.77871 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.39403 4.7618 5.13251 5.50547 5.88011 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.12801 4.49316 4.86178 5.23317 5.60668 5.98168 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.22632 4.59255 4.96205 5.33403 5.70795 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 264.834 266.593 268.111 269.401 270.546 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 265.328 267.014 268.465 269.732 270.821 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 263.89 265.799 267.419 268.816 270.033 271.093 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 264.433 266.259 267.812 269.164 270.326 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 264.953 266.694 268.189 269.475 270.604 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 265.438 267.111 268.546 269.783 270.874 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 264.035 265.916 267.509 268.891 270.091 271.14 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 264.565 266.367 267.905 269.229 270.381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 265.076 266.806 268.274 269.555 270.664 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 265.565 267.214 268.632 269.862 270.939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 264.179 266.036 267.616 268.977 270.162 271.202 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 264.712 266.485 268 269.311 270.455 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 265.219 266.918 268.374 269.635 270.737 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 263.791 265.705 267.331 268.729 269.945 271.01 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 264.341 266.178 267.732 269.073 270.245 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 264.865 266.621 268.109 269.403 270.532 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 265.362 267.037 268.474 269.72 270.812 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 263.96 265.841 267.444 268.826 270.026 271.08 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 264.493 266.295 267.835 269.164 270.321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 265.005 266.731 268.208 269.488 270.605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 265.492 267.147 268.568 269.8 270.88 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 264.098 265.96 267.546 268.911 270.099 271.143 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 264.626 266.407 267.928 269.242 270.388 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08785 6.4704 6.8533 7.23779 7.61725 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19118 6.57793 6.95691 7.33958 7.7194 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.91441 6.29638 6.68181 7.05946 7.43848 7.82132 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0187 6.3972 6.78415 7.16362 7.54208 7.92338 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11764 6.49997 6.88351 7.2631 7.64382 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21916 6.60202 6.98266 7.36443 7.74658 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93975 6.32325 6.70324 7.08505 7.46734 7.84947 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04207 6.42384 6.80584 7.18801 7.5702 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14507 6.52685 6.90882 7.29101 7.67317 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.24787 6.62987 7.01191 7.3941 7.7762 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96907 6.3509 6.73295 7.11511 7.49712 7.87916 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07209 6.45394 6.83604 7.21816 7.6003 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17515 6.55709 6.93925 7.32142 7.70341 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89666 6.27834 6.66022 7.04246 7.42457 7.80656 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9999 6.38162 6.76361 7.14571 7.52775 7.90974 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10308 6.48487 6.86686 7.24898 7.63101 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.20632 6.58822 6.97019 7.35228 7.73425 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92793 6.30953 6.69148 7.07355 7.45562 7.83758 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03111 6.41285 6.79487 7.17692 7.55892 7.94081 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13446 6.51628 6.89821 7.28025 7.66227 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.23772 6.61958 7.00163 7.38371 7.76563 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.95943 6.34109 6.723 7.10503 7.48706 7.86898 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.0627 6.44446 6.82643 7.20848 7.59054 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.197 296.603 296.081 295.618 295.21 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.017 296.437 295.937 295.49 295.094 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.468 296.839 296.28 295.8 295.373 294.985 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.276 296.677 296.136 295.667 295.252 294.876 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.103 296.516 295.998 295.544 295.138 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.933 296.364 295.866 295.423 295.026 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.388 296.762 296.217 295.734 295.304 294.919 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.206 296.605 296.076 295.605 295.187 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.031 296.448 295.935 295.481 295.076 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.859 296.298 295.802 295.362 294.967 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.315 296.697 296.152 295.671 295.243 294.863 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.135 296.537 296.011 295.546 295.132 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.961 296.385 295.876 295.424 295.022 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.43 296.795 296.237 295.744 295.306 294.916 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.248 296.634 296.094 295.617 295.193 294.813 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.07 296.478 295.957 295.495 295.083 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.901 296.329 295.823 295.375 294.975 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.366 296.735 296.182 295.694 295.26 294.872 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.185 296.578 296.043 295.569 295.146 294.769 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.013 296.424 295.905 295.446 295.038 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.843 296.276 295.776 295.331 294.932 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 297.305 296.681 296.132 295.646 295.214 294.829 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 297.125 296.522 295.991 295.522 295.104 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9983 8.3747 8.75221 9.12901 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09965 8.47523 8.85332 9.23057 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82096 8.19822 8.5762 8.95395 9.33221 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92197 8.29983 8.6777 9.05591 9.43445 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02402 8.4016 8.77946 9.165 9.54082 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12582 8.50304 8.88156 9.2629 9.64209 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84898 8.22677 8.60478 8.98375 9.36157 9.74033 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95069 8.32943 8.70859 9.08787 9.46417 9.84136 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05377 8.43174 8.80951 9.18931 9.56599 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15421 8.5341 8.91127 9.28778 9.66807 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87814 8.25601 8.6339 9.01198 9.38961 9.76678 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97993 8.35784 8.7357 9.11343 9.49107 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08182 8.45979 8.8376 9.21527 9.59276 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80603 8.18376 8.56164 8.93947 9.31717 9.69473 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90787 8.28564 8.66362 9.04139 9.41903 9.79641 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00982 8.38765 8.76553 9.14324 9.52086 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11177 8.48958 8.86748 9.24522 9.62275 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83609 8.21382 8.59161 8.96947 9.34714 9.72461 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9381 8.31586 8.69368 9.07147 9.44906 9.82652 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04015 8.41793 8.79576 9.17357 9.55111 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.14225 8.52004 8.89787 9.27557 9.65314 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86664 8.24437 8.62222 8.99999 9.37765 9.75508 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96878 8.34652 8.72437 9.10212 9.47972 9.85709 NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.316 275.889 276.416 276.902 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.467 276.025 276.542 277.018 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.013 275.611 276.161 276.666 277.132 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.173 275.757 276.292 276.787 277.245 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.328 275.899 276.424 276.917 277.362 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.482 276.038 276.551 277.028 277.469 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.036 275.629 276.174 276.677 277.141 277.572 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.196 275.777 276.312 276.803 277.255 277.675 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.355 275.921 276.441 276.923 277.366 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.507 276.062 276.569 277.037 277.476 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.067 275.656 276.196 276.694 277.154 277.579 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.228 275.803 276.331 276.816 277.266 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.384 275.945 276.461 276.937 277.379 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.938 275.537 276.086 276.59 277.056 277.487 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.104 275.687 276.222 276.714 277.171 277.595 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.262 275.832 276.356 276.84 277.286 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.419 275.976 276.486 276.958 277.396 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.976 275.569 276.113 276.614 277.077 277.506 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.138 275.717 276.249 276.738 277.191 277.611 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.296 275.861 276.38 276.859 277.303 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.449 276.001 276.509 276.978 277.413 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.009 275.598 276.138 276.635 277.094 277.52 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.168 275.743 276.271 276.757 277.207 277.624 NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.86249 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.96698 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0685 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1683 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88992 10.2705 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99258 10.3729 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0954 10.4775 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81751 10.1981 10.5784 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.92039 10.3008 10.6805 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0232 10.4035 10.783 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1261 10.5061 10.8854 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84835 10.2288 10.6087 10.9879 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95135 10.3317 10.7113 11.0903 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0543 10.4344 10.8139 11.1927 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1572 10.5372 10.9165 11.2951 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87971 10.2601 10.6399 11.019 11.3974 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.539 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.482 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.429 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.379 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.497 293.328 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.442 293.278 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.392 293.23 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.513 293.34 293.183 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.459 293.291 293.138 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.407 293.243 293.094 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.357 293.196 293.05 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.479 293.307 293.151 293.009 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.427 293.26 293.107 292.967 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.376 293.212 293.063 292.927 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.326 293.167 293.021 292.887 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.45 293.278 293.121 292.978 292.849 + + + + + + 5000 + 5000 + + 0.823021 1.09759 1.42808 1.78343 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 0.888617 1.18332 1.52228 1.88183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 0.96129 1.27114 1.61752 1.98058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.03882 1.36153 1.71324 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.12115 1.45411 1.81074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.20727 1.54851 1.90917 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.29632 1.64431 2.00837 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.38777 1.74128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.48123 1.83971 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.57669 1.93935 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.67273 2.03806 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.7702 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.8675 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.9663 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 229.927 246.892 256.583 262.547 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 235.513 250.034 258.465 263.767 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 240.301 252.73 260.117 264.858 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 244.336 255.069 261.572 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 247.82 257.131 262.893 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 250.834 258.945 264.076 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 253.444 260.552 265.148 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 255.718 261.987 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 257.716 263.276 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 259.472 264.438 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 261.029 265.472 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 262.413 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 263.646 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 264.758 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.09176 2.47755 2.86322 3.25088 3.63943 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.19555 2.58086 2.96816 3.35636 3.74618 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 1.9148 2.29829 2.68439 3.07176 3.46011 3.84868 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.0172 2.40205 2.78857 3.17612 3.56482 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.12068 2.50607 2.89307 3.28114 3.67038 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.22449 2.61035 2.99798 3.3859 3.77363 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 1.94398 2.3288 2.71514 3.10249 3.48949 3.87824 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.04776 2.43307 2.81985 3.20766 3.59453 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.15217 2.53634 2.92331 3.3119 3.69832 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.2555 2.64088 3.02782 3.41429 3.80226 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 1.97559 2.36027 2.74503 3.13137 3.51878 3.9068 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.07908 2.46333 2.84903 3.23614 3.62359 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.18311 2.56744 2.95418 3.34299 3.72837 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.90187 2.28599 2.67295 3.06023 3.44926 3.83319 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.00515 2.3908 2.77676 3.16306 3.55144 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.1106 2.49603 2.88093 3.26791 3.65574 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.21408 2.59983 2.98672 3.37426 3.76062 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.31864 2.70384 3.0903 3.47866 3.86625 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.42119 2.80736 3.19459 3.58269 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.52569 2.91236 3.3002 3.68688 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.63172 3.0186 3.40406 3.79169 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.7353 3.12202 3.50885 3.89664 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.83915 3.22644 3.61367 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 296.341 294.627 293.39 292.453 291.725 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 295.807 294.246 293.101 292.228 291.541 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 297.35 295.332 293.897 292.835 292.02 291.376 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 296.724 294.891 293.575 292.589 291.826 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 296.161 294.491 293.278 292.357 291.639 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 295.648 294.121 292.996 292.142 291.471 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 297.15 295.179 293.782 292.742 291.943 291.309 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 296.546 294.756 293.468 292.5 291.752 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 295.995 294.372 293.182 292.279 291.579 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 295.503 294.016 292.916 292.076 291.413 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 296.971 295.054 293.691 292.671 291.881 291.257 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 296.39 294.647 293.388 292.438 291.699 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 295.856 294.272 293.107 292.218 291.528 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 297.454 295.387 293.923 292.841 292.008 291.364 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 296.824 294.944 293.603 292.604 291.827 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 296.246 294.544 293.315 292.378 291.648 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 295.734 294.176 293.033 292.162 291.481 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 295.262 293.839 292.782 291.964 291.319 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 294.842 293.528 292.541 291.775 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 294.447 293.234 292.315 291.6 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 294.078 292.958 292.106 291.431 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 293.747 292.708 291.907 291.272 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 293.437 292.47 291.719 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.05467 4.43801 4.82492 5.20613 5.59297 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.15723 4.54087 4.9261 5.31444 5.69679 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 3.87468 4.25951 4.64438 5.03047 5.41762 5.80194 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 3.97859 4.3639 4.74854 5.13665 5.52073 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.08242 4.46643 4.85252 5.23676 5.62294 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.18412 4.56941 4.95496 5.33847 5.72437 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 3.90383 4.28843 4.67164 5.05749 5.4434 5.82776 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.00618 4.39124 4.77674 5.16153 5.54606 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.1089 4.49548 4.87955 5.26633 5.6497 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 3.82848 4.21277 4.59735 4.98252 5.36806 5.75341 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 3.93172 4.31601 4.70091 5.08616 5.47163 5.85718 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.03529 4.41974 4.80475 5.19009 5.57561 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.13893 4.52355 4.9087 5.29414 5.67969 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 3.85857 4.24267 4.62751 5.01276 5.39824 5.78371 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 3.96275 4.34877 4.73266 5.11676 5.50231 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.06646 4.45239 4.83553 5.22093 5.60648 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.16984 4.55449 4.93961 5.32507 5.7106 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 3.89079 4.27407 4.6585 5.04373 5.42919 5.81476 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 3.99356 4.37767 4.76263 5.14787 5.53336 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.09698 4.48159 4.86661 5.25208 5.63757 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.20085 4.58557 4.97078 5.35625 5.74179 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 3.92052 4.30482 4.68969 5.07497 5.46051 5.84604 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.02436 4.40884 4.79387 5.17921 5.56474 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.644 276.567 277.355 278.022 278.609 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.9 276.783 277.534 278.185 278.746 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 275.137 276.143 276.988 277.711 278.337 278.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 275.42 276.382 277.19 277.887 278.484 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 275.694 276.605 277.38 278.043 278.623 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 275.945 276.818 277.562 278.2 278.759 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 275.202 276.195 277.024 277.737 278.354 278.89 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 275.482 276.428 277.226 277.908 278.5 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 275.75 276.654 277.416 278.075 278.644 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 274.982 276.008 276.868 277.6 278.231 278.782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 275.278 276.254 277.074 277.776 278.384 278.917 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 275.557 276.485 277.272 277.948 278.533 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 275.826 276.712 277.464 278.111 278.674 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 275.075 276.081 276.925 277.645 278.27 278.815 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 275.365 276.329 277.135 277.825 278.424 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 275.644 276.56 277.328 277.992 278.57 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 275.903 276.774 277.516 278.156 278.713 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 275.165 276.154 276.986 277.696 278.31 278.848 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 275.447 276.39 277.186 277.868 278.46 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 275.716 276.616 277.378 278.034 278.605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 275.972 276.831 277.563 278.193 278.743 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 275.24 276.216 277.036 277.737 278.345 278.876 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 275.514 276.445 277.231 277.906 278.492 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96831 6.35656 6.74455 7.13355 7.51709 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07333 6.46568 6.84961 7.2366 7.6204 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.79222 6.18021 6.57114 6.95342 7.33665 7.72339 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89835 6.28256 6.67477 7.05884 7.44141 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9989 6.38689 6.77542 7.15953 7.54425 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10198 6.49033 6.87585 7.26197 7.64809 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.81818 6.20769 6.59295 6.97949 7.36602 7.75201 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92217 6.30973 6.69687 7.08368 7.47001 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0268 6.41423 6.80118 7.18783 7.57402 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13121 6.51862 6.90551 7.29204 7.67809 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.84796 6.23573 6.62307 7.00995 7.39619 7.78203 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.95266 6.34028 6.72748 7.11416 7.5004 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05733 6.44481 6.83194 7.21857 7.60457 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.77427 6.16205 6.54933 6.93639 7.32283 7.7087 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87918 6.26682 6.65405 7.04083 7.42707 7.81286 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98399 6.37148 6.75854 7.14521 7.53135 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08878 6.47622 6.86313 7.24964 7.63558 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.80587 6.19354 6.58084 6.96767 7.35403 7.73988 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9107 6.29827 6.6855 7.07221 7.45841 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01561 6.40313 6.79012 7.17668 7.56275 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12043 6.50777 6.8947 7.28119 7.66709 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.8376 6.22526 6.61251 6.99932 7.38562 7.77139 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.94253 6.33012 6.71724 7.1039 7.49009 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.645 289.494 289.366 289.257 289.164 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.592 289.447 289.324 289.219 289.129 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.701 289.539 289.401 289.285 289.185 289.098 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.645 289.493 289.358 289.247 289.15 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.595 289.447 289.319 289.212 289.119 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.547 289.405 289.283 289.177 289.088 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.661 289.498 289.362 289.246 289.145 289.059 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.608 289.455 289.324 289.212 289.114 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.56 289.412 289.286 289.176 289.084 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.512 289.37 289.25 289.146 289.056 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.627 289.468 289.332 289.215 289.114 289.027 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.578 289.425 289.294 289.182 289.085 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.532 289.384 289.259 289.15 289.057 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.652 289.487 289.346 289.225 289.121 289.03 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.604 289.445 289.309 289.191 289.091 289.004 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.555 289.402 289.272 289.161 289.064 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.51 289.364 289.238 289.129 289.035 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.633 289.467 289.325 289.203 289.1 289.011 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.582 289.424 289.29 289.174 289.072 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.536 289.384 289.253 289.141 289.044 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.489 289.343 289.218 289.112 289.018 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 289.608 289.445 289.305 289.185 289.081 288.991 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 289.558 289.402 289.267 289.151 289.052 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90302 8.28516 8.66786 9.04947 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00598 8.38712 8.77029 9.15224 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.72283 8.10604 8.4895 8.87223 9.25506 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82551 8.20919 8.59235 8.97546 9.35847 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92914 8.31244 8.6955 9.08594 9.46611 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03262 8.41535 8.79895 9.18495 9.56853 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.75138 8.13509 8.5185 8.90243 9.2848 9.6678 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.85477 8.23933 8.62377 9.0079 9.38863 9.76988 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95956 8.34315 8.72605 9.11058 9.4916 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06157 8.447 8.82914 9.21026 9.59483 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.78118 8.16494 8.54818 8.93119 9.31334 9.69462 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88467 8.26832 8.65141 9.0339 9.41595 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9882 8.37173 8.75465 9.13702 9.51882 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.70803 8.09174 8.47507 8.8579 9.24016 9.6219 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81165 8.19523 8.5785 8.96111 9.34322 9.72473 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91523 8.29874 8.68181 9.06428 9.44627 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01882 8.40217 8.78512 9.1675 9.5493 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.7388 8.12245 8.50562 8.88843 9.27065 9.65232 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84249 8.22601 8.60912 8.99174 9.37376 9.75531 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94618 8.3296 8.71257 9.09509 9.47697 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04989 8.43317 8.81602 9.19834 9.58015 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.76993 8.15356 8.53677 8.91946 9.30163 9.6832 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87374 8.25722 8.6403 9.02284 9.40485 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.119 281.436 281.729 281.998 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.198 281.506 281.794 282.058 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.943 281.273 281.577 281.857 282.118 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.026 281.349 281.646 281.921 282.177 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.109 281.424 281.716 281.989 282.237 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.191 281.497 281.782 282.048 282.294 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.941 281.268 281.57 281.849 282.108 282.348 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.027 281.348 281.643 281.915 282.167 282.403 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.112 281.423 281.711 281.979 282.227 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.192 281.499 281.78 282.04 282.284 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.948 281.273 281.571 281.846 282.102 282.34 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.035 281.352 281.642 281.912 282.163 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.119 281.428 281.714 281.978 282.223 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.873 281.202 281.505 281.783 282.041 282.282 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.962 281.282 281.576 281.85 282.104 282.341 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.047 281.361 281.65 281.917 282.165 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.133 281.438 281.72 281.981 282.225 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.888 281.214 281.513 281.79 282.047 282.285 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.976 281.295 281.587 281.857 282.108 282.342 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.061 281.371 281.657 281.923 282.17 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.143 281.448 281.728 281.987 282.228 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.901 281.224 281.521 281.795 282.049 282.287 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.987 281.303 281.593 281.861 282.111 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.77886 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88395 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98596 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.70334 10.0861 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80648 10.1888 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90965 10.2916 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0129 10.3967 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.73372 10.116 10.498 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.83709 10.2192 10.6005 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94035 10.3223 10.7034 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0436 10.4253 10.8062 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.76462 10.1468 10.5283 10.909 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.86804 10.2501 10.6314 11.0118 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97146 10.3533 10.7343 11.1146 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.69239 10.0748 10.4564 10.8373 11.2173 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.79593 10.1781 10.5595 10.9402 11.32 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.862 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.855 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.849 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.842 288.842 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.837 288.837 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.83 288.831 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.825 288.826 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.821 288.82 288.822 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.816 288.815 288.818 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.811 288.811 288.815 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.807 288.808 288.811 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.803 288.801 288.803 288.808 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.798 288.798 288.801 288.805 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.795 288.794 288.796 288.801 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.792 288.79 288.791 288.795 288.8 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.788 288.787 288.787 288.79 288.796 + + + + + + 5000 + 5000 + + 1.06393 1.28775 1.57858 1.9058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.11513 1.36135 1.66423 1.99819 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.1738 1.43834 1.75165 2.0913 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.23792 1.5186 1.84029 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.30764 1.60206 1.93132 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.38207 1.68804 2.02374 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.46029 1.77614 2.11744 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.54189 1.86621 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.62642 1.95819 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.71353 2.05198 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.8023 2.14541 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.8928 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.98412 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.07718 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 221.083 236.923 247.534 254.693 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 225.972 240.231 249.738 256.21 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 230.397 243.161 251.717 257.594 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 234.333 245.795 253.493 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 237.887 248.169 255.122 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 241.084 250.31 256.607 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 243.954 252.246 257.964 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 246.528 253.997 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 248.843 255.597 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 250.936 257.055 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 252.808 258.366 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 254.519 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 256.045 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 257.452 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.16755 2.54243 2.92005 3.30146 3.68506 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.26803 2.64332 3.0231 3.40546 3.79057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 1.99688 2.36773 2.74461 3.12497 3.50784 3.892 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.09547 2.46874 2.84672 3.22771 3.61122 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.19545 2.57016 2.94924 3.33121 3.71557 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.29601 2.67213 3.05238 3.43456 3.81768 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.02498 2.39742 2.7747 3.15517 3.53679 3.92121 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.12496 2.49894 2.87737 3.25878 3.64061 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.22594 2.59979 2.979 3.36155 3.7432 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.32621 2.70203 3.08175 3.46259 3.84605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.0555 2.42809 2.80404 3.18368 3.56579 3.94954 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.15534 2.52859 2.90614 3.28695 3.66939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.25612 2.63032 3.00942 3.39229 3.773 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.98474 2.35595 2.73352 3.11376 3.49724 3.87677 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.08416 2.45804 2.83536 3.21503 3.59814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.18607 2.56065 2.93755 3.31834 3.70128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.28633 2.66216 3.0416 3.42333 3.80504 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.38799 2.76401 3.14348 3.52637 3.90959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.48783 2.86552 3.24624 3.62922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.58986 2.96864 3.35037 3.7322 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.69351 3.07307 3.45285 3.83591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.79497 3.17487 3.55634 3.9398 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.89686 3.27776 3.65993 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 303.661 300.901 298.874 297.315 296.088 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 302.816 300.288 298.4 296.942 295.783 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 305.261 302.051 299.72 297.964 296.599 295.51 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 304.283 301.346 299.195 297.556 296.272 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 303.388 300.694 298.702 297.172 295.964 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 302.567 300.096 298.244 296.815 295.68 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 304.958 301.818 299.539 297.818 296.48 295.408 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 303.997 301.129 299.023 297.419 296.165 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 303.121 300.507 298.553 297.049 295.868 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 302.334 299.922 298.109 296.708 295.59 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 304.661 301.603 299.385 297.7 296.382 295.326 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 303.735 300.946 298.89 297.314 296.074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 302.891 300.335 298.421 296.941 295.783 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 305.4 302.127 299.757 297.981 296.596 295.509 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 304.408 301.418 299.239 297.585 296.283 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 303.494 300.761 298.751 297.203 295.984 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 302.674 300.166 298.294 296.846 295.699 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 301.921 299.614 297.871 296.51 295.427 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 301.238 299.103 297.475 296.199 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 300.601 298.621 297.098 295.9 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 300.001 298.164 296.749 295.618 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 299.459 297.751 296.418 295.35 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 298.956 297.359 296.106 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.10807 4.48638 4.86899 5.24658 5.63024 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.20923 4.58805 4.96915 5.35392 5.73325 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 3.93083 4.3101 4.69034 5.0725 5.45627 5.83766 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.0331 4.41317 4.79341 5.17771 5.55853 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.13545 4.51444 4.89627 5.27689 5.65995 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.23568 4.61619 4.9977 5.37776 5.76063 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 3.95943 4.3386 4.71728 5.09923 5.48178 5.86323 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.06023 4.44009 4.82122 5.20229 5.58362 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.16143 4.54303 4.92295 5.30615 5.68645 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 3.88516 4.26385 4.64374 5.02491 5.40701 5.78935 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 3.98682 4.36575 4.74611 5.12752 5.50969 5.89238 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.08876 4.4681 4.84885 5.23054 5.61288 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.19094 4.57072 4.95173 5.33361 5.7161 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 3.91468 4.29322 4.6734 5.05471 5.43684 5.81939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.01718 4.39794 4.77744 5.15778 5.54005 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.11942 4.50028 4.87919 5.26095 5.64339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.22129 4.60116 4.98223 5.36423 5.74674 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 3.94627 4.32414 4.70401 5.08535 5.46745 5.8501 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.04748 4.42641 4.807 5.18851 5.57078 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.14941 4.52906 4.9099 5.29179 5.67419 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.25185 4.63185 5.01305 5.39508 5.77763 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 3.97555 4.35447 4.73481 5.11624 5.49848 5.88114 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.07782 4.45718 4.83792 5.21957 5.60191 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.758 272.999 274.06 274.958 275.75 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.103 273.29 274.304 275.185 275.94 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 271.08 272.434 273.574 274.547 275.39 276.123 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 271.466 272.756 273.844 274.784 275.591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 271.831 273.059 274.106 275.001 275.782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 272.175 273.351 274.352 275.21 275.965 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 271.177 272.51 273.628 274.59 275.423 276.148 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 271.553 272.826 273.903 274.823 275.622 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 271.915 273.134 274.16 275.049 275.816 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 270.885 272.263 273.42 274.408 275.261 276.005 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 271.278 272.591 273.7 274.648 275.469 276.187 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 271.658 272.909 273.968 274.877 275.668 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 272.016 273.209 274.226 275.102 275.864 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 271.004 272.361 273.503 274.475 275.316 276.05 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 271.398 272.694 273.781 274.713 275.523 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 271.768 273.004 274.044 274.943 275.722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 272.12 273.296 274.297 275.16 275.914 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 271.127 272.457 273.58 274.541 275.373 276.099 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 271.505 272.776 273.853 274.775 275.575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 271.868 273.082 274.113 274.999 275.771 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 272.213 273.372 274.361 275.215 275.96 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 271.225 272.541 273.651 274.6 275.422 276.14 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 271.599 272.854 273.915 274.827 275.619 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99855 6.38551 6.77232 7.16025 7.54287 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10317 6.49423 6.87703 7.26301 7.64593 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.82307 6.20966 6.59936 6.98057 7.3628 7.74864 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92878 6.31166 6.70265 7.08567 7.4673 7.85153 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02896 6.41565 6.80301 7.18608 7.56988 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13165 6.51873 6.90314 7.28826 7.67348 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.84888 6.23701 6.62106 7.00652 7.39205 7.77714 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.95251 6.33872 6.72465 7.11039 7.49578 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05672 6.44286 6.82868 7.21431 7.59957 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1608 6.54696 6.93272 7.31823 7.70337 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87858 6.26496 6.65107 7.03687 7.42216 7.80712 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98288 6.36917 6.7552 7.14083 7.52611 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08719 6.47339 6.85937 7.24497 7.63005 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.80521 6.19157 6.57759 6.96353 7.34898 7.73396 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.90972 6.29599 6.68202 7.06772 7.45299 7.8379 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01418 6.40036 6.78624 7.17184 7.55703 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1186 6.50475 6.89051 7.27602 7.66106 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.83672 6.22301 6.60909 6.99482 7.38018 7.76511 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.94124 6.32745 6.71344 7.09906 7.48429 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04575 6.43195 6.81779 7.20332 7.58845 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15024 6.53632 6.92211 7.30755 7.69254 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.86847 6.25472 6.64073 7.02644 7.41176 7.79665 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.97301 6.35924 6.74518 7.13077 7.516 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.341 292.031 291.76 291.52 291.314 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.241 291.938 291.679 291.45 291.252 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.476 292.144 291.853 291.606 291.386 291.191 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.371 292.055 291.773 291.533 291.321 291.133 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.276 291.969 291.699 291.466 291.26 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.182 291.885 291.627 291.4 291.201 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.421 292.09 291.807 291.557 291.336 291.142 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.323 292.006 291.729 291.487 291.275 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.226 291.92 291.655 291.422 291.216 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.135 291.841 291.584 291.357 291.157 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.374 292.047 291.762 291.513 291.296 291.103 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.276 291.962 291.688 291.448 291.236 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.184 291.881 291.616 291.383 291.178 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.432 292.096 291.803 291.546 291.322 291.123 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.333 292.009 291.727 291.48 291.263 291.07 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.239 291.927 291.655 291.415 291.204 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.148 291.846 291.582 291.352 291.149 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.394 292.06 291.77 291.516 291.293 291.094 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.3 291.978 291.696 291.449 291.232 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.204 291.894 291.624 291.387 291.178 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.115 291.817 291.555 291.324 291.121 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 292.358 292.027 291.738 291.486 291.264 291.069 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 292.261 291.942 291.664 291.42 291.205 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92703 8.30759 8.68885 9.06912 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02956 8.40915 8.7909 9.17156 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.74765 8.12921 8.51115 8.89249 9.27405 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84986 8.23191 8.6136 8.99538 9.37715 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95304 8.33477 8.7164 9.10548 9.48443 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05608 8.43726 8.81944 9.20415 9.58657 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.77604 8.1581 8.54003 8.9226 9.30371 9.68552 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87898 8.26194 8.6449 9.02767 9.40718 9.78731 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.98329 8.36531 8.74679 9.13002 9.50985 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08486 8.46877 8.84952 9.22936 9.61275 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80566 8.18779 8.56956 8.9512 9.3321 9.71225 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90869 8.29075 8.67238 9.05355 9.43439 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01174 8.39373 8.77525 9.15633 9.53693 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.73279 8.11487 8.49668 8.87811 9.2591 9.63969 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83593 8.21788 8.59968 8.98096 9.36185 9.74223 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93903 8.32099 8.70263 9.08378 9.46455 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04219 8.42398 8.80552 9.18662 9.56726 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.76334 8.14535 8.52703 8.90849 9.28946 9.66996 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86657 8.24851 8.63015 9.01141 9.39221 9.77265 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9698 8.35166 8.73321 9.11443 9.49512 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07306 8.45483 8.83629 9.21733 9.59797 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.7943 8.1763 8.55803 8.93936 9.32028 9.70071 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89765 8.27954 8.66117 9.04239 9.42319 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.082 279.49 279.866 280.213 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.187 279.584 279.953 280.292 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.86 279.286 279.677 280.038 280.372 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.971 279.388 279.77 280.122 280.449 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.08 279.486 279.86 280.212 280.53 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.187 279.584 279.95 280.29 280.604 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.868 279.291 279.678 280.036 280.367 280.676 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.979 279.393 279.773 280.125 280.447 280.748 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.091 279.494 279.864 280.208 280.524 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.197 279.592 279.953 280.288 280.601 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.884 279.302 279.686 280.041 280.369 280.673 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.996 279.405 279.781 280.127 280.448 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.107 279.506 279.872 280.212 280.526 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.789 279.213 279.603 279.963 280.296 280.604 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.905 279.32 279.701 280.051 280.375 280.678 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.018 279.422 279.793 280.137 280.457 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.127 279.523 279.888 280.223 280.534 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.814 279.235 279.62 279.976 280.305 280.611 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.926 279.337 279.715 280.064 280.387 280.686 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.038 279.44 279.808 280.149 280.465 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.146 279.538 279.899 280.233 280.543 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.833 279.25 279.634 279.987 280.314 280.618 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.945 279.353 279.728 280.074 280.394 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80105 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90604 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0079 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.72561 10.108 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.82863 10.2106 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93171 10.3134 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0349 10.4183 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.75596 10.1379 10.5195 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.85923 10.241 10.622 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.96242 10.344 10.7248 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0656 10.447 10.8276 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.78686 10.1688 10.5499 10.9303 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89022 10.2719 10.6528 11.033 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99352 10.375 10.7557 11.1357 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.71476 10.0969 10.4781 10.8586 11.2384 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.8182 10.2 10.5811 10.9615 11.341 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.524 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.5 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.476 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.517 290.455 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.492 290.432 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.469 290.412 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.448 290.39 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.488 290.426 290.371 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.466 290.406 290.353 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.445 290.386 290.334 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.423 290.367 290.317 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.465 290.403 290.349 290.3 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.444 290.383 290.329 290.282 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.422 290.364 290.313 290.267 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.467 290.404 290.346 290.295 290.25 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.445 290.382 290.327 290.279 290.237 + + + + + + 5000 + 5000 + + 1.2995 1.48813 1.74568 2.04611 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.3417 1.55209 1.82329 2.13226 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.39058 1.61991 1.90338 2.21977 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.44512 1.69155 1.98515 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.50511 1.76668 2.0697 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.56998 1.84486 2.15612 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.63923 1.92573 2.24418 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.71214 2.0089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.78855 2.09464 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.86798 2.18232 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.94949 2.27034 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.03347 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.11853 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.20597 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 215.95 230.181 240.735 248.4 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 220.175 233.374 243.052 250.089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 224.124 236.276 245.162 251.636 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 227.739 238.942 247.088 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 231.102 241.4 248.879 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 234.212 243.658 250.529 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 237.067 245.73 252.055 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 239.692 247.638 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 242.098 249.397 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 244.316 251.031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 246.338 252.507 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 248.199 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 249.894 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 251.463 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.26702 2.62823 2.99553 3.36881 3.74582 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.36337 2.72606 3.09611 3.47083 3.84974 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.10401 2.45932 2.8245 3.19576 3.5714 3.94971 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.19799 2.55679 2.92387 3.29636 3.67306 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.29359 2.65501 3.02391 3.39789 3.77577 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.39023 2.75396 3.12464 3.4993 3.87638 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.13069 2.48785 2.85367 3.22528 3.59984 3.97849 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.22609 2.58604 2.95377 3.32679 3.70193 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.32285 2.68372 3.05292 3.4276 3.80299 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.41926 2.78302 3.15342 3.5269 3.90434 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.15979 2.51756 2.88233 3.25322 3.62836 4.00641 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.25528 2.61481 2.98186 3.35443 3.73032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.35191 2.71342 3.08275 3.45788 3.83242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.0926 2.4481 2.8138 3.18485 3.56099 3.93468 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.18739 2.54662 2.91289 3.284 3.66028 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.28478 2.64595 3.01268 3.38542 3.76184 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.38122 2.74454 3.11431 3.48841 3.86403 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.47907 2.84357 3.21408 3.5898 3.96722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.57567 2.94256 3.31479 3.69095 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.67448 3.04316 3.41693 3.79241 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.77513 3.14528 3.51763 3.89463 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.87389 3.24495 3.61938 3.9971 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.97322 3.3458 3.72133 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 309.913 306.376 303.726 301.66 300.012 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 308.84 305.583 303.102 301.164 299.605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 311.926 307.866 304.846 302.529 300.706 299.238 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 310.703 306.956 304.153 301.988 300.269 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 309.57 306.117 303.508 301.479 299.856 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 308.53 305.339 302.902 300.998 299.474 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 311.546 307.566 304.609 302.341 300.555 299.11 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 310.34 306.682 303.936 301.809 300.128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 309.232 305.871 303.312 301.315 299.732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 308.226 305.11 302.727 300.861 299.357 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 311.167 307.289 304.407 302.183 300.422 298.998 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 310.005 306.44 303.753 301.665 300.007 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 308.929 305.642 303.136 301.171 299.616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 312.082 307.958 304.894 302.554 300.707 299.244 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 310.845 307.041 304.207 302.024 300.289 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 309.683 306.19 303.569 301.52 299.885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 308.649 305.42 302.96 301.034 299.498 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 307.678 304.696 302.404 300.591 299.135 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 306.806 304.029 301.878 300.169 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 305.978 303.391 301.374 299.771 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 305.199 302.79 300.911 299.392 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 304.493 302.243 300.467 299.03 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 303.833 301.722 300.048 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.17306 4.54551 4.92306 5.29644 5.67639 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.27248 4.64575 5.02209 5.4027 5.77848 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 3.99891 4.37184 4.7467 5.12423 5.50398 5.88194 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.09932 4.47329 4.8484 5.22827 5.60533 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.19989 4.57313 4.95005 5.32643 5.70577 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.29851 4.67346 5.05023 5.42624 5.80556 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.02689 4.39979 4.7732 5.15062 5.52925 5.90729 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.12585 4.49975 4.87582 5.25255 5.63011 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.22538 4.60124 4.9763 5.35528 5.73197 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 3.95393 4.32611 4.70052 5.07704 5.45514 5.83398 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.05363 4.42644 4.80162 5.17854 5.55681 5.93609 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.15386 4.52729 4.90299 5.28037 5.659 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.25421 4.62839 5.00465 5.38243 5.76131 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 3.98277 4.35494 4.72975 5.10643 5.48453 5.86361 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.08341 4.45801 4.83237 5.20835 5.5868 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.18384 4.5589 4.93293 5.31044 5.68915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.28407 4.65838 5.03471 5.41261 5.79158 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.01368 4.38529 4.75985 5.13666 5.51484 5.89406 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.11308 4.48603 4.86155 5.23869 5.61717 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.21332 4.58724 4.9632 5.34088 5.71963 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.3141 4.68861 5.06515 5.44315 5.82218 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.04243 4.41518 4.79028 5.16721 5.54554 5.92479 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.14296 4.5164 4.89209 5.26941 5.64802 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 268.302 269.811 271.107 272.204 273.174 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 268.726 270.169 271.405 272.482 273.407 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 267.485 269.127 270.514 271.705 272.739 273.636 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 267.954 269.523 270.849 271.997 272.983 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 268.4 269.891 271.167 272.262 273.221 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 268.816 270.248 271.472 272.522 273.447 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 267.608 269.226 270.588 271.763 272.782 273.67 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 268.066 269.612 270.923 272.049 273.028 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 268.502 269.986 271.239 272.328 273.268 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 267.253 268.926 270.337 271.543 272.586 273.498 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 267.733 269.326 270.675 271.835 272.842 273.723 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 268.189 269.711 271.006 272.12 273.088 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 268.629 270.082 271.321 272.391 273.326 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 267.397 269.043 270.434 271.625 272.658 273.559 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 267.873 269.452 270.778 271.916 272.909 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 268.324 269.828 271.096 272.196 273.155 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 268.75 270.184 271.409 272.466 273.39 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 267.545 269.162 270.531 271.706 272.725 273.617 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 268.005 269.551 270.864 271.993 272.975 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 268.444 269.923 271.182 272.269 273.215 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 268.865 270.279 271.487 272.532 273.446 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 267.665 269.263 270.617 271.779 272.787 273.67 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 268.118 269.645 270.943 272.06 273.031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03825 6.42328 6.8084 7.19481 7.57608 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14227 6.53146 6.91268 7.29719 7.67876 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.86365 6.24823 6.6361 7.01574 7.39656 7.78112 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96874 6.34971 6.73894 7.12044 7.50071 7.88367 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.06839 6.45318 6.83887 7.22045 7.60292 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17055 6.55579 6.9386 7.32225 7.70615 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.88926 6.27539 6.65765 7.04152 7.42566 7.80947 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99228 6.37657 6.76081 7.14502 7.52905 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.096 6.48027 6.86441 7.24852 7.63244 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19952 6.58386 6.968 7.3521 7.73595 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.91876 6.30317 6.68755 7.07179 7.45565 7.83932 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02254 6.40691 6.79123 7.17531 7.55923 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12631 6.51064 6.89497 7.2791 7.66285 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.84584 6.23015 6.61439 6.99873 7.38273 7.76641 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9498 6.33411 6.71839 7.10252 7.48638 7.87003 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05373 6.43798 6.82215 7.20626 7.5901 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15759 6.54193 6.92607 7.3101 7.69379 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87728 6.26154 6.64581 7.02992 7.41384 7.79752 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9812 6.36543 6.74971 7.13381 7.51767 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0852 6.46954 6.85369 7.23767 7.62143 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18918 6.5734 6.95756 7.34156 7.72525 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.9089 6.29316 6.6774 7.06154 7.44543 7.829 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.01291 6.39721 6.78141 7.16545 7.5493 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.81 294.354 293.954 293.599 293.291 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.667 294.223 293.843 293.502 293.202 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.016 294.531 294.102 293.736 293.409 293.115 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.865 294.404 293.988 293.632 293.317 293.033 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.73 294.28 293.882 293.536 293.229 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.597 294.161 293.781 293.442 293.142 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.948 294.466 294.047 293.677 293.35 293.058 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.805 294.342 293.936 293.579 293.262 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.669 294.222 293.83 293.482 293.174 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.537 294.104 293.725 293.39 293.092 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.886 294.409 293.993 293.626 293.3 293.01 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.748 294.288 293.884 293.528 293.213 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.613 294.169 293.779 293.435 293.13 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.974 294.484 294.056 293.678 293.345 293.048 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.832 294.36 293.946 293.581 293.257 292.969 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.697 294.24 293.839 293.486 293.173 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.563 294.124 293.737 293.396 293.091 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.924 294.438 294.012 293.637 293.305 293.011 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.784 294.314 293.903 293.541 293.221 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.649 294.198 293.8 293.448 293.136 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.519 294.081 293.697 293.358 293.056 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 294.873 294.392 293.971 293.6 293.27 292.976 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 294.735 294.27 293.862 293.502 293.183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95786 8.33657 8.71615 9.09487 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05988 8.43767 8.81777 9.19692 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.77938 8.15903 8.53922 8.91895 9.29903 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8811 8.26127 8.64123 9.0214 9.40173 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.98372 8.3636 8.74357 9.13109 9.50864 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08626 8.46567 8.8462 9.22937 9.61038 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80765 8.18778 8.56794 8.94889 9.32855 9.70903 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91001 8.29109 8.67236 9.05359 9.43165 9.81044 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.0138 8.394 8.77381 9.15549 9.53392 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11484 8.49696 8.8761 9.25446 9.63647 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83703 8.21725 8.59729 8.97737 9.35681 9.73561 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93951 8.31973 8.69967 9.07929 9.4587 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04205 8.42222 8.80207 9.18165 9.56088 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.76448 8.14462 8.5247 8.90454 9.28406 9.66326 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86709 8.24718 8.62724 9.00693 9.38637 9.76543 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96963 8.34973 8.72971 9.10935 9.48873 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07227 8.4523 8.8322 9.21179 9.59102 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.79485 8.17493 8.55484 8.9347 9.31422 9.69339 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8975 8.27757 8.65752 9.03724 9.41661 9.79569 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00022 8.38025 8.76014 9.13982 9.51911 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10295 8.48294 8.86278 9.24234 9.62161 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82559 8.20568 8.58568 8.96543 9.3449 9.72398 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92841 8.30845 8.68838 9.06805 9.44742 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.219 277.71 278.16 278.577 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.347 277.825 278.267 278.674 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.958 277.469 277.938 278.37 278.771 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.091 277.591 278.051 278.475 278.867 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.225 277.713 278.161 278.583 278.964 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.354 277.829 278.269 278.679 279.056 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.971 277.48 277.947 278.377 278.773 279.142 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.108 277.605 278.062 278.482 278.869 279.231 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.243 277.727 278.172 278.585 278.964 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.371 277.847 278.281 278.681 279.057 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.996 277.5 277.961 278.386 278.78 279.145 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.132 277.623 278.075 278.491 278.877 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.265 277.746 278.187 278.594 278.971 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.885 277.396 277.864 278.295 278.694 279.064 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.023 277.522 277.981 278.403 278.793 279.155 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.161 277.649 278.095 278.507 278.888 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.292 277.768 278.206 278.61 278.985 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.914 277.422 277.887 278.315 278.71 279.076 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.053 277.548 278.001 278.42 278.807 279.168 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.186 277.67 278.114 278.524 278.903 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.317 277.789 278.224 278.624 278.996 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.941 277.445 277.906 278.33 278.723 279.088 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.077 277.568 278.019 278.434 278.82 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.82872 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93351 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0353 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.75336 10.1352 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.85622 10.2376 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95916 10.3403 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0621 10.445 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.78367 10.1651 10.5461 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88679 10.268 10.6485 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98981 10.3708 10.7512 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0929 10.4737 10.8538 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81458 10.1959 10.5765 10.9564 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.91774 10.2989 10.6793 11.059 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0209 10.4019 10.782 11.1615 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.74262 10.1241 10.5048 10.8848 11.2641 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.8459 10.2272 10.6077 10.9876 11.3666 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.051 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.009 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.972 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.053 291.934 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.013 291.899 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.976 291.863 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.937 291.827 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.02 291.902 291.795 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.982 291.866 291.762 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.944 291.832 291.731 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.909 291.8 291.701 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.992 291.874 291.767 291.671 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.953 291.839 291.736 291.642 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.919 291.807 291.705 291.612 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.004 291.882 291.773 291.675 291.586 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.966 291.849 291.743 291.646 291.558 + + + + + + 5000 + 5000 + + 0.940685 1.18821 1.49861 1.84032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 0.998439 1.26772 1.5887 1.9358 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.06363 1.35003 1.68006 2.03187 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.13409 1.43537 1.77237 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.20987 1.52346 1.86675 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.28999 1.6137 1.96229 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.37357 1.70574 2.0589 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.46015 1.79937 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.54918 1.89469 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.64058 1.99158 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.73305 2.08772 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.82717 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.92159 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.01762 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 224.94 241.508 251.835 258.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 230.202 244.788 253.904 259.875 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 234.853 247.65 255.75 261.127 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 238.896 250.179 257.387 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 242.471 252.43 258.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 245.63 254.441 260.232 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 248.418 256.239 261.458 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 250.879 257.853 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 253.071 259.317 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 255.025 260.639 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 256.765 261.828 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 258.333 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 259.729 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 261.005 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.12524 2.50615 2.88825 3.27318 3.65954 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.22754 2.60841 2.99236 3.378 3.76576 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 1.95111 2.32889 2.71093 3.09521 3.48113 3.86779 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.05179 2.43145 2.81419 3.19888 3.58527 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.15369 2.53435 2.91781 3.3032 3.6903 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.25604 2.63756 3.02196 3.40734 3.79306 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 1.97978 2.35907 2.7414 3.12571 3.51034 3.89717 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.08186 2.46212 2.84519 3.23018 3.61484 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.18474 2.56431 2.94786 3.33378 3.71811 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.28671 2.66784 3.05159 3.43558 3.82156 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.01093 2.39021 2.77105 3.15441 3.53951 3.92565 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.11276 2.49212 2.8742 3.25854 3.6438 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.21536 2.59517 2.97855 3.36473 3.74804 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.93855 2.31688 2.69966 3.08382 3.47042 3.85242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.04007 2.42046 2.80261 3.18599 3.57205 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.14397 2.52457 2.9059 3.29015 3.67581 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.24598 2.62733 3.01092 3.3959 3.78024 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.34926 2.73041 3.11377 3.4997 3.88537 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.45062 2.83302 3.21737 3.60321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.55401 2.9372 3.32234 3.70688 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.65899 3.04264 3.42559 3.81121 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.76164 3.14534 3.52981 3.9157 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.86462 3.24908 3.63409 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 300.059 297.797 296.151 294.899 293.915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 299.36 297.298 295.768 294.596 293.671 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 301.382 298.731 296.835 295.416 294.319 293.451 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 300.568 298.154 296.407 295.089 294.058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 299.829 297.627 296.008 294.776 293.81 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 299.153 297.135 295.639 294.489 293.584 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 301.124 298.54 296.686 295.296 294.222 293.364 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 300.331 297.978 296.267 294.972 293.967 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 299.609 297.469 295.888 294.677 293.731 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 298.964 296.996 295.529 294.403 293.509 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 300.884 298.369 296.562 295.199 294.141 293.298 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 300.118 297.831 296.16 294.889 293.896 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 299.424 297.334 295.784 294.591 293.662 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 301.503 298.801 296.865 295.426 294.313 293.444 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 300.679 298.217 296.445 295.11 294.065 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 299.927 297.687 296.052 294.803 293.823 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 299.25 297.2 295.681 294.515 293.599 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 298.633 296.754 295.343 294.246 293.379 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 298.077 296.337 295.022 293.996 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 297.555 295.948 294.72 293.758 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 297.067 295.578 294.439 293.532 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 296.628 295.244 294.173 293.318 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 296.218 294.928 293.922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.07903 4.46004 4.84494 5.2245 5.60986 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.18092 4.56235 4.94564 5.33235 5.71332 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 3.90031 4.28253 4.66529 5.04957 5.43514 5.81812 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.00345 4.38635 4.76894 5.15528 5.53786 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.10659 4.48827 4.8724 5.25497 5.63972 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.20761 4.59069 4.97439 5.35629 5.74079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 3.92919 4.31129 4.69239 5.07644 5.4608 5.84382 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.03082 4.41348 4.79696 5.18003 5.56309 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.13283 4.51712 4.89928 5.2844 5.66635 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 3.85436 4.23605 4.61845 5.00178 5.38572 5.76968 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 3.95684 4.33865 4.72145 5.10493 5.48889 5.87312 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.05965 4.44175 4.8248 5.20845 5.59249 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.16264 4.545 4.92823 5.31203 5.69618 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 3.88416 4.26566 4.64838 5.03181 5.41575 5.79986 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 3.98758 4.37116 4.75301 5.13538 5.51939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.0906 4.47416 4.85535 5.23909 5.62321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.19327 4.57572 4.95897 5.34283 5.72696 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 3.91609 4.29687 4.67918 5.06261 5.44653 5.83075 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.01813 4.39984 4.78278 5.16631 5.55031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.12086 4.50318 4.88627 5.27008 5.65414 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.22407 4.6066 4.98997 5.37385 5.75801 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 3.94561 4.3274 4.71018 5.09369 5.47772 5.86192 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.0487 4.43084 4.81388 5.19751 5.58157 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.688 274.773 275.701 276.483 277.176 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.991 275.028 275.914 276.681 277.338 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 273.094 274.28 275.273 276.122 276.859 277.498 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 273.43 274.558 275.51 276.331 277.034 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 273.751 274.824 275.737 276.516 277.198 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 274.049 275.076 275.95 276.7 277.359 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 273.176 274.342 275.319 276.158 276.884 277.516 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 273.506 274.618 275.557 276.36 277.056 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 273.822 274.886 275.781 276.557 277.226 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 272.919 274.125 275.136 275.997 276.742 277.39 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 273.265 274.414 275.381 276.207 276.921 277.548 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 273.596 274.688 275.612 276.406 277.097 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 273.908 274.953 275.839 276.602 277.266 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 273.026 274.212 275.206 276.054 276.787 277.428 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 273.368 274.501 275.451 276.263 276.97 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 273.694 274.774 275.68 276.462 277.142 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 274.001 275.027 275.9 276.653 277.31 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 273.133 274.295 275.275 276.113 276.837 277.47 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 273.464 274.574 275.513 276.316 277.013 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 273.781 274.841 275.739 276.512 277.184 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 274.082 275.094 275.956 276.699 277.348 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 273.219 274.369 275.337 276.163 276.878 277.505 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 273.545 274.64 275.566 276.361 277.051 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98174 6.36944 6.75695 7.14549 7.52863 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08661 6.47839 6.86185 7.24841 7.63182 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.80591 6.19332 6.58371 6.96554 7.34836 7.73471 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.91186 6.29551 6.6872 7.07082 7.453 7.83773 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01226 6.39968 6.78774 7.17139 7.55573 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11519 6.50297 6.88802 7.27373 7.65946 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.8318 6.22073 6.60548 6.99156 7.37768 7.76328 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93565 6.32264 6.70927 7.09561 7.48154 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0401 6.42698 6.81344 7.19967 7.58547 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14436 6.53125 6.91767 7.30376 7.68942 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.86157 6.24874 6.63556 7.02198 7.4078 7.79328 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96609 6.35314 6.73984 7.12608 7.51192 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0706 6.45755 6.84418 7.23038 7.61598 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.78801 6.17518 6.56193 6.94851 7.33452 7.72002 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89276 6.2798 6.66652 7.05285 7.43867 7.82409 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9974 6.38435 6.7709 7.15713 7.54286 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10206 6.48894 6.87536 7.26143 7.647 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.81958 6.20665 6.59343 6.9798 7.36574 7.7512 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92428 6.31127 6.69798 7.08422 7.47 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02902 6.41597 6.80247 7.18859 7.57427 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13369 6.5205 6.90696 7.29299 7.6785 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.85133 6.23839 6.62511 7.01144 7.39732 7.78272 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.95609 6.3431 6.72971 7.11592 7.50171 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.992 290.761 290.563 290.389 290.238 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.917 290.691 290.502 290.334 290.188 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.088 290.842 290.626 290.445 290.286 290.143 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.008 290.774 290.566 290.389 290.236 290.099 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.935 290.707 290.509 290.338 290.189 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.866 290.644 290.454 290.288 290.143 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.04 290.794 290.584 290.401 290.241 290.1 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.966 290.731 290.527 290.348 290.193 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.893 290.666 290.469 290.299 290.149 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.823 290.606 290.417 290.251 290.106 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.001 290.758 290.547 290.363 290.204 290.064 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.928 290.693 290.49 290.314 290.16 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.858 290.633 290.437 290.266 290.117 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.043 290.792 290.575 290.385 290.22 290.076 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.97 290.727 290.517 290.335 290.176 290.037 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.896 290.665 290.464 290.288 290.134 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.83 290.605 290.41 290.239 290.091 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.013 290.763 290.546 290.36 290.196 290.052 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.941 290.701 290.494 290.311 290.151 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.871 290.638 290.438 290.263 290.111 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.801 290.579 290.387 290.218 290.069 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 290.984 290.736 290.522 290.335 290.171 290.029 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 290.91 290.672 290.465 290.285 290.128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91376 8.29518 8.67721 9.05819 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01653 8.39695 8.77947 9.16081 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.73395 8.11641 8.49915 8.88124 9.26348 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83641 8.21935 8.60183 8.98431 9.36675 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93985 8.32241 8.70481 9.09461 9.47422 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04311 8.42512 8.80807 9.19348 9.57651 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.76243 8.14538 8.52811 8.9114 9.29318 9.67563 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86562 8.24943 8.63318 9.01669 9.39685 9.77759 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97018 8.35305 8.73529 9.11922 9.49968 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07198 8.45671 8.83821 9.21873 9.60276 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.79216 8.17515 8.55771 8.94009 9.32166 9.70241 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89542 8.27833 8.66075 9.04263 9.42412 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.99873 8.38156 8.76382 9.14559 9.52683 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.71914 8.10208 8.4847 8.86688 9.24856 9.62977 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82252 8.20534 8.58794 8.96994 9.35148 9.73246 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92589 8.30868 8.69108 9.07293 9.45436 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02927 8.41189 8.7942 9.17599 9.55725 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.74979 8.13269 8.51517 8.89735 9.27899 9.6601 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.85328 8.23606 8.61848 9.00047 9.38194 9.76297 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95675 8.33944 8.72176 9.10367 9.485 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06025 8.44283 8.82503 9.20675 9.58802 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.78085 8.16371 8.54624 8.9283 9.30989 9.69093 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88443 8.26717 8.64958 9.03152 9.41297 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.101 280.463 280.797 281.107 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.192 280.546 280.873 281.176 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.901 280.279 280.628 280.948 281.245 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280 280.368 280.707 281.022 281.313 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.094 280.455 280.787 281.101 281.384 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.189 280.541 280.866 281.168 281.449 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.905 280.279 280.623 280.943 281.238 281.513 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.002 280.37 280.709 281.02 281.307 281.575 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.102 280.459 280.787 281.093 281.376 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.194 280.545 280.867 281.164 281.443 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.915 280.287 280.629 280.944 281.236 281.507 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.016 280.379 280.712 281.02 281.306 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.113 280.467 280.793 281.095 281.375 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.83 280.208 280.554 280.874 281.169 281.443 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.933 280.301 280.638 280.95 281.24 281.51 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.032 280.39 280.722 281.028 281.311 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.13 280.482 280.804 281.102 281.379 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.851 280.224 280.566 280.883 281.176 281.449 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.951 280.316 280.652 280.961 281.247 281.514 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.05 280.406 280.733 281.036 281.317 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.144 280.492 280.813 281.11 281.386 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.867 280.237 280.578 280.892 281.182 281.452 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.966 280.328 280.661 280.967 281.252 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.78893 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89398 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99594 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.71343 10.0961 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81654 10.1987 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.91966 10.3015 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0229 10.4065 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.74381 10.126 10.5078 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84713 10.2291 10.6103 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95037 10.3322 10.7132 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0536 10.4352 10.8159 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.7747 10.1568 10.5382 10.9187 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87812 10.26 10.6412 11.0215 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98148 10.3632 10.744 11.1242 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.70254 10.0848 10.4663 10.847 11.2269 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80605 10.1881 10.5694 10.9499 11.3296 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.693 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.678 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.661 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.679 289.649 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.665 289.634 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.649 289.621 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.636 289.608 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.654 289.623 289.596 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.64 289.61 289.585 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.627 289.599 289.574 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.615 289.587 289.563 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.633 289.602 289.576 289.554 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.622 289.591 289.565 289.543 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.608 289.578 289.554 289.534 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.629 289.596 289.569 289.545 289.525 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.617 289.584 289.556 289.534 289.516 + + + + + + 5000 + 5000 + + 1.69945 1.84729 2.06004 2.31983 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.73158 1.89903 2.12612 2.39608 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.76958 1.95471 2.19499 2.474 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.81265 2.01426 2.26614 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.86061 2.07757 2.3404 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.91338 2.14431 2.41696 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.9703 2.21404 2.49564 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.03126 2.28671 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.0958 2.36201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.16371 2.44 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.23437 2.51883 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.30777 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.38309 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.46089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 210.69 222.486 232.201 239.928 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 214.068 225.322 234.469 241.714 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 217.311 227.974 236.588 243.385 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 220.366 230.478 238.562 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 223.292 232.845 240.432 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 226.074 235.071 242.188 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 228.705 237.163 243.838 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 231.184 239.124 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 233.521 240.977 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 235.725 242.719 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 237.775 244.324 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 239.705 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 241.49 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 243.179 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.48039 2.81518 3.16149 3.51779 3.88096 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.56891 2.90684 3.25714 3.61576 3.9814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.33175 2.65765 2.99946 3.35207 3.7125 4.0782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.41702 2.74828 3.09339 3.44822 3.81051 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.50453 2.83997 3.18823 3.54547 3.90968 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.59349 2.93284 3.28411 3.64297 4.00701 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.35578 2.68402 3.02691 3.38007 3.73974 4.10599 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.44262 2.77534 3.12155 3.47728 3.83828 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.53132 2.86688 3.21578 3.57401 3.93593 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.62031 2.9602 3.31144 3.66945 4.03411 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.38218 2.71153 3.05396 3.40686 3.76726 4.13311 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.46934 2.80232 3.14828 3.50381 3.8657 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.55823 2.89483 3.24418 3.6031 3.96446 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.32148 2.64716 2.98931 3.34155 3.70236 4.06361 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.40753 2.73886 3.08309 3.43649 3.79814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.49671 2.83165 3.17765 3.5336 3.89628 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.58536 2.92421 3.27444 3.63268 3.99527 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.67628 3.01768 3.36968 3.73024 4.09519 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.76616 3.11134 3.4661 3.828 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.85874 3.20699 3.56416 3.92603 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.95334 3.30415 3.66098 4.02508 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.04656 3.39946 3.75907 4.12449 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.14069 3.49605 3.8575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 318.846 314.463 311.05 308.319 306.102 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 317.538 313.455 310.24 307.66 305.551 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 321.261 316.336 312.508 309.484 307.047 305.051 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 319.802 315.202 311.617 308.767 306.461 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 318.435 314.139 310.775 308.087 305.9 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 317.159 313.145 309.983 307.448 305.38 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 320.81 315.967 312.211 309.238 306.849 304.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 319.367 314.852 311.333 308.534 306.274 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 318.019 313.827 310.522 307.874 305.734 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 316.782 312.852 309.748 307.262 305.224 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 320.351 315.611 311.944 309.03 306.671 304.733 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 318.95 314.54 311.093 308.341 306.108 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 317.641 313.528 310.283 307.676 305.575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 321.428 316.437 312.565 309.515 307.053 305.069 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 319.952 315.294 311.681 308.817 306.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 318.554 314.219 310.844 308.138 305.939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 317.279 313.231 310.047 307.492 305.415 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 316.086 312.306 309.311 306.889 304.911 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 314.988 311.438 308.613 306.325 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 313.945 310.612 307.943 305.782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 312.949 309.819 307.321 305.266 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 312.04 309.099 306.725 304.773 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 311.185 308.408 306.159 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.3107 4.67151 5.03905 5.40381 5.77604 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.40694 4.76897 5.13557 5.50773 5.87621 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.14274 4.50301 4.86712 5.23537 5.607 5.97791 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.23952 4.6014 4.9662 5.33704 5.70628 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.33659 4.69827 5.0652 5.43303 5.80484 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.43192 4.79576 5.16298 5.53075 5.90279 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.16952 4.52998 4.89283 5.26104 5.63161 6.00267 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.26499 4.62692 4.99273 5.36063 5.73047 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.36104 4.72539 5.09069 5.46113 5.83044 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.09911 4.45851 4.822 5.18909 5.55891 5.93053 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.19519 4.55567 4.92028 5.28813 5.65851 6.03087 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.2917 4.65343 5.0191 5.38776 5.75873 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.38882 4.75168 5.1182 5.48751 5.85902 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.12672 4.4862 4.85019 5.21759 5.58761 5.95955 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.22368 4.58617 4.95017 5.31718 5.68778 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.32064 4.68404 5.04813 5.41702 5.78818 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.41753 4.78073 5.14747 5.51705 5.8887 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.15637 4.51554 4.87945 5.24703 5.61717 5.98934 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.25228 4.61327 4.97851 5.34677 5.7175 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.34908 4.71153 5.07764 5.44679 5.81804 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.44666 4.81018 5.17719 5.54693 5.91871 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.1842 4.54459 4.90912 5.27692 5.6473 6.01959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.28115 4.64277 5.00835 5.3769 5.74783 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 262.905 264.794 266.427 267.82 269.059 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 263.433 265.245 266.81 268.179 269.36 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 261.893 263.94 265.684 267.191 268.506 269.652 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 262.476 264.436 266.107 267.565 268.823 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 263.033 264.902 266.515 267.904 269.126 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 263.555 265.353 266.9 268.237 269.417 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 262.051 264.069 265.782 267.272 268.57 269.706 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 262.618 264.553 266.208 267.638 268.885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 263.165 265.025 266.609 267.992 269.191 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 261.611 263.693 265.464 266.992 268.323 269.489 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 262.204 264.195 265.898 267.368 268.649 269.776 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 262.777 264.681 266.313 267.727 268.964 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 263.318 265.144 266.714 268.078 269.273 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 261.787 263.84 265.592 267.1 268.413 269.566 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 262.378 264.35 266.022 267.469 268.737 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 262.938 264.825 266.429 267.827 269.05 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 263.472 265.273 266.823 268.17 269.352 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 261.969 263.987 265.712 267.203 268.502 269.643 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 262.538 264.474 266.133 267.568 268.821 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 263.087 264.944 266.537 267.918 269.129 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 263.612 265.391 266.923 268.256 269.426 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 262.116 264.114 265.821 267.296 268.582 269.711 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 262.683 264.596 266.234 267.653 268.894 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1265 6.50716 6.88831 7.27117 7.64926 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22924 6.61409 6.99141 7.37258 7.75109 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.95396 6.33386 6.7175 7.09357 7.4711 7.8526 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05768 6.43418 6.81933 7.19726 7.57428 7.95433 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15605 6.53646 6.91826 7.29636 7.67568 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25699 6.63802 7.01698 7.39726 7.77808 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97911 6.36055 6.73878 7.11898 7.49978 7.88061 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08087 6.46066 6.84086 7.2215 7.60228 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18324 6.56318 6.94344 7.32412 7.7049 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.28558 6.66572 7.0461 7.42675 7.80755 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00824 6.38803 6.76829 7.14887 7.52948 7.91019 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11066 6.4906 6.87098 7.25153 7.63223 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21322 6.59325 6.97372 7.35436 7.73501 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93625 6.31586 6.6959 7.07651 7.45717 7.83781 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03888 6.41863 6.79886 7.17936 7.55996 7.94065 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14154 6.52141 6.90165 7.28221 7.66285 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.24424 6.62423 7.00456 7.38515 7.76577 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96733 6.34693 6.7271 7.10752 7.48811 7.86872 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07002 6.44975 6.82998 7.21043 7.59104 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17275 6.55265 6.93291 7.31344 7.69406 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.27554 6.65556 7.03591 7.41646 7.79703 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.99877 6.37836 6.75846 7.13891 7.51949 7.90011 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.10144 6.48127 6.8615 7.24198 7.62258 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.757 298.079 297.48 296.944 296.474 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.552 297.888 297.314 296.799 296.344 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.07 298.349 297.711 297.16 296.665 296.217 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.854 298.166 297.544 297.006 296.526 296.094 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.656 297.984 297.388 296.866 296.398 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.46 297.81 297.237 296.727 296.27 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.984 298.266 297.643 297.087 296.59 296.147 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.777 298.087 297.478 296.939 296.457 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.574 297.908 297.32 296.797 296.33 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.381 297.737 297.167 296.658 296.205 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.902 298.193 297.569 297.017 296.526 296.085 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.695 298.012 297.41 296.874 296.396 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.499 297.838 297.253 296.733 296.27 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.034 298.308 297.669 297.103 296.6 296.149 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.825 298.123 297.505 296.957 296.468 296.03 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.624 297.947 297.348 296.816 296.341 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.43 297.774 297.194 296.679 296.219 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.96 298.241 297.608 297.047 296.547 296.098 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.756 298.061 297.447 296.902 296.416 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.556 297.883 297.29 296.764 296.292 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.365 297.716 297.141 296.628 296.169 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 298.893 298.179 297.55 296.993 296.496 296.053 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 298.687 297.998 297.391 296.851 296.368 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02584 8.40073 8.77687 9.15244 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12672 8.50089 8.87765 9.25368 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84923 8.22493 8.60151 8.97793 9.35496 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9498 8.32613 8.70258 9.07956 9.45693 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05139 8.42753 8.80405 9.1883 9.56297 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15282 8.52857 8.90575 9.28586 9.66395 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87708 8.25333 8.62993 9.00763 9.38427 9.76191 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97836 8.3556 8.73339 9.11139 9.48652 9.86263 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08101 8.45751 8.83394 9.2125 9.58805 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.18102 8.55947 8.93534 9.31067 9.68982 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90606 8.28244 8.65893 9.03574 9.41217 9.78823 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00747 8.38387 8.76036 9.1368 9.51332 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10888 8.48539 8.8619 9.23838 9.61472 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83424 8.21045 8.58691 8.96342 9.33991 9.71635 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93568 8.31189 8.68847 9.06498 9.44148 9.81779 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03716 8.41353 8.79007 9.16654 9.54298 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13873 8.51507 8.89162 9.26815 9.64458 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86415 8.24034 8.61671 8.9933 9.36979 9.74616 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96573 8.34198 8.71844 9.09495 9.47138 9.84776 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06736 8.44367 8.82015 9.19672 9.57313 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.16904 8.5454 8.92192 9.29842 9.67486 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89455 8.27078 8.64721 9.02371 9.40018 9.77651 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9963 8.37254 8.749 9.12549 9.50195 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.218 274.837 275.408 275.932 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.383 274.985 275.544 276.059 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.891 274.538 275.132 275.678 276.184 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.065 274.697 275.277 275.81 276.306 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.234 274.85 275.417 275.952 276.433 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.4 275.003 275.558 276.074 276.549 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.919 274.561 275.15 275.693 276.194 276.661 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.093 274.721 275.298 275.831 276.319 276.774 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.265 274.878 275.44 275.961 276.44 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.43 275.031 275.579 276.084 276.559 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.956 274.591 275.175 275.714 276.211 276.672 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.128 274.751 275.322 275.848 276.334 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.299 274.905 275.462 275.977 276.454 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.817 274.463 275.056 275.603 276.107 276.574 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.995 274.627 275.206 275.739 276.231 276.689 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.168 274.783 275.35 275.872 276.356 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.336 274.938 275.492 276.002 276.475 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.858 274.5 275.088 275.629 276.129 276.593 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.033 274.659 275.234 275.764 276.254 276.708 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.204 274.815 275.377 275.895 276.375 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.37 274.967 275.517 276.024 276.494 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.895 274.531 275.115 275.653 276.15 276.611 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.066 274.688 275.26 275.786 276.272 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88865 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99296 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0943 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81357 10.1939 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.916 10.2959 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0185 10.3982 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1211 10.5026 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84373 10.2236 10.6033 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94645 10.3262 10.7053 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0491 10.4287 10.8076 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1517 10.5312 10.9099 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87454 10.2544 10.6336 11.0122 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97737 10.357 10.7361 11.1145 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0801 10.4596 10.8385 11.2168 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80293 10.1829 10.5622 10.9409 11.319 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90584 10.2856 10.6648 11.0433 11.4212 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.526 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.46 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.397 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.546 294.338 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.481 294.277 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.418 294.22 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.356 294.16 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.503 294.296 294.107 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.44 294.238 294.052 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.379 294.18 293.999 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.318 294.125 293.948 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.467 294.26 294.072 293.898 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.405 294.203 294.018 293.849 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.343 294.147 293.967 293.801 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.496 294.285 294.093 293.915 293.753 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.433 294.226 294.038 293.866 293.708 + + + + + + 5000 + 5000 + + 1.83312 1.97094 2.1715 2.4192 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.8629 2.01936 2.23417 2.49231 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.89817 2.07178 2.29977 2.56735 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.93841 2.12805 2.36772 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.98328 2.18797 2.43877 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.03271 2.25141 2.51234 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.08647 2.31785 2.58809 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.14392 2.38723 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.20521 2.45952 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.26968 2.53428 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.33718 2.61033 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.40741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.47959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.55448 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 209.465 220.579 229.951 237.579 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 212.624 223.293 232.174 239.367 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 215.673 225.846 234.263 241.045 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 218.563 228.272 236.219 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 221.348 230.581 238.084 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 224.015 232.764 239.841 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 226.549 234.829 241.501 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 228.959 236.778 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 231.24 238.624 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 233.407 240.375 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 235.433 241.99 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 237.352 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 239.137 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 240.833 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.56441 2.88968 3.22824 3.57822 3.936 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.65015 2.97919 3.322 3.67451 4.03502 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.4207 2.73626 3.06965 3.41521 3.76976 4.13063 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.50307 2.82444 3.16148 3.50969 3.86635 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.58763 2.91383 3.25436 3.60532 3.9642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.6739 3.0045 3.34841 3.70118 4.06025 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.44384 2.76181 3.09633 3.44266 3.79655 4.15799 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.52764 2.8508 3.18905 3.53818 3.89372 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.6136 2.94001 3.28135 3.63329 3.99004 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.6999 3.03111 3.37525 3.72734 4.08698 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.46925 2.78863 3.12286 3.46893 3.82367 4.18473 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.55354 2.87704 3.21519 3.56425 3.92075 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.63966 2.96725 3.30923 3.66198 4.0182 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.41071 2.72607 3.05962 3.40482 3.75972 4.11608 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.49386 2.81528 3.15138 3.49805 3.85415 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.58004 2.90564 3.24405 3.59367 3.95097 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.66613 2.99609 3.33897 3.6911 4.04861 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.75438 3.08737 3.43244 3.78726 4.14739 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.84197 3.17915 3.52733 3.88364 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.93223 3.27282 3.62372 3.98039 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.02458 3.36821 3.71912 4.07821 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.11581 3.46182 3.81574 4.17634 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.20795 3.5568 3.91282 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 321.5 316.935 313.336 310.432 308.055 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 320.146 315.879 312.476 309.725 307.461 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 323.987 318.894 314.882 311.674 309.069 306.925 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 322.489 317.71 313.938 310.912 308.44 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 321.074 316.598 313.047 310.185 307.839 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 319.751 315.554 312.205 309.498 307.28 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 323.524 318.508 314.565 311.414 308.858 306.742 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 322.036 317.346 313.64 310.662 308.242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 320.645 316.269 312.778 309.956 307.661 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 319.357 315.242 311.957 309.303 307.111 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 323.049 318.138 314.285 311.19 308.667 306.581 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 321.605 317.016 313.383 310.454 308.063 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 320.249 315.951 312.524 309.745 307.489 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 324.151 318.998 314.939 311.707 309.077 306.942 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 322.638 317.804 314.004 310.96 308.47 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 321.191 316.676 313.118 310.24 307.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 319.873 315.64 312.271 309.545 307.314 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 318.626 314.66 311.487 308.9 306.774 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 317.482 313.746 310.745 308.293 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 316.387 312.868 310.027 307.711 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 315.339 312.029 309.363 307.156 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 314.383 311.26 308.723 306.623 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 313.477 310.524 308.115 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.36399 4.72048 5.08421 5.44582 5.81516 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.45896 4.8169 5.17993 5.54888 5.91462 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.19828 4.55393 4.91401 5.27876 5.64733 6.01559 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.29368 4.65112 5.01207 5.37957 5.74592 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.38947 4.74692 5.11016 5.47475 5.8437 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.48364 4.84335 5.20701 5.57167 5.94096 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.22461 4.58048 4.93938 5.30417 5.67176 6.04015 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.31872 4.67632 5.03833 5.40294 5.76987 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.41362 4.7737 5.13535 5.50253 5.86908 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.15512 4.50979 4.86923 5.23282 5.59962 5.96849 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.24982 4.60585 4.96659 5.331 5.69838 6.06808 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.34512 4.70245 5.06433 5.42973 5.79785 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.44093 4.79966 5.16256 5.52873 5.89743 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.1823 4.53718 4.89714 5.261 5.62796 5.99725 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.27791 4.63592 4.99607 5.35976 5.72742 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.3736 4.73271 5.09313 5.45875 5.82705 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.46929 4.82835 5.19153 5.55796 5.92686 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.21153 4.56611 4.92605 5.29018 5.65732 6.02681 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.30611 4.66274 5.02414 5.38909 5.75689 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.40172 4.75992 5.12233 5.48825 5.85671 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.49805 4.8575 5.22101 5.58764 5.95669 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.23899 4.59484 4.95547 5.31986 5.68723 6.05684 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.33467 4.69194 5.05369 5.41897 5.78701 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 261.209 263.207 264.94 266.419 267.738 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 261.768 263.685 265.344 266.802 268.059 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 260.142 262.303 264.151 265.752 267.151 268.372 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 260.759 262.829 264.6 266.149 267.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 261.347 263.323 265.033 266.51 267.813 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 261.897 263.801 265.443 266.865 268.123 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 260.31 262.442 264.258 265.839 267.22 268.431 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 260.91 262.953 264.708 266.227 267.556 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 261.484 263.453 265.134 266.606 267.883 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 259.848 262.045 263.921 265.542 266.956 268.2 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 260.473 262.574 264.378 265.941 267.306 268.508 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 261.074 263.09 264.822 266.325 267.642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 261.648 263.579 265.246 266.696 267.97 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 260.032 262.198 264.053 265.657 267.055 268.284 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 260.655 262.738 264.512 266.049 267.399 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 261.246 263.241 264.942 266.43 267.733 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 261.809 263.716 265.362 266.796 268.055 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 260.222 262.353 264.182 265.766 267.148 268.366 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 260.824 262.869 264.628 266.154 267.489 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 261.402 263.366 265.057 266.528 267.817 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 261.957 263.841 265.468 266.886 268.133 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 260.378 262.488 264.297 265.864 267.233 268.439 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 260.975 262.998 264.738 266.246 267.567 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.16271 6.54148 6.92094 7.30235 7.67912 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.26484 6.64788 7.02365 7.40343 7.78064 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99094 6.36898 6.75086 7.12536 7.50153 7.88177 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.09408 6.46875 6.8522 7.22863 7.60438 7.98321 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19195 6.57055 6.95076 7.32737 7.70538 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.29241 6.67164 7.04906 7.42792 7.80744 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01591 6.39546 6.77197 7.15062 7.53007 7.90963 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11712 6.49503 6.87363 7.25276 7.63222 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.219 6.59712 6.9758 7.355 7.73444 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.32083 6.69919 7.07798 7.45728 7.83679 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04485 6.42274 6.80134 7.18042 7.55964 7.9391 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14678 6.52488 6.9036 7.28265 7.66201 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.24879 6.62703 7.00589 7.38513 7.7645 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97326 6.35093 6.72925 7.10832 7.48756 7.86693 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07536 6.45323 6.83177 7.21076 7.59 7.96946 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1775 6.55553 6.93412 7.31323 7.69256 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.27965 6.65791 7.03665 7.41583 7.79514 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00423 6.38192 6.76035 7.13921 7.51839 7.89777 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10637 6.48419 6.86276 7.24175 7.62103 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.20855 6.58669 6.96534 7.34439 7.72366 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.3109 6.68911 7.06787 7.44701 7.82633 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.03549 6.41319 6.79161 7.17053 7.54975 7.92908 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.13768 6.51568 6.89423 7.27319 7.65245 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.062 299.312 298.646 298.054 297.531 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.835 299.1 298.466 297.896 297.39 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.406 299.614 298.907 298.295 297.746 297.248 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.169 299.41 298.722 298.125 297.595 297.114 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.952 299.209 298.551 297.971 297.451 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.737 299.017 298.384 297.818 297.31 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.314 299.524 298.834 298.217 297.667 297.174 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.087 299.325 298.653 298.054 297.521 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.865 299.129 298.478 297.898 297.378 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.652 298.939 298.307 297.745 297.241 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.225 299.444 298.755 298.144 297.598 297.108 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300 299.246 298.578 297.984 297.453 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.782 299.052 298.405 297.83 297.316 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.372 299.571 298.866 298.24 297.681 297.18 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.142 299.369 298.685 298.078 297.535 297.049 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.921 299.173 298.51 297.921 297.395 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.706 298.982 298.341 297.77 297.259 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.293 299.5 298.8 298.178 297.622 297.125 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.067 299.299 298.62 298.017 297.48 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.846 299.105 298.449 297.865 297.341 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.637 298.918 298.281 297.713 297.205 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 300.217 299.43 298.736 298.119 297.57 297.076 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 299.992 299.232 298.56 297.961 297.426 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05294 8.42642 8.80124 9.17558 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15343 8.52617 8.90167 9.27652 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87702 8.25128 8.62647 9.00164 9.37751 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97724 8.35208 8.72718 9.10289 9.47917 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07837 8.45307 8.82831 9.21135 9.5849 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.17942 8.55379 8.92968 9.30857 9.68556 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90478 8.27951 8.65475 9.03119 9.4067 9.78329 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00562 8.3814 8.75786 9.13465 9.50864 9.88371 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10785 8.48295 8.85806 9.23542 9.60987 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.20748 8.58452 8.95911 9.33328 9.71137 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93358 8.30846 8.68363 9.0592 9.4345 9.8095 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03458 8.40957 8.78472 9.15994 9.53531 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13558 8.51067 8.88588 9.26119 9.63647 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.862 8.23673 8.61184 8.98712 9.36242 9.73777 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96306 8.33782 8.71303 9.08828 9.46365 9.83894 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06409 8.43904 8.81429 9.18956 9.5649 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.16529 8.54025 8.91551 9.29083 9.66617 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89181 8.26649 8.6415 9.01683 9.39219 9.7675 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.99296 8.36776 8.74289 9.11818 9.49347 9.8688 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09418 8.46908 8.84425 9.21962 9.59492 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.19547 8.57043 8.94567 9.321 9.69638 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92207 8.2968 8.67191 9.04715 9.42248 9.79774 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02343 8.39822 8.77335 9.14861 9.52393 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.241 273.901 274.509 275.069 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.418 274.061 274.655 275.204 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.896 273.583 274.216 274.799 275.338 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.079 273.753 274.372 274.942 275.469 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.262 273.918 274.522 275.091 275.605 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.438 274.079 274.672 275.223 275.73 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.926 273.611 274.239 274.818 275.352 275.849 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.112 273.782 274.397 274.964 275.486 275.971 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.296 273.948 274.548 275.104 275.615 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.471 274.112 274.697 275.236 275.742 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.966 273.645 274.267 274.841 275.371 275.863 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.15 273.813 274.422 274.984 275.503 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.332 273.98 274.574 275.123 275.631 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.82 273.509 274.14 274.722 275.261 275.759 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.008 273.682 274.301 274.87 275.395 275.882 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.194 273.85 274.454 275.01 275.526 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.372 274.014 274.605 275.151 275.655 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.863 273.548 274.175 274.752 275.286 275.78 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.05 273.718 274.331 274.896 275.419 275.904 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.232 273.884 274.484 275.037 275.549 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.409 274.047 274.633 275.174 275.676 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.903 273.582 274.204 274.778 275.308 275.8 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.085 273.749 274.358 274.92 275.439 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.91293 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.017 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1182 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.83795 10.2176 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94018 10.3195 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0426 10.4216 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1449 10.5258 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.86803 10.2473 10.6264 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97058 10.3497 10.7282 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.073 10.452 10.8304 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1755 10.5543 10.9325 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89882 10.278 10.6566 11.0347 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0014 10.3805 10.759 11.1368 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.104 10.483 10.8612 11.2389 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2067 10.5854 10.9635 11.341 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93004 10.3092 10.6878 11.0658 11.4431 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.354 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.279 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.208 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.379 295.14 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.305 295.073 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.234 295.008 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.164 294.94 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.332 295.097 294.88 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.262 295.03 294.817 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.191 294.965 294.758 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.124 294.903 294.7 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.294 295.058 294.841 294.642 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.223 294.992 294.781 294.587 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.154 294.93 294.723 294.532 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.088 294.867 294.664 294.479 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.256 295.021 294.807 294.609 294.427 + + + + + + 5000 + 5000 + + 2.11548 2.23575 2.41427 2.63918 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.14122 2.27855 2.47068 2.70615 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.17183 2.32496 2.53004 2.77531 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.20696 2.37507 2.59189 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.24651 2.42893 2.65689 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.29014 2.48603 2.72436 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.3378 2.54637 2.79444 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.38922 2.60946 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.44396 2.67559 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.50246 2.74445 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.56357 2.81465 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.62781 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.69397 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.76294 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 207.409 217.285 225.933 233.253 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 210.181 219.752 228.037 235.012 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 212.881 222.101 230.037 236.676 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 215.465 224.357 231.927 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 217.978 226.524 233.747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 220.41 228.599 235.48 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 222.749 230.578 237.127 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 224.993 232.469 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 227.147 234.277 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 229.207 236.003 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 231.16 237.615 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 233.022 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 234.776 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 236.455 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.74514 3.05183 3.37466 3.71119 4.0576 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.8255 3.13668 3.46435 3.80409 4.15374 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.61101 2.90652 3.22281 3.55396 3.89625 4.24663 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.68765 2.98978 3.31045 3.64487 3.98982 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.76669 3.07448 3.39938 3.7371 4.08476 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.84761 3.16052 3.48954 3.82967 4.17808 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.63228 2.93046 3.24814 3.58026 3.92209 4.27322 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.71053 3.01462 3.33663 3.67217 4.01616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.79084 3.09918 3.42514 3.76408 4.10986 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.87199 3.18594 3.51533 3.85494 4.20396 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.65596 2.9557 3.27338 3.60539 3.94828 4.29924 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.73462 3.03937 3.36175 3.69736 4.04246 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.81532 3.1251 3.45187 3.79175 4.1371 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.60158 2.89675 3.21304 3.54365 3.88623 4.23235 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.67894 2.98094 3.30068 3.6336 3.97787 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.75963 3.06666 3.38952 3.72584 4.07179 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.84037 3.15249 3.4804 3.81995 4.16687 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.92348 3.23958 3.57045 3.91314 4.26288 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.00643 3.32737 3.66176 4.0065 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.09192 3.41718 3.75506 4.10058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.18002 3.50876 3.8472 4.1956 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.2669 3.59884 3.94084 4.29129 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.35512 3.69049 4.03503 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 326.245 321.453 317.581 314.393 311.749 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 324.837 320.321 316.639 313.61 311.086 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 328.805 323.525 319.251 315.764 312.883 310.482 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 327.268 322.276 318.232 314.925 312.183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 325.806 321.094 317.265 314.123 311.511 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 324.425 319.973 316.345 313.362 310.885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 328.328 323.118 318.912 315.48 312.651 310.278 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 326.802 321.89 317.908 314.649 311.961 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 325.355 320.738 316.972 313.871 311.315 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 324.011 319.641 316.074 313.144 310.694 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 327.84 322.725 318.605 315.23 312.437 310.101 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 326.35 321.534 317.628 314.421 311.762 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 324.939 320.398 316.691 313.633 311.118 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 328.964 323.629 319.309 315.795 312.89 310.505 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 327.411 322.365 318.298 314.976 312.216 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 325.918 321.171 317.339 314.18 311.555 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 324.543 320.058 316.409 313.409 310.923 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 323.23 319.006 315.555 312.694 310.312 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 322.024 318.016 314.733 312.012 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 320.854 317.062 313.944 311.366 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 319.736 316.146 313.205 310.74 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 318.703 315.301 312.494 310.144 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 317.723 314.492 311.817 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.4881 4.83505 5.19024 5.54455 5.90722 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.58039 4.92903 5.28393 5.64567 6.0052 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.32732 4.67274 5.02388 5.38084 5.74232 6.10447 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.41987 4.76738 5.11965 5.47951 5.83924 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.51273 4.86066 5.21561 5.5729 5.93536 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.60424 4.9547 5.31035 5.66804 6.03102 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.35286 4.69853 5.04857 5.40552 5.76617 6.12863 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.44406 4.79178 5.14518 5.50244 5.8627 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.53608 4.8867 5.24024 5.60001 5.96019 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.28543 4.62965 4.97994 5.3355 5.69533 6.05799 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.377 4.72295 5.07499 5.43178 5.79241 6.15604 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.46955 4.81722 5.17062 5.52851 5.88999 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.56254 4.91189 5.26665 5.62568 5.98806 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.31156 4.65613 5.00715 5.36315 5.72311 6.08616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.40427 4.75236 5.10367 5.45984 5.82079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.49707 4.8466 5.19868 5.55695 5.91876 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.59007 4.9399 5.29502 5.65432 6.01691 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.33991 4.68427 5.03533 5.39165 5.75187 6.11526 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.43157 4.77838 5.13119 5.48862 5.84976 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.52444 4.87322 5.22733 5.58592 5.94789 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.61815 4.96843 5.3239 5.68346 6.0463 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.36655 4.71227 5.06411 5.4208 5.78135 6.14491 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.45943 4.80697 5.16021 5.51798 5.87938 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 257.78 259.977 261.897 263.546 265.022 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 258.396 260.509 262.35 263.973 265.38 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 256.618 258.985 261.023 262.801 264.366 265.736 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 257.29 259.564 261.524 263.248 264.744 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 257.935 260.111 262.005 263.65 265.108 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 258.541 260.64 262.461 264.048 265.458 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 256.801 259.138 261.144 262.904 264.447 265.805 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 257.457 259.704 261.647 263.337 264.822 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 258.088 260.254 262.116 263.758 265.19 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 256.295 258.7 260.773 262.574 264.153 265.546 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 256.981 259.288 261.28 263.016 264.541 265.891 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 257.635 259.851 261.771 263.447 264.922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 258.266 260.396 262.244 263.86 265.286 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 256.497 258.87 260.917 262.698 264.261 265.641 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 257.175 259.464 261.427 263.139 264.648 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 257.823 260.019 261.905 263.562 265.021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 258.441 260.545 262.371 263.972 265.383 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 256.701 259.039 261.06 262.821 264.367 265.732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 257.359 259.608 261.556 263.254 264.747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 257.993 260.155 262.03 263.671 265.116 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 258.602 260.682 262.49 264.074 265.47 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 256.872 259.189 261.189 262.931 264.461 265.814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 257.525 259.75 261.677 263.358 264.837 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.24338 6.61815 6.99386 7.37214 7.74586 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.34447 6.72362 7.09555 7.4723 7.84652 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07369 6.44742 6.8252 7.19648 7.56956 7.94694 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17534 6.54605 6.92574 7.299 7.67154 8.04762 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.27219 6.64665 7.02336 7.39658 7.77174 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.37157 6.74687 7.12066 7.49644 7.87306 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.09824 6.47335 6.84615 7.22135 7.59774 7.9745 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19816 6.57196 6.94686 7.32266 7.69907 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.29891 6.67299 7.04802 7.42399 7.80051 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.39951 6.77402 7.14934 7.5255 7.90212 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12674 6.50042 6.87523 7.25085 7.62696 8.00366 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22746 6.60141 6.97644 7.35223 7.72863 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.32827 6.70257 7.0779 7.4539 7.83031 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05598 6.42934 6.80378 7.17935 7.55544 7.932 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15685 6.53053 6.9053 7.28091 7.65711 8.0338 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25778 6.63175 7.00674 7.38256 7.7589 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.35886 6.73316 7.10836 7.48429 7.86073 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08665 6.46 6.83457 7.21001 7.5861 7.96265 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18749 6.56122 6.93612 7.31176 7.68796 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.28863 6.6627 7.03767 7.41344 7.7898 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.38973 6.76404 7.13935 7.51536 7.89179 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.11759 6.49106 6.86569 7.24113 7.6172 7.99372 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.21862 6.59241 6.96727 7.34294 7.71922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.595 301.711 300.926 300.224 299.604 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.332 301.468 300.717 300.038 299.434 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 303.007 302.073 301.238 300.514 299.863 299.271 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.726 301.833 301.023 300.317 299.684 299.11 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.474 301.598 300.82 300.132 299.513 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.222 301.373 300.624 299.953 299.348 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.902 301.971 301.157 300.427 299.774 299.186 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.634 301.738 300.945 300.236 299.601 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.375 301.508 300.737 300.05 299.433 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.123 301.284 300.538 299.871 299.271 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.8 301.882 301.069 300.344 299.695 299.112 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.535 301.646 300.858 300.156 299.526 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.279 301.42 300.656 299.974 299.361 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.973 302.033 301.2 300.458 299.795 299.2 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.703 301.793 300.987 300.268 299.625 299.045 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.442 301.562 300.781 300.083 299.457 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.192 301.34 300.581 299.903 299.295 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.882 301.948 301.122 300.387 299.729 299.138 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.613 301.712 300.913 300.2 299.56 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.358 301.485 300.708 300.015 299.394 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.108 301.262 300.511 299.839 299.235 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 302.792 301.869 301.051 300.32 299.666 299.078 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 302.529 301.634 300.84 300.133 299.499 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11722 8.48745 8.85921 9.23075 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.21673 8.58629 8.95889 9.33088 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9429 8.31372 8.68573 9.05803 9.43123 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04218 8.4137 8.78573 9.15854 9.53224 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.14249 8.51379 8.88597 9.26604 9.63713 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.24245 8.61364 8.98665 9.36285 9.73693 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97039 8.34171 8.71381 9.0874 9.46022 9.83429 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07025 8.44262 8.81599 9.19009 9.5614 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.17137 8.54327 8.91551 9.29 9.66179 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.27023 8.64402 9.01577 9.3872 9.76265 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.99889 8.37032 8.74232 9.11502 9.48764 9.86028 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09882 8.4705 8.84265 9.21511 9.58786 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.19897 8.57081 8.943 9.31552 9.68826 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92789 8.29911 8.67103 9.04342 9.41609 9.78899 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02793 8.39935 8.77149 9.14389 9.51658 9.8894 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12807 8.49968 8.87185 9.24437 9.61717 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.22828 8.60004 8.97237 9.34499 9.71778 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95735 8.32862 8.70049 9.0729 9.44557 9.81843 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05755 8.42899 8.80104 9.1735 9.54619 9.91912 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15782 8.52943 8.90161 9.27421 9.64696 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.25822 8.62998 9.00227 9.37489 9.74774 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.98737 8.35866 8.7306 9.10297 9.47567 9.8485 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08773 8.4592 8.83131 9.20376 9.57647 NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.218 271.959 272.644 273.275 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.416 272.14 272.811 273.43 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.83 271.606 272.319 272.973 273.58 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.038 271.796 272.492 273.135 273.731 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.243 271.984 272.665 273.306 273.884 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.445 272.167 272.833 273.454 274.027 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.869 271.639 272.346 273 273.602 274.162 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.078 271.832 272.526 273.165 273.752 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.287 272.022 272.697 273.323 273.899 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.484 272.205 272.865 273.473 274.044 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.914 271.68 272.382 273.029 273.626 274.18 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.125 271.872 272.556 273.188 273.774 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.328 272.057 272.729 273.348 273.922 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.753 271.53 272.242 272.897 273.502 274.064 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.966 271.723 272.419 273.061 273.655 274.207 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.173 271.914 272.595 273.223 273.803 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.376 272.099 272.764 273.379 273.949 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.803 271.573 272.28 272.931 273.533 274.091 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.013 271.766 272.457 273.094 273.683 274.23 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.219 271.954 272.629 273.253 273.83 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.418 272.136 272.797 273.408 273.974 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.847 271.613 272.315 272.962 273.56 274.114 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.055 271.802 272.488 273.121 273.707 NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.96698 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0704 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1713 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89221 10.2704 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99408 10.3719 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.096 10.4736 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.198 10.5775 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.92221 10.2999 10.6776 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0243 10.4019 10.7792 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1264 10.5039 10.8809 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2285 10.6058 10.9827 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95283 10.3305 10.7078 11.0846 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0551 10.4327 10.8098 11.1864 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1573 10.5347 10.9117 11.2882 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2595 10.6368 11.0137 11.39 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98402 10.3617 10.7389 11.1156 11.4917 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.981 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.889 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.802 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.016 296.72 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.927 296.638 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.839 296.557 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.754 296.477 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.965 296.671 296.4 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.876 296.589 296.326 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.792 296.511 296.252 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.708 296.433 296.18 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.919 296.626 296.358 296.11 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.833 296.547 296.284 296.041 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.75 296.469 296.212 295.974 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.666 296.393 296.142 295.909 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.878 296.587 296.318 296.071 295.843 + + + + + + 5000 + 5000 + + 1.23823 1.43501 1.70067 2.00793 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.28243 1.50128 1.78028 2.09568 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.33358 1.57132 1.86227 2.18464 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.39039 1.64507 1.94576 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.45266 1.72229 2.032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.51983 1.80247 2.11999 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.59127 1.88516 2.20952 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.66633 1.97007 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.7448 2.05747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.82615 2.1467 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.90956 2.23617 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.99521 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.08189 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.17074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.26106 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 217.094 231.741 242.36 249.938 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 221.48 234.978 244.661 251.591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 225.551 237.901 246.748 253.104 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 229.254 240.572 248.649 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 232.678 243.022 250.407 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 235.824 245.261 252.022 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 238.696 247.31 253.513 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 241.321 249.19 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 243.716 250.915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 245.916 252.513 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 247.91 253.952 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 249.741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 251.402 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 252.937 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 254.353 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.23151 2.59747 2.96837 3.34463 3.72401 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.32927 2.69641 3.06987 3.44734 3.82845 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.06582 2.4265 2.79581 3.17031 3.5485 3.92888 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.16142 2.52525 2.89612 3.27166 3.65078 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.25849 2.62459 2.99706 3.37391 3.75413 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.35651 2.7246 3.09868 3.47592 3.8552 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.093 2.45548 2.82532 3.20007 3.57716 3.95792 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.18997 2.55481 2.9263 3.30237 3.67981 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.28821 2.65366 3.02635 3.40385 3.78147 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.38589 2.75394 3.12766 3.50374 3.88333 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.12256 2.48548 2.85424 3.2282 3.60582 3.98591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.21958 2.58389 2.95468 3.33014 3.70836 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.31763 2.68358 3.05638 3.43431 3.811 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.05411 2.41508 2.78499 3.15928 3.53803 3.9138 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.15058 2.51492 2.88506 3.25923 3.63788 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.24957 2.6153 2.98569 3.36133 3.74002 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.34725 2.71497 3.08818 3.46499 3.84283 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.44651 2.81492 3.18865 3.56696 3.94646 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.54413 2.91489 3.29014 3.66877 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.64409 3.01641 3.39306 3.77069 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.74588 3.11926 3.49432 3.87348 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.84553 3.21976 3.5967 3.97645 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.94578 3.32133 3.69921 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 307.927 304.622 302.161 300.255 298.739 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 306.919 303.884 301.585 299.796 298.365 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 309.82 306.007 303.198 301.056 299.374 298.027 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 308.669 305.162 302.555 300.555 298.973 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 307.602 304.379 301.958 300.085 298.594 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 306.629 303.656 301.4 299.642 298.242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 309.462 305.729 302.978 300.878 299.234 297.907 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 308.325 304.903 302.354 300.39 298.842 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 307.287 304.15 301.778 299.933 298.477 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 306.346 303.443 301.235 299.516 298.133 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 309.105 305.47 302.791 300.734 299.112 297.805 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 308.012 304.68 302.187 300.257 298.73 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 307.007 303.938 301.615 299.801 298.372 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 309.969 306.095 303.243 301.077 299.376 298.03 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 308.805 305.243 302.608 300.588 298.988 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 307.715 304.448 302.015 300.124 298.619 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 306.744 303.734 301.455 299.677 298.264 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 305.838 303.061 300.94 299.268 297.93 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 305.023 302.443 300.456 298.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 304.254 301.853 299.99 298.514 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 303.529 301.296 299.563 298.166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 302.874 300.791 299.154 297.834 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 302.262 300.311 298.768 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.1554 4.52942 4.90831 5.28291 5.66373 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.25539 4.63015 5.00769 5.3894 5.76615 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 3.98051 4.35514 4.73137 5.11023 5.49095 5.86992 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.08143 4.45701 4.83346 5.21457 5.59267 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.18242 4.55719 4.93548 5.31296 5.6933 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.28153 4.65784 5.03591 5.41309 5.79339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.00874 4.38325 4.75803 5.13665 5.51628 5.89535 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.10813 4.48362 4.86099 5.239 5.61747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.20808 4.5854 4.96184 5.34183 5.71959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.30925 4.68511 5.0629 5.44213 5.82183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.03557 4.41003 4.78658 5.16469 5.544 5.9242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.13625 4.51124 4.8883 5.26683 5.64646 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.23707 4.61276 4.9903 5.36918 5.74902 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 3.96437 4.33823 4.71448 5.09238 5.47155 5.85158 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.06549 4.44183 4.81739 5.19463 5.5741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.16639 4.54304 4.91835 5.29699 5.67669 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.26709 4.64287 5.02047 5.39947 5.77939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 3.99546 4.36871 4.7447 5.12273 5.50196 5.88209 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.09529 4.4699 4.84675 5.22506 5.60455 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.19601 4.57147 4.94873 5.32753 5.70726 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.29724 4.67322 5.05101 5.43008 5.81005 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.02435 4.39874 4.77523 5.15338 5.53274 5.91291 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.12533 4.50036 4.87738 5.25587 5.63549 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 269.155 270.601 271.84 272.887 273.814 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 269.561 270.941 272.124 273.153 274.036 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 268.371 269.945 271.272 272.41 273.398 274.253 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 268.822 270.324 271.591 272.689 273.631 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 269.248 270.675 271.896 272.942 273.857 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 269.646 271.017 272.186 273.19 274.072 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 268.489 270.039 271.341 272.465 273.437 274.286 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 268.928 270.407 271.661 272.738 273.672 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 269.346 270.765 271.964 273.004 273.901 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 269.752 271.101 272.253 273.25 274.121 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 268.608 270.133 271.424 272.533 273.494 274.336 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 269.046 270.503 271.741 272.805 273.729 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 269.467 270.856 272.041 273.064 273.957 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 268.287 269.863 271.194 272.333 273.318 274.178 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 268.745 270.255 271.522 272.61 273.558 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 269.175 270.614 271.827 272.878 273.793 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 269.584 270.955 272.125 273.135 274.017 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 268.43 269.977 271.287 272.41 273.382 274.234 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 268.87 270.349 271.605 272.684 273.621 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 269.291 270.705 271.909 272.947 273.85 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 269.693 271.045 272.2 273.199 274.071 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 268.544 270.074 271.369 272.479 273.442 274.284 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 268.978 270.439 271.68 272.747 273.673 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02398 6.40977 6.79536 7.18251 7.56411 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12829 6.51821 6.89963 7.28502 7.66695 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.84911 6.2345 6.62274 7.00313 7.38445 7.76936 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.95428 6.3361 6.72593 7.10808 7.48869 7.87213 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05417 6.43959 6.82608 7.208 7.59104 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15664 6.54257 6.92579 7.31006 7.69441 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87482 6.26154 6.64452 7.02891 7.41359 7.79787 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97804 6.36299 6.74784 7.13259 7.51709 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08191 6.46687 6.8516 7.23626 7.62062 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18565 6.57063 6.95533 7.33992 7.72424 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.90437 6.28946 6.67446 7.05926 7.44363 7.82776 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00832 6.39337 6.77831 7.16294 7.54733 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11228 6.49728 6.8822 7.26684 7.65109 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.83128 6.21632 6.60119 6.98611 7.37061 7.75476 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93543 6.32045 6.70535 7.09001 7.47436 7.85847 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03955 6.42449 6.80927 7.1939 7.57822 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14361 6.52856 6.91332 7.29787 7.68206 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.86274 6.24774 6.63266 7.01734 7.40175 7.78585 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9669 6.35182 6.7367 7.12134 7.5057 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07106 6.45605 6.84081 7.22535 7.60959 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17523 6.56011 6.94484 7.32935 7.7135 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.89442 6.27938 6.66425 7.04894 7.43335 7.81739 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 5.99861 6.3836 6.76842 7.15301 7.53734 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.008 293.597 293.237 292.92 292.643 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.878 293.477 293.135 292.831 292.562 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.189 293.754 293.368 293.039 292.747 292.483 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.054 293.639 293.265 292.945 292.664 292.41 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.932 293.525 293.169 292.859 292.583 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.811 293.418 293.077 292.775 292.505 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.126 293.692 293.317 292.983 292.69 292.43 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.998 293.58 293.216 292.894 292.611 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.873 293.472 293.12 292.809 292.532 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.755 293.366 293.024 292.724 292.458 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.069 293.639 293.264 292.935 292.644 292.385 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.943 293.53 293.167 292.848 292.565 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.822 293.423 293.072 292.763 292.49 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.148 293.706 293.32 292.982 292.682 292.417 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.02 293.594 293.222 292.894 292.604 292.346 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.897 293.486 293.125 292.808 292.528 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.776 293.38 293.034 292.727 292.455 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.101 293.664 293.281 292.944 292.646 292.381 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.976 293.552 293.182 292.857 292.57 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.853 293.446 293.09 292.775 292.494 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.737 293.342 292.996 292.692 292.421 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 294.056 293.621 293.242 292.909 292.614 292.351 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 293.93 293.511 293.145 292.821 292.535 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94928 8.32844 8.70843 9.08753 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05126 8.42955 8.81023 9.18964 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.77045 8.15067 8.53128 8.91141 9.29186 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87237 8.25302 8.63341 9.01395 9.39479 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97522 8.35548 8.73588 9.12365 9.50161 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07777 8.45765 8.83864 9.22227 9.6032 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.79883 8.17944 8.56004 8.9415 9.32154 9.70228 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90132 8.28281 8.66454 9.0463 9.4246 9.80377 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00511 8.38587 8.7662 9.14818 9.52685 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10643 8.48891 8.86858 9.24729 9.62955 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82827 8.20897 8.58944 8.96992 9.34966 9.72887 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9309 8.31159 8.69194 9.07196 9.45171 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03356 8.41418 8.79446 9.17442 9.554 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13627 8.5168 8.89704 9.27691 9.65648 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.85838 8.23893 8.61943 8.99953 9.37934 9.75875 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96105 8.34161 8.72202 9.10207 9.48179 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06385 8.44431 8.82463 9.20459 9.58418 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.78604 8.16662 8.54699 8.92724 9.30714 9.68664 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88885 8.26939 8.64978 9.02989 9.40961 9.78905 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9917 8.37219 8.75249 9.13256 9.51222 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09457 8.475 8.85525 9.23519 9.6148 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81685 8.19742 8.57785 8.958 9.33784 9.71728 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9198 8.30031 8.68067 9.06073 9.44046 9.81981 NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.693 278.165 278.599 279 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.816 278.276 278.701 279.094 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.441 277.932 278.384 278.8 279.185 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.569 278.05 278.492 278.9 279.277 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.698 278.167 278.597 279.004 279.371 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.821 278.279 278.703 279.096 279.459 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.453 277.942 278.391 278.805 279.186 279.541 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.584 278.062 278.501 278.906 279.279 279.627 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.714 278.179 278.608 279.005 279.37 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.837 278.294 278.712 279.097 279.458 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.476 277.96 278.404 278.813 279.192 279.543 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.606 278.079 278.513 278.914 279.285 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.735 278.197 278.621 279.012 279.375 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.86 278.31 278.724 279.109 279.465 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.501 277.981 278.423 278.829 279.204 279.552 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.634 278.103 278.532 278.928 279.296 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.76 278.217 278.639 279.028 279.389 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.396 277.885 278.332 278.743 279.123 279.476 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.529 278.005 278.442 278.844 279.217 279.564 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.658 278.123 278.55 278.944 279.309 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.784 278.238 278.655 279.041 279.399 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.422 277.906 278.349 278.758 279.136 279.486 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.552 278.024 278.458 278.858 279.228 279.572 NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81866 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.92323 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0251 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1253 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84617 10.2278 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94918 10.3305 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0522 10.4354 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.77359 10.1552 10.5365 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87675 10.2582 10.6388 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97985 10.3611 10.7416 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.083 10.464 10.8442 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80451 10.186 10.5668 10.9469 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90773 10.2891 10.6697 11.0496 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.011 10.3921 10.7725 11.1521 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1142 10.4951 10.8753 11.2547 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.83583 10.2173 10.5981 10.9781 11.3573 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.54 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.503 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.47 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.438 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.504 291.407 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.472 291.377 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.438 291.345 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.507 291.407 291.317 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.474 291.377 291.289 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.442 291.347 291.262 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.411 291.319 291.236 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.481 291.382 291.291 291.21 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.448 291.35 291.263 291.185 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.417 291.323 291.238 291.16 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.387 291.294 291.21 291.137 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.456 291.357 291.268 291.187 291.114 + + + + + + 5000 + 5000 + + 1.87029 2.0055 2.20286 2.44731 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.89949 2.05306 2.2646 2.51958 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.93403 2.10461 2.32933 2.59381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 1.9735 2.16002 2.39642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.0176 2.21904 2.46661 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.06615 2.28159 2.53936 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.11908 2.34717 2.6143 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.17561 2.41564 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.23598 2.48712 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.2996 2.561 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.36621 2.63629 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.4356 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.50692 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.581 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.65721 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + 209.157 220.094 229.37 236.965 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 212.259 222.774 231.579 238.751 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 215.258 225.3 233.658 240.429 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 218.105 227.703 235.607 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 220.852 229.995 237.469 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 223.487 232.166 239.225 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 225.995 234.221 240.886 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 228.384 236.165 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 230.65 238.008 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 232.805 239.759 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 234.822 241.375 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 236.736 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 238.52 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 240.216 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 241.816 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN 2.58019 2.90372 3.24088 3.58967 3.94642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 2.66545 2.99283 3.33426 3.68564 4.04517 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.43736 2.75107 3.08291 3.42714 3.78062 4.14057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.51921 2.83878 3.17436 3.52132 3.87693 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.60326 2.92775 3.26687 3.61665 3.97451 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 2.68903 3.01802 3.36056 3.71222 4.07034 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.46034 2.77645 3.10946 3.4545 3.80732 4.16786 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.54362 2.86502 3.20179 3.54972 3.90421 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.62906 2.9538 3.29374 3.64452 4.00031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 2.71488 3.04451 3.38732 3.7383 4.097 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.48558 2.80316 3.13587 3.48067 3.83435 4.19452 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.56936 2.89113 3.22784 3.57569 3.93119 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.65497 2.98092 3.32153 3.67312 4.02839 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 2.74092 3.0729 3.41678 3.77058 4.12603 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.51005 2.82968 3.16427 3.50971 3.86476 4.22459 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.5957 2.9196 3.25662 3.60504 3.96134 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.68132 3.00965 3.35117 3.70216 4.05873 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.76907 3.10053 3.44432 3.79807 4.15728 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.85625 3.19197 3.53891 3.89417 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 2.94606 3.28527 3.635 3.99069 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.03803 3.38033 3.73014 4.08827 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.12888 3.47362 3.82647 4.18617 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 3.22066 3.5683 3.9233 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN 321.962 317.368 313.74 310.807 308.402 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 320.601 316.305 312.871 310.092 307.801 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 324.459 319.341 315.3 312.062 309.428 307.258 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 322.956 318.15 314.348 311.291 308.792 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 321.535 317.03 313.448 310.557 308.183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 320.204 315.978 312.598 309.863 307.618 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 323.994 318.952 314.981 311.799 309.215 307.074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 322.501 317.784 314.047 311.04 308.591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 321.103 316.698 313.177 310.325 308.004 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN 319.808 315.662 312.348 309.665 307.447 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 323.517 318.581 314.697 311.572 309.022 306.91 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 322.067 317.451 313.787 310.829 308.41 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 320.704 316.378 312.92 310.111 307.829 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN 319.446 315.357 312.095 309.436 307.276 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 323.104 318.243 314.413 311.34 308.822 306.746 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 321.651 317.107 313.52 310.612 308.226 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 320.326 316.064 312.664 309.909 307.652 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 319.07 315.075 311.872 309.257 307.105 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 317.919 314.154 311.123 308.642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 316.815 313.266 310.397 308.054 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 315.76 312.419 309.725 307.492 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 314.795 311.643 309.078 306.952 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + 313.881 310.899 308.463 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.38034 4.73555 5.09812 5.45875 5.82724 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.47493 4.83162 5.19359 5.56158 5.92646 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.21529 4.56957 4.92845 5.29213 5.65977 6.02721 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.31029 4.6664 5.02619 5.39267 5.75814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.4057 4.76187 5.124 5.48762 5.8557 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 4.49952 4.85799 5.22056 5.5843 5.95274 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.24151 4.596 4.95372 5.31745 5.68413 6.05173 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.33521 4.69149 5.05236 5.41597 5.78202 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.42976 4.78856 5.1491 5.51531 5.88099 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 4.52555 4.88376 5.24629 5.61215 5.98021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.26657 4.62125 4.98083 5.34422 5.71069 6.07958 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.36151 4.71754 5.07825 5.44266 5.80991 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 4.45692 4.81441 5.17622 5.54142 5.90929 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.19934 4.55283 4.9116 5.27438 5.6404 6.00887 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.29455 4.65121 5.0102 5.37287 5.73965 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.38985 4.74767 5.10698 5.47162 5.83905 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 4.48518 4.843 5.20509 5.57058 5.93864 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.22844 4.58165 4.94039 5.30347 5.6697 6.03839 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.32262 4.67794 5.0382 5.40212 5.76905 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.41788 4.7748 5.1361 5.50103 5.86862 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 4.51384 4.87205 5.2345 5.60019 5.96841 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.25579 4.61029 4.96973 5.33309 5.69955 6.06835 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 4.3511 4.70705 5.06766 5.43193 5.7991 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN 260.719 262.747 264.508 266.012 267.353 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 261.287 263.233 264.919 266.401 267.68 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 259.638 261.829 263.706 265.333 266.757 268 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 260.263 262.364 264.163 265.738 267.099 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 260.859 262.866 264.602 266.105 267.43 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 261.418 263.351 265.02 266.466 267.746 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 259.808 261.97 263.815 265.423 266.827 268.06 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 260.417 262.49 264.273 265.818 267.169 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 260.999 262.998 264.706 266.203 267.503 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN 261.568 263.473 265.121 266.559 267.824 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 259.973 262.106 263.937 265.526 266.915 268.139 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 260.583 262.628 264.389 265.917 267.257 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN 261.166 263.126 264.819 266.294 267.59 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 259.526 261.723 263.607 265.237 266.66 267.91 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 260.157 262.272 264.074 265.637 267.009 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 260.757 262.782 264.511 266.023 267.35 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN 261.328 263.265 264.938 266.396 267.677 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 259.718 261.881 263.738 265.348 266.754 267.993 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 260.329 262.404 264.192 265.743 267.101 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 260.915 262.909 264.628 266.123 267.435 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN 261.479 263.392 265.045 266.488 267.757 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 259.876 262.018 263.855 265.448 266.841 268.068 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN 260.482 262.536 264.303 265.837 267.181 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.16962 6.54801 6.92715 7.30826 7.68477 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.27165 6.65432 7.02978 7.40928 7.78621 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99802 6.37568 6.7572 7.1314 7.5073 7.88727 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10104 6.47535 6.85845 7.23459 7.61008 7.98866 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19881 6.57705 6.95693 7.33324 7.711 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.29916 6.67804 7.05515 7.43372 7.81299 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02296 6.40212 6.77828 7.15663 7.53581 7.91511 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12404 6.5016 6.87986 7.25869 7.63788 8.01726 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22582 6.60359 6.98195 7.36085 7.74003 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.32755 6.70555 7.08404 7.46306 7.84231 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05186 6.42937 6.80764 7.18641 7.56535 7.94454 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15369 6.53142 6.90979 7.28855 7.66764 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25558 6.63346 7.01201 7.39097 7.77007 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98034 6.35762 6.73561 7.11436 7.49331 7.87242 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08233 6.45983 6.83803 7.21671 7.59568 7.9749 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18437 6.56203 6.94029 7.31911 7.69817 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.28642 6.66432 7.04275 7.42164 7.80069 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01129 6.38859 6.76667 7.14522 7.52412 7.90325 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11331 6.49076 6.86899 7.24769 7.6267 8.00585 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21539 6.59318 6.9715 7.35025 7.72925 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.31764 6.69549 7.07394 7.4528 7.83187 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.04251 6.41983 6.79792 7.17654 7.55548 7.93455 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 6.14461 6.52223 6.90045 7.2791 7.6581 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.298 299.533 298.856 298.253 297.721 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.067 299.319 298.673 298.093 297.576 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.648 299.842 299.122 298.499 297.94 297.433 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.406 299.634 298.934 298.326 297.787 297.296 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.186 299.43 298.76 298.17 297.64 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.968 299.235 298.591 298.014 297.497 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.555 299.751 299.048 298.421 297.86 297.357 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.323 299.548 298.864 298.255 297.711 297.223 NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.097 299.349 298.687 298.095 297.566 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.881 299.155 298.512 297.94 297.427 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.464 299.669 298.969 298.346 297.791 297.291 NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.235 299.468 298.788 298.184 297.643 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.014 299.27 298.612 298.027 297.503 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.614 299.799 299.081 298.444 297.874 297.364 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.38 299.593 298.897 298.279 297.727 297.231 NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.155 299.394 298.719 298.12 297.584 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.936 299.2 298.548 297.966 297.445 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.533 299.727 299.014 298.381 297.815 297.309 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.303 299.522 298.831 298.218 297.671 297.179 NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.079 299.325 298.657 298.062 297.528 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.866 299.135 298.486 297.908 297.391 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 300.456 299.655 298.949 298.322 297.762 297.258 NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN 300.227 299.455 298.77 298.161 297.616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06111 8.43415 8.80858 9.18254 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.16148 8.53378 8.90889 9.28339 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88539 8.25919 8.63398 9.00877 9.38428 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9855 8.35989 8.73458 9.10991 9.48583 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08649 8.46076 8.8356 9.21827 9.59149 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.18742 8.56138 8.93687 9.3154 9.69205 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91313 8.2874 8.66222 9.03828 9.41343 9.78971 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01383 8.38917 8.76522 9.14163 9.5153 9.89003 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11592 8.4906 8.86532 9.24231 9.61643 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.21545 8.59206 8.96626 9.34007 9.71783 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94188 8.31629 8.69105 9.06625 9.44121 9.81589 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04274 8.4173 8.79205 9.1669 9.54192 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.14363 8.51827 8.89309 9.26804 9.64299 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87036 8.24463 8.61934 8.99424 9.36919 9.74421 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97131 8.34562 8.72042 9.09529 9.47031 9.84528 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.0722 8.44671 8.82156 9.19648 9.57148 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.17328 8.54782 8.9227 9.29765 9.67265 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90014 8.27437 8.64896 9.02391 9.39892 9.77391 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00116 8.37551 8.75024 9.12516 9.50011 9.87512 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10225 8.47672 8.8515 9.22651 9.60147 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.20343 8.57797 8.95282 9.32779 9.70284 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93036 8.30463 8.67933 9.0542 9.42918 9.80412 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03159 8.40594 8.78068 9.15557 9.53054 9.9055 NaN NaN NaN NaN + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.963 273.635 274.254 274.825 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.143 273.798 274.404 274.962 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.612 273.312 273.956 274.55 275.098 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.798 273.485 274.115 274.696 275.233 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.985 273.654 274.268 274.847 275.37 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.165 273.817 274.421 274.982 275.499 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.643 273.34 273.98 274.57 275.113 275.619 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.833 273.515 274.141 274.718 275.249 275.743 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.021 273.684 274.295 274.861 275.381 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.198 273.851 274.447 274.996 275.51 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.684 273.376 274.009 274.593 275.133 275.634 NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.872 273.547 274.167 274.739 275.268 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.058 273.717 274.323 274.881 275.398 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.537 273.238 273.881 274.473 275.021 275.529 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.728 273.414 274.043 274.623 275.158 275.654 NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.917 273.586 274.2 274.766 275.292 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.098 273.752 274.354 274.909 275.423 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.58 273.277 273.916 274.504 275.047 275.55 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.771 273.451 274.075 274.65 275.182 275.676 NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.956 273.62 274.23 274.794 275.315 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.136 273.785 274.383 274.934 275.444 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.621 273.312 273.946 274.53 275.07 275.571 NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.807 273.482 274.103 274.675 275.204 275.694 NaN NaN NaN NaN + + + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.91742 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0215 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1227 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.222 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94466 10.3238 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.047 10.4259 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1493 10.5301 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87254 10.2517 10.6307 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97505 10.354 10.7324 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0775 10.4563 10.8346 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1799 10.5586 10.9367 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90331 10.2824 10.6609 11.0388 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0059 10.3848 10.7632 11.141 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1085 10.4873 10.8654 11.243 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2111 10.5896 10.9676 11.3451 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93452 10.3136 10.692 11.0699 11.4471 + + + + 5000 + 5000 + + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.499 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.423 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.35 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.281 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.45 295.212 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.378 295.146 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.305 295.077 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.479 295.237 295.015 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.406 295.169 294.952 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.334 295.103 294.891 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.265 295.039 294.831 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.439 295.198 294.976 294.773 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.367 295.131 294.915 294.717 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.297 295.067 294.856 294.66 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.229 295.002 294.796 294.606 + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.401 295.161 294.942 294.74 294.553 + + + + + + 6.25064122361692 + 284.478219237802 + + + 6.09198716843552 + 284.112220821742 + + + 6.12972258765456 + 284.225258143 + + + 6.12130212971328 + 284.052669982142 + + + 5.95823357350295 + 283.758000616276 + + + 6.00440443416997 + 283.869081343775 + + + 6.05770703720214 + 284.027099532072 + + + 5.98094371672299 + 283.807419436709 + + + 6.17101781342723 + 284.324458406957 + + + 6.20897376906843 + 284.423103788365 + + + 6.30264997448819 + 284.590440102053 + + + 6.02838543893148 + 283.819400338538 + + + 6.22678567178944 + 284.322311115097 + + + + + + + 3.500058 + 3.354197 + 0.022900 + 0.000000 + 0.000000 + 8.675177 + 0.268831 + 2.812220 + 83.179593 + 0.992827 + 0.571295 + 0.275278 + 0.038401 + 3.186380 + 0.000000 + 0.0 + 0.0 + 0.0 + CAMS + 0.392921 + 1.224094 + AUX_ECMWFT + 357.927923 + + + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B01.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B01.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B02.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B02.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B03.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B03.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B04.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B04.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B05.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B05.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B06.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B06.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B07.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B07.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B08.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B08.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B8A.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B8A.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B09.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B09.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B10.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B10.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B11.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B11.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B12.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B12.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_CLASSI_B00.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_CLDPRB_20m.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_SNWPRB_20m.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_CLDPRB_60m.jp2 + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_SNWPRB_60m.jp2 + + GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/T50TMK_20240411T030521_PVI.jp2 + + +""" mtd_l1c_old_xml = """ @@ -869,15 +5781,17 @@ def setup_method(self): """Set up the test case.""" from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") - self.xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_tile_xml), filename_info, mock.MagicMock()) - self.old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) - self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) + self.l1c_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), filename_info, mock.MagicMock()) + self.l2a_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l2a_tile_xml), filename_info, mock.MagicMock()) + self.l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) + self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) + # self.l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), filename_info, mock.MagicMock(), mask_saturated=True) def test_satellite_zenith_array(self): """Test reading the satellite zenith array.""" info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith") - expected_data = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, + expected_data_l1c = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], @@ -897,17 +5811,41 @@ def test_satellite_zenith_array(self): 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]) - res = self.xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle", + expected_data_l2a = np.array([[ 0.823021, 1.57224058, 2.40477933, 3.33521808, 4.29339294, 5.21712818, + 6.13685593, 7.07123343, 7.9968313, 8.91635508], + [ 0.99274285, 1.78598401, 2.6537931, 3.59082788, 4.54147721, 5.47021744, + 6.39166677, 7.31780035, 8.24199199, 9.16344605], + [ 1.19102539, 2.01811264, 2.90509575, 3.843318, 4.79014776, 5.71596442, + 6.63842562, 7.56669001, 8.49008303, 9.41563303], + [ 1.40805629, 2.22663763, 3.15710255, 4.11212841, 5.0389507, 5.95589868, + 6.88741709, 7.81585111, 8.74029188, 9.66309923], + [ 1.6364984, 2.47693166, 3.40635124, 4.36058177, 5.28712709, 6.20731242, + 7.1382703, 8.0645253, 8.98640945, 9.91275701], + [ 1.86939307, 2.72734103, 3.6589573, 4.61010637, 5.53711387, 6.45878796, + 7.38909762, 8.31322866, 9.23367977, 10.16133622], + [ 2.05092764, 2.97881829, 3.93023614, 4.86081228, 5.78239877, 6.71040601, + 7.63930914, 8.5619639, 9.48322901, 10.40935204], + [ 2.30188982, 3.2312058, 4.18234794, 5.11071768, 6.032489, 6.96190824, + 7.88904158, 8.81055152, 9.73186255, 10.65685334], + [ 2.55171923, 3.48266186, 4.4345628, 5.36132917, 6.28264069, 7.2133112, + 8.13807392, 9.05916615, 9.98720691, 10.90435203], + [ 2.80292458, 3.74216648, 4.68491268, 5.61065107, 6.53464742, 7.46456958, + 8.38738208, 9.3076386, 10.23571319, 11.15166785]]) + res1 = self.l1c_xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle", + resolution=60), + info)[::200, ::200] + res2 = self.l2a_xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle_l2a", resolution=60), info)[::200, ::200] - np.testing.assert_allclose(res, expected_data) + np.testing.assert_allclose(res1, expected_data_l1c) + np.testing.assert_allclose(res2, expected_data_l2a) def test_old_xml_calibration(self): """Test the calibration of older data formats (no offset).""" fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) - result = self.old_xml_fh.calibrate_to_reflectances(fake_data, "B01") + result = self.l1c_old_xml_fh.calibrate_to_reflectances(fake_data, "B01") np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03], [0.04, 10, 655.34, np.inf]]]) @@ -916,7 +5854,7 @@ def test_xml_calibration(self): fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") + result = self.l1c_xml_fh.calibrate_to_reflectances(fake_data, "B01") np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], [0.04 - 10, 0, 655.34 - 10, np.inf]]]) @@ -925,7 +5863,7 @@ def test_xml_calibration_to_counts(self): fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) - result = self.xml_fh._sanitize_data(fake_data) + result = self.l1c_xml_fh._sanitize_data(fake_data) np.testing.assert_allclose(result, [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]]) @@ -933,12 +5871,12 @@ def test_xml_calibration_unmasked_saturated(self): """Test the calibration with radiometric offset but unmasked saturated pixels.""" from satpy.readers.msi_safe import SAFEMSIMDXML filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") - self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False) + self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False) fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") + result = self.l1c_xml_fh.calibrate_to_reflectances(fake_data, "B01") np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], [0.04 - 10, 0, 655.34 - 10, 655.35 - 10]]]) @@ -947,7 +5885,7 @@ def test_xml_calibration_with_different_offset(self): fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B10") + result = self.l1c_xml_fh.calibrate_to_reflectances(fake_data, "B10") np.testing.assert_allclose(result, [[[np.nan, 0.01 - 20, 0.02 - 20, 0.03 - 20], [0.04 - 20, -10, 655.34 - 20, np.inf]]]) @@ -956,7 +5894,7 @@ def test_xml_calibration_to_radiance(self): fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_radiances(fake_data, "B01") + result = self.l1c_xml_fh.calibrate_to_radiances(fake_data, "B01") expected = np.array([[[np.nan, -251.584265, -251.332429, -251.080593], [-250.828757, 0., 16251.99095, np.inf]]]) np.testing.assert_allclose(result, expected) @@ -967,7 +5905,7 @@ def test_xml_navigation(self): crs = CRS("EPSG:32616") dsid = make_dataid(name="B01", resolution=60) - result = self.xml_tile_fh.get_area_def(dsid) + result = self.l1c_xml_tile_fh.get_area_def(dsid) area_extents = (499980.0, 3590220.0, 609780.0, 3700020.0) assert result.crs == crs @@ -982,7 +5920,7 @@ def setup_method(self): from satpy.readers.msi_safe import SAFEMSITileMDXML self.filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None) self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), + self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), self.filename_info, mock.MagicMock()) @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), From 9d9802ab3986d9c03280eaaf351589b28488265e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Thu, 18 Apr 2024 23:58:38 +0800 Subject: [PATCH 1265/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 8cd5852ee9..fc5638e7a7 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -5812,22 +5812,22 @@ def test_satellite_zenith_array(self): [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]) expected_data_l2a = np.array([[ 0.823021, 1.57224058, 2.40477933, 3.33521808, 4.29339294, 5.21712818, - 6.13685593, 7.07123343, 7.9968313, 8.91635508], - [ 0.99274285, 1.78598401, 2.6537931, 3.59082788, 4.54147721, 5.47021744, + 6.13685593, 7.07123343, 7.9968313, 8.91635508], + [ 0.99274285, 1.78598401, 2.6537931, 3.59082788, 4.54147721, 5.47021744, 6.39166677, 7.31780035, 8.24199199, 9.16344605], [ 1.19102539, 2.01811264, 2.90509575, 3.843318, 4.79014776, 5.71596442, 6.63842562, 7.56669001, 8.49008303, 9.41563303], [ 1.40805629, 2.22663763, 3.15710255, 4.11212841, 5.0389507, 5.95589868, 6.88741709, 7.81585111, 8.74029188, 9.66309923], - [ 1.6364984, 2.47693166, 3.40635124, 4.36058177, 5.28712709, 6.20731242, - 7.1382703, 8.0645253, 8.98640945, 9.91275701], - [ 1.86939307, 2.72734103, 3.6589573, 4.61010637, 5.53711387, 6.45878796, + [ 1.6364984, 2.47693166, 3.40635124, 4.36058177, 5.28712709, 6.20731242, + 7.1382703, 8.0645253, 8.98640945, 9.91275701], + [ 1.86939307, 2.72734103, 3.6589573, 4.61010637, 5.53711387, 6.45878796, 7.38909762, 8.31322866, 9.23367977, 10.16133622], [ 2.05092764, 2.97881829, 3.93023614, 4.86081228, 5.78239877, 6.71040601, - 7.63930914, 8.5619639, 9.48322901, 10.40935204], - [ 2.30188982, 3.2312058, 4.18234794, 5.11071768, 6.032489, 6.96190824, + 7.63930914, 8.5619639, 9.48322901, 10.40935204], + [ 2.30188982, 3.2312058, 4.18234794, 5.11071768, 6.032489, 6.96190824, 7.88904158, 8.81055152, 9.73186255, 10.65685334], - [ 2.55171923, 3.48266186, 4.4345628, 5.36132917, 6.28264069, 7.2133112, + [ 2.55171923, 3.48266186, 4.4345628, 5.36132917, 6.28264069, 7.2133112, 8.13807392, 9.05916615, 9.98720691, 10.90435203], [ 2.80292458, 3.74216648, 4.68491268, 5.61065107, 6.53464742, 7.46456958, 8.38738208, 9.3076386, 10.23571319, 11.15166785]]) From 2dcb4e9f4b3f403fbd4b54de64e4b7c75a673f0a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 18:31:29 +0800 Subject: [PATCH 1266/1416] Update msi_safe.yaml --- satpy/etc/readers/msi_safe.yaml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index 8ac40f2725..d877a1ba5f 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -11,34 +11,34 @@ reader: file_types: l1c_safe_granule: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSI{process_level:3s} + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] requires: [l1c_safe_metadata, l1c_safe_tile_metadata] l1c_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] l1c_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSI{process_level:3s}.xml'] l2a_safe_granule_10m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSI{process_level:3s} + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_granule_20m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSI{process_level:3s} + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_granule_60m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSI{process_level:3s} + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] l2a_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSIL2A_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSI{process_level:3s}.xml'] datasets: B01: From 8e98a498a1f23c369e8e0c2628c1a21029e20c0b Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 18:32:25 +0800 Subject: [PATCH 1267/1416] Update msi_safe.py --- satpy/readers/msi_safe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index eaf98d6a38..7bf7b537a6 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -69,10 +69,10 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s self._start_time = filename_info["observation_time"] self._end_time = filename_info["observation_time"] self._channel = filename_info["band_name"] + self.process_level = filename_info["process_level"] self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] - self.process_level = "L2A" if "MSIL2A" in filename else "L1C" def get_dataset(self, key, info): """Load a dataset.""" @@ -135,9 +135,9 @@ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): self._end_time = filename_info["observation_time"] self.root = ET.parse(self.filename) self.tile = filename_info["dtile_number"] + self.process_level = filename_info["process_level"] self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated - self.process_level = "L2A" if "MSIL2A" in filename else "L1C" import bottleneck # noqa import geotiepoints # noqa From 3211d769518e7cb26ff452b47833272b4ab7b205 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 18:56:17 +0800 Subject: [PATCH 1268/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 5013 +-------------------- 1 file changed, 71 insertions(+), 4942 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index fc5638e7a7..c7cf8c1e28 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -575,4919 +575,6 @@ """ # noqa -mtd_l2a_tile_xml = b""" - - - S2A_OPER_MSI_L1C_TL_2APS_20240411T054822_A045975_T50TMK_N05.10 - S2A_OPER_MSI_L2A_TL_2APS_20240411T080950_A045975_T50TMK_N05.10 - S2A_OPER_MSI_L2A_DS_2APS_20240411T080950_S20240411T030632_N05.10 - NOMINAL - 2024-04-11T03:16:45.260288Z - - 2APS - 2024-04-11T08:49:40.661681Z - - - - - WGS84 / UTM zone 50N - EPSG:32650 - - 10980 - 10980 - - - 5490 - 5490 - - - 1830 - 1830 - - - 399960 - 4500000 - 10 - -10 - - - 399960 - 4500000 - 20 - -20 - - - 399960 - 4500000 - 60 - -60 - - - - - - 5000 - 5000 - - 34.9209 34.9012 34.8817 34.8621 34.8426 34.8232 34.8037 34.7844 34.765 34.7457 34.7264 34.7072 34.688 34.6689 34.6498 34.6307 34.6117 34.5927 34.5737 34.5548 34.536 34.5171 34.4983 - 34.8794 34.8598 34.8402 34.8206 34.8011 34.7816 34.7622 34.7428 34.7234 34.7041 34.6848 34.6656 34.6464 34.6272 34.6081 34.589 34.57 34.551 34.532 34.5131 34.4942 34.4754 34.4566 - 34.838 34.8184 34.7987 34.7792 34.7596 34.7401 34.7207 34.7013 34.6819 34.6626 34.6433 34.624 34.6048 34.5856 34.5665 34.5474 34.5283 34.5093 34.4903 34.4714 34.4525 34.4336 34.4148 - 34.7966 34.7769 34.7573 34.7377 34.7182 34.6986 34.6792 34.6597 34.6404 34.621 34.6017 34.5824 34.5632 34.544 34.5248 34.5057 34.4866 34.4676 34.4486 34.4296 34.4107 34.3918 34.373 - 34.7552 34.7355 34.7159 34.6963 34.6767 34.6572 34.6377 34.6182 34.5988 34.5794 34.5601 34.5408 34.5216 34.5023 34.4832 34.464 34.445 34.4259 34.4069 34.3879 34.369 34.3501 34.3312 - 34.7138 34.6941 34.6744 34.6548 34.6352 34.6157 34.5962 34.5767 34.5573 34.5379 34.5185 34.4992 34.48 34.4607 34.4415 34.4224 34.4033 34.3842 34.3652 34.3462 34.3273 34.3083 34.2895 - 34.6724 34.6527 34.633 34.6134 34.5938 34.5742 34.5547 34.5352 34.5157 34.4963 34.477 34.4577 34.4384 34.4191 34.3999 34.3808 34.3616 34.3425 34.3235 34.3045 34.2855 34.2666 34.2477 - 34.631 34.6113 34.5916 34.5719 34.5523 34.5327 34.5132 34.4937 34.4742 34.4548 34.4354 34.4161 34.3968 34.3775 34.3583 34.3391 34.32 34.3009 34.2818 34.2628 34.2438 34.2249 34.206 - 34.5896 34.5699 34.5502 34.5305 34.5109 34.4913 34.4717 34.4522 34.4327 34.4133 34.3939 34.3745 34.3552 34.3359 34.3167 34.2975 34.2783 34.2592 34.2401 34.2211 34.2021 34.1831 34.1642 - 34.5483 34.5285 34.5088 34.4891 34.4694 34.4498 34.4302 34.4107 34.3912 34.3717 34.3523 34.333 34.3136 34.2943 34.2751 34.2559 34.2367 34.2176 34.1985 34.1794 34.1604 34.1414 34.1225 - 34.5069 34.4871 34.4674 34.4477 34.428 34.4084 34.3888 34.3692 34.3497 34.3302 34.3108 34.2914 34.2721 34.2527 34.2335 34.2142 34.1951 34.1759 34.1568 34.1377 34.1187 34.0997 34.0808 - 34.4655 34.4457 34.426 34.4062 34.3866 34.3669 34.3473 34.3277 34.3082 34.2887 34.2693 34.2499 34.2305 34.2112 34.1919 34.1726 34.1534 34.1343 34.1151 34.096 34.077 34.058 34.039 - 34.4242 34.4044 34.3846 34.3648 34.3451 34.3255 34.3059 34.2863 34.2667 34.2472 34.2278 34.2083 34.1889 34.1696 34.1503 34.131 34.1118 34.0926 34.0735 34.0544 34.0353 34.0163 33.9973 - 34.3828 34.363 34.3432 34.3234 34.3037 34.284 34.2644 34.2448 34.2252 34.2057 34.1862 34.1668 34.1474 34.128 34.1087 34.0894 34.0702 34.051 34.0318 34.0127 33.9936 33.9746 33.9556 - 34.3415 34.3216 34.3018 34.2821 34.2623 34.2426 34.223 34.2033 34.1838 34.1642 34.1447 34.1253 34.1059 34.0865 34.0671 34.0478 34.0286 34.0094 33.9902 33.9711 33.952 33.9329 33.9139 - 34.3002 34.2803 34.2605 34.2407 34.2209 34.2012 34.1815 34.1619 34.1423 34.1227 34.1032 34.0838 34.0643 34.0449 34.0256 34.0063 33.987 33.9677 33.9486 33.9294 33.9103 33.8912 33.8722 - 34.2589 34.239 34.2191 34.1993 34.1795 34.1598 34.1401 34.1205 34.1008 34.0813 34.0617 34.0422 34.0228 34.0034 33.984 33.9647 33.9454 33.9261 33.9069 33.8878 33.8686 33.8495 33.8305 - 34.2175 34.1976 34.1778 34.1579 34.1381 34.1184 34.0987 34.079 34.0594 34.0398 34.0202 34.0007 33.9813 33.9618 33.9425 33.9231 33.9038 33.8845 33.8653 33.8461 33.827 33.8079 33.7888 - 34.1762 34.1563 34.1364 34.1166 34.0968 34.077 34.0573 34.0376 34.0179 33.9983 33.9788 33.9592 33.9398 33.9203 33.9009 33.8815 33.8622 33.8429 33.8237 33.8045 33.7853 33.7662 33.7471 - 34.1349 34.115 34.0951 34.0752 34.0554 34.0356 34.0159 33.9962 33.9765 33.9569 33.9373 33.9177 33.8982 33.8788 33.8594 33.84 33.8206 33.8013 33.7821 33.7629 33.7437 33.7245 33.7054 - 34.0936 34.0737 34.0537 34.0339 34.014 33.9942 33.9745 33.9547 33.9351 33.9154 33.8958 33.8763 33.8567 33.8373 33.8178 33.7984 33.7791 33.7597 33.7405 33.7212 33.702 33.6829 33.6638 - 34.0523 34.0324 34.0124 33.9925 33.9727 33.9529 33.9331 33.9133 33.8936 33.874 33.8544 33.8348 33.8152 33.7957 33.7763 33.7569 33.7375 33.7182 33.6989 33.6796 33.6604 33.6412 33.6221 - 34.0111 33.9911 33.9711 33.9512 33.9313 33.9115 33.8917 33.8719 33.8522 33.8325 33.8129 33.7933 33.7738 33.7542 33.7348 33.7153 33.6959 33.6766 33.6573 33.638 33.6188 33.5996 33.5805 - - - - 5000 - 5000 - - 152.939 153.035 153.13 153.225 153.32 153.416 153.511 153.606 153.702 153.798 153.893 153.989 154.085 154.181 154.277 154.373 154.469 154.565 154.662 154.758 154.854 154.951 155.047 - 152.916 153.011 153.107 153.202 153.297 153.392 153.488 153.583 153.679 153.775 153.87 153.966 154.062 154.158 154.254 154.35 154.446 154.542 154.639 154.735 154.831 154.928 155.025 - 152.893 152.988 153.083 153.179 153.274 153.369 153.465 153.56 153.656 153.752 153.847 153.943 154.039 154.135 154.231 154.327 154.423 154.519 154.616 154.712 154.809 154.905 155.002 - 152.87 152.965 153.06 153.155 153.251 153.346 153.442 153.537 153.633 153.728 153.824 153.92 154.016 154.112 154.208 154.304 154.4 154.496 154.593 154.689 154.786 154.882 154.979 - 152.846 152.941 153.037 153.132 153.227 153.323 153.418 153.514 153.609 153.705 153.801 153.897 153.993 154.089 154.185 154.281 154.377 154.473 154.57 154.666 154.763 154.859 154.956 - 152.823 152.918 153.013 153.109 153.204 153.299 153.395 153.49 153.586 153.682 153.778 153.873 153.969 154.065 154.161 154.258 154.354 154.45 154.547 154.643 154.739 154.836 154.933 - 152.799 152.895 152.99 153.085 153.18 153.276 153.371 153.467 153.563 153.658 153.754 153.85 153.946 154.042 154.138 154.234 154.331 154.427 154.523 154.62 154.716 154.813 154.91 - 152.776 152.871 152.966 153.062 153.157 153.252 153.348 153.444 153.539 153.635 153.731 153.827 153.923 154.019 154.115 154.211 154.307 154.404 154.5 154.597 154.693 154.79 154.886 - 152.752 152.847 152.943 153.038 153.133 153.229 153.324 153.42 153.516 153.611 153.707 153.803 153.899 153.995 154.091 154.188 154.284 154.38 154.477 154.573 154.67 154.766 154.863 - 152.728 152.824 152.919 153.014 153.11 153.205 153.301 153.396 153.492 153.588 153.684 153.78 153.876 153.972 154.068 154.164 154.26 154.357 154.453 154.55 154.646 154.743 154.84 - 152.705 152.8 152.895 152.991 153.086 153.181 153.277 153.373 153.468 153.564 153.66 153.756 153.852 153.948 154.044 154.141 154.237 154.333 154.43 154.526 154.623 154.72 154.816 - 152.681 152.776 152.871 152.967 153.062 153.158 153.253 153.349 153.445 153.541 153.636 153.732 153.829 153.925 154.021 154.117 154.213 154.31 154.406 154.503 154.599 154.696 154.793 - 152.657 152.752 152.847 152.943 153.038 153.134 153.229 153.325 153.421 153.517 153.613 153.709 153.805 153.901 153.997 154.093 154.19 154.286 154.383 154.479 154.576 154.673 154.769 - 152.633 152.728 152.823 152.919 153.014 153.11 153.206 153.301 153.397 153.493 153.589 153.685 153.781 153.877 153.973 154.07 154.166 154.263 154.359 154.456 154.552 154.649 154.746 - 152.609 152.704 152.799 152.895 152.99 153.086 153.182 153.277 153.373 153.469 153.565 153.661 153.757 153.853 153.95 154.046 154.142 154.239 154.335 154.432 154.529 154.625 154.722 - 152.585 152.68 152.775 152.871 152.966 153.062 153.158 153.253 153.349 153.445 153.541 153.637 153.733 153.829 153.926 154.022 154.118 154.215 154.311 154.408 154.505 154.602 154.698 - 152.56 152.656 152.751 152.847 152.942 153.038 153.133 153.229 153.325 153.421 153.517 153.613 153.709 153.805 153.902 153.998 154.095 154.191 154.288 154.384 154.481 154.578 154.675 - 152.536 152.631 152.727 152.822 152.918 153.014 153.109 153.205 153.301 153.397 153.493 153.589 153.685 153.781 153.878 153.974 154.071 154.167 154.264 154.36 154.457 154.554 154.651 - 152.512 152.607 152.703 152.798 152.894 152.989 153.085 153.181 153.277 153.373 153.469 153.565 153.661 153.757 153.854 153.95 154.047 154.143 154.24 154.336 154.433 154.53 154.627 - 152.487 152.583 152.678 152.774 152.869 152.965 153.061 153.157 153.252 153.348 153.445 153.541 153.637 153.733 153.829 153.926 154.022 154.119 154.216 154.312 154.409 154.506 154.603 - 152.463 152.558 152.654 152.749 152.845 152.941 153.036 153.132 153.228 153.324 153.42 153.516 153.613 153.709 153.805 153.902 153.998 154.095 154.191 154.288 154.385 154.482 154.579 - 152.438 152.534 152.629 152.725 152.82 152.916 153.012 153.108 153.204 153.3 153.396 153.492 153.588 153.685 153.781 153.877 153.974 154.071 154.167 154.264 154.361 154.458 154.555 - 152.414 152.509 152.605 152.7 152.796 152.892 152.987 153.083 153.179 153.275 153.371 153.468 153.564 153.66 153.757 153.853 153.95 154.046 154.143 154.24 154.337 154.434 154.531 - - - - - 34.2508883033046 - 153.732570441329 - - - - 5000 - 5000 - - 1.97703 2.1053 2.29413 2.5298 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.00462 2.15075 2.35339 2.59967 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.03731 2.19992 2.41573 2.67162 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.07478 2.25294 2.48052 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.1168 2.30961 2.54835 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.16312 2.36962 2.61877 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.21367 2.43296 2.6915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.26781 2.49894 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.32569 2.56807 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.38701 2.63968 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.45114 2.71276 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.51821 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.58721 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.659 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 208.344 218.797 227.795 235.278 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 211.295 221.381 229.961 237.056 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 214.157 223.829 232.007 238.731 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 216.884 226.167 233.932 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 219.525 228.405 235.779 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 222.07 230.538 237.528 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 224.503 232.56 239.186 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 226.83 234.485 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 229.047 236.314 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 231.162 238.059 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 233.155 239.675 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 235.048 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 236.822 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 238.514 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.64951 2.96573 3.29685 3.64043 3.99275 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.73274 3.05304 3.38864 3.73509 4.09035 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.51047 2.81637 3.14143 3.48007 3.82888 4.18472 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.59005 2.9021 3.2313 3.57285 3.92399 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.67197 2.98919 3.3223 3.66692 4.02043 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.75571 3.07766 3.41441 3.7612 4.11517 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.53269 2.84107 3.16751 3.50699 3.85521 4.21181 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.61388 2.92772 3.25821 3.60085 3.9508 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.69706 3.01473 3.34862 3.69441 4.04598 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.78078 3.10372 3.44082 3.78694 4.14155 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.55734 2.86715 3.19341 3.53275 3.88181 4.23811 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.63887 2.95325 3.28382 3.62646 3.97758 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.72227 3.04131 3.376 3.72263 4.07367 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.5008 2.80632 3.13157 3.46978 3.81877 4.17029 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.58119 2.89312 3.22131 3.56146 3.9119 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.66461 2.98122 3.3122 3.65548 4.00738 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.74814 3.06947 3.40515 3.75127 4.10378 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.83388 3.15864 3.49697 3.84607 4.20128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.91919 3.24865 3.59019 3.94101 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.00713 3.3404 3.68514 4.03645 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.09748 3.434 3.779 4.13296 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.18661 3.52592 3.87415 4.22993 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.27687 3.61929 3.96988 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 323.875 319.18 315.438 312.386 309.87 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 322.492 318.084 314.534 311.639 309.24 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 326.407 321.206 317.049 313.693 310.947 308.671 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 324.885 319.984 316.066 312.89 310.281 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 323.443 318.832 315.135 312.126 309.642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 322.088 317.748 314.25 311.402 309.049 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 325.936 320.806 316.721 313.42 310.725 308.479 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 324.426 319.608 315.755 312.629 310.069 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 323.001 318.49 314.851 311.885 309.456 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 321.68 317.425 313.991 311.194 308.87 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 325.455 320.424 316.426 313.184 310.521 308.308 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 323.982 319.264 315.484 312.41 309.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 322.592 318.16 314.585 311.66 309.272 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 326.571 321.309 317.107 313.727 310.953 308.691 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 325.034 320.076 316.13 312.942 310.312 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 323.556 318.911 315.207 312.182 309.687 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 322.207 317.835 314.316 311.447 309.085 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 320.921 316.814 313.495 310.768 308.511 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 319.743 315.863 312.712 310.121 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 318.607 314.942 311.957 309.506 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 317.521 314.064 311.256 308.915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 316.524 313.255 310.578 308.35 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 315.579 312.48 309.936 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.42528 4.77697 5.13643 5.4944 5.86044 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.51892 4.87221 5.23118 5.59655 5.95918 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.26204 4.61261 4.96819 5.32906 5.69405 6.0593 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.35606 4.70847 5.06509 5.42879 5.79188 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.45033 4.80301 5.16215 5.52309 5.8888 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.54321 4.89826 5.25789 5.61913 5.98529 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.28804 4.63877 4.99321 5.35406 5.71819 6.08372 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.38065 4.73327 5.09101 5.452 5.81556 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.47413 4.82945 5.18704 5.55048 5.91388 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.21956 4.56894 4.9238 5.28338 5.64677 6.01255 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.31264 4.66366 5.02006 5.38068 5.74465 6.11133 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.40658 4.75909 5.11671 5.47833 5.84319 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.50092 4.85501 5.21389 5.57648 5.94203 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.24618 4.59591 4.9514 5.31133 5.67477 6.04096 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.34035 4.69341 5.04906 5.40905 5.77338 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.43457 4.78889 5.14515 5.5071 5.87217 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.52892 4.88338 5.24251 5.6054 5.97116 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.27496 4.62443 4.97992 5.34015 5.70385 6.07032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.36804 4.71979 5.07693 5.4381 5.80258 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.46235 4.8158 5.17407 5.53632 5.90155 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.55735 4.91218 5.27167 5.63483 6.00077 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.30198 4.65281 5.00903 5.36959 5.73355 6.10016 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.39631 4.74868 5.10619 5.4677 5.83246 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 259.438 261.542 263.374 264.943 266.343 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 260.028 262.049 263.804 265.347 266.684 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 258.321 260.591 262.539 264.235 265.721 267.02 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 258.966 261.146 263.017 264.658 266.079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 259.585 261.668 263.474 265.04 266.424 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 260.165 262.172 263.91 265.418 266.755 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 258.496 260.737 262.655 264.331 265.796 267.083 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 259.126 261.279 263.132 264.741 266.152 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 259.73 261.803 263.582 265.143 266.501 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 258.01 260.32 262.3 264.016 265.516 266.837 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 258.669 260.88 262.782 264.436 265.886 267.166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 259.298 261.42 263.252 264.847 266.245 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 259.904 261.939 263.701 265.239 266.592 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 258.204 260.481 262.438 264.135 265.62 266.928 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 258.857 261.05 262.924 264.553 265.986 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 259.478 261.581 263.379 264.956 266.341 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 260.071 262.082 263.824 265.345 266.683 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 258.402 260.643 262.574 264.252 265.72 267.014 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 259.034 261.188 263.046 264.663 266.081 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 259.641 261.711 263.5 265.06 266.431 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 260.226 262.214 263.937 265.441 266.766 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 258.567 260.786 262.697 264.356 265.809 267.092 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 259.193 261.324 263.162 264.762 266.166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.20011 6.57702 6.95471 7.33472 7.71001 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.30177 6.68303 7.05691 7.43541 7.81114 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02935 6.40541 6.78524 7.15835 7.53305 7.9119 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13173 6.50461 6.88625 7.26132 7.63551 8.01305 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22913 6.60581 6.98443 7.3594 7.73613 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.32911 6.70656 7.0822 7.45966 7.83787 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05415 6.43153 6.80634 7.1834 7.56141 7.93969 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15469 6.53072 6.90756 7.28517 7.6632 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25604 6.63233 7.00929 7.38695 7.76506 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.35731 6.73386 7.11106 7.48887 7.86707 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08281 6.45877 6.8356 7.21309 7.59085 7.96898 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18422 6.56037 6.93737 7.31487 7.69287 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.28565 6.66203 7.03927 7.41702 7.79501 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01162 6.3873 6.76382 7.14125 7.51902 7.89706 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11316 6.48908 6.86586 7.24325 7.62108 7.99928 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21474 6.59085 6.96778 7.34537 7.7233 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.31639 6.6928 7.06991 7.44757 7.82553 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04248 6.41816 6.79477 7.17202 7.54977 7.92782 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.144 6.51989 6.89676 7.27422 7.65206 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.2457 6.62195 6.99889 7.37639 7.75429 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.34747 6.72384 7.10097 7.47868 7.85668 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.07357 6.4493 6.82596 7.20329 7.58102 7.95903 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.17526 6.55128 6.92808 7.30551 7.68339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.294 300.476 299.753 299.105 298.535 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.048 300.249 299.559 298.935 298.379 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.672 300.809 300.038 299.371 298.772 298.227 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.412 300.586 299.837 299.188 298.608 298.08 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.177 300.37 299.651 299.019 298.45 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.944 300.161 299.471 298.852 298.298 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.573 300.712 299.961 299.29 298.688 298.148 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.324 300.497 299.764 299.113 298.529 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.084 300.285 299.574 298.94 298.374 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.852 300.077 299.389 298.775 298.225 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.476 300.628 299.878 299.211 298.614 298.078 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.232 300.412 299.684 299.036 298.457 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.995 300.201 299.497 298.869 298.307 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.637 300.767 299.999 299.316 298.706 298.158 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.387 300.546 299.802 299.139 298.548 298.016 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.147 300.332 299.612 298.969 298.394 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.914 300.126 299.428 298.805 298.245 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.552 300.69 299.927 299.249 298.643 298.1 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.304 300.47 299.732 299.076 298.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 301.067 300.261 299.546 298.907 298.335 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.837 300.056 299.362 298.743 298.189 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 301.468 300.614 299.859 299.188 298.586 298.045 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 301.226 300.399 299.666 299.014 298.43 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08452 8.45638 8.82965 9.20261 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.18443 8.55562 8.92971 9.30313 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90931 8.28187 8.65554 9.02927 9.4038 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00912 8.38232 8.75586 9.13013 9.50517 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10985 8.48282 8.85656 9.23808 9.61045 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.21032 8.58314 8.95763 9.33519 9.71055 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93698 8.31003 8.68369 9.05876 9.43294 9.80827 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03731 8.41139 8.78632 9.16185 9.53447 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13893 8.5125 8.88626 9.26212 9.6352 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.23827 8.61366 8.98688 9.35967 9.73641 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96562 8.33878 8.71237 9.08655 9.4605 9.83435 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06609 8.43947 8.81313 9.18696 9.56102 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.1667 8.54013 8.91385 9.28778 9.66183 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89431 8.26729 8.64086 9.01471 9.38872 9.76283 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9949 8.368 8.74168 9.11551 9.48953 9.86363 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09547 8.46873 8.84248 9.21641 9.5905 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.19618 8.56956 8.94338 9.31735 9.6914 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92397 8.29696 8.67041 9.04428 9.41832 9.79243 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02464 8.39778 8.77138 9.14528 9.51928 9.89342 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12538 8.49866 8.87236 9.24634 9.62039 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.22624 8.59962 8.97339 9.34737 9.7215 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9541 8.32712 8.70065 9.0745 9.44852 9.82258 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05496 8.42812 8.80175 9.17562 9.54964 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.205 272.908 273.556 274.152 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.393 273.079 273.714 274.297 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.839 272.573 273.245 273.865 274.44 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.034 272.751 273.411 274.019 274.582 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.229 272.93 273.573 274.179 274.725 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.419 273.101 273.732 274.319 274.86 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.873 272.602 273.271 273.889 274.458 274.987 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.071 272.785 273.44 274.045 274.6 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.269 272.963 273.601 274.194 274.738 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.454 273.137 273.761 274.336 274.875 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.916 272.64 273.304 273.914 274.479 275.003 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.114 272.82 273.467 274.066 274.62 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.307 272.997 273.631 274.216 274.757 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.763 272.497 273.17 273.789 274.361 274.893 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.963 272.68 273.339 273.946 274.507 275.026 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.16 272.861 273.504 274.096 274.646 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.351 273.035 273.664 274.245 274.784 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.809 272.538 273.206 273.822 274.39 274.917 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.008 272.72 273.373 273.975 274.532 275.049 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.202 272.897 273.536 274.125 274.671 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.391 273.07 273.695 274.272 274.807 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.851 272.575 273.239 273.85 274.415 274.939 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.047 272.753 273.402 274.001 274.555 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93809 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0418 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1429 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.8632 10.2422 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.96528 10.3439 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0675 10.4458 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1696 10.5499 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89328 10.2719 10.6503 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99559 10.374 10.7519 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0979 10.4762 10.854 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2002 10.5784 10.9559 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.924 10.3025 10.6805 11.0579 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0264 10.4048 10.7827 11.16 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1289 10.5071 10.8847 11.2619 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2313 10.6093 10.9869 11.3639 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95517 10.3337 10.7116 11.089 11.4658 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.143 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.06 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.983 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.173 295.907 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.093 295.833 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.014 295.759 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.936 295.687 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.126 295.862 295.618 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.046 295.787 295.55 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.968 295.716 295.484 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.893 295.646 295.418 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.084 295.819 295.577 295.355 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.005 295.747 295.511 295.294 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.929 295.678 295.445 295.232 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.854 295.607 295.382 295.173 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.043 295.782 295.541 295.319 295.114 - - - - - - 5000 - 5000 - - 1.43228 1.60524 1.8464 2.13265 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.47061 1.66474 1.91995 2.21533 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.51524 1.72801 1.99607 2.29961 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.56542 1.79529 2.07417 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.62096 1.86621 2.15515 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.68128 1.94029 2.23816 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.74598 2.0173 2.32311 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.81462 2.09678 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.88669 2.17894 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.96221 2.26337 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.03976 2.34819 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.12021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.20185 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.28602 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 213.859 227.218 237.55 245.315 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 217.766 230.299 239.872 247.059 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 221.46 233.135 242.008 248.666 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 224.881 235.765 243.971 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 228.1 238.213 245.81 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 231.113 240.484 247.517 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 233.911 242.584 249.101 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 236.505 244.533 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 238.913 246.344 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 241.144 248.031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 243.203 249.572 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 245.104 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 246.851 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 248.48 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.33111 2.68405 3.04481 3.41291 3.78575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.42496 2.77992 3.14389 3.51368 3.88862 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.17275 2.51866 2.87653 3.2421 3.6131 3.9876 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.26386 2.61399 2.97422 3.34134 3.71369 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.3569 2.7102 3.07271 3.44153 3.8153 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.45113 2.80718 3.17193 3.54178 3.91494 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.19851 2.54651 2.90522 3.27117 3.64121 4.01608 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.29124 2.64257 3.00361 3.37133 3.74218 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.3854 2.73829 3.10125 3.47095 3.84227 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.47946 2.83579 3.20029 3.56905 3.94262 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.22682 2.57555 2.93336 3.29873 3.6694 4.0438 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.31963 2.67074 3.03128 3.39867 3.77031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.41379 2.76748 3.13068 3.50088 3.8714 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.16175 2.50772 2.86606 3.23129 3.60278 3.97273 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.25366 2.60403 2.96347 3.3292 3.70104 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.34847 2.7014 3.06171 3.4293 3.80152 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.44244 2.79805 3.16179 3.53108 3.90281 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.5381 2.89538 3.26026 3.63137 4.00498 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.63262 2.99271 3.35959 3.73144 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.72941 3.0918 3.46054 3.83192 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.82825 3.19241 3.56005 3.93314 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.92527 3.29075 3.66069 4.03471 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.02299 3.39032 3.76158 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 313.045 309.176 306.236 303.925 302.074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 311.876 308.298 305.543 303.369 301.616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 315.228 310.813 307.48 304.903 302.857 301.201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 313.903 309.815 306.714 304.297 302.368 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 312.674 308.891 305.998 303.724 301.9 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 311.54 308.027 305.322 303.188 301.471 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 314.815 310.485 307.223 304.694 302.69 301.057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 313.513 309.512 306.472 304.098 302.208 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 312.305 308.617 305.779 303.546 301.764 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 311.207 307.777 305.125 303.034 301.338 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 314.405 310.178 306.996 304.516 302.54 300.935 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 313.143 309.241 306.268 303.938 302.073 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 311.973 308.363 305.581 303.382 301.631 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 315.391 310.91 307.534 304.929 302.86 301.212 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 314.05 309.901 306.771 304.339 302.39 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 312.792 308.968 306.062 303.772 301.933 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 311.661 308.111 305.38 303.227 301.5 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 310.601 307.312 304.762 302.729 301.085 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 309.643 306.57 304.17 302.254 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 308.729 305.862 303.609 301.807 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 307.87 305.192 303.087 301.376 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 307.087 304.58 302.589 300.969 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 306.353 303.999 302.117 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.21509 4.58387 4.95832 5.32901 5.70652 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.31351 4.68322 5.05653 5.43457 5.80807 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.04285 4.41182 4.78335 5.158 5.5352 5.91099 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.14218 4.51234 4.88419 5.26127 5.63591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.2416 4.61122 4.98503 5.35879 5.7358 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.3392 4.71067 5.08447 5.45792 5.83499 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.07051 4.43949 4.80959 5.18413 5.56025 5.93616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.16835 4.53848 4.91133 5.28536 5.66053 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.26676 4.63904 5.01107 5.38737 5.76178 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 3.99836 4.36651 4.73747 5.11105 5.48658 5.86321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.09685 4.46578 4.83769 5.21182 5.58767 5.96477 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.19595 4.56574 4.9383 5.31293 5.68918 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.29527 4.66587 5.0391 5.41429 5.79092 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.02672 4.39496 4.7664 5.14018 5.51578 5.89263 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.12626 4.49709 4.86816 5.24133 5.61738 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.22557 4.59699 4.96792 5.34277 5.71915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.32477 4.69561 5.06896 5.44426 5.82096 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.05728 4.42498 4.79621 5.17014 5.54584 5.92289 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.15556 4.52477 4.89707 5.27147 5.64756 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.25473 4.62508 4.99796 5.37299 5.74941 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.35451 4.72557 5.09913 5.47458 5.8514 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.08573 4.45457 4.8264 5.20051 5.57638 5.95346 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.18513 4.55488 4.92742 5.30202 5.67823 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 266.45 268.096 269.511 270.713 271.778 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 266.913 268.489 269.841 271.018 272.032 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 265.564 267.352 268.864 270.167 271.3 272.284 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 266.073 267.783 269.232 270.49 271.571 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 266.561 268.187 269.582 270.779 271.829 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 267.014 268.576 269.914 271.066 272.079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 265.698 267.461 268.948 270.235 271.351 272.325 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 266.197 267.882 269.316 270.548 271.62 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 266.674 268.29 269.66 270.853 271.884 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 265.312 267.133 268.673 269.994 271.138 272.137 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 265.836 267.572 269.046 270.313 271.416 272.384 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 266.332 267.99 269.405 270.626 271.689 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 266.809 268.397 269.753 270.925 271.949 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 265.471 267.262 268.78 270.084 271.214 272.205 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 265.987 267.706 269.157 270.405 271.493 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 266.478 268.119 269.506 270.71 271.76 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 266.943 268.507 269.846 271.006 272.019 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 265.629 267.39 268.887 270.173 271.29 272.268 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 266.129 267.815 269.251 270.487 271.564 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 266.608 268.221 269.599 270.789 271.828 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 267.066 268.61 269.934 271.08 272.081 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 265.76 267.503 268.981 270.252 271.358 272.327 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 266.254 267.919 269.337 270.56 271.626 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.06427 6.44803 6.83196 7.2174 7.59778 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.16793 6.55589 6.93589 7.31949 7.70015 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89029 6.27351 6.66009 7.0387 7.4186 7.80228 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99495 6.37463 6.7627 7.14316 7.52246 7.90458 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.09422 6.47774 6.86233 7.24287 7.62442 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19606 6.5801 6.96173 7.34443 7.72743 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.91577 6.30049 6.6816 7.0644 7.44757 7.83053 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01841 6.40138 6.78447 7.16764 7.55068 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12175 6.50475 6.88773 7.27086 7.65387 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22489 6.60801 6.99109 7.37418 7.75714 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.94515 6.32823 6.71141 7.09457 7.47746 7.86027 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04854 6.43161 6.81476 7.19783 7.58084 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15194 6.53503 6.91825 7.30135 7.6842 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87249 6.25545 6.63846 7.02172 7.40473 7.78753 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97609 6.35907 6.74215 7.12521 7.50814 7.89093 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07961 6.4626 6.84564 7.22873 7.61163 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18318 6.56624 6.94927 7.33228 7.71508 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.90386 6.28674 6.6698 7.05284 7.43582 7.81861 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00738 6.39032 6.77345 7.15648 7.53939 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11108 6.4941 6.87708 7.26005 7.6429 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21465 6.59766 6.98072 7.36374 7.74652 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.93541 6.31833 6.70138 7.08441 7.46732 7.85004 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.03904 6.42203 6.80506 7.18807 7.57099 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.13 295.598 295.129 294.715 294.353 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.966 295.448 295.001 294.601 294.248 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.372 295.806 295.307 294.876 294.494 294.149 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.199 295.66 295.176 294.758 294.386 294.052 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.043 295.516 295.052 294.647 294.284 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.888 295.379 294.933 294.538 294.184 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.298 295.734 295.247 294.814 294.43 294.088 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.133 295.593 295.119 294.7 294.326 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.975 295.453 294.993 294.587 294.226 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.82 295.316 294.874 294.48 294.13 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.228 295.674 295.187 294.757 294.374 294.034 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.067 295.531 295.06 294.643 294.274 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.911 295.394 294.939 294.535 294.176 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.332 295.762 295.262 294.821 294.429 294.081 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.168 295.618 295.134 294.706 294.327 293.989 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.009 295.477 295.01 294.597 294.229 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.856 295.343 294.891 294.489 294.132 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.275 295.708 295.211 294.773 294.386 294.04 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.11 295.565 295.086 294.663 294.286 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.957 295.429 294.964 294.552 294.187 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.803 295.294 294.846 294.449 294.094 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 296.217 295.658 295.166 294.731 294.345 294 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 296.056 295.515 295.039 294.619 294.245 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9782 8.35573 8.73425 9.11201 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07985 8.45653 8.83562 9.21379 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80029 8.17871 8.55779 8.93653 9.31567 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90165 8.28066 8.65954 9.03872 9.41814 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00399 8.38269 8.76158 9.14809 9.52479 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10613 8.48444 8.86397 9.24621 9.62624 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82843 8.20739 8.58644 8.96639 9.34511 9.72472 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93046 8.31034 8.69055 9.07079 9.44796 9.82594 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03387 8.41296 8.79174 9.17247 9.54999 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13464 8.5156 8.89375 9.27117 9.65231 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8577 8.23673 8.61567 8.99473 9.37326 9.75124 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95982 8.33887 8.71778 9.09642 9.47493 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06206 8.4411 8.8199 9.19851 9.57685 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.78537 8.16428 8.54324 8.92207 9.30068 9.67905 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88759 8.26651 8.64553 9.02423 9.40274 9.78095 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.98983 8.36878 8.74768 9.12635 9.50484 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09211 8.47103 8.84992 9.22856 9.60693 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81558 8.19448 8.57331 8.95214 9.33072 9.70904 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91791 8.2968 8.67568 9.05441 9.43289 9.81115 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02028 8.39918 8.77802 9.15675 9.53515 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.1227 8.50157 8.88039 9.259 9.63741 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84622 8.22512 8.60402 8.98277 9.36132 9.73957 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94869 8.32757 8.70646 9.08514 9.46361 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.209 276.743 277.235 277.688 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.349 276.869 277.352 277.796 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.925 276.484 276.995 277.465 277.901 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.073 276.617 277.117 277.579 278.007 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.218 276.75 277.24 277.699 278.113 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.361 276.879 277.357 277.803 278.214 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.944 276.497 277.006 277.475 277.907 278.309 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.093 276.635 277.133 277.591 278.012 278.405 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.241 276.769 277.253 277.703 278.116 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.381 276.899 277.373 277.809 278.218 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.972 276.521 277.025 277.489 277.917 278.314 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.122 276.658 277.149 277.602 278.022 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.266 276.79 277.271 277.715 278.126 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.852 276.41 276.921 277.39 277.824 278.227 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.005 276.548 277.047 277.507 277.933 278.328 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.153 276.685 277.173 277.623 278.038 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.299 276.816 277.293 277.733 278.142 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.886 276.438 276.945 277.413 277.844 278.243 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.036 276.576 277.071 277.527 277.949 278.341 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.183 276.71 277.194 277.64 278.054 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.326 276.84 277.314 277.751 278.156 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.916 276.464 276.968 277.431 277.859 278.255 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.064 276.6 277.091 277.544 277.963 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84649 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95109 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0528 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.77119 10.1526 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87397 10.2549 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97676 10.3574 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0796 10.4622 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80149 10.1824 10.5631 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90448 10.2852 10.6654 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0074 10.388 10.768 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1104 10.4908 10.8705 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.83236 10.2132 10.5935 10.973 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93543 10.3162 10.6962 11.0755 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0385 10.419 10.7988 11.178 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.76048 10.1415 10.5219 10.9015 11.2805 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.8637 10.2446 10.6247 11.0041 11.3829 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.871 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.821 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.775 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.879 292.731 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.832 292.687 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.784 292.643 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.739 292.601 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.843 292.695 292.56 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.796 292.652 292.521 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.751 292.61 292.482 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.707 292.57 292.444 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.812 292.664 292.53 292.408 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.766 292.623 292.492 292.373 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.722 292.581 292.453 292.337 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.829 292.677 292.541 292.417 292.304 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.784 292.638 292.503 292.38 292.269 - - - - - - 5000 - 5000 - - 1.56418 1.7238 1.95023 2.22302 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.59921 1.77927 2.02001 2.30248 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.64036 1.83858 2.09237 2.38358 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.68673 1.90188 2.16697 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.73826 1.96893 2.24457 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.79465 2.03922 2.3243 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.85529 2.11248 2.40619 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.92006 2.18857 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.98817 2.26722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.05985 2.34849 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.13391 2.43022 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.21081 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.28925 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.37021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 212.152 224.705 234.751 242.531 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 215.781 227.666 237.053 244.303 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 219.241 230.416 239.192 245.95 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 222.477 232.991 241.168 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 225.551 235.405 243.032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 228.451 237.664 244.772 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 231.172 239.77 246.395 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 233.714 241.732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 236.098 243.575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 238.324 245.295 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 240.386 246.879 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 242.311 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 244.085 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 245.753 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.40295 2.74686 3.10058 3.46299 3.8311 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.49413 2.8407 3.19795 3.5624 3.93286 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.24935 2.58532 2.93538 3.2946 3.66048 4.03079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.3376 2.67833 3.03122 3.39239 3.75982 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.42791 2.7724 3.12787 3.49114 3.86028 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.51944 2.86736 3.22549 3.59004 3.95886 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.27427 2.61247 2.96345 3.3232 3.68815 4.05891 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.36407 2.70622 3.05997 3.42187 3.78801 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.45548 2.79987 3.156 3.52014 3.88692 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.54709 2.89533 3.25339 3.61692 3.9862 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.30163 2.64076 2.99104 3.35034 3.71608 4.08635 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.39159 2.73378 3.08723 3.44885 3.81579 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.48317 2.82848 3.18494 3.54964 3.91573 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.23876 2.57467 2.92501 3.28395 3.65027 4.01604 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.32768 2.66866 3.02071 3.38043 3.74737 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.41976 2.76383 3.11715 3.47907 3.84666 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.51108 2.85842 3.2156 3.57957 3.9469 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.60439 2.95394 3.31255 3.67852 4.04795 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.69663 3.04943 3.41044 3.77747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.79135 3.14687 3.51004 3.8768 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.88814 3.24581 3.60819 3.97695 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.9833 3.34266 3.70763 4.0775 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.07929 3.4408 3.80732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 316.046 311.889 308.695 306.162 304.117 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 314.798 310.942 307.938 305.553 303.614 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 318.359 313.655 310.056 307.238 304.986 303.155 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 316.958 312.582 309.221 306.575 304.446 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 315.652 311.585 308.436 305.945 303.93 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 314.435 310.651 307.699 305.354 303.456 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 317.925 313.306 309.776 307.011 304.802 302.996 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 316.542 312.256 308.956 306.356 304.274 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 315.254 311.289 308.201 305.749 303.779 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 314.08 310.376 307.483 305.183 303.308 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 317.487 312.972 309.527 306.815 304.64 302.861 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 316.144 311.962 308.734 306.179 304.122 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 314.897 311.012 307.982 305.566 303.632 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 318.526 313.757 310.109 307.267 304.991 303.168 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 317.105 312.672 309.283 306.62 304.473 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 315.77 311.664 308.504 305.993 303.965 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 314.558 310.734 307.76 305.397 303.487 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 313.423 309.87 307.082 304.843 303.024 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 312.389 309.057 306.433 304.321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 311.404 308.288 305.816 303.825 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 310.471 307.553 305.239 303.349 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 309.621 306.882 304.691 302.898 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 308.822 306.244 304.17 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.2605 4.62548 4.99656 5.36443 5.73941 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.35787 4.72387 5.09397 5.46918 5.84033 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.09031 4.4551 4.82306 5.19466 5.5692 5.94262 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.18842 4.55462 4.92307 5.29713 5.66919 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.28671 4.6525 5.02302 5.39393 5.76847 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.38322 4.75102 5.12167 5.4924 5.86705 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.11755 4.48245 4.84906 5.22055 5.59402 5.96761 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.21425 4.58044 4.9499 5.32099 5.69363 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.31151 4.67997 5.04882 5.42229 5.79427 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.04625 4.41021 4.77757 5.148 5.52081 5.89503 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.14357 4.50842 4.87683 5.24795 5.6212 5.99605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.24142 4.60735 4.97661 5.34838 5.72207 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.33968 4.70657 5.07655 5.44893 5.82314 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.07421 4.43823 4.80613 5.17686 5.54977 5.92428 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.17254 4.53936 4.90704 5.27724 5.65066 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.27072 4.63826 5.00592 5.37791 5.75181 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.3688 4.73597 5.10615 5.47871 5.85299 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.10435 4.46796 4.83567 5.20655 5.5796 5.95431 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.20149 4.56675 4.93565 5.30711 5.68064 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.2995 4.66607 5.03572 5.40791 5.78187 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.39825 4.76568 5.1361 5.5088 5.88321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.13251 4.49729 4.86558 5.23668 5.60994 5.98472 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.23072 4.59658 4.96576 5.33747 5.71115 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 264.671 266.44 267.968 269.267 270.42 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 265.167 266.865 268.325 269.601 270.697 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 263.721 265.642 267.273 268.678 269.904 270.971 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 264.267 266.105 267.668 269.028 270.199 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 264.79 266.542 268.048 269.343 270.479 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 265.279 266.963 268.407 269.652 270.751 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 263.867 265.76 267.363 268.754 269.962 271.018 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 264.401 266.214 267.762 269.094 270.254 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 264.914 266.656 268.133 269.423 270.54 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 263.453 265.407 267.066 268.493 269.732 270.817 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 264.012 265.88 267.471 268.841 270.034 271.082 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 264.548 266.332 267.857 269.177 270.329 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 265.058 266.768 268.234 269.504 270.613 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 263.622 265.548 267.184 268.591 269.815 270.888 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 264.175 266.023 267.588 268.938 270.118 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 264.702 266.469 267.967 269.27 270.407 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 265.202 266.888 268.334 269.589 270.688 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 263.791 265.684 267.298 268.689 269.897 270.959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 264.327 266.141 267.691 269.029 270.195 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 264.842 266.58 268.067 269.355 270.48 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 265.333 266.998 268.429 269.67 270.757 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 263.93 265.804 267.4 268.775 269.971 271.021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 264.462 266.254 267.785 269.108 270.262 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.09385 6.47612 6.85877 7.24302 7.62227 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1971 6.58356 6.9623 7.34475 7.72437 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92056 6.30221 6.68737 7.06479 7.4436 7.82623 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02475 6.40297 6.78964 7.16889 7.54713 7.92825 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1236 6.50566 6.88894 7.2683 7.64883 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22504 6.60762 6.98803 7.36957 7.75153 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.94585 6.32906 6.70878 7.09036 7.47243 7.85436 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04809 6.42958 6.81131 7.19324 7.57524 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.151 6.53251 6.91423 7.29619 7.67815 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25374 6.63546 7.01726 7.39922 7.78111 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97515 6.35668 6.73846 7.12039 7.50219 7.88404 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07808 6.45964 6.84149 7.22339 7.60531 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18107 6.56273 6.94463 7.32658 7.70837 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.90281 6.28418 6.66578 7.04778 7.42968 7.81147 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00595 6.38738 6.76911 7.15098 7.53281 7.9146 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10906 6.49057 6.87229 7.25419 7.63601 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21222 6.59384 6.97556 7.35743 7.7392 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93404 6.31535 6.69703 7.07886 7.46071 7.84248 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03716 6.4186 6.80035 7.18216 7.56396 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14042 6.52194 6.90363 7.28544 7.66726 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.2436 6.62519 7.007 7.38885 7.77056 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.96555 6.3469 6.72853 7.11033 7.49215 7.87387 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.06872 6.45019 6.8319 7.21373 7.59557 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.453 296.846 296.312 295.838 295.419 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.269 296.675 296.164 295.706 295.301 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.731 297.087 296.515 296.025 295.587 295.189 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.535 296.922 296.367 295.888 295.463 295.078 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.358 296.758 296.227 295.762 295.347 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.184 296.602 296.093 295.638 295.232 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.65 297.009 296.452 295.957 295.517 295.122 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.464 296.849 296.307 295.825 295.398 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.284 296.688 296.164 295.698 295.284 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.109 296.535 296.027 295.576 295.172 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.575 296.943 296.386 295.893 295.455 295.065 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.391 296.779 296.242 295.766 295.341 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.214 296.624 296.103 295.641 295.228 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.693 297.044 296.473 295.968 295.52 295.121 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.507 296.879 296.327 295.838 295.404 295.015 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.326 296.72 296.187 295.713 295.291 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.152 296.567 296.049 295.59 295.181 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.627 296.983 296.417 295.917 295.473 295.075 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.443 296.822 296.274 295.789 295.356 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.266 296.664 296.133 295.664 295.246 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.093 296.513 296.001 295.545 295.137 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 297.566 296.928 296.365 295.868 295.426 295.032 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 297.381 296.764 296.222 295.741 295.313 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00067 8.37694 8.75434 9.13104 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10198 8.47744 8.85542 9.23257 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82339 8.20052 8.57839 8.95602 9.33418 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92436 8.3021 8.67984 9.05795 9.4364 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02637 8.40384 8.78158 9.16701 9.54274 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12815 8.50523 8.88365 9.26488 9.64399 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8514 8.22905 8.60695 8.98582 9.36353 9.7422 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95307 8.33169 8.71073 9.08991 9.4661 9.8432 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05611 8.43396 8.81162 9.19132 9.5679 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15651 8.53629 8.91335 9.28976 9.66995 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88054 8.25829 8.63606 9.01403 9.39156 9.76864 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9823 8.36008 8.73783 9.11545 9.49299 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08415 8.46199 8.8397 9.21727 9.59466 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80846 8.18606 8.56381 8.94154 9.31913 9.6966 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91026 8.2879 8.66577 9.04343 9.42098 9.79826 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01217 8.38988 8.76765 9.14525 9.52277 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11409 8.49177 8.86956 9.24721 9.62464 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8385 8.21611 8.59377 8.97153 9.3491 9.72648 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94048 8.3181 8.69582 9.0735 9.45099 9.82837 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04249 8.42015 8.79787 9.17557 9.55302 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.14455 8.52223 8.89995 9.27755 9.65502 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86904 8.24665 8.62437 9.00204 9.3796 9.75693 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97115 8.34876 8.72649 9.10415 9.48164 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.217 275.794 276.324 276.814 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.368 275.931 276.451 276.931 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.911 275.514 276.067 276.576 277.045 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.072 275.661 276.2 276.698 277.16 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.229 275.803 276.332 276.829 277.277 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.384 275.944 276.461 276.941 277.384 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.935 275.533 276.081 276.587 277.055 277.489 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.096 275.681 276.219 276.715 277.169 277.592 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.257 275.826 276.35 276.835 277.281 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.409 275.968 276.479 276.95 277.392 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.966 275.559 276.103 276.605 277.068 277.496 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.128 275.708 276.239 276.728 277.181 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.286 275.85 276.37 276.85 277.294 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.836 275.44 275.992 276.5 276.969 277.403 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.004 275.591 276.13 276.625 277.085 277.512 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.163 275.737 276.265 276.751 277.201 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.321 275.881 276.396 276.871 277.312 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.875 275.472 276.019 276.524 276.99 277.422 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.038 275.621 276.156 276.649 277.105 277.528 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.197 275.766 276.289 276.771 277.218 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.351 275.907 276.418 276.891 277.329 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.908 275.501 276.045 276.545 277.008 277.437 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.068 275.648 276.179 276.668 277.121 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.86676 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97122 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0728 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.79155 10.1725 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89419 10.2746 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99681 10.377 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0996 10.4816 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.82179 10.2022 10.5825 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.92464 10.3049 10.6846 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0274 10.4076 10.7871 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1303 10.5102 10.8894 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.85262 10.233 10.6128 10.9919 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.9556 10.3358 10.7154 11.0943 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0585 10.4385 10.8179 11.1967 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.78087 10.1614 10.5413 10.9205 11.299 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88398 10.2643 10.644 11.023 11.4013 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.708 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.65 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.595 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.722 293.543 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.666 293.491 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.61 293.44 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.557 293.39 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.683 293.504 293.341 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.627 293.454 293.295 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.574 293.404 293.249 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.522 293.356 293.205 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.648 293.471 293.309 293.162 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.595 293.422 293.263 293.118 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.542 293.372 293.218 293.078 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.671 293.491 293.326 293.175 293.036 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.619 293.441 293.279 293.131 292.997 - - - - - - 5000 - 5000 - - 1.55006 1.71098 1.93889 2.21306 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.58541 1.76686 2.00906 2.29285 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.6269 1.82656 2.08179 2.37429 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.67364 1.89026 2.15677 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.72557 1.95771 2.23471 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.78236 2.02838 2.31479 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.84339 2.10203 2.39699 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.90856 2.17846 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.97707 2.25748 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.04914 2.33909 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.12356 2.42112 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.20083 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.27961 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.3609 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.4441 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 212.32 224.957 235.037 242.819 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 215.978 227.931 237.342 244.59 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 219.461 230.691 239.482 246.233 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 222.717 233.273 241.458 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 225.807 235.692 243.32 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 228.719 237.953 245.058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 231.45 240.06 246.678 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 233.998 242.022 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 236.386 243.862 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 238.613 245.58 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 240.677 247.16 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 242.6 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 244.373 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 246.037 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 247.589 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.38844 2.73414 3.08925 3.45281 3.82187 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.48013 2.82839 3.18696 3.55249 3.92386 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.23391 2.57182 2.92345 3.28394 3.65084 4.02201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.32272 2.6653 3.01965 3.38202 3.75044 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.41356 2.75981 3.11667 3.48105 3.85113 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.5056 2.85516 3.21462 3.58022 3.94993 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.25897 2.59913 2.95163 3.31264 3.6786 4.05019 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.34937 2.69333 3.04854 3.41159 3.77869 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.44131 2.78739 3.14488 3.51014 3.87783 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.53342 2.88326 3.24261 3.60719 3.97732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.28654 2.62757 2.97934 3.33985 3.70658 4.07769 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.37706 2.72101 3.07587 3.43865 3.80653 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.46914 2.81612 3.17393 3.53973 3.9067 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.56113 2.91305 3.27325 3.64061 4.00722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.31274 2.65556 3.0091 3.37003 3.73795 4.10851 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.40536 2.75119 3.1059 3.46895 3.83748 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.49719 2.84619 3.20466 3.56971 3.93793 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.59097 2.94207 3.30194 3.66893 4.0392 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.68367 3.03791 3.40011 3.76811 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.7788 3.13568 3.49998 3.86767 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.876 3.23497 3.5984 3.96803 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.97153 3.33211 3.69808 4.06879 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.06786 3.43054 3.79802 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 315.475 311.37 308.222 305.731 303.722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 314.241 310.436 307.477 305.132 303.228 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 317.766 313.112 309.562 306.789 304.574 302.777 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 316.378 312.053 308.739 306.137 304.044 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 315.085 311.07 307.967 305.517 303.538 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 313.882 310.148 307.241 304.936 303.073 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 317.335 312.767 309.286 306.565 304.394 302.621 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 315.966 311.731 308.479 305.92 303.875 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 314.692 310.777 307.736 305.324 303.39 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 313.532 309.878 307.029 304.768 302.927 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 316.902 312.438 309.041 306.372 304.234 302.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 315.573 311.441 308.26 305.747 303.726 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 314.34 310.504 307.52 305.145 303.244 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 313.214 309.615 306.817 304.58 302.789 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 316.525 312.142 308.801 306.181 304.071 302.354 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 315.204 311.148 308.035 305.565 303.572 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 314.005 310.231 307.302 304.978 303.103 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 312.884 309.379 306.636 304.435 302.649 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 311.863 308.579 305.997 303.922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 310.892 307.821 305.391 303.435 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 309.972 307.098 304.824 302.967 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 309.134 306.439 304.285 302.525 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 308.348 305.811 303.773 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.25613 4.62146 4.99286 5.361 5.73621 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.35358 4.71994 5.09033 5.46581 5.83719 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.08573 4.45092 4.81922 5.19111 5.5659 5.93954 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.18397 4.55054 4.9193 5.29365 5.66595 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.28236 4.64851 5.01934 5.39052 5.7653 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.37897 4.74712 5.11807 5.48905 5.86392 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.11301 4.47831 4.84524 5.21701 5.59074 5.96454 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.20982 4.57639 4.94616 5.31753 5.69041 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.30719 4.67602 5.04516 5.4189 5.79111 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.40599 4.77369 5.14441 5.51748 5.89193 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.13907 4.5043 4.87303 5.24445 5.61795 5.99301 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.23704 4.60333 4.9729 5.34493 5.71887 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.3354 4.70263 5.07291 5.44556 5.82 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.06963 4.43405 4.80229 5.1733 5.54647 5.9212 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.16808 4.53528 4.90327 5.27375 5.64742 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.26636 4.63427 5.00224 5.3745 5.74863 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.36454 4.73206 5.10255 5.47537 5.84987 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.09982 4.46381 4.83185 5.20301 5.57631 5.95124 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.19706 4.5627 4.93192 5.30365 5.67743 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.29518 4.66211 5.03206 5.40452 5.77871 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.39403 4.7618 5.13251 5.50547 5.88011 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.12801 4.49316 4.86178 5.23317 5.60668 5.98168 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.22632 4.59255 4.96205 5.33403 5.70795 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 264.834 266.593 268.111 269.401 270.546 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 265.328 267.014 268.465 269.732 270.821 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 263.89 265.799 267.419 268.816 270.033 271.093 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 264.433 266.259 267.812 269.164 270.326 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 264.953 266.694 268.189 269.475 270.604 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 265.438 267.111 268.546 269.783 270.874 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 264.035 265.916 267.509 268.891 270.091 271.14 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 264.565 266.367 267.905 269.229 270.381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 265.076 266.806 268.274 269.555 270.664 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 265.565 267.214 268.632 269.862 270.939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 264.179 266.036 267.616 268.977 270.162 271.202 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 264.712 266.485 268 269.311 270.455 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 265.219 266.918 268.374 269.635 270.737 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 263.791 265.705 267.331 268.729 269.945 271.01 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 264.341 266.178 267.732 269.073 270.245 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 264.865 266.621 268.109 269.403 270.532 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 265.362 267.037 268.474 269.72 270.812 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 263.96 265.841 267.444 268.826 270.026 271.08 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 264.493 266.295 267.835 269.164 270.321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 265.005 266.731 268.208 269.488 270.605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 265.492 267.147 268.568 269.8 270.88 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 264.098 265.96 267.546 268.911 270.099 271.143 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 264.626 266.407 267.928 269.242 270.388 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08785 6.4704 6.8533 7.23779 7.61725 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19118 6.57793 6.95691 7.33958 7.7194 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.91441 6.29638 6.68181 7.05946 7.43848 7.82132 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0187 6.3972 6.78415 7.16362 7.54208 7.92338 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11764 6.49997 6.88351 7.2631 7.64382 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21916 6.60202 6.98266 7.36443 7.74658 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93975 6.32325 6.70324 7.08505 7.46734 7.84947 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04207 6.42384 6.80584 7.18801 7.5702 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14507 6.52685 6.90882 7.29101 7.67317 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.24787 6.62987 7.01191 7.3941 7.7762 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96907 6.3509 6.73295 7.11511 7.49712 7.87916 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07209 6.45394 6.83604 7.21816 7.6003 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17515 6.55709 6.93925 7.32142 7.70341 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89666 6.27834 6.66022 7.04246 7.42457 7.80656 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9999 6.38162 6.76361 7.14571 7.52775 7.90974 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10308 6.48487 6.86686 7.24898 7.63101 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.20632 6.58822 6.97019 7.35228 7.73425 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92793 6.30953 6.69148 7.07355 7.45562 7.83758 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03111 6.41285 6.79487 7.17692 7.55892 7.94081 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13446 6.51628 6.89821 7.28025 7.66227 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.23772 6.61958 7.00163 7.38371 7.76563 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.95943 6.34109 6.723 7.10503 7.48706 7.86898 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.0627 6.44446 6.82643 7.20848 7.59054 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.197 296.603 296.081 295.618 295.21 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.017 296.437 295.937 295.49 295.094 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.468 296.839 296.28 295.8 295.373 294.985 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.276 296.677 296.136 295.667 295.252 294.876 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.103 296.516 295.998 295.544 295.138 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.933 296.364 295.866 295.423 295.026 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.388 296.762 296.217 295.734 295.304 294.919 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.206 296.605 296.076 295.605 295.187 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.031 296.448 295.935 295.481 295.076 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.859 296.298 295.802 295.362 294.967 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.315 296.697 296.152 295.671 295.243 294.863 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.135 296.537 296.011 295.546 295.132 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.961 296.385 295.876 295.424 295.022 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.43 296.795 296.237 295.744 295.306 294.916 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.248 296.634 296.094 295.617 295.193 294.813 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.07 296.478 295.957 295.495 295.083 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.901 296.329 295.823 295.375 294.975 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.366 296.735 296.182 295.694 295.26 294.872 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.185 296.578 296.043 295.569 295.146 294.769 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.013 296.424 295.905 295.446 295.038 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.843 296.276 295.776 295.331 294.932 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 297.305 296.681 296.132 295.646 295.214 294.829 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 297.125 296.522 295.991 295.522 295.104 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9983 8.3747 8.75221 9.12901 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09965 8.47523 8.85332 9.23057 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82096 8.19822 8.5762 8.95395 9.33221 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92197 8.29983 8.6777 9.05591 9.43445 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02402 8.4016 8.77946 9.165 9.54082 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12582 8.50304 8.88156 9.2629 9.64209 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84898 8.22677 8.60478 8.98375 9.36157 9.74033 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95069 8.32943 8.70859 9.08787 9.46417 9.84136 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05377 8.43174 8.80951 9.18931 9.56599 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15421 8.5341 8.91127 9.28778 9.66807 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87814 8.25601 8.6339 9.01198 9.38961 9.76678 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97993 8.35784 8.7357 9.11343 9.49107 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08182 8.45979 8.8376 9.21527 9.59276 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80603 8.18376 8.56164 8.93947 9.31717 9.69473 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90787 8.28564 8.66362 9.04139 9.41903 9.79641 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00982 8.38765 8.76553 9.14324 9.52086 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11177 8.48958 8.86748 9.24522 9.62275 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83609 8.21382 8.59161 8.96947 9.34714 9.72461 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9381 8.31586 8.69368 9.07147 9.44906 9.82652 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04015 8.41793 8.79576 9.17357 9.55111 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.14225 8.52004 8.89787 9.27557 9.65314 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86664 8.24437 8.62222 8.99999 9.37765 9.75508 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96878 8.34652 8.72437 9.10212 9.47972 9.85709 NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.316 275.889 276.416 276.902 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.467 276.025 276.542 277.018 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.013 275.611 276.161 276.666 277.132 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.173 275.757 276.292 276.787 277.245 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.328 275.899 276.424 276.917 277.362 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.482 276.038 276.551 277.028 277.469 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.036 275.629 276.174 276.677 277.141 277.572 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.196 275.777 276.312 276.803 277.255 277.675 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.355 275.921 276.441 276.923 277.366 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.507 276.062 276.569 277.037 277.476 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.067 275.656 276.196 276.694 277.154 277.579 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.228 275.803 276.331 276.816 277.266 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.384 275.945 276.461 276.937 277.379 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.938 275.537 276.086 276.59 277.056 277.487 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.104 275.687 276.222 276.714 277.171 277.595 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.262 275.832 276.356 276.84 277.286 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.419 275.976 276.486 276.958 277.396 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.976 275.569 276.113 276.614 277.077 277.506 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.138 275.717 276.249 276.738 277.191 277.611 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.296 275.861 276.38 276.859 277.303 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.449 276.001 276.509 276.978 277.413 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.009 275.598 276.138 276.635 277.094 277.52 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.168 275.743 276.271 276.757 277.207 277.624 NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.86249 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.96698 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0685 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1683 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88992 10.2705 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99258 10.3729 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0954 10.4775 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81751 10.1981 10.5784 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.92039 10.3008 10.6805 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0232 10.4035 10.783 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1261 10.5061 10.8854 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84835 10.2288 10.6087 10.9879 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95135 10.3317 10.7113 11.0903 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0543 10.4344 10.8139 11.1927 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1572 10.5372 10.9165 11.2951 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87971 10.2601 10.6399 11.019 11.3974 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.539 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.482 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.429 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.379 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.497 293.328 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.442 293.278 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.392 293.23 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.513 293.34 293.183 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.459 293.291 293.138 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.407 293.243 293.094 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.357 293.196 293.05 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.479 293.307 293.151 293.009 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.427 293.26 293.107 292.967 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.376 293.212 293.063 292.927 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.326 293.167 293.021 292.887 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.45 293.278 293.121 292.978 292.849 - - - - - - 5000 - 5000 - - 0.823021 1.09759 1.42808 1.78343 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 0.888617 1.18332 1.52228 1.88183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 0.96129 1.27114 1.61752 1.98058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.03882 1.36153 1.71324 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.12115 1.45411 1.81074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.20727 1.54851 1.90917 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.29632 1.64431 2.00837 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.38777 1.74128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.48123 1.83971 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.57669 1.93935 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.67273 2.03806 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.7702 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.8675 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.9663 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 229.927 246.892 256.583 262.547 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 235.513 250.034 258.465 263.767 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 240.301 252.73 260.117 264.858 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 244.336 255.069 261.572 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 247.82 257.131 262.893 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 250.834 258.945 264.076 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 253.444 260.552 265.148 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 255.718 261.987 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 257.716 263.276 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 259.472 264.438 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 261.029 265.472 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 262.413 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 263.646 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 264.758 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.09176 2.47755 2.86322 3.25088 3.63943 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.19555 2.58086 2.96816 3.35636 3.74618 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 1.9148 2.29829 2.68439 3.07176 3.46011 3.84868 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.0172 2.40205 2.78857 3.17612 3.56482 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.12068 2.50607 2.89307 3.28114 3.67038 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.22449 2.61035 2.99798 3.3859 3.77363 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 1.94398 2.3288 2.71514 3.10249 3.48949 3.87824 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.04776 2.43307 2.81985 3.20766 3.59453 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.15217 2.53634 2.92331 3.3119 3.69832 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.2555 2.64088 3.02782 3.41429 3.80226 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 1.97559 2.36027 2.74503 3.13137 3.51878 3.9068 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.07908 2.46333 2.84903 3.23614 3.62359 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.18311 2.56744 2.95418 3.34299 3.72837 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.90187 2.28599 2.67295 3.06023 3.44926 3.83319 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.00515 2.3908 2.77676 3.16306 3.55144 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.1106 2.49603 2.88093 3.26791 3.65574 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.21408 2.59983 2.98672 3.37426 3.76062 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.31864 2.70384 3.0903 3.47866 3.86625 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.42119 2.80736 3.19459 3.58269 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.52569 2.91236 3.3002 3.68688 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.63172 3.0186 3.40406 3.79169 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.7353 3.12202 3.50885 3.89664 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.83915 3.22644 3.61367 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 296.341 294.627 293.39 292.453 291.725 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 295.807 294.246 293.101 292.228 291.541 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 297.35 295.332 293.897 292.835 292.02 291.376 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 296.724 294.891 293.575 292.589 291.826 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 296.161 294.491 293.278 292.357 291.639 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 295.648 294.121 292.996 292.142 291.471 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 297.15 295.179 293.782 292.742 291.943 291.309 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 296.546 294.756 293.468 292.5 291.752 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 295.995 294.372 293.182 292.279 291.579 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 295.503 294.016 292.916 292.076 291.413 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 296.971 295.054 293.691 292.671 291.881 291.257 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 296.39 294.647 293.388 292.438 291.699 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 295.856 294.272 293.107 292.218 291.528 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 297.454 295.387 293.923 292.841 292.008 291.364 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 296.824 294.944 293.603 292.604 291.827 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 296.246 294.544 293.315 292.378 291.648 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 295.734 294.176 293.033 292.162 291.481 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 295.262 293.839 292.782 291.964 291.319 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 294.842 293.528 292.541 291.775 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 294.447 293.234 292.315 291.6 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 294.078 292.958 292.106 291.431 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 293.747 292.708 291.907 291.272 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 293.437 292.47 291.719 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.05467 4.43801 4.82492 5.20613 5.59297 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.15723 4.54087 4.9261 5.31444 5.69679 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 3.87468 4.25951 4.64438 5.03047 5.41762 5.80194 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 3.97859 4.3639 4.74854 5.13665 5.52073 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.08242 4.46643 4.85252 5.23676 5.62294 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.18412 4.56941 4.95496 5.33847 5.72437 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 3.90383 4.28843 4.67164 5.05749 5.4434 5.82776 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.00618 4.39124 4.77674 5.16153 5.54606 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.1089 4.49548 4.87955 5.26633 5.6497 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 3.82848 4.21277 4.59735 4.98252 5.36806 5.75341 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 3.93172 4.31601 4.70091 5.08616 5.47163 5.85718 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.03529 4.41974 4.80475 5.19009 5.57561 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.13893 4.52355 4.9087 5.29414 5.67969 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 3.85857 4.24267 4.62751 5.01276 5.39824 5.78371 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 3.96275 4.34877 4.73266 5.11676 5.50231 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.06646 4.45239 4.83553 5.22093 5.60648 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.16984 4.55449 4.93961 5.32507 5.7106 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 3.89079 4.27407 4.6585 5.04373 5.42919 5.81476 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 3.99356 4.37767 4.76263 5.14787 5.53336 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.09698 4.48159 4.86661 5.25208 5.63757 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.20085 4.58557 4.97078 5.35625 5.74179 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 3.92052 4.30482 4.68969 5.07497 5.46051 5.84604 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.02436 4.40884 4.79387 5.17921 5.56474 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.644 276.567 277.355 278.022 278.609 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 275.9 276.783 277.534 278.185 278.746 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 275.137 276.143 276.988 277.711 278.337 278.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 275.42 276.382 277.19 277.887 278.484 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 275.694 276.605 277.38 278.043 278.623 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 275.945 276.818 277.562 278.2 278.759 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 275.202 276.195 277.024 277.737 278.354 278.89 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 275.482 276.428 277.226 277.908 278.5 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 275.75 276.654 277.416 278.075 278.644 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 274.982 276.008 276.868 277.6 278.231 278.782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 275.278 276.254 277.074 277.776 278.384 278.917 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 275.557 276.485 277.272 277.948 278.533 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 275.826 276.712 277.464 278.111 278.674 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 275.075 276.081 276.925 277.645 278.27 278.815 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 275.365 276.329 277.135 277.825 278.424 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 275.644 276.56 277.328 277.992 278.57 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 275.903 276.774 277.516 278.156 278.713 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 275.165 276.154 276.986 277.696 278.31 278.848 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 275.447 276.39 277.186 277.868 278.46 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 275.716 276.616 277.378 278.034 278.605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 275.972 276.831 277.563 278.193 278.743 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 275.24 276.216 277.036 277.737 278.345 278.876 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 275.514 276.445 277.231 277.906 278.492 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96831 6.35656 6.74455 7.13355 7.51709 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07333 6.46568 6.84961 7.2366 7.6204 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.79222 6.18021 6.57114 6.95342 7.33665 7.72339 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89835 6.28256 6.67477 7.05884 7.44141 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9989 6.38689 6.77542 7.15953 7.54425 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10198 6.49033 6.87585 7.26197 7.64809 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.81818 6.20769 6.59295 6.97949 7.36602 7.75201 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92217 6.30973 6.69687 7.08368 7.47001 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0268 6.41423 6.80118 7.18783 7.57402 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13121 6.51862 6.90551 7.29204 7.67809 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.84796 6.23573 6.62307 7.00995 7.39619 7.78203 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.95266 6.34028 6.72748 7.11416 7.5004 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05733 6.44481 6.83194 7.21857 7.60457 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.77427 6.16205 6.54933 6.93639 7.32283 7.7087 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87918 6.26682 6.65405 7.04083 7.42707 7.81286 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98399 6.37148 6.75854 7.14521 7.53135 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08878 6.47622 6.86313 7.24964 7.63558 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.80587 6.19354 6.58084 6.96767 7.35403 7.73988 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9107 6.29827 6.6855 7.07221 7.45841 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01561 6.40313 6.79012 7.17668 7.56275 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12043 6.50777 6.8947 7.28119 7.66709 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.8376 6.22526 6.61251 6.99932 7.38562 7.77139 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.94253 6.33012 6.71724 7.1039 7.49009 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.645 289.494 289.366 289.257 289.164 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.592 289.447 289.324 289.219 289.129 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.701 289.539 289.401 289.285 289.185 289.098 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.645 289.493 289.358 289.247 289.15 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.595 289.447 289.319 289.212 289.119 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.547 289.405 289.283 289.177 289.088 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.661 289.498 289.362 289.246 289.145 289.059 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.608 289.455 289.324 289.212 289.114 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.56 289.412 289.286 289.176 289.084 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.512 289.37 289.25 289.146 289.056 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.627 289.468 289.332 289.215 289.114 289.027 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.578 289.425 289.294 289.182 289.085 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.532 289.384 289.259 289.15 289.057 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.652 289.487 289.346 289.225 289.121 289.03 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.604 289.445 289.309 289.191 289.091 289.004 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.555 289.402 289.272 289.161 289.064 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.51 289.364 289.238 289.129 289.035 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.633 289.467 289.325 289.203 289.1 289.011 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.582 289.424 289.29 289.174 289.072 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.536 289.384 289.253 289.141 289.044 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.489 289.343 289.218 289.112 289.018 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 289.608 289.445 289.305 289.185 289.081 288.991 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 289.558 289.402 289.267 289.151 289.052 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90302 8.28516 8.66786 9.04947 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00598 8.38712 8.77029 9.15224 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.72283 8.10604 8.4895 8.87223 9.25506 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82551 8.20919 8.59235 8.97546 9.35847 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92914 8.31244 8.6955 9.08594 9.46611 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03262 8.41535 8.79895 9.18495 9.56853 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.75138 8.13509 8.5185 8.90243 9.2848 9.6678 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.85477 8.23933 8.62377 9.0079 9.38863 9.76988 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95956 8.34315 8.72605 9.11058 9.4916 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06157 8.447 8.82914 9.21026 9.59483 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.78118 8.16494 8.54818 8.93119 9.31334 9.69462 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88467 8.26832 8.65141 9.0339 9.41595 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9882 8.37173 8.75465 9.13702 9.51882 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.70803 8.09174 8.47507 8.8579 9.24016 9.6219 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81165 8.19523 8.5785 8.96111 9.34322 9.72473 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91523 8.29874 8.68181 9.06428 9.44627 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01882 8.40217 8.78512 9.1675 9.5493 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.7388 8.12245 8.50562 8.88843 9.27065 9.65232 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84249 8.22601 8.60912 8.99174 9.37376 9.75531 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94618 8.3296 8.71257 9.09509 9.47697 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04989 8.43317 8.81602 9.19834 9.58015 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.76993 8.15356 8.53677 8.91946 9.30163 9.6832 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87374 8.25722 8.6403 9.02284 9.40485 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.119 281.436 281.729 281.998 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.198 281.506 281.794 282.058 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.943 281.273 281.577 281.857 282.118 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.026 281.349 281.646 281.921 282.177 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.109 281.424 281.716 281.989 282.237 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.191 281.497 281.782 282.048 282.294 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.941 281.268 281.57 281.849 282.108 282.348 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.027 281.348 281.643 281.915 282.167 282.403 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.112 281.423 281.711 281.979 282.227 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.192 281.499 281.78 282.04 282.284 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.948 281.273 281.571 281.846 282.102 282.34 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.035 281.352 281.642 281.912 282.163 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.119 281.428 281.714 281.978 282.223 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.873 281.202 281.505 281.783 282.041 282.282 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.962 281.282 281.576 281.85 282.104 282.341 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.047 281.361 281.65 281.917 282.165 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.133 281.438 281.72 281.981 282.225 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.888 281.214 281.513 281.79 282.047 282.285 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.976 281.295 281.587 281.857 282.108 282.342 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.061 281.371 281.657 281.923 282.17 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 281.143 281.448 281.728 281.987 282.228 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.901 281.224 281.521 281.795 282.049 282.287 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.987 281.303 281.593 281.861 282.111 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.77886 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88395 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98596 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.70334 10.0861 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80648 10.1888 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90965 10.2916 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0129 10.3967 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.73372 10.116 10.498 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.83709 10.2192 10.6005 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94035 10.3223 10.7034 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0436 10.4253 10.8062 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.76462 10.1468 10.5283 10.909 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.86804 10.2501 10.6314 11.0118 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97146 10.3533 10.7343 11.1146 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.69239 10.0748 10.4564 10.8373 11.2173 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.79593 10.1781 10.5595 10.9402 11.32 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.862 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.855 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.849 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.842 288.842 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.837 288.837 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.83 288.831 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.825 288.826 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.821 288.82 288.822 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.816 288.815 288.818 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.811 288.811 288.815 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.807 288.808 288.811 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.803 288.801 288.803 288.808 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.798 288.798 288.801 288.805 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.795 288.794 288.796 288.801 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.792 288.79 288.791 288.795 288.8 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 288.788 288.787 288.787 288.79 288.796 - - - - - - 5000 - 5000 - - 1.06393 1.28775 1.57858 1.9058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.11513 1.36135 1.66423 1.99819 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.1738 1.43834 1.75165 2.0913 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.23792 1.5186 1.84029 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.30764 1.60206 1.93132 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.38207 1.68804 2.02374 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.46029 1.77614 2.11744 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.54189 1.86621 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.62642 1.95819 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.71353 2.05198 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.8023 2.14541 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.8928 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.98412 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.07718 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 221.083 236.923 247.534 254.693 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 225.972 240.231 249.738 256.21 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 230.397 243.161 251.717 257.594 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 234.333 245.795 253.493 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 237.887 248.169 255.122 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 241.084 250.31 256.607 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 243.954 252.246 257.964 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 246.528 253.997 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 248.843 255.597 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 250.936 257.055 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 252.808 258.366 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 254.519 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 256.045 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 257.452 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.16755 2.54243 2.92005 3.30146 3.68506 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.26803 2.64332 3.0231 3.40546 3.79057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 1.99688 2.36773 2.74461 3.12497 3.50784 3.892 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.09547 2.46874 2.84672 3.22771 3.61122 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.19545 2.57016 2.94924 3.33121 3.71557 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.29601 2.67213 3.05238 3.43456 3.81768 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.02498 2.39742 2.7747 3.15517 3.53679 3.92121 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.12496 2.49894 2.87737 3.25878 3.64061 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.22594 2.59979 2.979 3.36155 3.7432 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.32621 2.70203 3.08175 3.46259 3.84605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.0555 2.42809 2.80404 3.18368 3.56579 3.94954 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.15534 2.52859 2.90614 3.28695 3.66939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.25612 2.63032 3.00942 3.39229 3.773 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.98474 2.35595 2.73352 3.11376 3.49724 3.87677 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.08416 2.45804 2.83536 3.21503 3.59814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.18607 2.56065 2.93755 3.31834 3.70128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.28633 2.66216 3.0416 3.42333 3.80504 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.38799 2.76401 3.14348 3.52637 3.90959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.48783 2.86552 3.24624 3.62922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.58986 2.96864 3.35037 3.7322 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.69351 3.07307 3.45285 3.83591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.79497 3.17487 3.55634 3.9398 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.89686 3.27776 3.65993 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 303.661 300.901 298.874 297.315 296.088 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 302.816 300.288 298.4 296.942 295.783 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 305.261 302.051 299.72 297.964 296.599 295.51 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 304.283 301.346 299.195 297.556 296.272 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 303.388 300.694 298.702 297.172 295.964 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 302.567 300.096 298.244 296.815 295.68 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 304.958 301.818 299.539 297.818 296.48 295.408 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 303.997 301.129 299.023 297.419 296.165 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 303.121 300.507 298.553 297.049 295.868 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 302.334 299.922 298.109 296.708 295.59 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 304.661 301.603 299.385 297.7 296.382 295.326 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 303.735 300.946 298.89 297.314 296.074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 302.891 300.335 298.421 296.941 295.783 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 305.4 302.127 299.757 297.981 296.596 295.509 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 304.408 301.418 299.239 297.585 296.283 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 303.494 300.761 298.751 297.203 295.984 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 302.674 300.166 298.294 296.846 295.699 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 301.921 299.614 297.871 296.51 295.427 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 301.238 299.103 297.475 296.199 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 300.601 298.621 297.098 295.9 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 300.001 298.164 296.749 295.618 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 299.459 297.751 296.418 295.35 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 298.956 297.359 296.106 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.10807 4.48638 4.86899 5.24658 5.63024 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.20923 4.58805 4.96915 5.35392 5.73325 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 3.93083 4.3101 4.69034 5.0725 5.45627 5.83766 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.0331 4.41317 4.79341 5.17771 5.55853 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.13545 4.51444 4.89627 5.27689 5.65995 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.23568 4.61619 4.9977 5.37776 5.76063 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 3.95943 4.3386 4.71728 5.09923 5.48178 5.86323 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.06023 4.44009 4.82122 5.20229 5.58362 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.16143 4.54303 4.92295 5.30615 5.68645 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 3.88516 4.26385 4.64374 5.02491 5.40701 5.78935 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 3.98682 4.36575 4.74611 5.12752 5.50969 5.89238 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.08876 4.4681 4.84885 5.23054 5.61288 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.19094 4.57072 4.95173 5.33361 5.7161 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 3.91468 4.29322 4.6734 5.05471 5.43684 5.81939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.01718 4.39794 4.77744 5.15778 5.54005 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.11942 4.50028 4.87919 5.26095 5.64339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.22129 4.60116 4.98223 5.36423 5.74674 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 3.94627 4.32414 4.70401 5.08535 5.46745 5.8501 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.04748 4.42641 4.807 5.18851 5.57078 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.14941 4.52906 4.9099 5.29179 5.67419 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.25185 4.63185 5.01305 5.39508 5.77763 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 3.97555 4.35447 4.73481 5.11624 5.49848 5.88114 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.07782 4.45718 4.83792 5.21957 5.60191 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.758 272.999 274.06 274.958 275.75 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.103 273.29 274.304 275.185 275.94 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 271.08 272.434 273.574 274.547 275.39 276.123 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 271.466 272.756 273.844 274.784 275.591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 271.831 273.059 274.106 275.001 275.782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 272.175 273.351 274.352 275.21 275.965 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 271.177 272.51 273.628 274.59 275.423 276.148 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 271.553 272.826 273.903 274.823 275.622 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 271.915 273.134 274.16 275.049 275.816 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 270.885 272.263 273.42 274.408 275.261 276.005 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 271.278 272.591 273.7 274.648 275.469 276.187 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 271.658 272.909 273.968 274.877 275.668 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 272.016 273.209 274.226 275.102 275.864 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 271.004 272.361 273.503 274.475 275.316 276.05 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 271.398 272.694 273.781 274.713 275.523 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 271.768 273.004 274.044 274.943 275.722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 272.12 273.296 274.297 275.16 275.914 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 271.127 272.457 273.58 274.541 275.373 276.099 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 271.505 272.776 273.853 274.775 275.575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 271.868 273.082 274.113 274.999 275.771 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 272.213 273.372 274.361 275.215 275.96 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 271.225 272.541 273.651 274.6 275.422 276.14 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 271.599 272.854 273.915 274.827 275.619 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99855 6.38551 6.77232 7.16025 7.54287 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10317 6.49423 6.87703 7.26301 7.64593 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.82307 6.20966 6.59936 6.98057 7.3628 7.74864 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92878 6.31166 6.70265 7.08567 7.4673 7.85153 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02896 6.41565 6.80301 7.18608 7.56988 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13165 6.51873 6.90314 7.28826 7.67348 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.84888 6.23701 6.62106 7.00652 7.39205 7.77714 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.95251 6.33872 6.72465 7.11039 7.49578 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05672 6.44286 6.82868 7.21431 7.59957 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1608 6.54696 6.93272 7.31823 7.70337 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87858 6.26496 6.65107 7.03687 7.42216 7.80712 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98288 6.36917 6.7552 7.14083 7.52611 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08719 6.47339 6.85937 7.24497 7.63005 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.80521 6.19157 6.57759 6.96353 7.34898 7.73396 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.90972 6.29599 6.68202 7.06772 7.45299 7.8379 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01418 6.40036 6.78624 7.17184 7.55703 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1186 6.50475 6.89051 7.27602 7.66106 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.83672 6.22301 6.60909 6.99482 7.38018 7.76511 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.94124 6.32745 6.71344 7.09906 7.48429 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04575 6.43195 6.81779 7.20332 7.58845 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15024 6.53632 6.92211 7.30755 7.69254 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.86847 6.25472 6.64073 7.02644 7.41176 7.79665 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.97301 6.35924 6.74518 7.13077 7.516 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.341 292.031 291.76 291.52 291.314 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.241 291.938 291.679 291.45 291.252 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.476 292.144 291.853 291.606 291.386 291.191 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.371 292.055 291.773 291.533 291.321 291.133 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.276 291.969 291.699 291.466 291.26 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.182 291.885 291.627 291.4 291.201 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.421 292.09 291.807 291.557 291.336 291.142 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.323 292.006 291.729 291.487 291.275 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.226 291.92 291.655 291.422 291.216 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.135 291.841 291.584 291.357 291.157 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.374 292.047 291.762 291.513 291.296 291.103 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.276 291.962 291.688 291.448 291.236 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.184 291.881 291.616 291.383 291.178 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.432 292.096 291.803 291.546 291.322 291.123 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.333 292.009 291.727 291.48 291.263 291.07 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.239 291.927 291.655 291.415 291.204 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.148 291.846 291.582 291.352 291.149 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.394 292.06 291.77 291.516 291.293 291.094 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.3 291.978 291.696 291.449 291.232 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.204 291.894 291.624 291.387 291.178 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.115 291.817 291.555 291.324 291.121 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 292.358 292.027 291.738 291.486 291.264 291.069 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 292.261 291.942 291.664 291.42 291.205 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92703 8.30759 8.68885 9.06912 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02956 8.40915 8.7909 9.17156 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.74765 8.12921 8.51115 8.89249 9.27405 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84986 8.23191 8.6136 8.99538 9.37715 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95304 8.33477 8.7164 9.10548 9.48443 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05608 8.43726 8.81944 9.20415 9.58657 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.77604 8.1581 8.54003 8.9226 9.30371 9.68552 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87898 8.26194 8.6449 9.02767 9.40718 9.78731 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.98329 8.36531 8.74679 9.13002 9.50985 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08486 8.46877 8.84952 9.22936 9.61275 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80566 8.18779 8.56956 8.9512 9.3321 9.71225 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90869 8.29075 8.67238 9.05355 9.43439 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01174 8.39373 8.77525 9.15633 9.53693 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.73279 8.11487 8.49668 8.87811 9.2591 9.63969 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83593 8.21788 8.59968 8.98096 9.36185 9.74223 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93903 8.32099 8.70263 9.08378 9.46455 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04219 8.42398 8.80552 9.18662 9.56726 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.76334 8.14535 8.52703 8.90849 9.28946 9.66996 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86657 8.24851 8.63015 9.01141 9.39221 9.77265 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9698 8.35166 8.73321 9.11443 9.49512 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07306 8.45483 8.83629 9.21733 9.59797 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.7943 8.1763 8.55803 8.93936 9.32028 9.70071 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89765 8.27954 8.66117 9.04239 9.42319 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.082 279.49 279.866 280.213 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.187 279.584 279.953 280.292 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.86 279.286 279.677 280.038 280.372 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.971 279.388 279.77 280.122 280.449 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.08 279.486 279.86 280.212 280.53 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.187 279.584 279.95 280.29 280.604 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.868 279.291 279.678 280.036 280.367 280.676 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.979 279.393 279.773 280.125 280.447 280.748 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.091 279.494 279.864 280.208 280.524 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.197 279.592 279.953 280.288 280.601 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.884 279.302 279.686 280.041 280.369 280.673 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.996 279.405 279.781 280.127 280.448 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.107 279.506 279.872 280.212 280.526 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.789 279.213 279.603 279.963 280.296 280.604 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.905 279.32 279.701 280.051 280.375 280.678 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.018 279.422 279.793 280.137 280.457 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.127 279.523 279.888 280.223 280.534 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.814 279.235 279.62 279.976 280.305 280.611 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.926 279.337 279.715 280.064 280.387 280.686 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.038 279.44 279.808 280.149 280.465 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.146 279.538 279.899 280.233 280.543 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.833 279.25 279.634 279.987 280.314 280.618 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 278.945 279.353 279.728 280.074 280.394 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80105 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90604 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0079 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.72561 10.108 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.82863 10.2106 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93171 10.3134 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0349 10.4183 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.75596 10.1379 10.5195 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.85923 10.241 10.622 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.96242 10.344 10.7248 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0656 10.447 10.8276 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.78686 10.1688 10.5499 10.9303 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89022 10.2719 10.6528 11.033 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99352 10.375 10.7557 11.1357 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.71476 10.0969 10.4781 10.8586 11.2384 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.8182 10.2 10.5811 10.9615 11.341 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.524 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.5 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.476 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.517 290.455 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.492 290.432 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.469 290.412 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.448 290.39 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.488 290.426 290.371 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.466 290.406 290.353 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.445 290.386 290.334 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.423 290.367 290.317 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.465 290.403 290.349 290.3 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.444 290.383 290.329 290.282 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.422 290.364 290.313 290.267 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.467 290.404 290.346 290.295 290.25 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.445 290.382 290.327 290.279 290.237 - - - - - - 5000 - 5000 - - 1.2995 1.48813 1.74568 2.04611 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.3417 1.55209 1.82329 2.13226 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.39058 1.61991 1.90338 2.21977 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.44512 1.69155 1.98515 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.50511 1.76668 2.0697 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.56998 1.84486 2.15612 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.63923 1.92573 2.24418 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.71214 2.0089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.78855 2.09464 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.86798 2.18232 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.94949 2.27034 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.03347 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.11853 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.20597 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 215.95 230.181 240.735 248.4 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 220.175 233.374 243.052 250.089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 224.124 236.276 245.162 251.636 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 227.739 238.942 247.088 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 231.102 241.4 248.879 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 234.212 243.658 250.529 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 237.067 245.73 252.055 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 239.692 247.638 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 242.098 249.397 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 244.316 251.031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 246.338 252.507 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 248.199 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 249.894 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 251.463 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.26702 2.62823 2.99553 3.36881 3.74582 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.36337 2.72606 3.09611 3.47083 3.84974 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.10401 2.45932 2.8245 3.19576 3.5714 3.94971 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.19799 2.55679 2.92387 3.29636 3.67306 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.29359 2.65501 3.02391 3.39789 3.77577 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.39023 2.75396 3.12464 3.4993 3.87638 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.13069 2.48785 2.85367 3.22528 3.59984 3.97849 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.22609 2.58604 2.95377 3.32679 3.70193 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.32285 2.68372 3.05292 3.4276 3.80299 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.41926 2.78302 3.15342 3.5269 3.90434 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.15979 2.51756 2.88233 3.25322 3.62836 4.00641 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.25528 2.61481 2.98186 3.35443 3.73032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.35191 2.71342 3.08275 3.45788 3.83242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.0926 2.4481 2.8138 3.18485 3.56099 3.93468 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.18739 2.54662 2.91289 3.284 3.66028 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.28478 2.64595 3.01268 3.38542 3.76184 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.38122 2.74454 3.11431 3.48841 3.86403 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.47907 2.84357 3.21408 3.5898 3.96722 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.57567 2.94256 3.31479 3.69095 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.67448 3.04316 3.41693 3.79241 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.77513 3.14528 3.51763 3.89463 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.87389 3.24495 3.61938 3.9971 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.97322 3.3458 3.72133 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 309.913 306.376 303.726 301.66 300.012 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 308.84 305.583 303.102 301.164 299.605 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 311.926 307.866 304.846 302.529 300.706 299.238 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 310.703 306.956 304.153 301.988 300.269 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 309.57 306.117 303.508 301.479 299.856 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 308.53 305.339 302.902 300.998 299.474 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 311.546 307.566 304.609 302.341 300.555 299.11 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 310.34 306.682 303.936 301.809 300.128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 309.232 305.871 303.312 301.315 299.732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 308.226 305.11 302.727 300.861 299.357 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 311.167 307.289 304.407 302.183 300.422 298.998 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 310.005 306.44 303.753 301.665 300.007 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 308.929 305.642 303.136 301.171 299.616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 312.082 307.958 304.894 302.554 300.707 299.244 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 310.845 307.041 304.207 302.024 300.289 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 309.683 306.19 303.569 301.52 299.885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 308.649 305.42 302.96 301.034 299.498 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 307.678 304.696 302.404 300.591 299.135 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 306.806 304.029 301.878 300.169 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 305.978 303.391 301.374 299.771 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 305.199 302.79 300.911 299.392 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 304.493 302.243 300.467 299.03 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 303.833 301.722 300.048 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.17306 4.54551 4.92306 5.29644 5.67639 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.27248 4.64575 5.02209 5.4027 5.77848 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 3.99891 4.37184 4.7467 5.12423 5.50398 5.88194 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.09932 4.47329 4.8484 5.22827 5.60533 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.19989 4.57313 4.95005 5.32643 5.70577 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.29851 4.67346 5.05023 5.42624 5.80556 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.02689 4.39979 4.7732 5.15062 5.52925 5.90729 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.12585 4.49975 4.87582 5.25255 5.63011 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.22538 4.60124 4.9763 5.35528 5.73197 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 3.95393 4.32611 4.70052 5.07704 5.45514 5.83398 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.05363 4.42644 4.80162 5.17854 5.55681 5.93609 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.15386 4.52729 4.90299 5.28037 5.659 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.25421 4.62839 5.00465 5.38243 5.76131 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 3.98277 4.35494 4.72975 5.10643 5.48453 5.86361 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.08341 4.45801 4.83237 5.20835 5.5868 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.18384 4.5589 4.93293 5.31044 5.68915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.28407 4.65838 5.03471 5.41261 5.79158 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.01368 4.38529 4.75985 5.13666 5.51484 5.89406 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.11308 4.48603 4.86155 5.23869 5.61717 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.21332 4.58724 4.9632 5.34088 5.71963 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.3141 4.68861 5.06515 5.44315 5.82218 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.04243 4.41518 4.79028 5.16721 5.54554 5.92479 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.14296 4.5164 4.89209 5.26941 5.64802 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 268.302 269.811 271.107 272.204 273.174 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 268.726 270.169 271.405 272.482 273.407 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 267.485 269.127 270.514 271.705 272.739 273.636 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 267.954 269.523 270.849 271.997 272.983 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 268.4 269.891 271.167 272.262 273.221 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 268.816 270.248 271.472 272.522 273.447 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 267.608 269.226 270.588 271.763 272.782 273.67 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 268.066 269.612 270.923 272.049 273.028 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 268.502 269.986 271.239 272.328 273.268 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 267.253 268.926 270.337 271.543 272.586 273.498 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 267.733 269.326 270.675 271.835 272.842 273.723 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 268.189 269.711 271.006 272.12 273.088 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 268.629 270.082 271.321 272.391 273.326 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 267.397 269.043 270.434 271.625 272.658 273.559 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 267.873 269.452 270.778 271.916 272.909 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 268.324 269.828 271.096 272.196 273.155 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 268.75 270.184 271.409 272.466 273.39 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 267.545 269.162 270.531 271.706 272.725 273.617 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 268.005 269.551 270.864 271.993 272.975 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 268.444 269.923 271.182 272.269 273.215 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 268.865 270.279 271.487 272.532 273.446 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 267.665 269.263 270.617 271.779 272.787 273.67 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 268.118 269.645 270.943 272.06 273.031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03825 6.42328 6.8084 7.19481 7.57608 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14227 6.53146 6.91268 7.29719 7.67876 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.86365 6.24823 6.6361 7.01574 7.39656 7.78112 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96874 6.34971 6.73894 7.12044 7.50071 7.88367 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.06839 6.45318 6.83887 7.22045 7.60292 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17055 6.55579 6.9386 7.32225 7.70615 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.88926 6.27539 6.65765 7.04152 7.42566 7.80947 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99228 6.37657 6.76081 7.14502 7.52905 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.096 6.48027 6.86441 7.24852 7.63244 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19952 6.58386 6.968 7.3521 7.73595 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.91876 6.30317 6.68755 7.07179 7.45565 7.83932 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02254 6.40691 6.79123 7.17531 7.55923 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12631 6.51064 6.89497 7.2791 7.66285 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.84584 6.23015 6.61439 6.99873 7.38273 7.76641 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9498 6.33411 6.71839 7.10252 7.48638 7.87003 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05373 6.43798 6.82215 7.20626 7.5901 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15759 6.54193 6.92607 7.3101 7.69379 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87728 6.26154 6.64581 7.02992 7.41384 7.79752 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9812 6.36543 6.74971 7.13381 7.51767 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0852 6.46954 6.85369 7.23767 7.62143 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18918 6.5734 6.95756 7.34156 7.72525 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.9089 6.29316 6.6774 7.06154 7.44543 7.829 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.01291 6.39721 6.78141 7.16545 7.5493 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.81 294.354 293.954 293.599 293.291 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.667 294.223 293.843 293.502 293.202 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.016 294.531 294.102 293.736 293.409 293.115 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.865 294.404 293.988 293.632 293.317 293.033 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.73 294.28 293.882 293.536 293.229 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.597 294.161 293.781 293.442 293.142 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.948 294.466 294.047 293.677 293.35 293.058 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.805 294.342 293.936 293.579 293.262 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.669 294.222 293.83 293.482 293.174 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.537 294.104 293.725 293.39 293.092 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.886 294.409 293.993 293.626 293.3 293.01 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.748 294.288 293.884 293.528 293.213 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.613 294.169 293.779 293.435 293.13 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.974 294.484 294.056 293.678 293.345 293.048 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.832 294.36 293.946 293.581 293.257 292.969 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.697 294.24 293.839 293.486 293.173 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.563 294.124 293.737 293.396 293.091 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.924 294.438 294.012 293.637 293.305 293.011 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.784 294.314 293.903 293.541 293.221 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.649 294.198 293.8 293.448 293.136 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.519 294.081 293.697 293.358 293.056 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 294.873 294.392 293.971 293.6 293.27 292.976 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 294.735 294.27 293.862 293.502 293.183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95786 8.33657 8.71615 9.09487 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05988 8.43767 8.81777 9.19692 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.77938 8.15903 8.53922 8.91895 9.29903 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8811 8.26127 8.64123 9.0214 9.40173 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.98372 8.3636 8.74357 9.13109 9.50864 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08626 8.46567 8.8462 9.22937 9.61038 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.80765 8.18778 8.56794 8.94889 9.32855 9.70903 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91001 8.29109 8.67236 9.05359 9.43165 9.81044 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.0138 8.394 8.77381 9.15549 9.53392 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11484 8.49696 8.8761 9.25446 9.63647 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83703 8.21725 8.59729 8.97737 9.35681 9.73561 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93951 8.31973 8.69967 9.07929 9.4587 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04205 8.42222 8.80207 9.18165 9.56088 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.76448 8.14462 8.5247 8.90454 9.28406 9.66326 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86709 8.24718 8.62724 9.00693 9.38637 9.76543 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96963 8.34973 8.72971 9.10935 9.48873 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07227 8.4523 8.8322 9.21179 9.59102 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.79485 8.17493 8.55484 8.9347 9.31422 9.69339 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8975 8.27757 8.65752 9.03724 9.41661 9.79569 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00022 8.38025 8.76014 9.13982 9.51911 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10295 8.48294 8.86278 9.24234 9.62161 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82559 8.20568 8.58568 8.96543 9.3449 9.72398 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92841 8.30845 8.68838 9.06805 9.44742 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.219 277.71 278.16 278.577 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.347 277.825 278.267 278.674 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.958 277.469 277.938 278.37 278.771 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.091 277.591 278.051 278.475 278.867 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.225 277.713 278.161 278.583 278.964 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.354 277.829 278.269 278.679 279.056 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.971 277.48 277.947 278.377 278.773 279.142 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.108 277.605 278.062 278.482 278.869 279.231 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.243 277.727 278.172 278.585 278.964 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.371 277.847 278.281 278.681 279.057 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.996 277.5 277.961 278.386 278.78 279.145 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.132 277.623 278.075 278.491 278.877 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.265 277.746 278.187 278.594 278.971 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.885 277.396 277.864 278.295 278.694 279.064 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.023 277.522 277.981 278.403 278.793 279.155 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.161 277.649 278.095 278.507 278.888 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.292 277.768 278.206 278.61 278.985 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.914 277.422 277.887 278.315 278.71 279.076 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.053 277.548 278.001 278.42 278.807 279.168 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.186 277.67 278.114 278.524 278.903 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.317 277.789 278.224 278.624 278.996 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 276.941 277.445 277.906 278.33 278.723 279.088 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.077 277.568 278.019 278.434 278.82 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.82872 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93351 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0353 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.75336 10.1352 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.85622 10.2376 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95916 10.3403 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0621 10.445 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.78367 10.1651 10.5461 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88679 10.268 10.6485 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98981 10.3708 10.7512 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0929 10.4737 10.8538 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81458 10.1959 10.5765 10.9564 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.91774 10.2989 10.6793 11.059 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0209 10.4019 10.782 11.1615 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.74262 10.1241 10.5048 10.8848 11.2641 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.8459 10.2272 10.6077 10.9876 11.3666 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.051 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.009 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.972 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.053 291.934 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.013 291.899 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.976 291.863 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.937 291.827 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.02 291.902 291.795 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.982 291.866 291.762 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.944 291.832 291.731 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.909 291.8 291.701 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.992 291.874 291.767 291.671 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.953 291.839 291.736 291.642 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.919 291.807 291.705 291.612 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 292.004 291.882 291.773 291.675 291.586 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.966 291.849 291.743 291.646 291.558 - - - - - - 5000 - 5000 - - 0.940685 1.18821 1.49861 1.84032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 0.998439 1.26772 1.5887 1.9358 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.06363 1.35003 1.68006 2.03187 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.13409 1.43537 1.77237 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.20987 1.52346 1.86675 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.28999 1.6137 1.96229 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.37357 1.70574 2.0589 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.46015 1.79937 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.54918 1.89469 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.64058 1.99158 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.73305 2.08772 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.82717 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.92159 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.01762 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 224.94 241.508 251.835 258.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 230.202 244.788 253.904 259.875 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 234.853 247.65 255.75 261.127 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 238.896 250.179 257.387 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 242.471 252.43 258.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 245.63 254.441 260.232 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 248.418 256.239 261.458 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 250.879 257.853 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 253.071 259.317 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 255.025 260.639 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 256.765 261.828 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 258.333 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 259.729 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 261.005 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.12524 2.50615 2.88825 3.27318 3.65954 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.22754 2.60841 2.99236 3.378 3.76576 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 1.95111 2.32889 2.71093 3.09521 3.48113 3.86779 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.05179 2.43145 2.81419 3.19888 3.58527 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.15369 2.53435 2.91781 3.3032 3.6903 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.25604 2.63756 3.02196 3.40734 3.79306 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 1.97978 2.35907 2.7414 3.12571 3.51034 3.89717 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.08186 2.46212 2.84519 3.23018 3.61484 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.18474 2.56431 2.94786 3.33378 3.71811 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.28671 2.66784 3.05159 3.43558 3.82156 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.01093 2.39021 2.77105 3.15441 3.53951 3.92565 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.11276 2.49212 2.8742 3.25854 3.6438 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.21536 2.59517 2.97855 3.36473 3.74804 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.93855 2.31688 2.69966 3.08382 3.47042 3.85242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.04007 2.42046 2.80261 3.18599 3.57205 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.14397 2.52457 2.9059 3.29015 3.67581 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.24598 2.62733 3.01092 3.3959 3.78024 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.34926 2.73041 3.11377 3.4997 3.88537 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.45062 2.83302 3.21737 3.60321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.55401 2.9372 3.32234 3.70688 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.65899 3.04264 3.42559 3.81121 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.76164 3.14534 3.52981 3.9157 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.86462 3.24908 3.63409 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 300.059 297.797 296.151 294.899 293.915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 299.36 297.298 295.768 294.596 293.671 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 301.382 298.731 296.835 295.416 294.319 293.451 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 300.568 298.154 296.407 295.089 294.058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 299.829 297.627 296.008 294.776 293.81 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 299.153 297.135 295.639 294.489 293.584 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 301.124 298.54 296.686 295.296 294.222 293.364 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 300.331 297.978 296.267 294.972 293.967 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 299.609 297.469 295.888 294.677 293.731 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 298.964 296.996 295.529 294.403 293.509 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 300.884 298.369 296.562 295.199 294.141 293.298 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 300.118 297.831 296.16 294.889 293.896 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 299.424 297.334 295.784 294.591 293.662 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 301.503 298.801 296.865 295.426 294.313 293.444 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 300.679 298.217 296.445 295.11 294.065 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 299.927 297.687 296.052 294.803 293.823 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 299.25 297.2 295.681 294.515 293.599 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 298.633 296.754 295.343 294.246 293.379 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 298.077 296.337 295.022 293.996 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 297.555 295.948 294.72 293.758 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 297.067 295.578 294.439 293.532 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 296.628 295.244 294.173 293.318 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 296.218 294.928 293.922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.07903 4.46004 4.84494 5.2245 5.60986 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.18092 4.56235 4.94564 5.33235 5.71332 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 3.90031 4.28253 4.66529 5.04957 5.43514 5.81812 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.00345 4.38635 4.76894 5.15528 5.53786 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.10659 4.48827 4.8724 5.25497 5.63972 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.20761 4.59069 4.97439 5.35629 5.74079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 3.92919 4.31129 4.69239 5.07644 5.4608 5.84382 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.03082 4.41348 4.79696 5.18003 5.56309 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.13283 4.51712 4.89928 5.2844 5.66635 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 3.85436 4.23605 4.61845 5.00178 5.38572 5.76968 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 3.95684 4.33865 4.72145 5.10493 5.48889 5.87312 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.05965 4.44175 4.8248 5.20845 5.59249 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.16264 4.545 4.92823 5.31203 5.69618 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 3.88416 4.26566 4.64838 5.03181 5.41575 5.79986 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 3.98758 4.37116 4.75301 5.13538 5.51939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.0906 4.47416 4.85535 5.23909 5.62321 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.19327 4.57572 4.95897 5.34283 5.72696 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 3.91609 4.29687 4.67918 5.06261 5.44653 5.83075 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.01813 4.39984 4.78278 5.16631 5.55031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.12086 4.50318 4.88627 5.27008 5.65414 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.22407 4.6066 4.98997 5.37385 5.75801 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 3.94561 4.3274 4.71018 5.09369 5.47772 5.86192 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.0487 4.43084 4.81388 5.19751 5.58157 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.688 274.773 275.701 276.483 277.176 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.991 275.028 275.914 276.681 277.338 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 273.094 274.28 275.273 276.122 276.859 277.498 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 273.43 274.558 275.51 276.331 277.034 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 273.751 274.824 275.737 276.516 277.198 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 274.049 275.076 275.95 276.7 277.359 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 273.176 274.342 275.319 276.158 276.884 277.516 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 273.506 274.618 275.557 276.36 277.056 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 273.822 274.886 275.781 276.557 277.226 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 272.919 274.125 275.136 275.997 276.742 277.39 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 273.265 274.414 275.381 276.207 276.921 277.548 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 273.596 274.688 275.612 276.406 277.097 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 273.908 274.953 275.839 276.602 277.266 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 273.026 274.212 275.206 276.054 276.787 277.428 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 273.368 274.501 275.451 276.263 276.97 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 273.694 274.774 275.68 276.462 277.142 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 274.001 275.027 275.9 276.653 277.31 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 273.133 274.295 275.275 276.113 276.837 277.47 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 273.464 274.574 275.513 276.316 277.013 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 273.781 274.841 275.739 276.512 277.184 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 274.082 275.094 275.956 276.699 277.348 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 273.219 274.369 275.337 276.163 276.878 277.505 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 273.545 274.64 275.566 276.361 277.051 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98174 6.36944 6.75695 7.14549 7.52863 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08661 6.47839 6.86185 7.24841 7.63182 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.80591 6.19332 6.58371 6.96554 7.34836 7.73471 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.91186 6.29551 6.6872 7.07082 7.453 7.83773 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01226 6.39968 6.78774 7.17139 7.55573 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11519 6.50297 6.88802 7.27373 7.65946 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.8318 6.22073 6.60548 6.99156 7.37768 7.76328 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93565 6.32264 6.70927 7.09561 7.48154 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0401 6.42698 6.81344 7.19967 7.58547 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14436 6.53125 6.91767 7.30376 7.68942 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.86157 6.24874 6.63556 7.02198 7.4078 7.79328 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96609 6.35314 6.73984 7.12608 7.51192 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.0706 6.45755 6.84418 7.23038 7.61598 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.78801 6.17518 6.56193 6.94851 7.33452 7.72002 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89276 6.2798 6.66652 7.05285 7.43867 7.82409 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9974 6.38435 6.7709 7.15713 7.54286 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10206 6.48894 6.87536 7.26143 7.647 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.81958 6.20665 6.59343 6.9798 7.36574 7.7512 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.92428 6.31127 6.69798 7.08422 7.47 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02902 6.41597 6.80247 7.18859 7.57427 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.13369 6.5205 6.90696 7.29299 7.6785 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.85133 6.23839 6.62511 7.01144 7.39732 7.78272 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.95609 6.3431 6.72971 7.11592 7.50171 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.992 290.761 290.563 290.389 290.238 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.917 290.691 290.502 290.334 290.188 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.088 290.842 290.626 290.445 290.286 290.143 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.008 290.774 290.566 290.389 290.236 290.099 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.935 290.707 290.509 290.338 290.189 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.866 290.644 290.454 290.288 290.143 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.04 290.794 290.584 290.401 290.241 290.1 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.966 290.731 290.527 290.348 290.193 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.893 290.666 290.469 290.299 290.149 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.823 290.606 290.417 290.251 290.106 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.001 290.758 290.547 290.363 290.204 290.064 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.928 290.693 290.49 290.314 290.16 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.858 290.633 290.437 290.266 290.117 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.043 290.792 290.575 290.385 290.22 290.076 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.97 290.727 290.517 290.335 290.176 290.037 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.896 290.665 290.464 290.288 290.134 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.83 290.605 290.41 290.239 290.091 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.013 290.763 290.546 290.36 290.196 290.052 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.941 290.701 290.494 290.311 290.151 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.871 290.638 290.438 290.263 290.111 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 290.801 290.579 290.387 290.218 290.069 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 290.984 290.736 290.522 290.335 290.171 290.029 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 290.91 290.672 290.465 290.285 290.128 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91376 8.29518 8.67721 9.05819 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01653 8.39695 8.77947 9.16081 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.73395 8.11641 8.49915 8.88124 9.26348 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83641 8.21935 8.60183 8.98431 9.36675 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93985 8.32241 8.70481 9.09461 9.47422 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04311 8.42512 8.80807 9.19348 9.57651 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.76243 8.14538 8.52811 8.9114 9.29318 9.67563 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86562 8.24943 8.63318 9.01669 9.39685 9.77759 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97018 8.35305 8.73529 9.11922 9.49968 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07198 8.45671 8.83821 9.21873 9.60276 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.79216 8.17515 8.55771 8.94009 9.32166 9.70241 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89542 8.27833 8.66075 9.04263 9.42412 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.99873 8.38156 8.76382 9.14559 9.52683 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.71914 8.10208 8.4847 8.86688 9.24856 9.62977 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82252 8.20534 8.58794 8.96994 9.35148 9.73246 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92589 8.30868 8.69108 9.07293 9.45436 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02927 8.41189 8.7942 9.17599 9.55725 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.74979 8.13269 8.51517 8.89735 9.27899 9.6601 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.85328 8.23606 8.61848 9.00047 9.38194 9.76297 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95675 8.33944 8.72176 9.10367 9.485 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06025 8.44283 8.82503 9.20675 9.58802 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.78085 8.16371 8.54624 8.9283 9.30989 9.69093 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88443 8.26717 8.64958 9.03152 9.41297 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.101 280.463 280.797 281.107 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.192 280.546 280.873 281.176 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.901 280.279 280.628 280.948 281.245 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280 280.368 280.707 281.022 281.313 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.094 280.455 280.787 281.101 281.384 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.189 280.541 280.866 281.168 281.449 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.905 280.279 280.623 280.943 281.238 281.513 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.002 280.37 280.709 281.02 281.307 281.575 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.102 280.459 280.787 281.093 281.376 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.194 280.545 280.867 281.164 281.443 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.915 280.287 280.629 280.944 281.236 281.507 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.016 280.379 280.712 281.02 281.306 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.113 280.467 280.793 281.095 281.375 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.83 280.208 280.554 280.874 281.169 281.443 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.933 280.301 280.638 280.95 281.24 281.51 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.032 280.39 280.722 281.028 281.311 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.13 280.482 280.804 281.102 281.379 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.851 280.224 280.566 280.883 281.176 281.449 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.951 280.316 280.652 280.961 281.247 281.514 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.05 280.406 280.733 281.036 281.317 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 280.144 280.492 280.813 281.11 281.386 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.867 280.237 280.578 280.892 281.182 281.452 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 279.966 280.328 280.661 280.967 281.252 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.78893 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89398 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99594 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.71343 10.0961 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81654 10.1987 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.91966 10.3015 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0229 10.4065 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.74381 10.126 10.5078 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84713 10.2291 10.6103 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95037 10.3322 10.7132 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0536 10.4352 10.8159 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.7747 10.1568 10.5382 10.9187 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87812 10.26 10.6412 11.0215 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98148 10.3632 10.744 11.1242 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.70254 10.0848 10.4663 10.847 11.2269 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80605 10.1881 10.5694 10.9499 11.3296 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.693 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.678 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.661 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.679 289.649 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.665 289.634 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.649 289.621 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.636 289.608 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.654 289.623 289.596 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.64 289.61 289.585 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.627 289.599 289.574 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.615 289.587 289.563 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.633 289.602 289.576 289.554 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.622 289.591 289.565 289.543 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.608 289.578 289.554 289.534 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.629 289.596 289.569 289.545 289.525 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 289.617 289.584 289.556 289.534 289.516 - - - - - - 5000 - 5000 - - 1.69945 1.84729 2.06004 2.31983 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.73158 1.89903 2.12612 2.39608 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.76958 1.95471 2.19499 2.474 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.81265 2.01426 2.26614 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.86061 2.07757 2.3404 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.91338 2.14431 2.41696 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.9703 2.21404 2.49564 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.03126 2.28671 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.0958 2.36201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.16371 2.44 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.23437 2.51883 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.30777 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.38309 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.46089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 210.69 222.486 232.201 239.928 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 214.068 225.322 234.469 241.714 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 217.311 227.974 236.588 243.385 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 220.366 230.478 238.562 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 223.292 232.845 240.432 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 226.074 235.071 242.188 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 228.705 237.163 243.838 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 231.184 239.124 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 233.521 240.977 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 235.725 242.719 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 237.775 244.324 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 239.705 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 241.49 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 243.179 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.48039 2.81518 3.16149 3.51779 3.88096 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.56891 2.90684 3.25714 3.61576 3.9814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.33175 2.65765 2.99946 3.35207 3.7125 4.0782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.41702 2.74828 3.09339 3.44822 3.81051 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.50453 2.83997 3.18823 3.54547 3.90968 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.59349 2.93284 3.28411 3.64297 4.00701 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.35578 2.68402 3.02691 3.38007 3.73974 4.10599 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.44262 2.77534 3.12155 3.47728 3.83828 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.53132 2.86688 3.21578 3.57401 3.93593 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.62031 2.9602 3.31144 3.66945 4.03411 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.38218 2.71153 3.05396 3.40686 3.76726 4.13311 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.46934 2.80232 3.14828 3.50381 3.8657 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.55823 2.89483 3.24418 3.6031 3.96446 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.32148 2.64716 2.98931 3.34155 3.70236 4.06361 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.40753 2.73886 3.08309 3.43649 3.79814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.49671 2.83165 3.17765 3.5336 3.89628 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.58536 2.92421 3.27444 3.63268 3.99527 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.67628 3.01768 3.36968 3.73024 4.09519 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.76616 3.11134 3.4661 3.828 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.85874 3.20699 3.56416 3.92603 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.95334 3.30415 3.66098 4.02508 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.04656 3.39946 3.75907 4.12449 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.14069 3.49605 3.8575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 318.846 314.463 311.05 308.319 306.102 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 317.538 313.455 310.24 307.66 305.551 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 321.261 316.336 312.508 309.484 307.047 305.051 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 319.802 315.202 311.617 308.767 306.461 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 318.435 314.139 310.775 308.087 305.9 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 317.159 313.145 309.983 307.448 305.38 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 320.81 315.967 312.211 309.238 306.849 304.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 319.367 314.852 311.333 308.534 306.274 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 318.019 313.827 310.522 307.874 305.734 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 316.782 312.852 309.748 307.262 305.224 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 320.351 315.611 311.944 309.03 306.671 304.733 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 318.95 314.54 311.093 308.341 306.108 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 317.641 313.528 310.283 307.676 305.575 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 321.428 316.437 312.565 309.515 307.053 305.069 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 319.952 315.294 311.681 308.817 306.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 318.554 314.219 310.844 308.138 305.939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 317.279 313.231 310.047 307.492 305.415 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 316.086 312.306 309.311 306.889 304.911 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 314.988 311.438 308.613 306.325 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 313.945 310.612 307.943 305.782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 312.949 309.819 307.321 305.266 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 312.04 309.099 306.725 304.773 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 311.185 308.408 306.159 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.3107 4.67151 5.03905 5.40381 5.77604 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.40694 4.76897 5.13557 5.50773 5.87621 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.14274 4.50301 4.86712 5.23537 5.607 5.97791 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.23952 4.6014 4.9662 5.33704 5.70628 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.33659 4.69827 5.0652 5.43303 5.80484 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.43192 4.79576 5.16298 5.53075 5.90279 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.16952 4.52998 4.89283 5.26104 5.63161 6.00267 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.26499 4.62692 4.99273 5.36063 5.73047 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.36104 4.72539 5.09069 5.46113 5.83044 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.09911 4.45851 4.822 5.18909 5.55891 5.93053 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.19519 4.55567 4.92028 5.28813 5.65851 6.03087 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.2917 4.65343 5.0191 5.38776 5.75873 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.38882 4.75168 5.1182 5.48751 5.85902 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.12672 4.4862 4.85019 5.21759 5.58761 5.95955 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.22368 4.58617 4.95017 5.31718 5.68778 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.32064 4.68404 5.04813 5.41702 5.78818 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.41753 4.78073 5.14747 5.51705 5.8887 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.15637 4.51554 4.87945 5.24703 5.61717 5.98934 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.25228 4.61327 4.97851 5.34677 5.7175 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.34908 4.71153 5.07764 5.44679 5.81804 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.44666 4.81018 5.17719 5.54693 5.91871 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.1842 4.54459 4.90912 5.27692 5.6473 6.01959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.28115 4.64277 5.00835 5.3769 5.74783 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 262.905 264.794 266.427 267.82 269.059 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 263.433 265.245 266.81 268.179 269.36 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 261.893 263.94 265.684 267.191 268.506 269.652 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 262.476 264.436 266.107 267.565 268.823 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 263.033 264.902 266.515 267.904 269.126 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 263.555 265.353 266.9 268.237 269.417 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 262.051 264.069 265.782 267.272 268.57 269.706 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 262.618 264.553 266.208 267.638 268.885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 263.165 265.025 266.609 267.992 269.191 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 261.611 263.693 265.464 266.992 268.323 269.489 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 262.204 264.195 265.898 267.368 268.649 269.776 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 262.777 264.681 266.313 267.727 268.964 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 263.318 265.144 266.714 268.078 269.273 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 261.787 263.84 265.592 267.1 268.413 269.566 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 262.378 264.35 266.022 267.469 268.737 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 262.938 264.825 266.429 267.827 269.05 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 263.472 265.273 266.823 268.17 269.352 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 261.969 263.987 265.712 267.203 268.502 269.643 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 262.538 264.474 266.133 267.568 268.821 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 263.087 264.944 266.537 267.918 269.129 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 263.612 265.391 266.923 268.256 269.426 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 262.116 264.114 265.821 267.296 268.582 269.711 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 262.683 264.596 266.234 267.653 268.894 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1265 6.50716 6.88831 7.27117 7.64926 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22924 6.61409 6.99141 7.37258 7.75109 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.95396 6.33386 6.7175 7.09357 7.4711 7.8526 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05768 6.43418 6.81933 7.19726 7.57428 7.95433 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15605 6.53646 6.91826 7.29636 7.67568 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25699 6.63802 7.01698 7.39726 7.77808 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97911 6.36055 6.73878 7.11898 7.49978 7.88061 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08087 6.46066 6.84086 7.2215 7.60228 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18324 6.56318 6.94344 7.32412 7.7049 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.28558 6.66572 7.0461 7.42675 7.80755 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00824 6.38803 6.76829 7.14887 7.52948 7.91019 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11066 6.4906 6.87098 7.25153 7.63223 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21322 6.59325 6.97372 7.35436 7.73501 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93625 6.31586 6.6959 7.07651 7.45717 7.83781 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03888 6.41863 6.79886 7.17936 7.55996 7.94065 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14154 6.52141 6.90165 7.28221 7.66285 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.24424 6.62423 7.00456 7.38515 7.76577 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.96733 6.34693 6.7271 7.10752 7.48811 7.86872 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07002 6.44975 6.82998 7.21043 7.59104 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17275 6.55265 6.93291 7.31344 7.69406 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.27554 6.65556 7.03591 7.41646 7.79703 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.99877 6.37836 6.75846 7.13891 7.51949 7.90011 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.10144 6.48127 6.8615 7.24198 7.62258 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.757 298.079 297.48 296.944 296.474 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.552 297.888 297.314 296.799 296.344 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.07 298.349 297.711 297.16 296.665 296.217 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.854 298.166 297.544 297.006 296.526 296.094 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.656 297.984 297.388 296.866 296.398 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.46 297.81 297.237 296.727 296.27 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.984 298.266 297.643 297.087 296.59 296.147 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.777 298.087 297.478 296.939 296.457 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.574 297.908 297.32 296.797 296.33 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.381 297.737 297.167 296.658 296.205 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.902 298.193 297.569 297.017 296.526 296.085 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.695 298.012 297.41 296.874 296.396 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.499 297.838 297.253 296.733 296.27 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.034 298.308 297.669 297.103 296.6 296.149 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.825 298.123 297.505 296.957 296.468 296.03 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.624 297.947 297.348 296.816 296.341 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.43 297.774 297.194 296.679 296.219 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.96 298.241 297.608 297.047 296.547 296.098 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.756 298.061 297.447 296.902 296.416 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.556 297.883 297.29 296.764 296.292 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 298.365 297.716 297.141 296.628 296.169 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 298.893 298.179 297.55 296.993 296.496 296.053 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 298.687 297.998 297.391 296.851 296.368 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02584 8.40073 8.77687 9.15244 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12672 8.50089 8.87765 9.25368 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.84923 8.22493 8.60151 8.97793 9.35496 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9498 8.32613 8.70258 9.07956 9.45693 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05139 8.42753 8.80405 9.1883 9.56297 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15282 8.52857 8.90575 9.28586 9.66395 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87708 8.25333 8.62993 9.00763 9.38427 9.76191 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97836 8.3556 8.73339 9.11139 9.48652 9.86263 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08101 8.45751 8.83394 9.2125 9.58805 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.18102 8.55947 8.93534 9.31067 9.68982 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90606 8.28244 8.65893 9.03574 9.41217 9.78823 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00747 8.38387 8.76036 9.1368 9.51332 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10888 8.48539 8.8619 9.23838 9.61472 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.83424 8.21045 8.58691 8.96342 9.33991 9.71635 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93568 8.31189 8.68847 9.06498 9.44148 9.81779 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03716 8.41353 8.79007 9.16654 9.54298 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13873 8.51507 8.89162 9.26815 9.64458 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.86415 8.24034 8.61671 8.9933 9.36979 9.74616 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96573 8.34198 8.71844 9.09495 9.47138 9.84776 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06736 8.44367 8.82015 9.19672 9.57313 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.16904 8.5454 8.92192 9.29842 9.67486 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89455 8.27078 8.64721 9.02371 9.40018 9.77651 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9963 8.37254 8.749 9.12549 9.50195 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.218 274.837 275.408 275.932 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.383 274.985 275.544 276.059 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.891 274.538 275.132 275.678 276.184 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.065 274.697 275.277 275.81 276.306 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.234 274.85 275.417 275.952 276.433 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.4 275.003 275.558 276.074 276.549 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.919 274.561 275.15 275.693 276.194 276.661 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.093 274.721 275.298 275.831 276.319 276.774 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.265 274.878 275.44 275.961 276.44 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.43 275.031 275.579 276.084 276.559 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.956 274.591 275.175 275.714 276.211 276.672 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.128 274.751 275.322 275.848 276.334 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.299 274.905 275.462 275.977 276.454 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.817 274.463 275.056 275.603 276.107 276.574 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.995 274.627 275.206 275.739 276.231 276.689 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.168 274.783 275.35 275.872 276.356 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.336 274.938 275.492 276.002 276.475 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.858 274.5 275.088 275.629 276.129 276.593 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.033 274.659 275.234 275.764 276.254 276.708 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.204 274.815 275.377 275.895 276.375 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.37 274.967 275.517 276.024 276.494 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.895 274.531 275.115 275.653 276.15 276.611 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 274.066 274.688 275.26 275.786 276.272 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.88865 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99296 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0943 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81357 10.1939 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.916 10.2959 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0185 10.3982 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1211 10.5026 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84373 10.2236 10.6033 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94645 10.3262 10.7053 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0491 10.4287 10.8076 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1517 10.5312 10.9099 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87454 10.2544 10.6336 11.0122 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97737 10.357 10.7361 11.1145 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0801 10.4596 10.8385 11.2168 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80293 10.1829 10.5622 10.9409 11.319 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90584 10.2856 10.6648 11.0433 11.4212 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.526 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.46 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.397 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.546 294.338 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.481 294.277 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.418 294.22 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.356 294.16 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.503 294.296 294.107 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.44 294.238 294.052 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.379 294.18 293.999 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.318 294.125 293.948 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.467 294.26 294.072 293.898 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.405 294.203 294.018 293.849 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.343 294.147 293.967 293.801 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.496 294.285 294.093 293.915 293.753 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.433 294.226 294.038 293.866 293.708 - - - - - - 5000 - 5000 - - 1.83312 1.97094 2.1715 2.4192 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.8629 2.01936 2.23417 2.49231 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.89817 2.07178 2.29977 2.56735 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.93841 2.12805 2.36772 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.98328 2.18797 2.43877 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.03271 2.25141 2.51234 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.08647 2.31785 2.58809 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.14392 2.38723 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.20521 2.45952 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.26968 2.53428 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.33718 2.61033 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.40741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.47959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.55448 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 209.465 220.579 229.951 237.579 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 212.624 223.293 232.174 239.367 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 215.673 225.846 234.263 241.045 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 218.563 228.272 236.219 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 221.348 230.581 238.084 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 224.015 232.764 239.841 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 226.549 234.829 241.501 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 228.959 236.778 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 231.24 238.624 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 233.407 240.375 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 235.433 241.99 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 237.352 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 239.137 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 240.833 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.56441 2.88968 3.22824 3.57822 3.936 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.65015 2.97919 3.322 3.67451 4.03502 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.4207 2.73626 3.06965 3.41521 3.76976 4.13063 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.50307 2.82444 3.16148 3.50969 3.86635 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.58763 2.91383 3.25436 3.60532 3.9642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.6739 3.0045 3.34841 3.70118 4.06025 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.44384 2.76181 3.09633 3.44266 3.79655 4.15799 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.52764 2.8508 3.18905 3.53818 3.89372 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.6136 2.94001 3.28135 3.63329 3.99004 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.6999 3.03111 3.37525 3.72734 4.08698 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.46925 2.78863 3.12286 3.46893 3.82367 4.18473 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.55354 2.87704 3.21519 3.56425 3.92075 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.63966 2.96725 3.30923 3.66198 4.0182 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.41071 2.72607 3.05962 3.40482 3.75972 4.11608 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.49386 2.81528 3.15138 3.49805 3.85415 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.58004 2.90564 3.24405 3.59367 3.95097 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.66613 2.99609 3.33897 3.6911 4.04861 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.75438 3.08737 3.43244 3.78726 4.14739 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.84197 3.17915 3.52733 3.88364 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.93223 3.27282 3.62372 3.98039 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.02458 3.36821 3.71912 4.07821 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.11581 3.46182 3.81574 4.17634 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.20795 3.5568 3.91282 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 321.5 316.935 313.336 310.432 308.055 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 320.146 315.879 312.476 309.725 307.461 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 323.987 318.894 314.882 311.674 309.069 306.925 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 322.489 317.71 313.938 310.912 308.44 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 321.074 316.598 313.047 310.185 307.839 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 319.751 315.554 312.205 309.498 307.28 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 323.524 318.508 314.565 311.414 308.858 306.742 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 322.036 317.346 313.64 310.662 308.242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 320.645 316.269 312.778 309.956 307.661 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 319.357 315.242 311.957 309.303 307.111 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 323.049 318.138 314.285 311.19 308.667 306.581 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 321.605 317.016 313.383 310.454 308.063 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 320.249 315.951 312.524 309.745 307.489 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 324.151 318.998 314.939 311.707 309.077 306.942 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 322.638 317.804 314.004 310.96 308.47 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 321.191 316.676 313.118 310.24 307.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 319.873 315.64 312.271 309.545 307.314 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 318.626 314.66 311.487 308.9 306.774 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 317.482 313.746 310.745 308.293 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 316.387 312.868 310.027 307.711 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 315.339 312.029 309.363 307.156 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 314.383 311.26 308.723 306.623 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 313.477 310.524 308.115 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.36399 4.72048 5.08421 5.44582 5.81516 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.45896 4.8169 5.17993 5.54888 5.91462 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.19828 4.55393 4.91401 5.27876 5.64733 6.01559 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.29368 4.65112 5.01207 5.37957 5.74592 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.38947 4.74692 5.11016 5.47475 5.8437 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.48364 4.84335 5.20701 5.57167 5.94096 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.22461 4.58048 4.93938 5.30417 5.67176 6.04015 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.31872 4.67632 5.03833 5.40294 5.76987 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.41362 4.7737 5.13535 5.50253 5.86908 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.15512 4.50979 4.86923 5.23282 5.59962 5.96849 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.24982 4.60585 4.96659 5.331 5.69838 6.06808 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.34512 4.70245 5.06433 5.42973 5.79785 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.44093 4.79966 5.16256 5.52873 5.89743 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.1823 4.53718 4.89714 5.261 5.62796 5.99725 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.27791 4.63592 4.99607 5.35976 5.72742 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.3736 4.73271 5.09313 5.45875 5.82705 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.46929 4.82835 5.19153 5.55796 5.92686 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.21153 4.56611 4.92605 5.29018 5.65732 6.02681 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.30611 4.66274 5.02414 5.38909 5.75689 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.40172 4.75992 5.12233 5.48825 5.85671 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.49805 4.8575 5.22101 5.58764 5.95669 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.23899 4.59484 4.95547 5.31986 5.68723 6.05684 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.33467 4.69194 5.05369 5.41897 5.78701 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 261.209 263.207 264.94 266.419 267.738 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 261.768 263.685 265.344 266.802 268.059 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 260.142 262.303 264.151 265.752 267.151 268.372 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 260.759 262.829 264.6 266.149 267.488 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 261.347 263.323 265.033 266.51 267.813 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 261.897 263.801 265.443 266.865 268.123 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 260.31 262.442 264.258 265.839 267.22 268.431 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 260.91 262.953 264.708 266.227 267.556 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 261.484 263.453 265.134 266.606 267.883 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 259.848 262.045 263.921 265.542 266.956 268.2 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 260.473 262.574 264.378 265.941 267.306 268.508 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 261.074 263.09 264.822 266.325 267.642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 261.648 263.579 265.246 266.696 267.97 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 260.032 262.198 264.053 265.657 267.055 268.284 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 260.655 262.738 264.512 266.049 267.399 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 261.246 263.241 264.942 266.43 267.733 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 261.809 263.716 265.362 266.796 268.055 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 260.222 262.353 264.182 265.766 267.148 268.366 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 260.824 262.869 264.628 266.154 267.489 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 261.402 263.366 265.057 266.528 267.817 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 261.957 263.841 265.468 266.886 268.133 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 260.378 262.488 264.297 265.864 267.233 268.439 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 260.975 262.998 264.738 266.246 267.567 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.16271 6.54148 6.92094 7.30235 7.67912 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.26484 6.64788 7.02365 7.40343 7.78064 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99094 6.36898 6.75086 7.12536 7.50153 7.88177 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.09408 6.46875 6.8522 7.22863 7.60438 7.98321 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19195 6.57055 6.95076 7.32737 7.70538 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.29241 6.67164 7.04906 7.42792 7.80744 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01591 6.39546 6.77197 7.15062 7.53007 7.90963 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11712 6.49503 6.87363 7.25276 7.63222 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.219 6.59712 6.9758 7.355 7.73444 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.32083 6.69919 7.07798 7.45728 7.83679 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.04485 6.42274 6.80134 7.18042 7.55964 7.9391 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14678 6.52488 6.9036 7.28265 7.66201 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.24879 6.62703 7.00589 7.38513 7.7645 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97326 6.35093 6.72925 7.10832 7.48756 7.86693 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07536 6.45323 6.83177 7.21076 7.59 7.96946 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.1775 6.55553 6.93412 7.31323 7.69256 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.27965 6.65791 7.03665 7.41583 7.79514 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00423 6.38192 6.76035 7.13921 7.51839 7.89777 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10637 6.48419 6.86276 7.24175 7.62103 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.20855 6.58669 6.96534 7.34439 7.72366 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.3109 6.68911 7.06787 7.44701 7.82633 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.03549 6.41319 6.79161 7.17053 7.54975 7.92908 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.13768 6.51568 6.89423 7.27319 7.65245 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.062 299.312 298.646 298.054 297.531 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.835 299.1 298.466 297.896 297.39 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.406 299.614 298.907 298.295 297.746 297.248 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.169 299.41 298.722 298.125 297.595 297.114 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.952 299.209 298.551 297.971 297.451 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.737 299.017 298.384 297.818 297.31 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.314 299.524 298.834 298.217 297.667 297.174 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.087 299.325 298.653 298.054 297.521 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.865 299.129 298.478 297.898 297.378 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.652 298.939 298.307 297.745 297.241 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.225 299.444 298.755 298.144 297.598 297.108 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300 299.246 298.578 297.984 297.453 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.782 299.052 298.405 297.83 297.316 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.372 299.571 298.866 298.24 297.681 297.18 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.142 299.369 298.685 298.078 297.535 297.049 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.921 299.173 298.51 297.921 297.395 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.706 298.982 298.341 297.77 297.259 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.293 299.5 298.8 298.178 297.622 297.125 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.067 299.299 298.62 298.017 297.48 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.846 299.105 298.449 297.865 297.341 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.637 298.918 298.281 297.713 297.205 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 300.217 299.43 298.736 298.119 297.57 297.076 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 299.992 299.232 298.56 297.961 297.426 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05294 8.42642 8.80124 9.17558 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15343 8.52617 8.90167 9.27652 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87702 8.25128 8.62647 9.00164 9.37751 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97724 8.35208 8.72718 9.10289 9.47917 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07837 8.45307 8.82831 9.21135 9.5849 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.17942 8.55379 8.92968 9.30857 9.68556 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90478 8.27951 8.65475 9.03119 9.4067 9.78329 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00562 8.3814 8.75786 9.13465 9.50864 9.88371 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10785 8.48295 8.85806 9.23542 9.60987 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.20748 8.58452 8.95911 9.33328 9.71137 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93358 8.30846 8.68363 9.0592 9.4345 9.8095 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03458 8.40957 8.78472 9.15994 9.53531 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13558 8.51067 8.88588 9.26119 9.63647 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.862 8.23673 8.61184 8.98712 9.36242 9.73777 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96306 8.33782 8.71303 9.08828 9.46365 9.83894 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06409 8.43904 8.81429 9.18956 9.5649 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.16529 8.54025 8.91551 9.29083 9.66617 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.89181 8.26649 8.6415 9.01683 9.39219 9.7675 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.99296 8.36776 8.74289 9.11818 9.49347 9.8688 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09418 8.46908 8.84425 9.21962 9.59492 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.19547 8.57043 8.94567 9.321 9.69638 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92207 8.2968 8.67191 9.04715 9.42248 9.79774 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02343 8.39822 8.77335 9.14861 9.52393 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.241 273.901 274.509 275.069 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.418 274.061 274.655 275.204 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.896 273.583 274.216 274.799 275.338 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.079 273.753 274.372 274.942 275.469 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.262 273.918 274.522 275.091 275.605 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.438 274.079 274.672 275.223 275.73 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.926 273.611 274.239 274.818 275.352 275.849 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.112 273.782 274.397 274.964 275.486 275.971 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.296 273.948 274.548 275.104 275.615 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.471 274.112 274.697 275.236 275.742 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.966 273.645 274.267 274.841 275.371 275.863 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.15 273.813 274.422 274.984 275.503 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.332 273.98 274.574 275.123 275.631 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.82 273.509 274.14 274.722 275.261 275.759 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.008 273.682 274.301 274.87 275.395 275.882 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.194 273.85 274.454 275.01 275.526 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.372 274.014 274.605 275.151 275.655 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.863 273.548 274.175 274.752 275.286 275.78 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.05 273.718 274.331 274.896 275.419 275.904 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.232 273.884 274.484 275.037 275.549 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.409 274.047 274.633 275.174 275.676 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.903 273.582 274.204 274.778 275.308 275.8 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.085 273.749 274.358 274.92 275.439 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.91293 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.017 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1182 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.83795 10.2176 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94018 10.3195 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0426 10.4216 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1449 10.5258 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.86803 10.2473 10.6264 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97058 10.3497 10.7282 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.073 10.452 10.8304 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1755 10.5543 10.9325 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89882 10.278 10.6566 11.0347 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0014 10.3805 10.759 11.1368 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.104 10.483 10.8612 11.2389 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2067 10.5854 10.9635 11.341 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93004 10.3092 10.6878 11.0658 11.4431 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.354 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.279 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.208 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.379 295.14 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.305 295.073 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.234 295.008 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.164 294.94 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.332 295.097 294.88 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.262 295.03 294.817 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.191 294.965 294.758 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.124 294.903 294.7 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.294 295.058 294.841 294.642 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.223 294.992 294.781 294.587 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.154 294.93 294.723 294.532 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.088 294.867 294.664 294.479 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.256 295.021 294.807 294.609 294.427 - - - - - - 5000 - 5000 - - 2.11548 2.23575 2.41427 2.63918 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.14122 2.27855 2.47068 2.70615 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.17183 2.32496 2.53004 2.77531 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.20696 2.37507 2.59189 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.24651 2.42893 2.65689 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.29014 2.48603 2.72436 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.3378 2.54637 2.79444 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.38922 2.60946 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.44396 2.67559 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.50246 2.74445 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.56357 2.81465 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.62781 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.69397 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.76294 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 207.409 217.285 225.933 233.253 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 210.181 219.752 228.037 235.012 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 212.881 222.101 230.037 236.676 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 215.465 224.357 231.927 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 217.978 226.524 233.747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 220.41 228.599 235.48 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 222.749 230.578 237.127 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 224.993 232.469 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 227.147 234.277 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 229.207 236.003 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 231.16 237.615 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 233.022 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 234.776 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 236.455 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.74514 3.05183 3.37466 3.71119 4.0576 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.8255 3.13668 3.46435 3.80409 4.15374 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.61101 2.90652 3.22281 3.55396 3.89625 4.24663 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.68765 2.98978 3.31045 3.64487 3.98982 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.76669 3.07448 3.39938 3.7371 4.08476 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.84761 3.16052 3.48954 3.82967 4.17808 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.63228 2.93046 3.24814 3.58026 3.92209 4.27322 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.71053 3.01462 3.33663 3.67217 4.01616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.79084 3.09918 3.42514 3.76408 4.10986 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.87199 3.18594 3.51533 3.85494 4.20396 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.65596 2.9557 3.27338 3.60539 3.94828 4.29924 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.73462 3.03937 3.36175 3.69736 4.04246 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.81532 3.1251 3.45187 3.79175 4.1371 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.60158 2.89675 3.21304 3.54365 3.88623 4.23235 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.67894 2.98094 3.30068 3.6336 3.97787 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.75963 3.06666 3.38952 3.72584 4.07179 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.84037 3.15249 3.4804 3.81995 4.16687 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.92348 3.23958 3.57045 3.91314 4.26288 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.00643 3.32737 3.66176 4.0065 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.09192 3.41718 3.75506 4.10058 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.18002 3.50876 3.8472 4.1956 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.2669 3.59884 3.94084 4.29129 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.35512 3.69049 4.03503 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 326.245 321.453 317.581 314.393 311.749 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 324.837 320.321 316.639 313.61 311.086 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 328.805 323.525 319.251 315.764 312.883 310.482 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 327.268 322.276 318.232 314.925 312.183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 325.806 321.094 317.265 314.123 311.511 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 324.425 319.973 316.345 313.362 310.885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 328.328 323.118 318.912 315.48 312.651 310.278 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 326.802 321.89 317.908 314.649 311.961 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 325.355 320.738 316.972 313.871 311.315 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 324.011 319.641 316.074 313.144 310.694 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 327.84 322.725 318.605 315.23 312.437 310.101 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 326.35 321.534 317.628 314.421 311.762 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 324.939 320.398 316.691 313.633 311.118 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 328.964 323.629 319.309 315.795 312.89 310.505 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 327.411 322.365 318.298 314.976 312.216 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 325.918 321.171 317.339 314.18 311.555 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 324.543 320.058 316.409 313.409 310.923 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 323.23 319.006 315.555 312.694 310.312 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 322.024 318.016 314.733 312.012 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 320.854 317.062 313.944 311.366 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 319.736 316.146 313.205 310.74 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 318.703 315.301 312.494 310.144 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 317.723 314.492 311.817 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.4881 4.83505 5.19024 5.54455 5.90722 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.58039 4.92903 5.28393 5.64567 6.0052 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.32732 4.67274 5.02388 5.38084 5.74232 6.10447 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.41987 4.76738 5.11965 5.47951 5.83924 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.51273 4.86066 5.21561 5.5729 5.93536 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.60424 4.9547 5.31035 5.66804 6.03102 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.35286 4.69853 5.04857 5.40552 5.76617 6.12863 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.44406 4.79178 5.14518 5.50244 5.8627 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.53608 4.8867 5.24024 5.60001 5.96019 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.28543 4.62965 4.97994 5.3355 5.69533 6.05799 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.377 4.72295 5.07499 5.43178 5.79241 6.15604 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.46955 4.81722 5.17062 5.52851 5.88999 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.56254 4.91189 5.26665 5.62568 5.98806 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.31156 4.65613 5.00715 5.36315 5.72311 6.08616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.40427 4.75236 5.10367 5.45984 5.82079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.49707 4.8466 5.19868 5.55695 5.91876 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.59007 4.9399 5.29502 5.65432 6.01691 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.33991 4.68427 5.03533 5.39165 5.75187 6.11526 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.43157 4.77838 5.13119 5.48862 5.84976 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.52444 4.87322 5.22733 5.58592 5.94789 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.61815 4.96843 5.3239 5.68346 6.0463 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.36655 4.71227 5.06411 5.4208 5.78135 6.14491 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.45943 4.80697 5.16021 5.51798 5.87938 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 257.78 259.977 261.897 263.546 265.022 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 258.396 260.509 262.35 263.973 265.38 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 256.618 258.985 261.023 262.801 264.366 265.736 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 257.29 259.564 261.524 263.248 264.744 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 257.935 260.111 262.005 263.65 265.108 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 258.541 260.64 262.461 264.048 265.458 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 256.801 259.138 261.144 262.904 264.447 265.805 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 257.457 259.704 261.647 263.337 264.822 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 258.088 260.254 262.116 263.758 265.19 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 256.295 258.7 260.773 262.574 264.153 265.546 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 256.981 259.288 261.28 263.016 264.541 265.891 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 257.635 259.851 261.771 263.447 264.922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 258.266 260.396 262.244 263.86 265.286 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 256.497 258.87 260.917 262.698 264.261 265.641 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 257.175 259.464 261.427 263.139 264.648 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 257.823 260.019 261.905 263.562 265.021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 258.441 260.545 262.371 263.972 265.383 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 256.701 259.039 261.06 262.821 264.367 265.732 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 257.359 259.608 261.556 263.254 264.747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 257.993 260.155 262.03 263.671 265.116 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 258.602 260.682 262.49 264.074 265.47 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 256.872 259.189 261.189 262.931 264.461 265.814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 257.525 259.75 261.677 263.358 264.837 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.24338 6.61815 6.99386 7.37214 7.74586 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.34447 6.72362 7.09555 7.4723 7.84652 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07369 6.44742 6.8252 7.19648 7.56956 7.94694 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17534 6.54605 6.92574 7.299 7.67154 8.04762 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.27219 6.64665 7.02336 7.39658 7.77174 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.37157 6.74687 7.12066 7.49644 7.87306 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.09824 6.47335 6.84615 7.22135 7.59774 7.9745 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19816 6.57196 6.94686 7.32266 7.69907 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.29891 6.67299 7.04802 7.42399 7.80051 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.39951 6.77402 7.14934 7.5255 7.90212 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12674 6.50042 6.87523 7.25085 7.62696 8.00366 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22746 6.60141 6.97644 7.35223 7.72863 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.32827 6.70257 7.0779 7.4539 7.83031 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05598 6.42934 6.80378 7.17935 7.55544 7.932 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15685 6.53053 6.9053 7.28091 7.65711 8.0338 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25778 6.63175 7.00674 7.38256 7.7589 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.35886 6.73316 7.10836 7.48429 7.86073 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08665 6.46 6.83457 7.21001 7.5861 7.96265 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18749 6.56122 6.93612 7.31176 7.68796 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.28863 6.6627 7.03767 7.41344 7.7898 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.38973 6.76404 7.13935 7.51536 7.89179 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.11759 6.49106 6.86569 7.24113 7.6172 7.99372 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.21862 6.59241 6.96727 7.34294 7.71922 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.595 301.711 300.926 300.224 299.604 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.332 301.468 300.717 300.038 299.434 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 303.007 302.073 301.238 300.514 299.863 299.271 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.726 301.833 301.023 300.317 299.684 299.11 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.474 301.598 300.82 300.132 299.513 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.222 301.373 300.624 299.953 299.348 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.902 301.971 301.157 300.427 299.774 299.186 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.634 301.738 300.945 300.236 299.601 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.375 301.508 300.737 300.05 299.433 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.123 301.284 300.538 299.871 299.271 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.8 301.882 301.069 300.344 299.695 299.112 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.535 301.646 300.858 300.156 299.526 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.279 301.42 300.656 299.974 299.361 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.973 302.033 301.2 300.458 299.795 299.2 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.703 301.793 300.987 300.268 299.625 299.045 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.442 301.562 300.781 300.083 299.457 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.192 301.34 300.581 299.903 299.295 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.882 301.948 301.122 300.387 299.729 299.138 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.613 301.712 300.913 300.2 299.56 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.358 301.485 300.708 300.015 299.394 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 302.108 301.262 300.511 299.839 299.235 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 302.792 301.869 301.051 300.32 299.666 299.078 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 302.529 301.634 300.84 300.133 299.499 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11722 8.48745 8.85921 9.23075 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.21673 8.58629 8.95889 9.33088 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9429 8.31372 8.68573 9.05803 9.43123 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04218 8.4137 8.78573 9.15854 9.53224 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.14249 8.51379 8.88597 9.26604 9.63713 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.24245 8.61364 8.98665 9.36285 9.73693 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97039 8.34171 8.71381 9.0874 9.46022 9.83429 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07025 8.44262 8.81599 9.19009 9.5614 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.17137 8.54327 8.91551 9.29 9.66179 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.27023 8.64402 9.01577 9.3872 9.76265 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.99889 8.37032 8.74232 9.11502 9.48764 9.86028 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09882 8.4705 8.84265 9.21511 9.58786 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.19897 8.57081 8.943 9.31552 9.68826 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.92789 8.29911 8.67103 9.04342 9.41609 9.78899 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.02793 8.39935 8.77149 9.14389 9.51658 9.8894 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.12807 8.49968 8.87185 9.24437 9.61717 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.22828 8.60004 8.97237 9.34499 9.71778 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.95735 8.32862 8.70049 9.0729 9.44557 9.81843 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05755 8.42899 8.80104 9.1735 9.54619 9.91912 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.15782 8.52943 8.90161 9.27421 9.64696 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.25822 8.62998 9.00227 9.37489 9.74774 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.98737 8.35866 8.7306 9.10297 9.47567 9.8485 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08773 8.4592 8.83131 9.20376 9.57647 NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.218 271.959 272.644 273.275 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.416 272.14 272.811 273.43 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.83 271.606 272.319 272.973 273.58 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.038 271.796 272.492 273.135 273.731 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.243 271.984 272.665 273.306 273.884 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.445 272.167 272.833 273.454 274.027 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.869 271.639 272.346 273 273.602 274.162 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.078 271.832 272.526 273.165 273.752 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.287 272.022 272.697 273.323 273.899 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.484 272.205 272.865 273.473 274.044 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.914 271.68 272.382 273.029 273.626 274.18 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.125 271.872 272.556 273.188 273.774 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.328 272.057 272.729 273.348 273.922 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.753 271.53 272.242 272.897 273.502 274.064 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.966 271.723 272.419 273.061 273.655 274.207 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.173 271.914 272.595 273.223 273.803 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.376 272.099 272.764 273.379 273.949 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.803 271.573 272.28 272.931 273.533 274.091 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.013 271.766 272.457 273.094 273.683 274.23 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.219 271.954 272.629 273.253 273.83 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.418 272.136 272.797 273.408 273.974 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 270.847 271.613 272.315 272.962 273.56 274.114 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 271.055 271.802 272.488 273.121 273.707 NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.96698 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0704 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1713 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.89221 10.2704 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.99408 10.3719 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.096 10.4736 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.198 10.5775 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.92221 10.2999 10.6776 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0243 10.4019 10.7792 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1264 10.5039 10.8809 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2285 10.6058 10.9827 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.95283 10.3305 10.7078 11.0846 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0551 10.4327 10.8098 11.1864 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1573 10.5347 10.9117 11.2882 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2595 10.6368 11.0137 11.39 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98402 10.3617 10.7389 11.1156 11.4917 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.981 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.889 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.802 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 297.016 296.72 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.927 296.638 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.839 296.557 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.754 296.477 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.965 296.671 296.4 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.876 296.589 296.326 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.792 296.511 296.252 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.708 296.433 296.18 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.919 296.626 296.358 296.11 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.833 296.547 296.284 296.041 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.75 296.469 296.212 295.974 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.666 296.393 296.142 295.909 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 296.878 296.587 296.318 296.071 295.843 - - - - - - 5000 - 5000 - - 1.23823 1.43501 1.70067 2.00793 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.28243 1.50128 1.78028 2.09568 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.33358 1.57132 1.86227 2.18464 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.39039 1.64507 1.94576 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.45266 1.72229 2.032 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.51983 1.80247 2.11999 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.59127 1.88516 2.20952 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.66633 1.97007 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.7448 2.05747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.82615 2.1467 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.90956 2.23617 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.99521 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.08189 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.17074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.26106 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 217.094 231.741 242.36 249.938 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 221.48 234.978 244.661 251.591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 225.551 237.901 246.748 253.104 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 229.254 240.572 248.649 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 232.678 243.022 250.407 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 235.824 245.261 252.022 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 238.696 247.31 253.513 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 241.321 249.19 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 243.716 250.915 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 245.916 252.513 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 247.91 253.952 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 249.741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 251.402 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 252.937 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 254.353 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.23151 2.59747 2.96837 3.34463 3.72401 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.32927 2.69641 3.06987 3.44734 3.82845 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.06582 2.4265 2.79581 3.17031 3.5485 3.92888 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.16142 2.52525 2.89612 3.27166 3.65078 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.25849 2.62459 2.99706 3.37391 3.75413 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.35651 2.7246 3.09868 3.47592 3.8552 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.093 2.45548 2.82532 3.20007 3.57716 3.95792 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.18997 2.55481 2.9263 3.30237 3.67981 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.28821 2.65366 3.02635 3.40385 3.78147 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.38589 2.75394 3.12766 3.50374 3.88333 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.12256 2.48548 2.85424 3.2282 3.60582 3.98591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.21958 2.58389 2.95468 3.33014 3.70836 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.31763 2.68358 3.05638 3.43431 3.811 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.05411 2.41508 2.78499 3.15928 3.53803 3.9138 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.15058 2.51492 2.88506 3.25923 3.63788 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.24957 2.6153 2.98569 3.36133 3.74002 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.34725 2.71497 3.08818 3.46499 3.84283 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.44651 2.81492 3.18865 3.56696 3.94646 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.54413 2.91489 3.29014 3.66877 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.64409 3.01641 3.39306 3.77069 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.74588 3.11926 3.49432 3.87348 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.84553 3.21976 3.5967 3.97645 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.94578 3.32133 3.69921 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 307.927 304.622 302.161 300.255 298.739 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 306.919 303.884 301.585 299.796 298.365 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 309.82 306.007 303.198 301.056 299.374 298.027 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 308.669 305.162 302.555 300.555 298.973 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 307.602 304.379 301.958 300.085 298.594 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 306.629 303.656 301.4 299.642 298.242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 309.462 305.729 302.978 300.878 299.234 297.907 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 308.325 304.903 302.354 300.39 298.842 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 307.287 304.15 301.778 299.933 298.477 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 306.346 303.443 301.235 299.516 298.133 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 309.105 305.47 302.791 300.734 299.112 297.805 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 308.012 304.68 302.187 300.257 298.73 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 307.007 303.938 301.615 299.801 298.372 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 309.969 306.095 303.243 301.077 299.376 298.03 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 308.805 305.243 302.608 300.588 298.988 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 307.715 304.448 302.015 300.124 298.619 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 306.744 303.734 301.455 299.677 298.264 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 305.838 303.061 300.94 299.268 297.93 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 305.023 302.443 300.456 298.881 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 304.254 301.853 299.99 298.514 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 303.529 301.296 299.563 298.166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 302.874 300.791 299.154 297.834 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 302.262 300.311 298.768 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.1554 4.52942 4.90831 5.28291 5.66373 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.25539 4.63015 5.00769 5.3894 5.76615 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 3.98051 4.35514 4.73137 5.11023 5.49095 5.86992 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.08143 4.45701 4.83346 5.21457 5.59267 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.18242 4.55719 4.93548 5.31296 5.6933 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.28153 4.65784 5.03591 5.41309 5.79339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.00874 4.38325 4.75803 5.13665 5.51628 5.89535 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.10813 4.48362 4.86099 5.239 5.61747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.20808 4.5854 4.96184 5.34183 5.71959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.30925 4.68511 5.0629 5.44213 5.82183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.03557 4.41003 4.78658 5.16469 5.544 5.9242 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.13625 4.51124 4.8883 5.26683 5.64646 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.23707 4.61276 4.9903 5.36918 5.74902 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 3.96437 4.33823 4.71448 5.09238 5.47155 5.85158 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.06549 4.44183 4.81739 5.19463 5.5741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.16639 4.54304 4.91835 5.29699 5.67669 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.26709 4.64287 5.02047 5.39947 5.77939 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 3.99546 4.36871 4.7447 5.12273 5.50196 5.88209 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.09529 4.4699 4.84675 5.22506 5.60455 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.19601 4.57147 4.94873 5.32753 5.70726 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.29724 4.67322 5.05101 5.43008 5.81005 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.02435 4.39874 4.77523 5.15338 5.53274 5.91291 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.12533 4.50036 4.87738 5.25587 5.63549 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 269.155 270.601 271.84 272.887 273.814 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 269.561 270.941 272.124 273.153 274.036 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 268.371 269.945 271.272 272.41 273.398 274.253 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 268.822 270.324 271.591 272.689 273.631 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 269.248 270.675 271.896 272.942 273.857 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 269.646 271.017 272.186 273.19 274.072 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 268.489 270.039 271.341 272.465 273.437 274.286 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 268.928 270.407 271.661 272.738 273.672 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 269.346 270.765 271.964 273.004 273.901 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 269.752 271.101 272.253 273.25 274.121 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 268.608 270.133 271.424 272.533 273.494 274.336 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 269.046 270.503 271.741 272.805 273.729 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 269.467 270.856 272.041 273.064 273.957 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 268.287 269.863 271.194 272.333 273.318 274.178 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 268.745 270.255 271.522 272.61 273.558 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 269.175 270.614 271.827 272.878 273.793 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 269.584 270.955 272.125 273.135 274.017 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 268.43 269.977 271.287 272.41 273.382 274.234 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 268.87 270.349 271.605 272.684 273.621 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 269.291 270.705 271.909 272.947 273.85 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 269.693 271.045 272.2 273.199 274.071 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 268.544 270.074 271.369 272.479 273.442 274.284 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 268.978 270.439 271.68 272.747 273.673 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02398 6.40977 6.79536 7.18251 7.56411 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12829 6.51821 6.89963 7.28502 7.66695 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.84911 6.2345 6.62274 7.00313 7.38445 7.76936 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.95428 6.3361 6.72593 7.10808 7.48869 7.87213 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05417 6.43959 6.82608 7.208 7.59104 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15664 6.54257 6.92579 7.31006 7.69441 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.87482 6.26154 6.64452 7.02891 7.41359 7.79787 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.97804 6.36299 6.74784 7.13259 7.51709 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08191 6.46687 6.8516 7.23626 7.62062 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18565 6.57063 6.95533 7.33992 7.72424 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.90437 6.28946 6.67446 7.05926 7.44363 7.82776 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.00832 6.39337 6.77831 7.16294 7.54733 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11228 6.49728 6.8822 7.26684 7.65109 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.83128 6.21632 6.60119 6.98611 7.37061 7.75476 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.93543 6.32045 6.70535 7.09001 7.47436 7.85847 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.03955 6.42449 6.80927 7.1939 7.57822 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14361 6.52856 6.91332 7.29787 7.68206 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.86274 6.24774 6.63266 7.01734 7.40175 7.78585 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9669 6.35182 6.7367 7.12134 7.5057 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.07106 6.45605 6.84081 7.22535 7.60959 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.17523 6.56011 6.94484 7.32935 7.7135 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.89442 6.27938 6.66425 7.04894 7.43335 7.81739 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 5.99861 6.3836 6.76842 7.15301 7.53734 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.008 293.597 293.237 292.92 292.643 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.878 293.477 293.135 292.831 292.562 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.189 293.754 293.368 293.039 292.747 292.483 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.054 293.639 293.265 292.945 292.664 292.41 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.932 293.525 293.169 292.859 292.583 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.811 293.418 293.077 292.775 292.505 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.126 293.692 293.317 292.983 292.69 292.43 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.998 293.58 293.216 292.894 292.611 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.873 293.472 293.12 292.809 292.532 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.755 293.366 293.024 292.724 292.458 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.069 293.639 293.264 292.935 292.644 292.385 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.943 293.53 293.167 292.848 292.565 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.822 293.423 293.072 292.763 292.49 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.148 293.706 293.32 292.982 292.682 292.417 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.02 293.594 293.222 292.894 292.604 292.346 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.897 293.486 293.125 292.808 292.528 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.776 293.38 293.034 292.727 292.455 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 294.101 293.664 293.281 292.944 292.646 292.381 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.976 293.552 293.182 292.857 292.57 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.853 293.446 293.09 292.775 292.494 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 293.737 293.342 292.996 292.692 292.421 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 294.056 293.621 293.242 292.909 292.614 292.351 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 293.93 293.511 293.145 292.821 292.535 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94928 8.32844 8.70843 9.08753 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.05126 8.42955 8.81023 9.18964 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.77045 8.15067 8.53128 8.91141 9.29186 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87237 8.25302 8.63341 9.01395 9.39479 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97522 8.35548 8.73588 9.12365 9.50161 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.07777 8.45765 8.83864 9.22227 9.6032 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.79883 8.17944 8.56004 8.9415 9.32154 9.70228 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90132 8.28281 8.66454 9.0463 9.4246 9.80377 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00511 8.38587 8.7662 9.14818 9.52685 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10643 8.48891 8.86858 9.24729 9.62955 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.82827 8.20897 8.58944 8.96992 9.34966 9.72887 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9309 8.31159 8.69194 9.07196 9.45171 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03356 8.41418 8.79446 9.17442 9.554 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.13627 8.5168 8.89704 9.27691 9.65648 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.85838 8.23893 8.61943 8.99953 9.37934 9.75875 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.96105 8.34161 8.72202 9.10207 9.48179 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06385 8.44431 8.82463 9.20459 9.58418 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.78604 8.16662 8.54699 8.92724 9.30714 9.68664 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88885 8.26939 8.64978 9.02989 9.40961 9.78905 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9917 8.37219 8.75249 9.13256 9.51222 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.09457 8.475 8.85525 9.23519 9.6148 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81685 8.19742 8.57785 8.958 9.33784 9.71728 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9198 8.30031 8.68067 9.06073 9.44046 9.81981 NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.693 278.165 278.599 279 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.816 278.276 278.701 279.094 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.441 277.932 278.384 278.8 279.185 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.569 278.05 278.492 278.9 279.277 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.698 278.167 278.597 279.004 279.371 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.821 278.279 278.703 279.096 279.459 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.453 277.942 278.391 278.805 279.186 279.541 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.584 278.062 278.501 278.906 279.279 279.627 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.714 278.179 278.608 279.005 279.37 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.837 278.294 278.712 279.097 279.458 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.476 277.96 278.404 278.813 279.192 279.543 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.606 278.079 278.513 278.914 279.285 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.735 278.197 278.621 279.012 279.375 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.86 278.31 278.724 279.109 279.465 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.501 277.981 278.423 278.829 279.204 279.552 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.634 278.103 278.532 278.928 279.296 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.76 278.217 278.639 279.028 279.389 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.396 277.885 278.332 278.743 279.123 279.476 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.529 278.005 278.442 278.844 279.217 279.564 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.658 278.123 278.55 278.944 279.309 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.784 278.238 278.655 279.041 279.399 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.422 277.906 278.349 278.758 279.136 279.486 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 277.552 278.024 278.458 278.858 279.228 279.572 NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.81866 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.92323 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0251 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1253 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.84617 10.2278 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94918 10.3305 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0522 10.4354 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.77359 10.1552 10.5365 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87675 10.2582 10.6388 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97985 10.3611 10.7416 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.083 10.464 10.8442 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.80451 10.186 10.5668 10.9469 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90773 10.2891 10.6697 11.0496 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.011 10.3921 10.7725 11.1521 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1142 10.4951 10.8753 11.2547 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.83583 10.2173 10.5981 10.9781 11.3573 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.54 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.503 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.47 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.438 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.504 291.407 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.472 291.377 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.438 291.345 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.507 291.407 291.317 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.474 291.377 291.289 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.442 291.347 291.262 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.411 291.319 291.236 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.481 291.382 291.291 291.21 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.448 291.35 291.263 291.185 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.417 291.323 291.238 291.16 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.387 291.294 291.21 291.137 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 291.456 291.357 291.268 291.187 291.114 - - - - - - 5000 - 5000 - - 1.87029 2.0055 2.20286 2.44731 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.89949 2.05306 2.2646 2.51958 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.93403 2.10461 2.32933 2.59381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 1.9735 2.16002 2.39642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.0176 2.21904 2.46661 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.06615 2.28159 2.53936 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.11908 2.34717 2.6143 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.17561 2.41564 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.23598 2.48712 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.2996 2.561 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.36621 2.63629 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.4356 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.50692 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.581 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.65721 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - 209.157 220.094 229.37 236.965 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 212.259 222.774 231.579 238.751 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 215.258 225.3 233.658 240.429 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 218.105 227.703 235.607 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 220.852 229.995 237.469 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 223.487 232.166 239.225 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 225.995 234.221 240.886 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 228.384 236.165 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 230.65 238.008 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 232.805 239.759 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 234.822 241.375 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 236.736 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 238.52 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 240.216 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 241.816 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN 2.58019 2.90372 3.24088 3.58967 3.94642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 2.66545 2.99283 3.33426 3.68564 4.04517 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.43736 2.75107 3.08291 3.42714 3.78062 4.14057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.51921 2.83878 3.17436 3.52132 3.87693 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.60326 2.92775 3.26687 3.61665 3.97451 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 2.68903 3.01802 3.36056 3.71222 4.07034 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.46034 2.77645 3.10946 3.4545 3.80732 4.16786 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.54362 2.86502 3.20179 3.54972 3.90421 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.62906 2.9538 3.29374 3.64452 4.00031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 2.71488 3.04451 3.38732 3.7383 4.097 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.48558 2.80316 3.13587 3.48067 3.83435 4.19452 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.56936 2.89113 3.22784 3.57569 3.93119 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.65497 2.98092 3.32153 3.67312 4.02839 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 2.74092 3.0729 3.41678 3.77058 4.12603 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.51005 2.82968 3.16427 3.50971 3.86476 4.22459 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.5957 2.9196 3.25662 3.60504 3.96134 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.68132 3.00965 3.35117 3.70216 4.05873 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.76907 3.10053 3.44432 3.79807 4.15728 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.85625 3.19197 3.53891 3.89417 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 2.94606 3.28527 3.635 3.99069 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.03803 3.38033 3.73014 4.08827 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.12888 3.47362 3.82647 4.18617 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 3.22066 3.5683 3.9233 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN 321.962 317.368 313.74 310.807 308.402 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 320.601 316.305 312.871 310.092 307.801 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 324.459 319.341 315.3 312.062 309.428 307.258 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 322.956 318.15 314.348 311.291 308.792 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 321.535 317.03 313.448 310.557 308.183 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 320.204 315.978 312.598 309.863 307.618 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 323.994 318.952 314.981 311.799 309.215 307.074 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 322.501 317.784 314.047 311.04 308.591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 321.103 316.698 313.177 310.325 308.004 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN 319.808 315.662 312.348 309.665 307.447 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 323.517 318.581 314.697 311.572 309.022 306.91 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 322.067 317.451 313.787 310.829 308.41 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 320.704 316.378 312.92 310.111 307.829 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN 319.446 315.357 312.095 309.436 307.276 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 323.104 318.243 314.413 311.34 308.822 306.746 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 321.651 317.107 313.52 310.612 308.226 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 320.326 316.064 312.664 309.909 307.652 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 319.07 315.075 311.872 309.257 307.105 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 317.919 314.154 311.123 308.642 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 316.815 313.266 310.397 308.054 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 315.76 312.419 309.725 307.492 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 314.795 311.643 309.078 306.952 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - 313.881 310.899 308.463 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.38034 4.73555 5.09812 5.45875 5.82724 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.47493 4.83162 5.19359 5.56158 5.92646 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.21529 4.56957 4.92845 5.29213 5.65977 6.02721 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.31029 4.6664 5.02619 5.39267 5.75814 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.4057 4.76187 5.124 5.48762 5.8557 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 4.49952 4.85799 5.22056 5.5843 5.95274 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.24151 4.596 4.95372 5.31745 5.68413 6.05173 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.33521 4.69149 5.05236 5.41597 5.78202 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.42976 4.78856 5.1491 5.51531 5.88099 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 4.52555 4.88376 5.24629 5.61215 5.98021 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.26657 4.62125 4.98083 5.34422 5.71069 6.07958 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.36151 4.71754 5.07825 5.44266 5.80991 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 4.45692 4.81441 5.17622 5.54142 5.90929 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.19934 4.55283 4.9116 5.27438 5.6404 6.00887 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.29455 4.65121 5.0102 5.37287 5.73965 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.38985 4.74767 5.10698 5.47162 5.83905 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 4.48518 4.843 5.20509 5.57058 5.93864 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.22844 4.58165 4.94039 5.30347 5.6697 6.03839 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.32262 4.67794 5.0382 5.40212 5.76905 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.41788 4.7748 5.1361 5.50103 5.86862 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 4.51384 4.87205 5.2345 5.60019 5.96841 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.25579 4.61029 4.96973 5.33309 5.69955 6.06835 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 4.3511 4.70705 5.06766 5.43193 5.7991 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN 260.719 262.747 264.508 266.012 267.353 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 261.287 263.233 264.919 266.401 267.68 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 259.638 261.829 263.706 265.333 266.757 268 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 260.263 262.364 264.163 265.738 267.099 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 260.859 262.866 264.602 266.105 267.43 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 261.418 263.351 265.02 266.466 267.746 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 259.808 261.97 263.815 265.423 266.827 268.06 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 260.417 262.49 264.273 265.818 267.169 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 260.999 262.998 264.706 266.203 267.503 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN 261.568 263.473 265.121 266.559 267.824 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 259.973 262.106 263.937 265.526 266.915 268.139 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 260.583 262.628 264.389 265.917 267.257 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN 261.166 263.126 264.819 266.294 267.59 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 259.526 261.723 263.607 265.237 266.66 267.91 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 260.157 262.272 264.074 265.637 267.009 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 260.757 262.782 264.511 266.023 267.35 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN 261.328 263.265 264.938 266.396 267.677 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 259.718 261.881 263.738 265.348 266.754 267.993 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 260.329 262.404 264.192 265.743 267.101 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 260.915 262.909 264.628 266.123 267.435 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN 261.479 263.392 265.045 266.488 267.757 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 259.876 262.018 263.855 265.448 266.841 268.068 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN 260.482 262.536 264.303 265.837 267.181 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.16962 6.54801 6.92715 7.30826 7.68477 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.27165 6.65432 7.02978 7.40928 7.78621 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.99802 6.37568 6.7572 7.1314 7.5073 7.88727 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10104 6.47535 6.85845 7.23459 7.61008 7.98866 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.19881 6.57705 6.95693 7.33324 7.711 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.29916 6.67804 7.05515 7.43372 7.81299 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02296 6.40212 6.77828 7.15663 7.53581 7.91511 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.12404 6.5016 6.87986 7.25869 7.63788 8.01726 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.22582 6.60359 6.98195 7.36085 7.74003 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.32755 6.70555 7.08404 7.46306 7.84231 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.05186 6.42937 6.80764 7.18641 7.56535 7.94454 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.15369 6.53142 6.90979 7.28855 7.66764 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.25558 6.63346 7.01201 7.39097 7.77007 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98034 6.35762 6.73561 7.11436 7.49331 7.87242 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.08233 6.45983 6.83803 7.21671 7.59568 7.9749 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.18437 6.56203 6.94029 7.31911 7.69817 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.28642 6.66432 7.04275 7.42164 7.80069 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.01129 6.38859 6.76667 7.14522 7.52412 7.90325 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.11331 6.49076 6.86899 7.24769 7.6267 8.00585 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.21539 6.59318 6.9715 7.35025 7.72925 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.31764 6.69549 7.07394 7.4528 7.83187 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.04251 6.41983 6.79792 7.17654 7.55548 7.93455 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 6.14461 6.52223 6.90045 7.2791 7.6581 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.298 299.533 298.856 298.253 297.721 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.067 299.319 298.673 298.093 297.576 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.648 299.842 299.122 298.499 297.94 297.433 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.406 299.634 298.934 298.326 297.787 297.296 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.186 299.43 298.76 298.17 297.64 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.968 299.235 298.591 298.014 297.497 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.555 299.751 299.048 298.421 297.86 297.357 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.323 299.548 298.864 298.255 297.711 297.223 NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.097 299.349 298.687 298.095 297.566 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.881 299.155 298.512 297.94 297.427 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.464 299.669 298.969 298.346 297.791 297.291 NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.235 299.468 298.788 298.184 297.643 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.014 299.27 298.612 298.027 297.503 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.614 299.799 299.081 298.444 297.874 297.364 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.38 299.593 298.897 298.279 297.727 297.231 NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.155 299.394 298.719 298.12 297.584 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.936 299.2 298.548 297.966 297.445 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.533 299.727 299.014 298.381 297.815 297.309 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.303 299.522 298.831 298.218 297.671 297.179 NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 300.079 299.325 298.657 298.062 297.528 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN 299.866 299.135 298.486 297.908 297.391 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 300.456 299.655 298.949 298.322 297.762 297.258 NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN 300.227 299.455 298.77 298.161 297.616 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06111 8.43415 8.80858 9.18254 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.16148 8.53378 8.90889 9.28339 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.88539 8.25919 8.63398 9.00877 9.38428 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.9855 8.35989 8.73458 9.10991 9.48583 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.08649 8.46076 8.8356 9.21827 9.59149 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.18742 8.56138 8.93687 9.3154 9.69205 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.91313 8.2874 8.66222 9.03828 9.41343 9.78971 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01383 8.38917 8.76522 9.14163 9.5153 9.89003 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.11592 8.4906 8.86532 9.24231 9.61643 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.21545 8.59206 8.96626 9.34007 9.71783 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94188 8.31629 8.69105 9.06625 9.44121 9.81589 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.04274 8.4173 8.79205 9.1669 9.54192 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.14363 8.51827 8.89309 9.26804 9.64299 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.87036 8.24463 8.61934 8.99424 9.36919 9.74421 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97131 8.34562 8.72042 9.09529 9.47031 9.84528 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.0722 8.44671 8.82156 9.19648 9.57148 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.17328 8.54782 8.9227 9.29765 9.67265 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.90014 8.27437 8.64896 9.02391 9.39892 9.77391 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.00116 8.37551 8.75024 9.12516 9.50011 9.87512 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.10225 8.47672 8.8515 9.22651 9.60147 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.20343 8.57797 8.95282 9.32779 9.70284 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.93036 8.30463 8.67933 9.0542 9.42918 9.80412 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.03159 8.40594 8.78068 9.15557 9.53054 9.9055 NaN NaN NaN NaN - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.963 273.635 274.254 274.825 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.143 273.798 274.404 274.962 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.612 273.312 273.956 274.55 275.098 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.798 273.485 274.115 274.696 275.233 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.985 273.654 274.268 274.847 275.37 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.165 273.817 274.421 274.982 275.499 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.643 273.34 273.98 274.57 275.113 275.619 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.833 273.515 274.141 274.718 275.249 275.743 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.021 273.684 274.295 274.861 275.381 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.198 273.851 274.447 274.996 275.51 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.684 273.376 274.009 274.593 275.133 275.634 NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.872 273.547 274.167 274.739 275.268 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.058 273.717 274.323 274.881 275.398 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.537 273.238 273.881 274.473 275.021 275.529 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.728 273.414 274.043 274.623 275.158 275.654 NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.917 273.586 274.2 274.766 275.292 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.098 273.752 274.354 274.909 275.423 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.58 273.277 273.916 274.504 275.047 275.55 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.771 273.451 274.075 274.65 275.182 275.676 NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.956 273.62 274.23 274.794 275.315 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 273.136 273.785 274.383 274.934 275.444 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.621 273.312 273.946 274.53 275.07 275.571 NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 272.807 273.482 274.103 274.675 275.204 275.694 NaN NaN NaN NaN - - - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.91742 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0215 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1227 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.222 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94466 10.3238 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.047 10.4259 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1493 10.5301 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.87254 10.2517 10.6307 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.97505 10.354 10.7324 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0775 10.4563 10.8346 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1799 10.5586 10.9367 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.90331 10.2824 10.6609 11.0388 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0059 10.3848 10.7632 11.141 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.1085 10.4873 10.8654 11.243 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.2111 10.5896 10.9676 11.3451 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.93452 10.3136 10.692 11.0699 11.4471 - - - - 5000 - 5000 - - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.499 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.423 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.35 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.281 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.45 295.212 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.378 295.146 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.305 295.077 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.479 295.237 295.015 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.406 295.169 294.952 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.334 295.103 294.891 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.265 295.039 294.831 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.439 295.198 294.976 294.773 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.367 295.131 294.915 294.717 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.297 295.067 294.856 294.66 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.229 295.002 294.796 294.606 - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 295.401 295.161 294.942 294.74 294.553 - - - - - - 6.25064122361692 - 284.478219237802 - - - 6.09198716843552 - 284.112220821742 - - - 6.12972258765456 - 284.225258143 - - - 6.12130212971328 - 284.052669982142 - - - 5.95823357350295 - 283.758000616276 - - - 6.00440443416997 - 283.869081343775 - - - 6.05770703720214 - 284.027099532072 - - - 5.98094371672299 - 283.807419436709 - - - 6.17101781342723 - 284.324458406957 - - - 6.20897376906843 - 284.423103788365 - - - 6.30264997448819 - 284.590440102053 - - - 6.02838543893148 - 283.819400338538 - - - 6.22678567178944 - 284.322311115097 - - - - - - - 3.500058 - 3.354197 - 0.022900 - 0.000000 - 0.000000 - 8.675177 - 0.268831 - 2.812220 - 83.179593 - 0.992827 - 0.571295 - 0.275278 - 0.038401 - 3.186380 - 0.000000 - 0.0 - 0.0 - 0.0 - CAMS - 0.392921 - 1.224094 - AUX_ECMWFT - 357.927923 - - - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B01.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B01.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B02.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B02.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B03.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B03.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B04.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B04.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B05.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B05.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B06.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B06.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B07.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B07.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B08.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B08.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B8A.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B8A.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B09.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B09.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B10.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B10.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B11.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B11.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_DETFOO_B12.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_QUALIT_B12.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_CLASSI_B00.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_CLDPRB_20m.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_SNWPRB_20m.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_CLDPRB_60m.jp2 - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/MSK_SNWPRB_60m.jp2 - - GRANULE/L2A_T50TMK_A045975_20240411T030632/QI_DATA/T50TMK_20240411T030521_PVI.jp2 - - -""" - mtd_l1c_old_xml = """ @@ -5774,24 +861,84 @@ """ # noqa +class TestTileXML: + """Test the SAFE TILE XML file handler.""" + + def setup_method(self): + """Set up the test case.""" + from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML + filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") + self.l1c_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), filename_info, mock.MagicMock()) + + @pytest.mark.parametrize(("angles_name", "angle_block", "angle_type", "expected"), + [("satellite_zenith_angle", "Viewing_Incidence_Angles_Grids", "Zenith", + [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, + 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], + [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, + 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], + [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, + 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], + [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, + 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], + [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, + 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), + ("satellite_zenith_angle_l2a", "Viewing_Incidence_Angles_Grids", "Zenith", + [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, + 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], + [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, + 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], + [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, + 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], + [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, + 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], + [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, + 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), + ]) + def test_angles(self, angles_name, angle_block, angle_type, expected): + info = dict(xml_tag=angle_block, xml_item=angle_type) + res = self.l1c_xml_tile_fh.get_dataset(make_dataid(name=angles_name, + resolution=60), info)[::200, ::200] + np.testing.assert_allclose(res, expected) + + + + class TestMTDXML: """Test the SAFE MTD XML file handler.""" def setup_method(self): """Set up the test case.""" from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") + filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") self.l1c_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), filename_info, mock.MagicMock()) - self.l2a_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l2a_tile_xml), filename_info, mock.MagicMock()) self.l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) - # self.l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), filename_info, mock.MagicMock(), mask_saturated=True) def test_satellite_zenith_array(self): """Test reading the satellite zenith array.""" info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith") - expected_data_l1c = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, + expected_data = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], @@ -5811,34 +958,15 @@ def test_satellite_zenith_array(self): 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]) - expected_data_l2a = np.array([[ 0.823021, 1.57224058, 2.40477933, 3.33521808, 4.29339294, 5.21712818, - 6.13685593, 7.07123343, 7.9968313, 8.91635508], - [ 0.99274285, 1.78598401, 2.6537931, 3.59082788, 4.54147721, 5.47021744, - 6.39166677, 7.31780035, 8.24199199, 9.16344605], - [ 1.19102539, 2.01811264, 2.90509575, 3.843318, 4.79014776, 5.71596442, - 6.63842562, 7.56669001, 8.49008303, 9.41563303], - [ 1.40805629, 2.22663763, 3.15710255, 4.11212841, 5.0389507, 5.95589868, - 6.88741709, 7.81585111, 8.74029188, 9.66309923], - [ 1.6364984, 2.47693166, 3.40635124, 4.36058177, 5.28712709, 6.20731242, - 7.1382703, 8.0645253, 8.98640945, 9.91275701], - [ 1.86939307, 2.72734103, 3.6589573, 4.61010637, 5.53711387, 6.45878796, - 7.38909762, 8.31322866, 9.23367977, 10.16133622], - [ 2.05092764, 2.97881829, 3.93023614, 4.86081228, 5.78239877, 6.71040601, - 7.63930914, 8.5619639, 9.48322901, 10.40935204], - [ 2.30188982, 3.2312058, 4.18234794, 5.11071768, 6.032489, 6.96190824, - 7.88904158, 8.81055152, 9.73186255, 10.65685334], - [ 2.55171923, 3.48266186, 4.4345628, 5.36132917, 6.28264069, 7.2133112, - 8.13807392, 9.05916615, 9.98720691, 10.90435203], - [ 2.80292458, 3.74216648, 4.68491268, 5.61065107, 6.53464742, 7.46456958, - 8.38738208, 9.3076386, 10.23571319, 11.15166785]]) + res1 = self.l1c_xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle", resolution=60), info)[::200, ::200] - res2 = self.l2a_xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle_l2a", + res2 = self.l1c_xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle_l2a", resolution=60), info)[::200, ::200] - np.testing.assert_allclose(res1, expected_data_l1c) - np.testing.assert_allclose(res2, expected_data_l2a) + np.testing.assert_allclose(res1, expected_data) + np.testing.assert_allclose(res2, expected_data) def test_old_xml_calibration(self): """Test the calibration of older data formats (no offset).""" @@ -5870,7 +998,7 @@ def test_xml_calibration_to_counts(self): def test_xml_calibration_unmasked_saturated(self): """Test the calibration with radiometric offset but unmasked saturated pixels.""" from satpy.readers.msi_safe import SAFEMSIMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") + filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False) fake_data = xr.DataArray([[[0, 1, 2, 3], @@ -5918,7 +1046,8 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" from satpy.readers.msi_safe import SAFEMSITileMDXML - self.filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None) + self.filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None, + process_level="L1C") self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), self.filename_info, mock.MagicMock()) From 59a3f7830ae18ca7a863095b7ce400aac7176d42 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 19:37:47 +0800 Subject: [PATCH 1269/1416] Update msi_safe.yaml --- satpy/etc/readers/msi_safe.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index d877a1ba5f..d3efb0a1ea 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -11,7 +11,7 @@ reader: file_types: l1c_safe_granule: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSI{process_level:3s} + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] requires: [l1c_safe_metadata, l1c_safe_tile_metadata] l1c_safe_tile_metadata: @@ -22,15 +22,15 @@ file_types: file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSI{process_level:3s}.xml'] l2a_safe_granule_10m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSI{process_level:3s} + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_granule_20m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSI{process_level:3s} + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_granule_60m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSI{process_level:3s} + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_tile_metadata: From d3cbffa8cf4ed796e37cc996fec54a6678d43d00 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 20:34:06 +0800 Subject: [PATCH 1270/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 35 +++++------------------ 1 file changed, 7 insertions(+), 28 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index c7cf8c1e28..385b86843a 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -862,15 +862,17 @@ class TestTileXML: - """Test the SAFE TILE XML file handler.""" + """Test the SAFE TILE XML file handler. + Since L1C/L2A share almost the same Tile XML, we just need to test L1C. + """ def setup_method(self): """Set up the test case.""" - from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML + from satpy.readers.msi_safe import SAFEMSITileMDXML filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") self.l1c_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), filename_info, mock.MagicMock()) - @pytest.mark.parametrize(("angles_name", "angle_block", "angle_type", "expected"), + @pytest.mark.parametrize(("angle_name", "angle_block", "angle_type", "expected"), [("satellite_zenith_angle", "Viewing_Incidence_Angles_Grids", "Zenith", [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], @@ -892,37 +894,14 @@ def setup_method(self): 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), - ("satellite_zenith_angle_l2a", "Viewing_Incidence_Angles_Grids", "Zenith", - [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, - 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], - [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, - 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], - [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, - 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], - [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, - 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], - [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, - 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), ]) - def test_angles(self, angles_name, angle_block, angle_type, expected): + def test_angles(self, angle_name, angle_block, angle_type, expected): info = dict(xml_tag=angle_block, xml_item=angle_type) - res = self.l1c_xml_tile_fh.get_dataset(make_dataid(name=angles_name, + res = self.l1c_xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info)[::200, ::200] np.testing.assert_allclose(res, expected) - - class TestMTDXML: """Test the SAFE MTD XML file handler.""" From 89a0c3e43bcc99ba163ede75042ddc9ecc2c80e4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 19 Apr 2024 09:11:35 -0500 Subject: [PATCH 1271/1416] Fix cftime not being uninstalled in unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c472709bc8..8d0af4123e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,7 +77,7 @@ jobs: numpy \ pandas \ scipy - mamba remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py + conda remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py cftime python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ From 14e1214e3cda0564e1174864d8a9a137eed7ceb2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 19 Apr 2024 09:25:21 -0500 Subject: [PATCH 1272/1416] Remove conda remove of trollimage which is pip installed --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8d0af4123e..9636148db2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,7 +77,7 @@ jobs: numpy \ pandas \ scipy - conda remove --force-remove -y pykdtree pyresample trollimage pyhdf netcdf4 h5py cftime + conda remove --force-remove -y pykdtree pyresample pyhdf netcdf4 h5py cftime python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ From b420b193f91714aa850a2c714271b5e8fd40d4bb Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 22:52:04 +0800 Subject: [PATCH 1273/1416] yaml and test --- satpy/etc/readers/msi_safe.yaml | 16 +++++----- satpy/tests/reader_tests/test_msi_safe.py | 36 +++++++++++++++++++++-- 2 files changed, 41 insertions(+), 11 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index d3efb0a1ea..16a74e64ea 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -12,33 +12,33 @@ reader: file_types: l1c_safe_granule: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] requires: [l1c_safe_metadata, l1c_safe_tile_metadata] l1c_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] l1c_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSI{process_level:3s}.xml'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] l2a_safe_granule_10m: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_granule_20m: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_granule_60m: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/{process_level:3s}_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] l2a_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSI{process_level:3s}.xml'] + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] datasets: B01: diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 385b86843a..8d797bcf16 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -894,12 +894,42 @@ def setup_method(self): 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), + ("solar_zenith_angle_l2a", "Sun_Angles_Grid", "Zenith", + [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, + 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], + [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, + 39.5574369, 39.51323286, 39.46920212, 39.4253673, 39.38179377], + [39.6806035, 39.63532838, 39.5902497, 39.54538507, 39.5007087, + 39.45621756, 39.41195347, 39.36779169, 39.3239121, 39.28027381], + [39.57980525, 39.53445664, 39.48931088, 39.44434154, 39.39957879, + 39.35503587, 39.31067408, 39.26649344, 39.22249393, 39.17876143], + [39.479007, 39.43355483, 39.38829092, 39.34328573, 39.29846167, + 39.25381983, 39.2093947, 39.16513007, 39.12109926, 39.07726878], + [39.37820875, 39.33268069, 39.28735495, 39.24224914, 39.19736058, + 39.15267709, 39.1081719, 39.06385068, 39.01973446, 38.97584982], + [39.2774105, 39.23184303, 39.18646737, 39.14130809, 39.09632176, + 39.05153988, 39.00696049, 38.9625713, 38.91842056, 38.87444401], + [39.17671225, 39.13104478, 39.08559031, 39.04034757, 38.99528294, + 38.95039991, 38.9057971, 38.86130793, 38.81705183, 38.77303821], + [39.076014, 39.03026112, 38.98477906, 38.93940875, 38.89425338, + 38.84936063, 38.80464763, 38.76011645, 38.7157479, 38.67164839], + [38.97531575, 38.92950771, 38.88389967, 38.83852091, 38.7933053, + 38.74831897, 38.7034912, 38.65891427, 38.61446851, 38.57030388]]), + ("moon_zenith_angle", "Sun_Angles_Grid", "Zenith", None) ]) def test_angles(self, angle_name, angle_block, angle_type, expected): - info = dict(xml_tag=angle_block, xml_item=angle_type) + info = dict(xml_tag=angle_block, xml_item=angle_type) if "satellite" in angle_name else \ + dict(xml_tag=angle_block + "/" + angle_type) + res = self.l1c_xml_tile_fh.get_dataset(make_dataid(name=angle_name, - resolution=60), info)[::200, ::200] - np.testing.assert_allclose(res, expected) + resolution=60), info) + if res is not None: + res = res[::200, ::200] + + if expected is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected class TestMTDXML: From da5f04d88fd67e93357de9250716e8b20e59020d Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 22:54:24 +0800 Subject: [PATCH 1274/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 8d797bcf16..48f835b352 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -864,6 +864,7 @@ class TestTileXML: """Test the SAFE TILE XML file handler. Since L1C/L2A share almost the same Tile XML, we just need to test L1C. + """ def setup_method(self): @@ -918,6 +919,7 @@ def setup_method(self): ("moon_zenith_angle", "Sun_Angles_Grid", "Zenith", None) ]) def test_angles(self, angle_name, angle_block, angle_type, expected): + """Test reading angles array.""" info = dict(xml_tag=angle_block, xml_item=angle_type) if "satellite" in angle_name else \ dict(xml_tag=angle_block + "/" + angle_type) From 0aeab3bc72c5f62f627b2396d89d69a88bd248b0 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 19 Apr 2024 09:55:36 -0500 Subject: [PATCH 1275/1416] Install geotiepoints from source in unstable CI --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9636148db2..7e1ce4bb80 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -77,11 +77,12 @@ jobs: numpy \ pandas \ scipy - conda remove --force-remove -y pykdtree pyresample pyhdf netcdf4 h5py cftime + conda remove --force-remove -y pykdtree pyresample python-geotiepoints pyhdf netcdf4 h5py cftime python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ + git+https://github.com/pytroll/python-geotiepoints \ git+https://github.com/fhs/pyhdf \ git+https://github.com/h5py/h5py \ git+https://github.com/h5netcdf/h5netcdf \ From 504590cd963ff7a551742517ed77b5fba39a5110 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 22:55:54 +0800 Subject: [PATCH 1276/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 48f835b352..0e0c1e1845 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -863,6 +863,7 @@ class TestTileXML: """Test the SAFE TILE XML file handler. + Since L1C/L2A share almost the same Tile XML, we just need to test L1C. """ From b50ee5b628d6c0cd87810ae6c814d67b07e5a4ac Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 19 Apr 2024 10:18:32 -0500 Subject: [PATCH 1277/1416] Force pyerfa to be installed from PyPI --- .github/workflows/ci.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 7e1ce4bb80..ce4a16006f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -67,6 +67,9 @@ jobs: # Install pykdtree with --no-build-isolation so it builds with numpy 2.0 # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions + # NOTE: Many of the packages removed and then reinstalled below are to avoid + # compatibility issues with numpy 2. When conda-forge has numpy 2 available + # this shouldn't be needed. run: | python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig python -m pip install \ @@ -77,7 +80,7 @@ jobs: numpy \ pandas \ scipy - conda remove --force-remove -y pykdtree pyresample python-geotiepoints pyhdf netcdf4 h5py cftime + conda remove --force-remove -y pykdtree pyresample python-geotiepoints pyhdf netcdf4 h5py cftime astropy pyerfa python -m pip install --upgrade --no-deps --pre --no-build-isolation \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ From f851334dc287edb382f578d558409416010ddb48 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 23:42:00 +0800 Subject: [PATCH 1278/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 656 ++++++++++++++++++++-- 1 file changed, 598 insertions(+), 58 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 0e0c1e1845..e13a923cd4 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -860,22 +860,579 @@ """ # noqa +mtd_l2a_xml = """ + + + + 2024-04-11T03:05:21.024Z + 2024-04-11T03:05:21.024Z + S2A_MSIL2A_20240411T030521_N0510_R075_T50TMK_20240411T080950.SAFE + Level-2A + S2MSI2A + 05.10 + https://doi.org/10.5270/S2_-znk9xsj + 2024-04-11T08:09:50.000000Z + Not applicable + Not applicable + + Sentinel-2A + INS-NOBS + 2024-04-11T03:05:21.024Z + 75 + DESCENDING + + +SAFE_COMPACT + + + + + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B02_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B03_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B04_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B08_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_TCI_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_AOT_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_WVP_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B01_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B02_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B03_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B04_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B05_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B06_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B07_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B8A_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B11_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B12_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_TCI_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_AOT_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_WVP_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_SCL_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B01_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B02_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B03_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B04_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B05_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B06_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B07_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B8A_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B09_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B11_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B12_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_TCI_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_AOT_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_WVP_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_SCL_60m + + + + + + + NODATA + 0 + + + SATURATED + 65535 + + + 3 + 2 + 1 + + + 10000 + 1000.0 + 1000.0 + + + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + + + 0.998279632507911 + + 1884.69 + 1959.66 + 1823.24 + 1512.06 + 1424.64 + 1287.61 + 1162.08 + 1041.63 + 955.32 + 812.92 + 367.15 + 245.59 + 85.25 + + + + + 60 + + 412 + 456 + 442.7 + + + 1 + 0.001775742 0.004073061 0.003626143 0.003515199 0.005729163 0.003780292 0.002636732 0.001262113 0.001987583 0.001368913 0.001250444 0.000463454 0.000814293 0.001376431 0.001485086 0.001823735 0.001626817 0.004392062 0.029008099 0.11874593 0.32387506 0.57281921 0.71472749 0.76196778 0.78929702 0.80862387 0.81089382 0.82419876 0.85415811 0.87079088 0.88731097 0.92619924 0.98228149 1 0.9752382 0.93596338 0.88997148 0.85021048 0.82569453 0.78390239 0.61417422 0.33007109 0.12410831 0.04365694 0.014749595 + + + + 10 + + 456 + 533 + 492.7 + + + 1 + 0.04255531 0.0722983 0.15374322 0.32799225 0.55336788 0.71011166 0.75285179 0.75232691 0.75668081 0.76326948 0.76239425 0.7852515 0.81546669 0.86179176 0.89282599 0.9195221 0.91900649 0.91315754 0.90035366 0.88989693 0.8823246 0.87606118 0.88429987 0.90695544 0.93232085 0.93947252 0.94383543 0.92204086 0.8860231 0.84743609 0.81251687 0.7823971 0.7731087 0.77209054 0.78742652 0.81217177 0.84605052 0.88767996 0.92793997 0.95069235 0.96573311 0.96938253 0.96570294 0.95832003 0.95405064 0.95178268 0.95699722 0.96556515 0.9770514 0.97709574 0.97436606 0.95903183 0.93506318 0.90190134 0.87165792 0.84402444 0.82280852 0.81536043 0.82057639 0.8395149 0.86992171 0.91526205 0.96067028 0.99163699 1 0.98356097 0.91130763 0.74018256 0.50395858 0.3050155 0.18004605 0.10738342 0.06593592 0.04207746 0.02662129 0.0143396 0.00265779 0.00081822 + + + + 10 + + 538 + 583 + 559.8 + + + 1 + 0.01448181 0.03422251 0.07346335 0.15444843 0.31661425 0.55322279 0.74859406 0.84890306 0.89772216 0.9215368 0.92572845 0.91122688 0.88818924 0.86523756 0.84718187 0.8387572 0.84459081 0.86219653 0.88838714 0.92443236 0.96017974 0.98685516 1 0.9986008 0.98076472 0.94522089 0.8981778 0.85580323 0.81841734 0.78862048 0.76460653 0.74963745 0.75055111 0.76137888 0.78244479 0.79890086 0.81016957 0.81408886 0.77358596 0.62881065 0.40397555 0.21542098 0.10715281 0.04792877 0.01848693 0.00108588 + + + + 10 + + 646 + 684 + 664.6 + + + 1 + 0.00141521 0.02590238 0.11651178 0.39088616 0.74959342 0.94485805 0.98011173 0.99406309 1 0.99545475 0.99052772 0.97733476 0.94055988 0.87894956 0.81629384 0.77345952 0.75448766 0.75991531 0.7826343 0.8101689 0.83612975 0.86125424 0.88609106 0.91138767 0.93405146 0.95042063 0.9592573 0.96039555 0.95913395 0.95809013 0.95527459 0.94376465 0.89490799 0.74426308 0.476777 0.22960399 0.08009118 0.02617076 0.00415242 + + + + 20 + + 695 + 714 + 704.1 + + + 1 + 0.02835786 0.12369337 0.39378774 0.76113071 0.97108502 0.99889523 1 0.99412258 0.98321789 0.96704093 0.94847389 0.92714833 0.90372458 0.88614713 0.86723745 0.79075319 0.58840332 0.26334833 0.05675422 0.00618833 + + + + 20 + + 731 + 749 + 740.5 + + + 1 + 0.00171088 0.05467153 0.25806676 0.64722098 0.89218999 0.90232877 0.91508768 0.94115846 0.96299993 0.97510481 0.9770217 0.98736251 1 0.98880277 0.97179916 0.90126739 0.60672391 0.20520227 0.0267569 + + + + 20 + + 769 + 797 + 782.8 + + + 1 + 0.00045899 0.0117201 0.05219715 0.16561733 0.36903355 0.63685453 0.86119638 0.97002897 0.99119602 0.99897921 1 0.97725155 0.92572385 0.86605804 0.81969611 0.79407674 0.79111029 0.80431552 0.81902721 0.82571292 0.82011829 0.79222195 0.72054559 0.58767794 0.41430355 0.23088817 0.09850282 0.02736551 0.00516235 + + + + 10 + + 760 + 907 + 832.8 + + + 1 + 0.00067259 0.00388856 0 0 0 0 0 0 0 0 0 0 0 0.00028956 0.00702964 0.01752391 0.03231111 0.05328661 0.08299885 0.12748502 0.19591065 0.30246323 0.43553954 0.57141637 0.69766701 0.80303852 0.89115744 0.95284584 0.98894161 1 0.98840653 0.96389216 0.94207967 0.93694643 0.94227343 0.95395718 0.96828896 0.97966549 0.9854444 0.98592681 0.98391181 0.97793903 0.97722771 0.97810609 0.98144486 0.98764558 0.98857708 0.9862422 0.98070921 0.97078624 0.95721089 0.93865821 0.91672388 0.89620759 0.872888 0.85160331 0.8246394 0.80078117 0.7823386 0.76360274 0.74962771 0.7387221 0.73079407 0.72271237 0.72507708 0.72563856 0.72304217 0.72229211 0.71616364 0.71159446 0.70826954 0.70157205 0.69924532 0.70093762 0.70692733 0.71824001 0.73124634 0.7484061 0.76818541 0.78394807 0.7968381 0.80260206 0.8045194 0.80240918 0.79699072 0.78920304 0.77691621 0.76518406 0.75119717 0.73700357 0.72262399 0.70412578 0.68410805 0.66474528 0.64736891 0.63005125 0.61564222 0.60249557 0.58988992 0.57993399 0.57136506 0.56094242 0.55235105 0.54568236 0.53958052 0.53510215 0.53093675 0.53016508 0.52984662 0.53036682 0.53211463 0.53271918 0.53246806 0.53331158 0.5319278 0.53051055 0.52951499 0.52996848 0.53253373 0.53705085 0.54235344 0.54912497 0.55523055 0.56011135 0.55767999 0.54821984 0.53144613 0.50763528 0.47811224 0.45092793 0.42798466 0.41051405 0.40039139 0.40087302 0.40829375 0.42086556 0.43007022 0.42456692 0.39136817 0.33009008 0.25720509 0.18189031 0.11650668 0.07031579 0.04275381 0.02593154 0.01574394 0.00394326 + + + + 20 + + 837 + 881 + 864.7 + + + 1 + 0.00030097 0 0 0 0 0 0 0 0 0 0.00157217 0.00249886 0.01332037 0.02614866 0.05260479 0.10779709 0.22160755 0.39721628 0.60986885 0.81658883 0.9322445 0.97210033 0.97545482 0.97538048 0.97328205 0.97607828 0.98034955 0.98690928 0.99087465 0.99741818 0.99984673 0.99939141 0.99587928 0.99541228 1 0.99640762 0.92359433 0.74137684 0.48965971 0.25020643 0.11221246 0.04755984 0.02297815 0.01061438 0.00108149 + + + + 60 + + 932 + 958 + 945.1 + + + 1 + 0.01662953 0.06111857 0.17407094 0.38946454 0.6645915 0.87454114 0.93695988 0.96751014 0.9893391 0.9951269 1 0.97845762 0.98069118 0.9922335 0.98798379 0.99428313 0.98348041 0.97820013 0.95023367 0.95299604 0.92240308 0.85573828 0.70970227 0.46429542 0.21538427 0.06534121 0.01625596 + + + + 60 + + 1337 + 1412 + 1373.5 + + + 1 + 0.00024052 5.404e-05 3.052e-05 2.872e-05 7.632e-05 0.00010949 8.804e-05 0.00012356 0.00017424 0.0003317 0.00036891 0.0004467 0.00065919 0.0010913 0.00196903 0.00373668 0.00801754 0.01884719 0.04466732 0.10165546 0.20111776 0.34284841 0.50710992 0.6632068 0.78377143 0.86153862 0.91000261 0.94193255 0.96182259 0.97365119 0.98169786 0.98795826 0.99283342 0.99649788 0.99906011 1 0.99907734 0.99601604 0.9909083 0.98479854 0.97802142 0.97030114 0.96080954 0.94849765 0.93314108 0.91482336 0.8937997 0.86825426 0.83023193 0.76384193 0.65440009 0.50671604 0.35014737 0.21799972 0.12643091 0.06768988 0.0322709 0.013544 0.00544557 0.00237642 0.00111267 0.00053796 0.0003457 0.00017488 0.00021619 0.00019479 0.00010421 5.919e-05 5.109e-05 6.115e-05 5.527e-05 3.856e-05 3.147e-05 0.00012289 0.0001089 2.502e-05 + + + + 20 + + 1539 + 1682 + 1613.7 + + + 1 + 6.79e-06 6.66e-06 8e-06 2.734e-05 3.685e-05 8.851e-05 0.00014522 0.00024812 0.00047627 0.00056335 0.00065326 0.00089835 0.00114664 0.00165604 0.00241611 0.00350246 0.00524274 0.0081538 0.01237062 0.0186097 0.02721853 0.03879155 0.05379167 0.07353187 0.09932758 0.1334178 0.18029249 0.24484994 0.32834511 0.42749961 0.53576798 0.64570396 0.74245998 0.81447017 0.85866596 0.87924777 0.88665266 0.888727 0.89105732 0.89725046 0.90632982 0.91627527 0.9263751 0.93515828 0.94226446 0.94739906 0.95131987 0.95416808 0.95635128 0.95813297 0.96062738 0.96344083 0.96577764 0.96818134 0.97104025 0.97343195 0.97597444 0.97865413 0.97994672 0.98064126 0.98094979 0.98143338 0.98123856 0.98068083 0.98033995 0.98101894 0.98268503 0.98507875 0.98777658 0.9903608 0.99202087 0.9933069 0.99256744 0.99044883 0.98717314 0.98353656 0.9800432 0.97617287 0.97253451 0.96977033 0.96762556 0.9662626 0.96572411 0.96592079 0.96729798 0.96975438 0.97337748 0.97862858 0.98345358 0.98765317 0.9919238 0.99554959 0.99767411 0.99866451 0.99941783 0.99930984 0.99885298 0.99913515 0.99973164 0.99973592 1 0.9998438 0.9967639 0.99175576 0.9859206 0.97887302 0.97029262 0.96135891 0.95379752 0.94709017 0.94228614 0.93919512 0.93616637 0.92889205 0.9129921 0.88158383 0.82602164 0.74412949 0.64281662 0.53483955 0.42772166 0.32439525 0.23488131 0.16445229 0.11056237 0.07271886 0.04634859 0.02949618 0.01941871 0.0133487 0.00934594 0.00654231 0.00487921 0.00341903 0.00249864 0.00196431 0.00142754 0.00105878 0.00049978 0.00022833 0.00015999 3.415e-05 4.517e-05 1.313e-05 + + + + 20 + + 2078 + 2320 + 2202.4 + + + 1 + 0.00063835 0.00102286 0.00288712 0.00399879 0.00658916 0.00765458 0.00799918 0.00853524 0.00929493 0.00999614 0.01096645 0.01208363 0.01335837 0.01501119 0.01711931 0.01977307 0.02332743 0.02765779 0.03320435 0.04020464 0.04886709 0.0596238 0.07315348 0.09050885 0.11143964 0.13686671 0.16776886 0.20341457 0.24281992 0.28484195 0.32711894 0.36834301 0.40794043 0.4447145 0.47647207 0.50303896 0.52524762 0.54328057 0.55717994 0.5685619 0.57895708 0.58860881 0.59881758 0.60990899 0.62128986 0.63421311 0.64847648 0.66363778 0.67997936 0.69609688 0.71189957 0.7269499 0.74124079 0.75734734 0.77201504 0.78552587 0.79818641 0.80962939 0.81965718 0.82855741 0.83668178 0.84440292 0.85106862 0.85321701 0.85471321 0.8561428 0.85778963 0.8594989 0.86142876 0.86322831 0.86511218 0.8672932 0.86967076 0.87427502 0.87856212 0.88241466 0.88590611 0.8894516 0.89320419 0.8966738 0.89987484 0.90257636 0.90481219 0.90550545 0.90564491 0.90548208 0.90513822 0.90476379 0.90406427 0.90332978 0.90274309 0.90235795 0.90196488 0.90340528 0.90429478 0.90529761 0.90642862 0.90807348 0.91010493 0.91293181 0.91556686 0.91842631 0.92128288 0.92431702 0.92719913 0.92972159 0.93190455 0.93412538 0.93588954 0.93707083 0.93762594 0.93828534 0.93763643 0.94042634 0.94250397 0.94324531 0.94301861 0.94210283 0.94061808 0.93841726 0.93665003 0.93524569 0.93301102 0.92686708 0.92104485 0.91547175 0.91100989 0.90828339 0.9072733 0.90817907 0.91115631 0.91617845 0.92284525 0.92059829 0.91947472 0.91947973 0.92126575 0.92451632 0.92772589 0.93196884 0.93676408 0.94147739 0.94679545 0.95119533 0.95443018 0.95704142 0.95972628 0.9625372 0.96485326 0.96603599 0.96664138 0.96630455 0.96545713 0.96484036 0.96365512 0.96169531 0.95944859 0.95732078 0.95513625 0.95355574 0.95273072 0.95217795 0.95172542 0.9521403 0.95263595 0.95405248 0.95707559 0.96063594 0.96421772 0.96830187 0.97268597 0.97741944 0.98289489 0.9871429 0.99073348 0.99398244 0.99678431 0.99875181 1 0.9999284 0.9991523 0.99712951 0.99388228 0.98968273 0.98373274 0.97621057 0.96780985 0.95833495 0.94842856 0.93818752 0.9277078 0.91702104 0.90597951 0.89384371 0.88165575 0.86861704 0.85460324 0.84058628 0.82598123 0.80948042 0.79182917 0.7724052 0.74907137 0.72031195 0.68815487 0.65125598 0.6100244 0.56600904 0.52095058 0.47464344 0.42924778 0.38584718 0.34208462 0.30067509 0.26317221 0.22770037 0.19571781 0.16808736 0.14467686 0.12482737 0.10823403 0.09439655 0.08235799 0.07149445 0.0626855 0.05498009 0.04818852 0.04285814 0.03859244 0.03494044 0.03199172 0.02958044 0.02741084 0.02556884 0.02395058 0.02166741 0.0191457 0.01632139 0.0109837 0.00736032 0.00649061 0.00469736 0.00205874 + + + + 4.10137842 + 3.75605469 + 4.18741753 + 4.52205376 + 5.20680393 + 4.8729478 + 4.5356737 + 6.16247757 + 5.13772343 + 8.53898524 + 55.10485389 + 35.30373192 + 106.24732599 + + + SC_NODATA + 0 + + + SC_SATURATED_DEFECTIVE + 1 + + + SC_DARK_FEATURE_SHADOW + 2 + + + SC_CLOUD_SHADOW + 3 + + + SC_VEGETATION + 4 + + + SC_NOT_VEGETATED + 5 + + + SC_WATER + 6 + + + SC_UNCLASSIFIED + 7 + + + SC_CLOUD_MEDIUM_PROBA + 8 + + + SC_CLOUD_HIGH_PROBA + 9 + + + SC_THIN_CIRRUS + 10 + + + SC_SNOW_ICE + 11 + + + + + + + + + 40.64479480422486 115.81682739339685 40.65079881136531 117.1154430676197 39.66155122739065 117.11377991452629 39.655752572676114 115.83386830444628 40.64479480422486 115.81682739339685 + + + POINT + 1 + + + EPSG + GEOGRAPHIC + + + + + S2A_OPER_GIP_INVLOC_MPC__20171206T000000_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_LREXTR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ATMIMA_MPC__20150605T094744_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ATMSAD_MPC__20160729T000005_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_BLINDP_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_CLOINV_MPC__20210609T000005_V20210823T030000_21000101T000000_B00 + S2A_OPER_GIP_CLOPAR_MPC__20220120T000001_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_CONVER_MPC__20150710T131444_V20150627T000000_21000101T000000_B00 + S2A_OPER_GIP_DATATI_MPC__20151117T131048_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_DECOMP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 + S2__OPER_GIP_EARMOD_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ECMWFP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 + S2A_OPER_GIP_G2PARA_MPC__20231208T000027_V20231213T070000_21000101T000000_B00 + S2A_OPER_GIP_G2PARE_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_GEOPAR_MPC__20150605T094741_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_INTDET_MPC__20220120T000010_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_JP2KPA_MPC__20220120T000006_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_MASPAR_MPC__20220120T000009_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_OLQCPA_MPC__20220715T000042_V20220830T002500_21000101T000000_B00 + S2A_OPER_GIP_PRDLOC_MPC__20180301T130000_V20180305T005000_21000101T000000_B00 + S2A_OPER_GIP_PROBAS_MPC__20240305T000510_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2ABCA_MPC__20240315T121000_V20240319T003000_21000101T000000_B00 + S2A_OPER_GIP_R2BINN_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2CRCO_MPC__20151023T224715_V20150622T224715_21000101T000000_B00 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DENT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DEPI_MPC__20230424T160000_V20230426T000000_21000101T000000_B00 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B12 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B03 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B07 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B09 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B10 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B01 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B05 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B8A + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B06 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B04 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B11 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B02 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B08 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B10 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B05 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B04 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B06 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B08 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B03 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B01 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B12 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B11 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B02 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B07 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B8A + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B09 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2NOMO_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2PARA_MPC__20221206T000009_V20221206T073000_21000101T000000_B00 + S2A_OPER_GIP_R2SWIR_MPC__20180406T000021_V20180604T100000_21000101T000000_B00 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_RESPAR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_SPAMOD_MPC__20231122T110026_V20231123T010000_21000101T000000_B00 + S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B8A + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B03 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B08 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131048_V20150703T000000_21000101T000000_B01 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B11 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B10 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B06 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B04 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B02 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B05 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131051_V20150703T000000_21000101T000000_B12 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B09 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B07 + S2__OPER_GIP_L2ACSC_MPC__20220121T000003_V20220125T022000_21000101T000000_B00 + S2__OPER_GIP_L2ACAC_MPC__20220121T000004_V20220125T022000_21000101T000000_B00 + S2__OPER_GIP_PROBA2_MPC__20231208T000510_V20231213T070000_21000101T000000_B00 + + + CopernicusDEM30 + S2__OPER_AUX_UT1UTC_PDMC_20240404T000000_V20240405T000000_20250404T000000 + + S2__OPER_AUX_ECMWFD_ADG__20240410T120000_V20240410T210000_20240412T150000 + + None + + GlobalSnowMap.tiff + ESACCI-LC-L4-WB-Map-150m-P13Y-2000-v4.0.tif + ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7.tif + ESACCI-LC-L4-Snow-Cond-500m-MONTHLY-2000-2012-v2.4 + + + 3.500058 + + 0.0 + 0 + + + + PASSED + PASSED + PASSED + PASSED + PASSED + PASSED + + + + + 3.354197 + 0.0 + 0.0 + 8.675177 + 0.268831 + 2.81222 + 83.179593 + 0.992827 + 0.571295 + 0.275278 + 0.038401 + 3.18638 + 0.0 + 0.0 + 0.0 + 0.0 + CAMS + 0.392921 + 1.224094 + AUX_ECMWFT + 357.927923 + + + +""" # noqa + class TestTileXML: """Test the SAFE TILE XML file handler. - - Since L1C/L2A share almost the same Tile XML, we just need to test L1C. + + Since L1C/L2A share almost the same Tile XML, we only use L1C Tile here. """ def setup_method(self): """Set up the test case.""" from satpy.readers.msi_safe import SAFEMSITileMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") - self.l1c_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), filename_info, mock.MagicMock()) + l1c_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") + l2a_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L2A") + self.l1c_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), l1c_filename_info, mock.MagicMock()) + self.l2a_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), l2a_filename_info, mock.MagicMock()) - @pytest.mark.parametrize(("angle_name", "angle_block", "angle_type", "expected"), - [("satellite_zenith_angle", "Viewing_Incidence_Angles_Grids", "Zenith", + @pytest.mark.parametrize(("process_level","angle_name", "angle_block", "angle_type", "expected"), + [("L1C", "satellite_zenith_angle", "Viewing_Incidence_Angles_Grids", "Zenith", [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, @@ -896,7 +1453,7 @@ def setup_method(self): 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), - ("solar_zenith_angle_l2a", "Sun_Angles_Grid", "Zenith", + ("L2A", "solar_zenith_angle_l2a", "Sun_Angles_Grid", "Zenith", [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, @@ -917,23 +1474,52 @@ def setup_method(self): 38.84936063, 38.80464763, 38.76011645, 38.7157479, 38.67164839], [38.97531575, 38.92950771, 38.88389967, 38.83852091, 38.7933053, 38.74831897, 38.7034912, 38.65891427, 38.61446851, 38.57030388]]), - ("moon_zenith_angle", "Sun_Angles_Grid", "Zenith", None) + ("L1C", "moon_zenith_angle", "Sun_Angles_Grid", "Zenith", None) ]) - def test_angles(self, angle_name, angle_block, angle_type, expected): + def test_angles(self, process_level, angle_name, angle_block, angle_type, expected): """Test reading angles array.""" info = dict(xml_tag=angle_block, xml_item=angle_type) if "satellite" in angle_name else \ dict(xml_tag=angle_block + "/" + angle_type) - res = self.l1c_xml_tile_fh.get_dataset(make_dataid(name=angle_name, - resolution=60), info) + if process_level == "L1C": + res = self.l1c_xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) + else: + res = self.l2a_xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) if res is not None: res = res[::200, ::200] - if expected is not None: + if res is not None: np.testing.assert_allclose(res, expected) else: assert res is expected + def test_navigation(self): + """Test the navigation.""" + from pyproj import CRS + crs = CRS("EPSG:32616") + + dsid = make_dataid(name="B01", resolution=60) + result = self.l1c_xml_tile_fh.get_area_def(dsid) + area_extent = (499980.0, 3590220.0, 609780.0, 3700020.0) + assert result.crs == crs + np.testing.assert_allclose(result.area_extent, area_extent) + + +# class TestMTDXML: +# """Test the SAFE MTD XML file handler.""" +# +# def setup_method(self): +# """Set up the test case.""" +# from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML +# l1c_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") +# l2a_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L2A") +# self.l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), l1c_filename_info, mock.MagicMock()) +# self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), l1c_filename_info, mock.MagicMock(), mask_saturated=True) +# self.l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), l1c_filename_info, mock.MagicMock(), mask_saturated=True) +# +# def test_calibration_and_masking(self, process_level, calibration): + + class TestMTDXML: """Test the SAFE MTD XML file handler.""" @@ -946,40 +1532,6 @@ def setup_method(self): self.l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) - def test_satellite_zenith_array(self): - """Test reading the satellite zenith array.""" - info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith") - - expected_data = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, - 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], - [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, - 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], - [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, - 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], - [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, - 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], - [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, - 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]) - - res1 = self.l1c_xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle", - resolution=60), - info)[::200, ::200] - res2 = self.l1c_xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle_l2a", - resolution=60), - info)[::200, ::200] - np.testing.assert_allclose(res1, expected_data) - np.testing.assert_allclose(res2, expected_data) - def test_old_xml_calibration(self): """Test the calibration of older data formats (no offset).""" fake_data = xr.DataArray([[[0, 1, 2, 3], @@ -1039,18 +1591,6 @@ def test_xml_calibration_to_radiance(self): [-250.828757, 0., 16251.99095, np.inf]]]) np.testing.assert_allclose(result, expected) - def test_xml_navigation(self): - """Test the navigation.""" - from pyproj import CRS - crs = CRS("EPSG:32616") - - dsid = make_dataid(name="B01", resolution=60) - result = self.l1c_xml_tile_fh.get_area_def(dsid) - - area_extents = (499980.0, 3590220.0, 609780.0, 3700020.0) - assert result.crs == crs - np.testing.assert_allclose(result.area_extent, area_extents) - class TestSAFEMSIL1C: """Test case for image reading (jp2k).""" From e0c06de89e5aaa93bd9e1cdcb9cc377b865c2195 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 23:43:28 +0800 Subject: [PATCH 1279/1416] Update __init__.py --- satpy/composites/__init__.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0b4d01c833..a390e9dd16 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1674,22 +1674,22 @@ class BackgroundCompositor(GenericCompositor): The output image mode will be determined by both foreground and background. Generally, when the background has an alpha band, the output image will also have one. - # L/L -> L - # L/LA -> LA - # L/RGB -> RGB - # L/RGBA -> RGBA - # LA/L -> L - # LA/LA -> LA - # LA/RGB -> RGB - # LA/RGBA -> RGBA - # RGB/L -> RGB - # RGB/LA -> RGBA - # RGB/RGB -> RGB - # RGB/RGBA -> RGBA - # RGBA/L -> RGB - # RGBA/LA -> RGBA - # RGBA/RGB -> RGB - # RGBA/RGBA -> RGBA + # L/L -> L + # L/LA -> LA + # L/RGB -> RGB + # L/RGBA -> RGBA + # LA/L -> L + # LA/LA -> LA + # LA/RGB -> RGB + # LA/RGBA -> RGBA + # RGB/L -> RGB + # RGB/LA -> RGBA + # RGB/RGB -> RGB + # RGB/RGBA -> RGBA + # RGBA/L -> RGB + # RGBA/LA -> RGBA + # RGBA/RGB -> RGB + # RGBA/RGBA -> RGBA """ From 82899d98af95bd6a471a0afcb5bb2313ffd41a61 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 19 Apr 2024 10:50:53 -0500 Subject: [PATCH 1280/1416] Add missing pyerfa dependency --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ce4a16006f..8955a154f1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -82,6 +82,7 @@ jobs: scipy conda remove --force-remove -y pykdtree pyresample python-geotiepoints pyhdf netcdf4 h5py cftime astropy pyerfa python -m pip install --upgrade --no-deps --pre --no-build-isolation \ + pyerfa \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ From 057e89ec69d2cf2715c42eb4a1ff8692591c1b46 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Fri, 19 Apr 2024 23:59:07 +0800 Subject: [PATCH 1281/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index e13a923cd4..453d2f67c7 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1515,9 +1515,20 @@ def test_navigation(self): # l2a_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L2A") # self.l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), l1c_filename_info, mock.MagicMock()) # self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), l1c_filename_info, mock.MagicMock(), mask_saturated=True) -# self.l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), l1c_filename_info, mock.MagicMock(), mask_saturated=True) +# self.l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), l2a_filename_info, mock.MagicMock(), mask_saturated=True) +# self.fake_data = xr.DataArray([[[0, 1, 2, 3], +# [4, 1000, 65534, 65535]]], +# dims=["band", "x", "y"]) # -# def test_calibration_and_masking(self, process_level, calibration): +# def test_old_xml_calibration(self): +# """Test the calibration of older data formats (no offset).""" +# fake_data = xr.DataArray([[[0, 1, 2, 3], +# [4, 1000, 65534, 65535]]], +# dims=["band", "x", "y"]) +# result = self.l1c_old_xml_fh.calibrate_to_reflectances(fake_data, "B01") +# np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03], +# [0.04, 10, 655.34, np.inf]]]) + From 807e0a4cdc1d657a553b300ee8e02a02f86ee56f Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 00:09:19 +0800 Subject: [PATCH 1282/1416] Update __init__.py --- satpy/composites/__init__.py | 51 +++++++++++++++++++++++++----------- 1 file changed, 35 insertions(+), 16 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index a390e9dd16..746e6ddf37 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1674,22 +1674,41 @@ class BackgroundCompositor(GenericCompositor): The output image mode will be determined by both foreground and background. Generally, when the background has an alpha band, the output image will also have one. - # L/L -> L - # L/LA -> LA - # L/RGB -> RGB - # L/RGBA -> RGBA - # LA/L -> L - # LA/LA -> LA - # LA/RGB -> RGB - # LA/RGBA -> RGBA - # RGB/L -> RGB - # RGB/LA -> RGBA - # RGB/RGB -> RGB - # RGB/RGBA -> RGBA - # RGBA/L -> RGB - # RGBA/LA -> RGBA - # RGBA/RGB -> RGB - # RGBA/RGBA -> RGBA + ============ ============ ======== + Foreground Background Result + ============ ============ ======== + L L L + ------------------------------------ + L LA LA + ------------------------------------ + L RGB RGB + ------------------------------------ + L RGBA RGBA + ------------------------------------ + LA L L + ------------------------------------ + LA LA LA + ------------------------------------ + LA RGB RGB + ------------------------------------ + LA RGBA RGBA + ------------------------------------ + RGB L RGB + ------------------------------------ + RGB LA RGBA + ------------------------------------ + RGB RGB RGB + ------------------------------------ + RGB RGBA RGBA + ------------------------------------ + RGBA L RGB + ------------------------------------ + RGBA LA RGBA + ------------------------------------ + RGBA RGB RGB + ------------------------------------ + RGBA RGBA RGBA + ------------------------------------ """ From 5f8c3ebfdc98fd1393f24ba6e19d57cb8aa38b46 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 00:16:56 +0800 Subject: [PATCH 1283/1416] Update __init__.py --- satpy/composites/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 746e6ddf37..1539d349bd 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1709,6 +1709,7 @@ class BackgroundCompositor(GenericCompositor): ------------------------------------ RGBA RGBA RGBA ------------------------------------ + ============ ============ ======== """ From 58901ef53485c109bacf5652fb2d85976e29c22c Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 10:25:31 +0800 Subject: [PATCH 1284/1416] Update __init__.py --- satpy/composites/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 1539d349bd..52832d34d0 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1674,8 +1674,9 @@ class BackgroundCompositor(GenericCompositor): The output image mode will be determined by both foreground and background. Generally, when the background has an alpha band, the output image will also have one. + ============ ============ ======== - Foreground Background Result + Foreground Background Result ============ ============ ======== L L L ------------------------------------ From 00f1710ffe179f5f002f92144c18d01588cbfd66 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 10:39:20 +0800 Subject: [PATCH 1285/1416] Update __init__.py --- satpy/composites/__init__.py | 64 ++++++++++++++++++------------------ 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 52832d34d0..41c0fbc368 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1678,38 +1678,38 @@ class BackgroundCompositor(GenericCompositor): ============ ============ ======== Foreground Background Result ============ ============ ======== - L L L - ------------------------------------ - L LA LA - ------------------------------------ - L RGB RGB - ------------------------------------ - L RGBA RGBA - ------------------------------------ - LA L L - ------------------------------------ - LA LA LA - ------------------------------------ - LA RGB RGB - ------------------------------------ - LA RGBA RGBA - ------------------------------------ - RGB L RGB - ------------------------------------ - RGB LA RGBA - ------------------------------------ - RGB RGB RGB - ------------------------------------ - RGB RGBA RGBA - ------------------------------------ - RGBA L RGB - ------------------------------------ - RGBA LA RGBA - ------------------------------------ - RGBA RGB RGB - ------------------------------------ - RGBA RGBA RGBA - ------------------------------------ + L L L + ------------ ------------ -------- + L LA LA + ------------ ------------ -------- + L RGB RGB + ------------ ------------ -------- + L RGBA RGBA + ------------ ------------ -------- + LA L L + ------------ ------------ -------- + LA LA LA + ------------ ------------ -------- + LA RGB RGB + ------------ ------------ -------- + LA RGBA RGBA + ------------ ------------ -------- + RGB L RGB + ------------ ------------ -------- + RGB LA RGBA + ------------ ------------ -------- + RGB RGB RGB + ------------ ------------ -------- + RGB RGBA RGBA + ------------ ------------ -------- + RGBA L RGB + ------------ ------------ -------- + RGBA LA RGBA + ------------ ------------ -------- + RGBA RGB RGB + ------------ ------------ -------- + RGBA RGBA RGBA + ------------ ------------ -------- ============ ============ ======== """ From 05fdcbf8c2300ebd195c3fc08805dab58e2225ec Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 10:47:01 +0800 Subject: [PATCH 1286/1416] Update __init__.py --- satpy/composites/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 41c0fbc368..9ab658954f 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1709,7 +1709,6 @@ class BackgroundCompositor(GenericCompositor): RGBA RGB RGB ------------ ------------ -------- RGBA RGBA RGBA - ------------ ------------ -------- ============ ============ ======== """ From bfa5f4b6866db59090173c30f6f4cfc2ec599a35 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 12:14:12 +0800 Subject: [PATCH 1287/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 179 ++++++++++++---------- 1 file changed, 94 insertions(+), 85 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 453d2f67c7..32832b17e5 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1419,7 +1419,7 @@ class TestTileXML: """Test the SAFE TILE XML file handler. - Since L1C/L2A share almost the same Tile XML, we only use L1C Tile here. + Since L1C/L2A share almost the same Tile XML structure, we only use L1C Tile here. """ @@ -1432,7 +1432,8 @@ def setup_method(self): self.l2a_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), l2a_filename_info, mock.MagicMock()) @pytest.mark.parametrize(("process_level","angle_name", "angle_block", "angle_type", "expected"), - [("L1C", "satellite_zenith_angle", "Viewing_Incidence_Angles_Grids", "Zenith", + [ + ("L1C", "satellite_zenith_angle", "Viewing_Incidence_Angles_Grids", "Zenith", [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, @@ -1505,102 +1506,110 @@ def test_navigation(self): np.testing.assert_allclose(result.area_extent, area_extent) -# class TestMTDXML: -# """Test the SAFE MTD XML file handler.""" -# -# def setup_method(self): -# """Set up the test case.""" -# from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML -# l1c_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") -# l2a_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L2A") -# self.l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), l1c_filename_info, mock.MagicMock()) -# self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), l1c_filename_info, mock.MagicMock(), mask_saturated=True) -# self.l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), l2a_filename_info, mock.MagicMock(), mask_saturated=True) -# self.fake_data = xr.DataArray([[[0, 1, 2, 3], -# [4, 1000, 65534, 65535]]], -# dims=["band", "x", "y"]) -# -# def test_old_xml_calibration(self): -# """Test the calibration of older data formats (no offset).""" -# fake_data = xr.DataArray([[[0, 1, 2, 3], -# [4, 1000, 65534, 65535]]], -# dims=["band", "x", "y"]) -# result = self.l1c_old_xml_fh.calibrate_to_reflectances(fake_data, "B01") -# np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03], -# [0.04, 10, 655.34, np.inf]]]) - - - - class TestMTDXML: """Test the SAFE MTD XML file handler.""" def setup_method(self): """Set up the test case.""" - from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") - self.l1c_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), filename_info, mock.MagicMock()) - self.l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) - self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) + self.l1c_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") + self.l2a_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L2A") + self.fake_data = xr.DataArray([[[0, 1, 2, 3], + [4, 1000, 65534, 65535]]], + dims=["band", "x", "y"]) - def test_old_xml_calibration(self): - """Test the calibration of older data formats (no offset).""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.l1c_old_xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03], - [0.04, 10, 655.34, np.inf]]]) + @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), + [ + ("L1C", True, "B01", [[[np.nan, -9.99, -9.98, -9.97], + [-9.96, 0, 645.34, np.inf]]]), + ("L1C", False, "B10", [[[np.nan, -19.99, -19.98, -19.97], + [-19.96, -10, 635.34, 635.35]]]), + ("oldL1C", True, "B01", [[[np.nan, 0.01, 0.02, 0.03], + [0.04, 10, 655.34, np.inf]]]), + ("L2A", False, "B03", [[[np.nan, -9.99, -9.98, -9.97], + [-9.96, 0, 645.34, 645.35]]]), + ]) + def test_xml_calibration_to_reflectance(self, process_level, mask_saturated, band_name, expected): + """Test the calibration to reflectance.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), self.l1c_filename_info, mock.MagicMock()) + l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.l1c_filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) + l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), self.l2a_filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) - def test_xml_calibration(self): - """Test the calibration with radiometric offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.l1c_xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], - [0.04 - 10, 0, 655.34 - 10, np.inf]]]) + if process_level == "oldL1C": + result = l1c_old_xml_fh.calibrate_to_reflectances(self.fake_data, band_name) + elif process_level == "L1C": + result = l1c_xml_fh.calibrate_to_reflectances(self.fake_data, band_name) + else: + result = l2a_xml_fh.calibrate_to_reflectances(self.fake_data, band_name) - def test_xml_calibration_to_counts(self): - """Test the calibration to counts.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.l1c_xml_fh._sanitize_data(fake_data) - np.testing.assert_allclose(result, [[[np.nan, 1, 2, 3], - [4, 1000, 65534, np.inf]]]) + np.testing.assert_allclose(result, expected) - def test_xml_calibration_unmasked_saturated(self): - """Test the calibration with radiometric offset but unmasked saturated pixels.""" + @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), + [ + ("L1C", True, "B01", [[[np.nan, -251.584265, -251.332429, -251.080593], + [-250.828757, 0., 16251.99095, np.inf]]]), + ("L1C", False, "B10", [[[np.nan, -35.465976, -35.448234, -35.430493], + [-35.412751, -17.741859, 1127.211275, 1127.229017]]]), + ("oldL1C", True, "B01", [[[np.nan, 0.251836101, 0.503672202, 0.755508303], + [1.00734440, 251.836101, 16503.8271, np.inf]]]), + ("L2A", False, "B03", [[[np.nan, -238.571863, -238.333052, -238.094241], + [-237.855431, 0, 15411.407995, 15411.646806]]]), + ]) + def test_xml_calibration_to_radiance(self, process_level, mask_saturated, band_name, expected): + """Test the calibration to reflectance.""" from satpy.readers.msi_safe import SAFEMSIMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") - self.l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False) + l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), self.l1c_filename_info, mock.MagicMock()) + l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.l1c_filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) + l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), self.l2a_filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) + if process_level == "oldL1C": + result = l1c_old_xml_fh.calibrate_to_radiances(self.fake_data, band_name) + elif process_level == "L1C": + result = l1c_xml_fh.calibrate_to_radiances(self.fake_data, band_name) + else: + result = l2a_xml_fh.calibrate_to_radiances(self.fake_data, band_name) - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.l1c_xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], - [0.04 - 10, 0, 655.34 - 10, 655.35 - 10]]]) + np.testing.assert_allclose(result, expected) - def test_xml_calibration_with_different_offset(self): - """Test the calibration with a different offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.l1c_xml_fh.calibrate_to_reflectances(fake_data, "B10") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 20, 0.02 - 20, 0.03 - 20], - [0.04 - 20, -10, 655.34 - 20, np.inf]]]) + @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), + [ + ("L1C", True, "B01", None), + ("L2A", False, "AOT", [[[np.nan, 0.001, 0.002, 0.003], + [0.004, 1., 65.534, 65.535]]]), + ("L2A", True, "WVP", [[[np.nan, 0.001, 0.002, 0.003], + [0.004, 1., 65.534, np.inf]]]), + ("L2A", False, "CLOUD", None), + ]) + def test_xml_calibration_to_atmospheric(self, process_level, mask_saturated, band_name, expected): + """Test the calibration to L2A atmospheric products.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.l1c_filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) + l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), self.l2a_filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) + if process_level == "L1C": + result = l1c_xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) + else: + result = l2a_xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) + + if result is not None: + np.set_printoptions(suppress=True) + np.testing.assert_allclose(result, expected) + else: + assert result is expected + + def test_xml_calibration_to_counts(self): + """Test the calibration to counts.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.l1c_filename_info, mock.MagicMock(), + mask_saturated=True) + result = l1c_xml_fh._sanitize_data(self.fake_data) + np.testing.assert_allclose(result, [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]]) - def test_xml_calibration_to_radiance(self): - """Test the calibration with a different offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.l1c_xml_fh.calibrate_to_radiances(fake_data, "B01") - expected = np.array([[[np.nan, -251.584265, -251.332429, -251.080593], - [-250.828757, 0., 16251.99095, np.inf]]]) - np.testing.assert_allclose(result, expected) class TestSAFEMSIL1C: From c038cfdf1dff02552980f7d966b90bbdfc7a61d6 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 12:14:32 +0800 Subject: [PATCH 1288/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 32832b17e5..77cd49a9e8 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1611,7 +1611,6 @@ def test_xml_calibration_to_counts(self): [4, 1000, 65534, np.inf]]]) - class TestSAFEMSIL1C: """Test case for image reading (jp2k).""" From e00e4d3cdd498a013bd6466f5986f81b5fc22025 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 12:15:21 +0800 Subject: [PATCH 1289/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 77cd49a9e8..6a0b8d9275 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1596,7 +1596,6 @@ def test_xml_calibration_to_atmospheric(self, process_level, mask_saturated, ban result = l2a_xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) if result is not None: - np.set_printoptions(suppress=True) np.testing.assert_allclose(result, expected) else: assert result is expected From e86b122dcc8e93abfac23fdc96a2db8129e57365 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 13:57:34 +0800 Subject: [PATCH 1290/1416] fix --- satpy/readers/msi_safe.py | 6 +- satpy/tests/reader_tests/test_msi_safe.py | 77 ++++++++++++++++++----- 2 files changed, 64 insertions(+), 19 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 7bf7b537a6..d99101f1bf 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -86,6 +86,8 @@ def get_dataset(self, key, info): logger.debug("Reading %s.", key["name"]) proj = self._read_from_file(key) + if proj is None: + return proj.attrs = info.copy() proj.attrs["units"] = "%" proj.attrs["platform_name"] = self.platform_name @@ -164,8 +166,8 @@ def calibrate_to_reflectances(self, data, band_name): def calibrate_to_atmospheric(self, data, band_name): """Calibrate L2A AOT/WVP product.""" - atmospheric_products = ["AOT", "WVP"] - if self.process_level == "L1C" or (self.process_level == "L2A" and band_name not in atmospheric_products): + atmospheric_bands = ["AOT", "WVP"] + if self.process_level == "L1C" or (self.process_level == "L2A" and band_name not in atmospheric_bands): return quantification = float(self.root.find(f".//{band_name}_QUANTIFICATION_VALUE").text) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 6a0b8d9275..af93939c20 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1615,26 +1615,69 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" - from satpy.readers.msi_safe import SAFEMSITileMDXML - self.filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None, - process_level="L1C") self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), - self.filename_info, mock.MagicMock()) - @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), - [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), - (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), - (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]]), - (False, "counts", [[np.nan, 1], [65534, 65535]])]) - def test_calibration_and_masking(self, mask_saturated, calibration, expected): + + @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "calibration", "expected"), + [ + ("L1C", True, "B01", "reflectance", [[np.nan, -9.99], [645.34, np.inf]]), + ("L1C", False, "B02", "radiance", [[np.nan, -262.148396], [16934.419021, 16934.681431]]), + ("L1C", True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), + ("L1C", True, "B01", "aerosol_thickness", None), + ("L2A", False, "AOT_L2A", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + ("L2A", True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + ("L2A", True, "SNOW_L2A", "water_vapor", None), + ]) + def test_calibration_and_masking(self, process_level, mask_saturated, band_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML + from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML, SAFEMSITileMDXML + l1c_filename_info = dict(observation_time=None, fmission_id="S2A", band_name=band_name, dtile_number=None, + process_level="L1C") + l2a_filename_info = dict(observation_time=None, fmission_id="S2A", band_name=band_name.replace("_L2A", ""), + dtile_number=None, process_level="L2A") + l1c_tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), l1c_filename_info, + mock.MagicMock()) + l2a_tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), l2a_filename_info, + mock.MagicMock()) + l1c_mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), l1c_filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) + l2a_mda = SAFEMSIMDXML(StringIO(mtd_l2a_xml), l2a_filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) - mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), mda, self.tile_mda) + if process_level == "L1C": + jp2_fh = SAFEMSIL1C("somefile", l1c_filename_info, mock.MagicMock(), l1c_mda, l1c_tile_mda) + else: + jp2_fh = SAFEMSIL1C("somefile", l2a_filename_info, mock.MagicMock(), l2a_mda, l2a_tile_mda) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = self.jp2_fh.get_dataset(make_dataid(name="B01", calibration=calibration), info=dict()) - np.testing.assert_allclose(res, expected) + res = jp2_fh.get_dataset(make_dataid(name=band_name, calibration=calibration), info=dict()) + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected + + def test_filename_dsname_mismatch(self): + """Test when satpy's dataset name and file's band name mismatch, the data and its area definition should + both be None. + + """ + from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML, SAFEMSITileMDXML + l1c_filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None, + process_level="L1C") + l2a_filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B10", dtile_number=None, + process_level="L2A") + l1c_tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), l1c_filename_info, + mock.MagicMock()) + l2a_tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), l2a_filename_info, + mock.MagicMock()) + l1c_mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), l1c_filename_info, mock.MagicMock(), mask_saturated=True) + l2a_mda = SAFEMSIMDXML(StringIO(mtd_l2a_xml), l2a_filename_info, mock.MagicMock(), mask_saturated=True) + l1c_jp2_fh = SAFEMSIL1C("somefile", l1c_filename_info, mock.MagicMock(), l1c_mda, l1c_tile_mda) + l2a_jp2_fh = SAFEMSIL1C("somefile", l2a_filename_info, mock.MagicMock(), l2a_mda, l2a_tile_mda) + + with mock.patch("xarray.open_dataset", return_value=self.fake_data): + res1 = l1c_jp2_fh.get_dataset(make_dataid(name="B02"), info=dict()) + res2 = l1c_jp2_fh.get_dataset(make_dataid(name="B10_L2A"), info=dict()) + res3 = l1c_jp2_fh.get_area_def(make_dataid(name="B02")) + res4 = l1c_jp2_fh.get_area_def(make_dataid(name="B10_L2A")) + assert res1 is None and res2 is None and res3 is None and res4 is None \ No newline at end of file From 6b53da032b9c06055e63db325271e6a1003bbf8b Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 14:02:22 +0800 Subject: [PATCH 1291/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index af93939c20..776547a953 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1657,10 +1657,7 @@ def test_calibration_and_masking(self, process_level, mask_saturated, band_name, assert res is expected def test_filename_dsname_mismatch(self): - """Test when satpy's dataset name and file's band name mismatch, the data and its area definition should - both be None. - - """ + """Test when dataset name and file name mismatch, the data and its area definition should both be None.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML, SAFEMSITileMDXML l1c_filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None, process_level="L1C") @@ -1675,9 +1672,13 @@ def test_filename_dsname_mismatch(self): l1c_jp2_fh = SAFEMSIL1C("somefile", l1c_filename_info, mock.MagicMock(), l1c_mda, l1c_tile_mda) l2a_jp2_fh = SAFEMSIL1C("somefile", l2a_filename_info, mock.MagicMock(), l2a_mda, l2a_tile_mda) - with mock.patch("xarray.open_dataset", return_value=self.fake_data): + with (mock.patch("xarray.open_dataset", return_value=self.fake_data)): res1 = l1c_jp2_fh.get_dataset(make_dataid(name="B02"), info=dict()) - res2 = l1c_jp2_fh.get_dataset(make_dataid(name="B10_L2A"), info=dict()) + res2 = l2a_jp2_fh.get_dataset(make_dataid(name="B11_L2A"), info=dict()) res3 = l1c_jp2_fh.get_area_def(make_dataid(name="B02")) - res4 = l1c_jp2_fh.get_area_def(make_dataid(name="B10_L2A")) - assert res1 is None and res2 is None and res3 is None and res4 is None \ No newline at end of file + res4 = l2a_jp2_fh.get_area_def(make_dataid(name="B11_L2A")) + assert res1 is None + assert res2 is None + assert res3 is None + assert res4 is None + \ No newline at end of file From 77b1753929e9e26acc71735e983d90c4f40635a0 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 14:03:34 +0800 Subject: [PATCH 1292/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 776547a953..f9e19f790f 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1681,4 +1681,3 @@ def test_filename_dsname_mismatch(self): assert res2 is None assert res3 is None assert res4 is None - \ No newline at end of file From a788854b96be35f030ebf6869fe52f0a3e6c3ea8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 20 Apr 2024 06:17:02 +0000 Subject: [PATCH 1293/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/reader_tests/test_msi_safe.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index f9e19f790f..2408b0d491 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1357,7 +1357,7 @@ S2__OPER_GIP_L2ACSC_MPC__20220121T000003_V20220125T022000_21000101T000000_B00 S2__OPER_GIP_L2ACAC_MPC__20220121T000004_V20220125T022000_21000101T000000_B00 S2__OPER_GIP_PROBA2_MPC__20231208T000510_V20231213T070000_21000101T000000_B00 - + CopernicusDEM30 S2__OPER_AUX_UT1UTC_PDMC_20240404T000000_V20240405T000000_20250404T000000 From fe8ac5cee6f14dc520dae0c21f0de7172801b0de Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 16:34:25 +0800 Subject: [PATCH 1294/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 236 +++++++++------------- 1 file changed, 98 insertions(+), 138 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 2408b0d491..03c52ac22e 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1415,6 +1415,38 @@ """ # noqa +PROCESS_LEVELS = ["L1C", "oldL1C", "L2A"] +MTD_XMLS = [mtd_l1c_xml, mtd_l1c_old_xml, mtd_l2a_xml] +TILE_XMLS = [mtd_l1c_tile_xml, mtd_l1c_tile_xml, mtd_l1c_tile_xml] + +def mtd_xml_builder(process_level, mask_saturated=True, band_name=None): + """Build fake SAFE MTD XML.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + filename_info = dict(observation_time=None, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_fh = SAFEMSIMDXML(StringIO(MTD_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock(), mask_saturated=mask_saturated) + return xml_fh + +def tile_xml_builder(process_level, band_name=None): + """Build fake SAFE Tile XML.""" + from satpy.readers.msi_safe import SAFEMSITileMDXML + filename_info = dict(observation_time=None, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_tile_fh = SAFEMSITileMDXML(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock()) + return xml_tile_fh + +def jp2_builder(process_level, band_name, mask_saturated=True): + """Build fake SAFE jp2 image file.""" + from satpy.readers.msi_safe import SAFEMSIL1C + filename_info = dict(observation_time=None, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_xh = mtd_xml_builder(process_level, mask_saturated, band_name) + tile_xml_xh = tile_xml_builder(process_level, band_name) + jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_xh, tile_xml_xh) + return jp2_fh + class TestTileXML: """Test the SAFE TILE XML file handler. @@ -1423,17 +1455,9 @@ class TestTileXML: """ - def setup_method(self): - """Set up the test case.""" - from satpy.readers.msi_safe import SAFEMSITileMDXML - l1c_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") - l2a_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L2A") - self.l1c_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), l1c_filename_info, mock.MagicMock()) - self.l2a_xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_l1c_tile_xml), l2a_filename_info, mock.MagicMock()) - - @pytest.mark.parametrize(("process_level","angle_name", "angle_block", "angle_type", "expected"), + @pytest.mark.parametrize(("process_level","angle_name", "angle_tag", "expected"), [ - ("L1C", "satellite_zenith_angle", "Viewing_Incidence_Angles_Grids", "Zenith", + ("L1C", "satellite_zenith_angle", ("Viewing_Incidence_Angles_Grids", "Zenith"), [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, @@ -1454,7 +1478,7 @@ def setup_method(self): 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), - ("L2A", "solar_zenith_angle_l2a", "Sun_Angles_Grid", "Zenith", + ("L2A", "solar_zenith_angle_l2a", ("Sun_Angles_Grid", "Zenith"), [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, @@ -1475,17 +1499,15 @@ def setup_method(self): 38.84936063, 38.80464763, 38.76011645, 38.7157479, 38.67164839], [38.97531575, 38.92950771, 38.88389967, 38.83852091, 38.7933053, 38.74831897, 38.7034912, 38.65891427, 38.61446851, 38.57030388]]), - ("L1C", "moon_zenith_angle", "Sun_Angles_Grid", "Zenith", None) + ("L1C", "moon_zenith_angle", ("Sun_Angles_Grid", "Zenith"), None) ]) - def test_angles(self, process_level, angle_name, angle_block, angle_type, expected): + def test_angles(self, process_level, angle_name, angle_tag, expected): """Test reading angles array.""" - info = dict(xml_tag=angle_block, xml_item=angle_type) if "satellite" in angle_name else \ - dict(xml_tag=angle_block + "/" + angle_type) + info = dict(xml_tag=angle_tag[0], xml_item=angle_tag[1]) if "satellite" in angle_name else \ + dict(xml_tag=angle_tag[0] + "/" + angle_tag[1]) + xml_tile_fh = tile_xml_builder(process_level) - if process_level == "L1C": - res = self.l1c_xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) - else: - res = self.l2a_xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) + res = xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) if res is not None: res = res[::200, ::200] @@ -1500,7 +1522,8 @@ def test_navigation(self): crs = CRS("EPSG:32616") dsid = make_dataid(name="B01", resolution=60) - result = self.l1c_xml_tile_fh.get_area_def(dsid) + xml_tile_fh = tile_xml_builder("L1C") + result = xml_tile_fh.get_area_def(dsid) area_extent = (499980.0, 3590220.0, 609780.0, 3700020.0) assert result.crs == crs np.testing.assert_allclose(result.area_extent, area_extent) @@ -1511,68 +1534,45 @@ class TestMTDXML: def setup_method(self): """Set up the test case.""" - self.l1c_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L1C") - self.l2a_filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A", process_level="L2A") - self.fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) + self.fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), [ - ("L1C", True, "B01", [[[np.nan, -9.99, -9.98, -9.97], - [-9.96, 0, 645.34, np.inf]]]), - ("L1C", False, "B10", [[[np.nan, -19.99, -19.98, -19.97], - [-19.96, -10, 635.34, 635.35]]]), - ("oldL1C", True, "B01", [[[np.nan, 0.01, 0.02, 0.03], - [0.04, 10, 655.34, np.inf]]]), - ("L2A", False, "B03", [[[np.nan, -9.99, -9.98, -9.97], - [-9.96, 0, 645.34, 645.35]]]), - ]) - def test_xml_calibration_to_reflectance(self, process_level, mask_saturated, band_name, expected): - """Test the calibration to reflectance.""" - from satpy.readers.msi_safe import SAFEMSIMDXML - l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), self.l1c_filename_info, mock.MagicMock()) - l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.l1c_filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), self.l2a_filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) + ("L1C", True, "B01", ([[[np.nan, -9.99, -9.98, -9.97], + [-9.96, 0, 645.34, np.inf]]], + [[[np.nan, -251.584265, -251.332429, -251.080593], + [-250.828757, 0., 16251.99095, np.inf]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]])), + ("L1C", False, "B10", ([[[np.nan, -19.99, -19.98, -19.97], + [-19.96, -10, 635.34, 635.35]]], + [[[np.nan, -35.465976, -35.448234, -35.430493], + [-35.412751, -17.741859, 1127.211275, 1127.229017]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, 65535]]])), + ("oldL1C", True, "B01", ([[[np.nan, 0.01, 0.02, 0.03], + [0.04, 10, 655.34, np.inf]]], + [[[np.nan, 0.251836101, 0.503672202, 0.755508303], + [1.00734440, 251.836101, 16503.8271, np.inf]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]])), + ("L2A", False, "B03", ([[[np.nan, -9.99, -9.98, -9.97], + [-9.96, 0, 645.34, 645.35]]], + [[[np.nan, -238.571863, -238.333052, -238.094241], + [-237.855431, 0, 15411.407995, 15411.646806]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, 65535]]])), + ]) + def test_xml_calibration(self, process_level, mask_saturated, band_name, expected): + """Test the calibration to reflectance/radiance/counts.""" + xml_fh = mtd_xml_builder(process_level, mask_saturated) - if process_level == "oldL1C": - result = l1c_old_xml_fh.calibrate_to_reflectances(self.fake_data, band_name) - elif process_level == "L1C": - result = l1c_xml_fh.calibrate_to_reflectances(self.fake_data, band_name) - else: - result = l2a_xml_fh.calibrate_to_reflectances(self.fake_data, band_name) + res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) + res2 = xml_fh.calibrate_to_radiances(self.fake_data, band_name) + res3 = xml_fh._sanitize_data(self.fake_data) - np.testing.assert_allclose(result, expected) - - @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), - [ - ("L1C", True, "B01", [[[np.nan, -251.584265, -251.332429, -251.080593], - [-250.828757, 0., 16251.99095, np.inf]]]), - ("L1C", False, "B10", [[[np.nan, -35.465976, -35.448234, -35.430493], - [-35.412751, -17.741859, 1127.211275, 1127.229017]]]), - ("oldL1C", True, "B01", [[[np.nan, 0.251836101, 0.503672202, 0.755508303], - [1.00734440, 251.836101, 16503.8271, np.inf]]]), - ("L2A", False, "B03", [[[np.nan, -238.571863, -238.333052, -238.094241], - [-237.855431, 0, 15411.407995, 15411.646806]]]), - ]) - def test_xml_calibration_to_radiance(self, process_level, mask_saturated, band_name, expected): - """Test the calibration to reflectance.""" - from satpy.readers.msi_safe import SAFEMSIMDXML - l1c_old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), self.l1c_filename_info, mock.MagicMock()) - l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.l1c_filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), self.l2a_filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - if process_level == "oldL1C": - result = l1c_old_xml_fh.calibrate_to_radiances(self.fake_data, band_name) - elif process_level == "L1C": - result = l1c_xml_fh.calibrate_to_radiances(self.fake_data, band_name) - else: - result = l2a_xml_fh.calibrate_to_radiances(self.fake_data, band_name) - - np.testing.assert_allclose(result, expected) + results = (res1, res2, res3) + np.testing.assert_allclose(results, expected) @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), [ @@ -1582,33 +1582,19 @@ def test_xml_calibration_to_radiance(self, process_level, mask_saturated, band_n ("L2A", True, "WVP", [[[np.nan, 0.001, 0.002, 0.003], [0.004, 1., 65.534, np.inf]]]), ("L2A", False, "CLOUD", None), + ("L2A", False, "B10", None), ]) def test_xml_calibration_to_atmospheric(self, process_level, mask_saturated, band_name, expected): """Test the calibration to L2A atmospheric products.""" - from satpy.readers.msi_safe import SAFEMSIMDXML - l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.l1c_filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - l2a_xml_fh = SAFEMSIMDXML(StringIO(mtd_l2a_xml), self.l2a_filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - if process_level == "L1C": - result = l1c_xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) - else: - result = l2a_xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) + xml_fh = mtd_xml_builder(process_level, mask_saturated) + + result =xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) if result is not None: np.testing.assert_allclose(result, expected) else: assert result is expected - def test_xml_calibration_to_counts(self): - """Test the calibration to counts.""" - from satpy.readers.msi_safe import SAFEMSIMDXML - l1c_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.l1c_filename_info, mock.MagicMock(), - mask_saturated=True) - result = l1c_xml_fh._sanitize_data(self.fake_data) - np.testing.assert_allclose(result, [[[np.nan, 1, 2, 3], - [4, 1000, 65534, np.inf]]]) - class TestSAFEMSIL1C: """Test case for image reading (jp2k).""" @@ -1618,7 +1604,7 @@ def setup_method(self): self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "calibration", "expected"), + @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), [ ("L1C", True, "B01", "reflectance", [[np.nan, -9.99], [645.34, np.inf]]), ("L1C", False, "B02", "radiance", [[np.nan, -262.148396], [16934.419021, 16934.681431]]), @@ -1628,56 +1614,30 @@ def setup_method(self): ("L2A", True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), ("L2A", True, "SNOW_L2A", "water_vapor", None), ]) - def test_calibration_and_masking(self, process_level, mask_saturated, band_name, calibration, expected): + def test_calibration_and_masking(self, process_level, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML, SAFEMSITileMDXML - l1c_filename_info = dict(observation_time=None, fmission_id="S2A", band_name=band_name, dtile_number=None, - process_level="L1C") - l2a_filename_info = dict(observation_time=None, fmission_id="S2A", band_name=band_name.replace("_L2A", ""), - dtile_number=None, process_level="L2A") - l1c_tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), l1c_filename_info, - mock.MagicMock()) - l2a_tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), l2a_filename_info, - mock.MagicMock()) - l1c_mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), l1c_filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - l2a_mda = SAFEMSIMDXML(StringIO(mtd_l2a_xml), l2a_filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - - if process_level == "L1C": - jp2_fh = SAFEMSIL1C("somefile", l1c_filename_info, mock.MagicMock(), l1c_mda, l1c_tile_mda) - else: - jp2_fh = SAFEMSIL1C("somefile", l2a_filename_info, mock.MagicMock(), l2a_mda, l2a_tile_mda) + jp2_fh = jp2_builder(process_level, dataset_name.replace("_L2A", ""), mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = jp2_fh.get_dataset(make_dataid(name=band_name, calibration=calibration), info=dict()) + res = jp2_fh.get_dataset(make_dataid(name=dataset_name, calibration=calibration), info=dict()) if res is not None: np.testing.assert_allclose(res, expected) else: assert res is expected - def test_filename_dsname_mismatch(self): - """Test when dataset name and file name mismatch, the data and its area definition should both be None.""" - from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML, SAFEMSITileMDXML - l1c_filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None, - process_level="L1C") - l2a_filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B10", dtile_number=None, - process_level="L2A") - l1c_tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), l1c_filename_info, - mock.MagicMock()) - l2a_tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_l1c_tile_xml), l2a_filename_info, - mock.MagicMock()) - l1c_mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), l1c_filename_info, mock.MagicMock(), mask_saturated=True) - l2a_mda = SAFEMSIMDXML(StringIO(mtd_l2a_xml), l2a_filename_info, mock.MagicMock(), mask_saturated=True) - l1c_jp2_fh = SAFEMSIL1C("somefile", l1c_filename_info, mock.MagicMock(), l1c_mda, l1c_tile_mda) - l2a_jp2_fh = SAFEMSIL1C("somefile", l2a_filename_info, mock.MagicMock(), l2a_mda, l2a_tile_mda) + @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), + [ + ("L1C", "B01", "B03"), + ("L2A", "B02", "B03_L2A"), + ]) + def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): + """Test when dataset name and file band name mismatch, the data and its area definition should both be None.""" + jp2_fh = jp2_builder(process_level, band_name) + + with mock.patch("xarray.open_dataset", return_value=self.fake_data): + res1 = jp2_fh.get_dataset(make_dataid(name=dataset_name), info=dict()) + res2 = jp2_fh.get_area_def(make_dataid(name=dataset_name)) - with (mock.patch("xarray.open_dataset", return_value=self.fake_data)): - res1 = l1c_jp2_fh.get_dataset(make_dataid(name="B02"), info=dict()) - res2 = l2a_jp2_fh.get_dataset(make_dataid(name="B11_L2A"), info=dict()) - res3 = l1c_jp2_fh.get_area_def(make_dataid(name="B02")) - res4 = l2a_jp2_fh.get_area_def(make_dataid(name="B11_L2A")) assert res1 is None assert res2 is None - assert res3 is None - assert res4 is None + From 80278e173b38ddedb0e728510c27560190252a50 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 20 Apr 2024 08:35:30 +0000 Subject: [PATCH 1295/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/reader_tests/test_msi_safe.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 03c52ac22e..96e48f1ac1 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1640,4 +1640,3 @@ def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): assert res1 is None assert res2 is None - From dfb7e3ceb5c7d2d91a123c86528828a0ed7d7a8b Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 16:58:38 +0800 Subject: [PATCH 1296/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 40 ++++++++++------------- 1 file changed, 18 insertions(+), 22 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 96e48f1ac1..5c4b8c7b49 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1419,32 +1419,24 @@ MTD_XMLS = [mtd_l1c_xml, mtd_l1c_old_xml, mtd_l2a_xml] TILE_XMLS = [mtd_l1c_tile_xml, mtd_l1c_tile_xml, mtd_l1c_tile_xml] -def mtd_xml_builder(process_level, mask_saturated=True, band_name=None): - """Build fake SAFE MTD XML.""" - from satpy.readers.msi_safe import SAFEMSIMDXML +def xml_builder(process_level, mask_saturated=True, band_name=None): + """Build fake SAFE MTD/Tile XML.""" + from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML filename_info = dict(observation_time=None, dtile_number=None, band_name=band_name, fmission_id="S2A", process_level=process_level.replace("old", "")) xml_fh = SAFEMSIMDXML(StringIO(MTD_XMLS[PROCESS_LEVELS.index(process_level)]), filename_info, mock.MagicMock(), mask_saturated=mask_saturated) - return xml_fh - -def tile_xml_builder(process_level, band_name=None): - """Build fake SAFE Tile XML.""" - from satpy.readers.msi_safe import SAFEMSITileMDXML - filename_info = dict(observation_time=None, dtile_number=None, band_name=band_name, fmission_id="S2A", - process_level=process_level.replace("old", "")) xml_tile_fh = SAFEMSITileMDXML(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), filename_info, mock.MagicMock()) - return xml_tile_fh + return xml_fh, xml_tile_fh def jp2_builder(process_level, band_name, mask_saturated=True): """Build fake SAFE jp2 image file.""" from satpy.readers.msi_safe import SAFEMSIL1C filename_info = dict(observation_time=None, dtile_number=None, band_name=band_name, fmission_id="S2A", process_level=process_level.replace("old", "")) - xml_xh = mtd_xml_builder(process_level, mask_saturated, band_name) - tile_xml_xh = tile_xml_builder(process_level, band_name) - jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_xh, tile_xml_xh) + xml_fh, tile_xml_fh = xml_builder(process_level, mask_saturated, band_name) + jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh @@ -1505,7 +1497,7 @@ def test_angles(self, process_level, angle_name, angle_tag, expected): """Test reading angles array.""" info = dict(xml_tag=angle_tag[0], xml_item=angle_tag[1]) if "satellite" in angle_name else \ dict(xml_tag=angle_tag[0] + "/" + angle_tag[1]) - xml_tile_fh = tile_xml_builder(process_level) + xml_tile_fh = xml_builder(process_level)[1] res = xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) if res is not None: @@ -1522,7 +1514,7 @@ def test_navigation(self): crs = CRS("EPSG:32616") dsid = make_dataid(name="B01", resolution=60) - xml_tile_fh = tile_xml_builder("L1C") + xml_tile_fh = xml_builder("L1C")[1] result = xml_tile_fh.get_area_def(dsid) area_extent = (499980.0, 3590220.0, 609780.0, 3700020.0) assert result.crs == crs @@ -1565,7 +1557,7 @@ def setup_method(self): ]) def test_xml_calibration(self, process_level, mask_saturated, band_name, expected): """Test the calibration to reflectance/radiance/counts.""" - xml_fh = mtd_xml_builder(process_level, mask_saturated) + xml_fh = xml_builder(process_level, mask_saturated)[0] res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) res2 = xml_fh.calibrate_to_radiances(self.fake_data, band_name) @@ -1586,7 +1578,7 @@ def test_xml_calibration(self, process_level, mask_saturated, band_name, expecte ]) def test_xml_calibration_to_atmospheric(self, process_level, mask_saturated, band_name, expected): """Test the calibration to L2A atmospheric products.""" - xml_fh = mtd_xml_builder(process_level, mask_saturated) + xml_fh = xml_builder(process_level, mask_saturated)[0] result =xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) @@ -1606,16 +1598,20 @@ def setup_method(self): @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), [ - ("L1C", True, "B01", "reflectance", [[np.nan, -9.99], [645.34, np.inf]]), - ("L1C", False, "B02", "radiance", [[np.nan, -262.148396], [16934.419021, 16934.681431]]), - ("L1C", True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), - ("L1C", True, "B01", "aerosol_thickness", None), + # ("L1C", True, "B01", "reflectance", [[np.nan, -9.99], [645.34, np.inf]]), + # ("L1C", False, "B02", "radiance", [[np.nan, -262.148396], [16934.419021, 16934.681431]]), + # ("L1C", True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), + # ("L1C", True, "B01", "aerosol_thickness", None), + ("L2A", False, "B01_L2A", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + ("L2A", True, "B02_L2A", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), + ("L2A", True, "B03_L2A", "counts", [[np.nan, 1], [65534, np.inf]]), ("L2A", False, "AOT_L2A", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), ("L2A", True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), ("L2A", True, "SNOW_L2A", "water_vapor", None), ]) def test_calibration_and_masking(self, process_level, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" + jp2_fh = jp2_builder(process_level, dataset_name.replace("_L2A", ""), mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): From 59b93b4d4933aad7d597fa7171ab29cefe36e2fe Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 17:11:59 +0800 Subject: [PATCH 1297/1416] fix --- satpy/readers/msi_safe.py | 4 +++- satpy/tests/reader_tests/test_msi_safe.py | 23 +++++++++-------------- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index d99101f1bf..d0aa94538c 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -167,7 +167,9 @@ def calibrate_to_reflectances(self, data, band_name): def calibrate_to_atmospheric(self, data, band_name): """Calibrate L2A AOT/WVP product.""" atmospheric_bands = ["AOT", "WVP"] - if self.process_level == "L1C" or (self.process_level == "L2A" and band_name not in atmospheric_bands): + if self.process_level == "L1C": + return + elif self.process_level == "L2A" and band_name not in atmospheric_bands: return quantification = float(self.root.find(f".//{band_name}_QUANTIFICATION_VALUE").text) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 5c4b8c7b49..59c3564fd4 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1596,23 +1596,18 @@ def setup_method(self): self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), + @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), [ - # ("L1C", True, "B01", "reflectance", [[np.nan, -9.99], [645.34, np.inf]]), - # ("L1C", False, "B02", "radiance", [[np.nan, -262.148396], [16934.419021, 16934.681431]]), - # ("L1C", True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), - # ("L1C", True, "B01", "aerosol_thickness", None), - ("L2A", False, "B01_L2A", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), - ("L2A", True, "B02_L2A", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), - ("L2A", True, "B03_L2A", "counts", [[np.nan, 1], [65534, np.inf]]), - ("L2A", False, "AOT_L2A", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), - ("L2A", True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), - ("L2A", True, "SNOW_L2A", "water_vapor", None), + (False, "B01_L2A", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + (True, "B02_L2A", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), + (True, "B03_L2A", "counts", [[np.nan, 1], [65534, np.inf]]), + (False, "AOT_L2A", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + (True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + (True, "SNOW_L2A", "water_vapor", None), ]) - def test_calibration_and_masking(self, process_level, mask_saturated, dataset_name, calibration, expected): + def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - - jp2_fh = jp2_builder(process_level, dataset_name.replace("_L2A", ""), mask_saturated) + jp2_fh = jp2_builder("L2A", dataset_name.replace("_L2A", ""), mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): res = jp2_fh.get_dataset(make_dataid(name=dataset_name, calibration=calibration), info=dict()) From 93ee913bea4b7a51b8493c9ea13f134deda2bc91 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sat, 20 Apr 2024 21:19:23 +0800 Subject: [PATCH 1298/1416] composites --- satpy/etc/composites/msi.yaml | 75 +++++++++++++++++++++++++++++++++ satpy/etc/enhancements/msi.yaml | 58 +++++++++++++++++++++++++ 2 files changed, 133 insertions(+) create mode 100644 satpy/etc/enhancements/msi.yaml diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index 010bd240b0..14a637b941 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -145,3 +145,78 @@ composites: - name: 'B02' #modifiers: [effective_solar_pathlength_corrected] standard_name: true_color + + urban_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B12' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B11' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + standard_name: natural_color + + false_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B08' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected] + standard_name: natural_color + + true_color_l2a: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04_L2A' + - name: 'B03_L2A' + - name: 'B02_L2A' + standard_name: true_color + + natural_color_l2a: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B11_L2A' + - name: 'B08_L2A' + - name: 'B04_L2A' + standard_name: natural_color + + urban_color_l2a: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B12_L2A' + - name: 'B11_L2A' + - name: 'B04_L2A' + standard_name: natural_color + + false_color_l2a: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B08_L2A' + - name: 'B04_L2A' + - name: 'B03_L2A' + standard_name: natural_color + + aerosol_optical_thickness: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: AOT_L2A + calibration: aerosol_thickness + standard_name: aot_msi + + water_vapor_map: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: WVP_L2A + calibration: water_vapor + standard_name: wvp_msi + + scene_class: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: SCL_L2A + standard_name: scl_msi + diff --git a/satpy/etc/enhancements/msi.yaml b/satpy/etc/enhancements/msi.yaml new file mode 100644 index 0000000000..d72d4bb9c9 --- /dev/null +++ b/satpy/etc/enhancements/msi.yaml @@ -0,0 +1,58 @@ +enhancements: + aot_msi: + standard_name: aot_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - colors: oranges + min_value: 0 + max_value: 1 + + wvp_msi: + standard_name: wvp_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - colors: pubu + min_value: 0 + max_value: 4 + + scl_msi: + # The palette is defined by Sentinel-2 Products Specification Document V14.9, page 319 + # Please review https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 + standard_name: scl_msi + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - values: [ + 0, #Nodata + 1, #Saturated_defective + 2, #Topograhic_shadow + 3, #Cloud_shadow + 4, #Vegetation + 5, #Not_vegetated + 6, #Water + 7, #Unclassified + 8, #Cloud_medium_probability + 9, #Cloud_high_probability + 10, #Thin_cirrus + 11, #Snow/ice + ] + colors: [[0, 0, 0], + [255, 0, 0], + [89, 89, 89], + [148, 54, 52], + [0, 176, 80], + [255, 255, 0], + [0, 112, 192], + [128, 128, 128], + [191, 191, 191], + [255, 255, 255], + [146, 205, 220], + [112, 48, 160]] From 9f0553790f88ae41c9aa25ceea88585021ec7602 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 20 Apr 2024 13:20:32 +0000 Subject: [PATCH 1299/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/composites/msi.yaml | 3 +-- satpy/etc/enhancements/msi.yaml | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index 14a637b941..0b8e340327 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -212,11 +212,10 @@ composites: prerequisites: - name: WVP_L2A calibration: water_vapor - standard_name: wvp_msi + standard_name: wvp_msi scene_class: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: SCL_L2A standard_name: scl_msi - diff --git a/satpy/etc/enhancements/msi.yaml b/satpy/etc/enhancements/msi.yaml index d72d4bb9c9..47cb8aa36b 100644 --- a/satpy/etc/enhancements/msi.yaml +++ b/satpy/etc/enhancements/msi.yaml @@ -43,7 +43,7 @@ enhancements: 9, #Cloud_high_probability 10, #Thin_cirrus 11, #Snow/ice - ] + ] colors: [[0, 0, 0], [255, 0, 0], [89, 89, 89], From 4fbfdcc88607ef9572e253bda5a2ae029266220f Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 21 Apr 2024 00:00:32 +0800 Subject: [PATCH 1300/1416] nvdi --- satpy/etc/composites/msi.yaml | 34 +++++++++++++++++++++++++++++++-- satpy/etc/enhancements/msi.yaml | 34 +++++++++++++++++++++++++++++++++ 2 files changed, 66 insertions(+), 2 deletions(-) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index 0b8e340327..c82dc09bf8 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -1,8 +1,6 @@ sensor_name: visir/msi - modifiers: - rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard @@ -168,6 +166,22 @@ composites: modifiers: [effective_solar_pathlength_corrected] standard_name: natural_color + ndvi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B08 + - name: B04 + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B08 + - name: B04 + standard_name: ndvi_msi + true_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -219,3 +233,19 @@ composites: prerequisites: - name: SCL_L2A standard_name: scl_msi + + ndvi_l2a: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B08_L2A + - name: B04_L2A + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B08_L2A + - name: B04_L2A + standard_name: ndvi_msi diff --git a/satpy/etc/enhancements/msi.yaml b/satpy/etc/enhancements/msi.yaml index 47cb8aa36b..9aa8118c37 100644 --- a/satpy/etc/enhancements/msi.yaml +++ b/satpy/etc/enhancements/msi.yaml @@ -1,4 +1,38 @@ enhancements: + ndvi_msi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + standard_name: ndvi_msi + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - min_value: -1 + max_value: 1 + values: [-1, -0.5, -0.2, -0.1, 0, 0.025, 0.05, 0.075, 0.1, 0.125, 0.15, 0.175, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 1] + colors: [[12, 12, 12], + [12, 12, 12], + [191, 191, 191], + [219, 219, 219], + [234, 234, 234], + [255, 249, 204], + [237, 232, 181], + [221, 216, 155], + [204, 198, 130], + [188, 183, 107], + [175, 193, 96], + [163, 204, 89], + [145, 191, 81], + [127, 178, 71], + [112, 163, 63], + [96, 150, 53], + [79, 137, 45], + [63, 124, 35], + [48, 109, 28], + [33, 96, 17], + [15, 84, 10], + [0, 68, 0]] + aot_msi: standard_name: aot_msi operations: From 72af85752d7dfc95083b3c5b5b26d2dc7cd30bfb Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 21 Apr 2024 18:32:31 +0800 Subject: [PATCH 1301/1416] composites --- satpy/etc/composites/msi.yaml | 120 ++++++++++++++++++++++++++++ satpy/etc/enhancements/msi.yaml | 134 ++++++++++++++++++++++++++++++-- 2 files changed, 246 insertions(+), 8 deletions(-) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index c82dc09bf8..e6b2159182 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -182,6 +182,66 @@ composites: - name: B04 standard_name: ndvi_msi + ndmi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B08 + - name: B11 + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B08 + - name: B11 + standard_name: ndmi_msi + + ndwi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B03 + - name: B08 + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B03 + - name: B08 + standard_name: ndwi_msi + + ndsi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ + compositor: !!python/name:satpy.composites.MaskingCompositor + prerequisites: + - name: B11 + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B03 + - name: B11 + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B03 + - name: B11 + conditions: + - method: less_equal + value: 0.42 + transparency: 100 + standard_name: ndsi_msi + + ndsi_with_true_color: + compositor: !!python/name:satpy.composites.BackgroundCompositor + prerequisites: + - name: ndsi + - name: true_color + standard_name: no_enhancement + true_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -249,3 +309,63 @@ composites: - name: B08_L2A - name: B04_L2A standard_name: ndvi_msi + + ndmi_l2a: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B8A_L2A + - name: B11_L2A + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B8A_L2A + - name: B11_L2A + standard_name: ndmi_msi + + ndwi_l2a: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B03_L2A + - name: B08_L2A + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B03_L2A + - name: B08_L2A + standard_name: ndwi_msi + + ndsi_l2a: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ + compositor: !!python/name:satpy.composites.MaskingCompositor + prerequisites: + - name: B11_L2A + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: B03_L2A + - name: B11_L2A + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: B03_L2A + - name: B11_L2A + conditions: + - method: less_equal + value: 0.42 + transparency: 100 + standard_name: ndsi_msi + + ndsi_l2a_with_true_color_l2a: + compositor: !!python/name:satpy.composites.BackgroundCompositor + prerequisites: + - name: ndsi_l2a + - name: true_color_l2a + standard_name: no_enhancement diff --git a/satpy/etc/enhancements/msi.yaml b/satpy/etc/enhancements/msi.yaml index 9aa8118c37..d221008c99 100644 --- a/satpy/etc/enhancements/msi.yaml +++ b/satpy/etc/enhancements/msi.yaml @@ -9,19 +9,42 @@ enhancements: palettes: - min_value: -1 max_value: 1 - values: [-1, -0.5, -0.2, -0.1, 0, 0.025, 0.05, 0.075, 0.1, 0.125, 0.15, 0.175, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 1] - colors: [[12, 12, 12], + values: [ + -1, + -0.5, + -0.2, + -0.1, + 0, + 0.025, + 0.05, + 0.075, + 0.1, + 0.125, + 0.15, + 0.175, + 0.2, + 0.25, + 0.3, + 0.35, + 0.4, + 0.45, + 0.5, + 0.55, + 0.6, + 1, + ] + colors: [ [12, 12, 12], [191, 191, 191], [219, 219, 219], [234, 234, 234], [255, 249, 204], [237, 232, 181], - [221, 216, 155], + [221, 216, 155], [204, 198, 130], [188, 183, 107], [175, 193, 96], - [163, 204, 89], + [163, 204, 89], [145, 191, 81], [127, 178, 71], [112, 163, 63], @@ -31,7 +54,96 @@ enhancements: [48, 109, 28], [33, 96, 17], [15, 84, 10], - [0, 68, 0]] + [0, 68, 0], + [0, 68, 0], + ] + + ndmi_msi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ + standard_name: ndmi_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - min_value: -0.8 + max_value: 0.8 + values: [ + -0.8, + -0.24, + -0.032, + 0.032, + 0.24, + 0.8, + ] + colors: [ + [128, 0, 0], + [255, 0, 0], + [255, 255, 0], + [0, 255, 255], + [0, 0, 255], + [0, 0, 128], + ] + + ndwi_msi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + standard_name: ndwi_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - min_value: -0.8 + max_value: 0.8 + values: [ + -0.8, + -0.7, + -0.6, + -0.5, + -0.4, + -0.3, + -0.2, + -0.1, + 0, + 0.1, + 0.2, + 0.3, + 0.4, + 0.5, + 0.6, + 0.7, + 0.8 + ] + colors: [ + [0, 128, 0], + [32, 144, 32], + [64, 160, 64], + [96, 176, 96], + [128, 192, 128], + [160, 208, 160], + [192, 223, 192], + [224, 239, 224], + [255, 255, 255], + [224, 224, 249], + [192, 192, 242], + [160, 160, 236], + [128, 128, 230], + [96, 96, 223], + [64, 64, 217], + [32, 32, 210], + [0, 0, 204], + ] + + ndsi_msi: + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + standard_name: ndsi_msi + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - values: [0] + colors: [[0, 240, 240]] aot_msi: standard_name: aot_msi @@ -40,9 +152,10 @@ enhancements: method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - colors: oranges + - colors: rdylgn min_value: 0 max_value: 1 + reverse: True wvp_msi: standard_name: wvp_msi @@ -51,9 +164,10 @@ enhancements: method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - colors: pubu + - colors: rdylbu min_value: 0 - max_value: 4 + max_value: 5 + reverse: True scl_msi: # The palette is defined by Sentinel-2 Products Specification Document V14.9, page 319 @@ -90,3 +204,7 @@ enhancements: [255, 255, 255], [146, 205, 220], [112, 48, 160]] + + no_enhancement: + standard_name: no_enhancement + operations: [] From 110641cea951b99ed5b08365c2d559bfff4075be Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 21 Apr 2024 19:25:04 +0800 Subject: [PATCH 1302/1416] Update msi.yaml --- satpy/etc/composites/msi.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index e6b2159182..b17aa0e856 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -167,6 +167,7 @@ composites: standard_name: natural_color ndvi: + # Normalized Difference Vegetation Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: @@ -183,6 +184,7 @@ composites: standard_name: ndvi_msi ndmi: + # Normalized Difference Moisture Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: @@ -199,6 +201,7 @@ composites: standard_name: ndmi_msi ndwi: + # Normalized Difference Water Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: @@ -215,6 +218,7 @@ composites: standard_name: ndwi_msi ndsi: + # Normalized Difference Snow Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: @@ -295,6 +299,7 @@ composites: standard_name: scl_msi ndvi_l2a: + # Normalized Difference Vegetation Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: @@ -311,6 +316,7 @@ composites: standard_name: ndvi_msi ndmi_l2a: + # Normalized Difference Moisture Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: @@ -327,6 +333,7 @@ composites: standard_name: ndmi_msi ndwi_l2a: + # Normalized Difference Water Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: @@ -343,6 +350,7 @@ composites: standard_name: ndwi_msi ndsi_l2a: + # Normalized Difference Snow Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: From 87d072d039e543090cfe9c3e978a3d6eedd0d8ac Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 22 Apr 2024 08:48:42 +0200 Subject: [PATCH 1303/1416] Remove doc references to setup.py --- doc/source/dev_guide/index.rst | 4 ++-- doc/source/dev_guide/plugins.rst | 13 ------------- doc/source/install.rst | 2 +- 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/doc/source/dev_guide/index.rst b/doc/source/dev_guide/index.rst index e877fd1c63..bcd536f614 100644 --- a/doc/source/dev_guide/index.rst +++ b/doc/source/dev_guide/index.rst @@ -29,7 +29,7 @@ and all code should follow the practices `_. Satpy is now Python 3 only and it is no longer needed to support Python 2. -Check ``setup.py`` for the current Python versions any new code needs +Check ``pyproject.toml`` for the current Python versions any new code needs to support. .. _devinstall: @@ -63,7 +63,7 @@ clone your fork. The package can then be installed in development mode by doing: The first command will install all dependencies needed by the Satpy conda-forge package, but won't actually install Satpy. The second command should be run from the root of the cloned Satpy repository (where the -``setup.py`` is) and will install the actual package. +``pyproject.toml`` is) and will install the actual package. You can now edit the python files in your cloned repository and have them immediately reflected in your conda environment. diff --git a/doc/source/dev_guide/plugins.rst b/doc/source/dev_guide/plugins.rst index bce72dabae..35c772c2a6 100644 --- a/doc/source/dev_guide/plugins.rst +++ b/doc/source/dev_guide/plugins.rst @@ -156,19 +156,6 @@ have a ``etc/`` directory in the root of the package structure. Even so, for future compatibility, it is best to use the name of the package directory on the right-hand side of the ``=``. -.. warning:: - - Due to some limitations in setuptools you must also define a ``setup.py`` - file in addition to ``pyproject.toml`` if you'd like to use "editable" - installations (``pip install -e .``). Once - `this setuptools issue `_ - is resolved this won't be needed. For now this minimal ``setup.py`` will - work: - - .. code-block:: python - - from setuptools import setup - setup() **Alternative: setup.py** diff --git a/doc/source/install.rst b/doc/source/install.rst index 3c3ba26a41..619903b34c 100644 --- a/doc/source/install.rst +++ b/doc/source/install.rst @@ -86,7 +86,7 @@ To install the `satpy` package and the minimum amount of python dependencies: Additional dependencies can be installed as "extras" and are grouped by reader, writer, or feature added. Extras available can be found in the -`setup.py `_ file. +`pyproject.toml `_ file. They can be installed individually: .. code-block:: bash From f1fb2fd08281e630067e10ac05db314245052ab3 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 22 Apr 2024 18:34:14 +0800 Subject: [PATCH 1304/1416] separate l1c/l2a yaml --- satpy/etc/readers/msi_safe.yaml | 309 +-------------------------- satpy/etc/readers/msi_safe_l2a.yaml | 315 ++++++++++++++++++++++++++++ 2 files changed, 318 insertions(+), 306 deletions(-) create mode 100644 satpy/etc/readers/msi_safe_l2a.yaml diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index 16a74e64ea..cc39c26a74 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -1,8 +1,8 @@ reader: name: msi_safe - short_name: MSI SAFE - long_name: Sentinel-2 A and B MSI data in SAFE format - description: SAFE Reader for MSI data (Sentinel-2) + short_name: MSI SAFE L1C + long_name: Sentinel-2 A and B MSI L1C data in SAFE format + description: SAFE Reader for MSI L1C data (Sentinel-2) status: Nominal supports_fsspec: false sensors: [msi] @@ -21,25 +21,6 @@ file_types: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] - l2a_safe_granule_10m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] - requires: [l2a_safe_metadata, l2a_safe_tile_metadata] - l2a_safe_granule_20m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] - requires: [l2a_safe_metadata, l2a_safe_tile_metadata] - l2a_safe_granule_60m: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] - requires: [l2a_safe_metadata, l2a_safe_tile_metadata] - l2a_safe_tile_metadata: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] - l2a_safe_metadata: - file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] - datasets: B01: name: B01 @@ -58,24 +39,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B01_L2A: - name: B01_L2A - sensor: msi - wavelength: [0.415, 0.443, 0.470] - resolution: - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B02: name: B02 sensor: msi @@ -93,25 +56,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B02_L2A: - name: B02_L2A - sensor: msi - wavelength: [0.440, 0.490, 0.540] - resolution: - 10: {file_type: l2a_safe_granule_10m} - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B03: name: B03 sensor: msi @@ -129,25 +73,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B03_L2A: - name: B03_L2A - sensor: msi - wavelength: [0.540, 0.560, 0.580] - resolution: - 10: {file_type: l2a_safe_granule_10m} - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B04: name: B04 sensor: msi @@ -165,25 +90,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B04_L2A: - name: B04_L2A - sensor: msi - wavelength: [0.645, 0.665, 0.685] - resolution: - 10: {file_type: l2a_safe_granule_10m} - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B05: name: B05 sensor: msi @@ -201,24 +107,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B05_L2A: - name: B05_L2A - sensor: msi - wavelength: [0.695, 0.705, 0.715] - resolution: - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B06: name: B06 sensor: msi @@ -236,24 +124,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B06_L2A: - name: B06_L2A - sensor: msi - wavelength: [0.731, 0.740, 0.749] - resolution: - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B07: name: B07 sensor: msi @@ -271,24 +141,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B07_L2A: - name: B07_L2A - sensor: msi - wavelength: [0.764, 0.783, 0.802] - resolution: - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B08: name: B08 sensor: msi @@ -306,23 +158,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B08_L2A: - name: B08_L2A - sensor: msi - wavelength: [0.780, 0.842, 0.905] - resolution: - 10: {file_type: l2a_safe_granule_10m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B8A: name: B8A sensor: msi @@ -340,24 +175,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B8A_L2A: - name: B8A_L2A - sensor: msi - wavelength: [0.855, 0.865, 0.875] - resolution: - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B09: name: B09 sensor: msi @@ -375,23 +192,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B09_L2A: - name: B09_L2A - sensor: msi - wavelength: [0.935, 0.945, 0.955] - resolution: - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B10: name: B10 sensor: msi @@ -426,24 +226,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B11_L2A: - name: B11_L2A - sensor: msi - wavelength: [1.565, 1.610, 1.655] - resolution: - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - B12: name: B12 sensor: msi @@ -461,65 +243,6 @@ datasets: units: "1" file_type: l1c_safe_granule - B12_L2A: - name: B12_L2A - sensor: msi - wavelength: [2.100, 2.190, 2.280] - resolution: - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - reflectance: - standard_name: toa_bidirectional_reflectance - units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 - counts: - standard_name: counts - units: "1" - - AOT_L2A: - name: AOT_L2A - sensor: msi - resolution: - 10: {file_type: l2a_safe_granule_10m} - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - aerosol_thickness: - standard_name: aerosol_optical_thickness - units: "1" - counts: - standard_name: counts - units: "1" - - WVP_L2A: - name: WVP_L2A - sensor: msi - resolution: - 10: {file_type: l2a_safe_granule_10m} - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - water_vapor: - standard_name: water_vapor - units: cm - counts: - standard_name: counts - units: "1" - - SCL_L2A: - name: SCL_L2A - sensor: msi - resolution: - 20: {file_type: l2a_safe_granule_20m} - 60: {file_type: l2a_safe_granule_60m} - calibration: - counts: - standard_name: counts - units: "1" - solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] @@ -545,29 +268,3 @@ datasets: file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith - - solar_zenith_angle_l2a: - name: solar_zenith_angle_l2a - resolution: [10, 20, 60] - file_type: l2a_safe_tile_metadata - xml_tag: Sun_Angles_Grid/Zenith - - solar_azimuth_angle_l2a: - name: solar_azimuth_angle_l2a - resolution: [10, 20, 60] - file_type: l2a_safe_tile_metadata - xml_tag: Sun_Angles_Grid/Azimuth - - satellite_azimuth_angle_l2a: - name: satellite_azimuth_angle_l2a - resolution: [10, 20, 60] - file_type: l2a_safe_tile_metadata - xml_tag: Viewing_Incidence_Angles_Grids - xml_item: Azimuth - - satellite_zenith_angle_l2a: - name: satellite_zenith_angle_l2a - resolution: [10, 20, 60] - file_type: l2a_safe_tile_metadata - xml_tag: Viewing_Incidence_Angles_Grids - xml_item: Zenith diff --git a/satpy/etc/readers/msi_safe_l2a.yaml b/satpy/etc/readers/msi_safe_l2a.yaml new file mode 100644 index 0000000000..e11b521f51 --- /dev/null +++ b/satpy/etc/readers/msi_safe_l2a.yaml @@ -0,0 +1,315 @@ +reader: + name: msi_safe_l2a + short_name: MSI SAFE L2A + long_name: Sentinel-2 A and B MSI L2A data in SAFE format + description: SAFE Reader for MSI L2A data (Sentinel-2) + status: Nominal + supports_fsspec: false + sensors: [msi] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + l2a_safe_granule_10m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_granule_20m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_granule_60m: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] + requires: [l2a_safe_metadata, l2a_safe_tile_metadata] + l2a_safe_tile_metadata: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + l2a_safe_metadata: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] + +datasets: + B01_L2A: + name: B01_L2A + sensor: msi + wavelength: [0.415, 0.443, 0.470] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B02_L2A: + name: B02_L2A + sensor: msi + wavelength: [0.440, 0.490, 0.540] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B03_L2A: + name: B03_L2A + sensor: msi + wavelength: [0.540, 0.560, 0.580] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B04_L2A: + name: B04_L2A + sensor: msi + wavelength: [0.645, 0.665, 0.685] + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B05_L2A: + name: B05_L2A + sensor: msi + wavelength: [0.695, 0.705, 0.715] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B06_L2A: + name: B06_L2A + sensor: msi + wavelength: [0.731, 0.740, 0.749] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B07_L2A: + name: B07_L2A + sensor: msi + wavelength: [0.764, 0.783, 0.802] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B08_L2A: + name: B08_L2A + sensor: msi + wavelength: [0.780, 0.842, 0.905] + resolution: + 10: {file_type: l2a_safe_granule_10m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B8A_L2A: + name: B8A_L2A + sensor: msi + wavelength: [0.855, 0.865, 0.875] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B09_L2A: + name: B09_L2A + sensor: msi + wavelength: [0.935, 0.945, 0.955] + resolution: + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B11_L2A: + name: B11_L2A + sensor: msi + wavelength: [1.565, 1.610, 1.655] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + B12_L2A: + name: B12_L2A + sensor: msi + wavelength: [2.100, 2.190, 2.280] + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + + AOT_L2A: + name: AOT_L2A + sensor: msi + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + aerosol_thickness: + standard_name: aerosol_optical_thickness + units: "1" + counts: + standard_name: counts + units: "1" + + WVP_L2A: + name: WVP_L2A + sensor: msi + resolution: + 10: {file_type: l2a_safe_granule_10m} + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + water_vapor: + standard_name: water_vapor + units: cm + counts: + standard_name: counts + units: "1" + + SCL_L2A: + name: SCL_L2A + sensor: msi + resolution: + 20: {file_type: l2a_safe_granule_20m} + 60: {file_type: l2a_safe_granule_60m} + calibration: + counts: + standard_name: counts + units: "1" + + solar_zenith_angle_l2a: + name: solar_zenith_angle_l2a + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Sun_Angles_Grid/Zenith + + solar_azimuth_angle_l2a: + name: solar_azimuth_angle_l2a + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Sun_Angles_Grid/Azimuth + + satellite_azimuth_angle_l2a: + name: satellite_azimuth_angle_l2a + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Viewing_Incidence_Angles_Grids + xml_item: Azimuth + + satellite_zenith_angle_l2a: + name: satellite_zenith_angle_l2a + resolution: [10, 20, 60] + file_type: l2a_safe_tile_metadata + xml_tag: Viewing_Incidence_Angles_Grids + xml_item: Zenith From 478f2e1d9ffcc67f45cb094dd8d3c2f08a93a5ff Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 22 Apr 2024 18:41:08 +0800 Subject: [PATCH 1305/1416] Update msi.yaml --- satpy/etc/composites/msi.yaml | 296 +++++++++++++++++++++++++--------- 1 file changed, 220 insertions(+), 76 deletions(-) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index b17aa0e856..74dd859dfd 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -1,70 +1,137 @@ sensor_name: visir/msi modifiers: - rayleigh_corrected: + rayleigh_corr: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] - optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle - - rayleigh_corrected_marine_clean: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_antarctic: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: antarctic_aerosol + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_continental_average: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_average_aerosol + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_continental_clean: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_clean_aerosol + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_continental_polluted: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_polluted_aerosol + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_desert: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: desert_aerosol + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] - optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle - - rayleigh_corrected_marine_tropical: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_marine_polluted: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance - atmosphere: tropical + atmosphere: us-standard + aerosol_type: marine_polluted_aerosol + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_marine_tropical: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] - optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle - - rayleigh_corrected_desert: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_rural: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance - atmosphere: tropical - aerosol_type: desert_aerosol + atmosphere: us-standard + aerosol_type: rural_aerosol prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] - optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle - - rayleigh_corrected_land: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corr_urban: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard - aerosol_type: continental_average_aerosol + aerosol_type: urban_aerosol prerequisites: - - name: 'B04' - modifiers: [sunz_corrected] - optional_prerequisites: - - satellite_azimuth_angle - - satellite_zenith_angle - - solar_azimuth_angle - - solar_zenith_angle + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle composites: @@ -82,55 +149,132 @@ composites: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr] standard_name: true_color - true_color_land: + true_color_antarctic: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_antarctic] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_antarctic] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_antarctic] + standard_name: true_color + + true_color_continental_average: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_average] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_average] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_average] + standard_name: true_color + + true_color_continental_clean: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_clean] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_clean] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_clean] + standard_name: true_color + + true_color_continental_polluted: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_polluted] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_polluted] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_polluted] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_desert] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_desert] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_clean] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_clean] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_clean] + standard_name: true_color + + true_color_marine_polluted: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_polluted] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_polluted] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_polluted] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_tropical] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_tropical] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_tropical] + standard_name: true_color + + true_color_rural: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_rural] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_rural] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_rural] + standard_name: true_color + + true_color_urban: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_urban] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_urban] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_urban] + standard_name: true_color + + true_color_uncorr: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B02' + modifiers: [effective_solar_pathlength_corrected] standard_name: true_color true_color_raw: From a5a9815864ff9ec7a5abfca5ae6f0dd5735d1e04 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 22 Apr 2024 13:06:36 +0300 Subject: [PATCH 1306/1416] Fix datetime imports --- doc/source/conf.py | 4 +- satpy/cf/decoding.py | 6 +- satpy/composites/viirs.py | 6 +- satpy/dataset/metadata.py | 9 +- satpy/modifiers/angles.py | 10 +-- satpy/readers/__init__.py | 8 +- satpy/readers/aapp_l1b.py | 12 +-- satpy/readers/abi_base.py | 7 +- satpy/readers/acspo.py | 6 +- satpy/readers/ahi_hsd.py | 21 ++--- satpy/readers/ahi_l2_nc.py | 11 +-- satpy/readers/ami_l1b.py | 11 +-- satpy/readers/amsr2_l2_gaasp.py | 7 +- satpy/readers/ascat_l2_soilmoisture_bufr.py | 5 +- satpy/readers/atms_l1b_nc.py | 7 +- satpy/readers/avhrr_l1b_gaclac.py | 19 ++-- satpy/readers/caliop_l2_cloud.py | 5 +- satpy/readers/electrol_hrit.py | 6 +- satpy/readers/epic_l1b_h5.py | 7 +- satpy/readers/eum_base.py | 8 +- satpy/readers/fci_l1c_nc.py | 8 +- satpy/readers/fy4_base.py | 11 +-- satpy/readers/gerb_l2_hr_h5.py | 6 +- satpy/readers/ghrsst_l2.py | 7 +- satpy/readers/ghrsst_l3c_sst.py | 5 +- satpy/readers/glm_l2.py | 8 +- satpy/readers/goci2_l2_nc.py | 11 +-- satpy/readers/goes_imager_hrit.py | 17 ++-- satpy/readers/goes_imager_nc.py | 34 ++++--- satpy/readers/gpm_imerg.py | 27 +++--- satpy/readers/grib.py | 6 +- satpy/readers/hdfeos_base.py | 7 +- satpy/readers/hrit_base.py | 5 +- satpy/readers/hrit_jma.py | 11 +-- satpy/readers/hrpt.py | 9 +- satpy/readers/hsaf_grib.py | 8 +- satpy/readers/hsaf_h5.py | 6 +- satpy/readers/hy2_scat_l2b_h5.py | 10 +-- satpy/readers/iasi_l2_so2_bufr.py | 5 +- satpy/readers/ici_l1b_nc.py | 11 +-- satpy/readers/insat3d_img_l1b_h5.py | 9 +- satpy/readers/mersi_l1b.py | 6 +- satpy/readers/msu_gsa_l1b.py | 6 +- satpy/readers/mws_l1b.py | 13 +-- satpy/readers/nwcsaf_msg2013_hdf5.py | 5 +- satpy/readers/nwcsaf_nc.py | 9 +- satpy/readers/oceancolorcci_l3_nc.py | 5 +- satpy/readers/omps_edr.py | 8 +- satpy/readers/osisaf_l3_nc.py | 4 +- satpy/readers/scatsat1_l2b.py | 8 +- satpy/readers/scmi.py | 5 +- satpy/readers/seadas_l2.py | 7 +- satpy/readers/seviri_base.py | 13 +-- satpy/readers/seviri_l1b_hrit.py | 7 +- satpy/readers/seviri_l1b_icare.py | 12 +-- satpy/readers/seviri_l1b_native.py | 11 +-- satpy/readers/seviri_l1b_nc.py | 13 ++- satpy/readers/seviri_l2_bufr.py | 7 +- satpy/readers/seviri_l2_grib.py | 4 +- satpy/readers/sgli_l1b.py | 7 +- satpy/readers/slstr_l1b.py | 19 ++-- satpy/readers/smos_l2_wind.py | 6 +- satpy/readers/tropomi_l2.py | 6 +- satpy/readers/vii_base_nc.py | 10 +-- satpy/readers/viirs_atms_sdr_base.py | 8 +- satpy/readers/viirs_compact.py | 6 +- satpy/readers/viirs_l1b.py | 4 +- satpy/readers/viirs_l2.py | 5 +- satpy/readers/viirs_sdr.py | 7 +- satpy/readers/viirs_vgac_l1c_nc.py | 10 +-- satpy/readers/virr_l1b.py | 6 +- satpy/tests/cf_tests/test_decoding.py | 12 +-- satpy/tests/compositor_tests/test_viirs.py | 9 +- satpy/tests/features/steps/steps-load.py | 12 +-- satpy/tests/modifier_tests/test_angles.py | 13 ++- satpy/tests/modifier_tests/test_crefl.py | 18 ++-- satpy/tests/multiscene_tests/test_blend.py | 50 +++++------ .../multiscene_tests/test_save_animation.py | 51 +++++------ satpy/tests/reader_tests/_li_test_utils.py | 12 +-- .../modis_tests/_modis_fixtures.py | 16 ++-- satpy/tests/reader_tests/test_abi_l1b.py | 8 +- satpy/tests/reader_tests/test_abi_l2_nc.py | 8 +- satpy/tests/reader_tests/test_acspo.py | 8 +- satpy/tests/reader_tests/test_ahi_hsd.py | 88 ++++++++++--------- satpy/tests/reader_tests/test_ahi_l2_nc.py | 6 +- satpy/tests/reader_tests/test_ami_l1b.py | 6 +- .../tests/reader_tests/test_amsr2_l2_gaasp.py | 6 +- .../test_ascat_l2_soilmoisture_bufr.py | 7 +- satpy/tests/reader_tests/test_atms_l1b_nc.py | 19 ++-- .../tests/reader_tests/test_atms_sdr_hdf5.py | 6 +- .../reader_tests/test_avhrr_l1b_gaclac.py | 9 +- satpy/tests/reader_tests/test_epic_l1b_h5.py | 6 +- satpy/tests/reader_tests/test_eum_base.py | 21 ++--- .../tests/reader_tests/test_generic_image.py | 4 +- satpy/tests/reader_tests/test_ghrsst_l2.py | 10 +-- satpy/tests/reader_tests/test_glm_l2.py | 6 +- satpy/tests/reader_tests/test_goci2_l2_nc.py | 8 +- satpy/tests/reader_tests/test_gpm_imerg.py | 8 +- satpy/tests/reader_tests/test_hrit_base.py | 10 +-- satpy/tests/reader_tests/test_hsaf_grib.py | 4 +- satpy/tests/reader_tests/test_hsaf_h5.py | 5 +- .../reader_tests/test_hy2_scat_l2b_h5.py | 6 +- satpy/tests/reader_tests/test_ici_l1b_nc.py | 21 ++--- .../reader_tests/test_insat3d_img_l1b_h5.py | 7 +- satpy/tests/reader_tests/test_li_l2_nc.py | 6 +- .../reader_tests/test_mimic_TPW2_lowres.py | 4 +- .../tests/reader_tests/test_mimic_TPW2_nc.py | 7 +- satpy/tests/reader_tests/test_mirs.py | 10 ++- satpy/tests/reader_tests/test_mws_l1b_nc.py | 20 ++--- .../reader_tests/test_oceancolorcci_l3_nc.py | 7 +- satpy/tests/reader_tests/test_osisaf_l3.py | 18 ++-- satpy/tests/reader_tests/test_satpy_cf_nc.py | 18 ++-- satpy/tests/reader_tests/test_scmi.py | 6 +- satpy/tests/reader_tests/test_seviri_base.py | 74 ++++++++-------- .../test_seviri_l1b_calibration.py | 6 +- .../reader_tests/test_seviri_l1b_hrit.py | 17 ++-- .../test_seviri_l1b_hrit_setup.py | 25 +++--- .../reader_tests/test_seviri_l1b_icare.py | 4 +- .../reader_tests/test_seviri_l1b_native.py | 30 +++---- .../tests/reader_tests/test_seviri_l1b_nc.py | 28 +++--- .../tests/reader_tests/test_seviri_l2_bufr.py | 4 +- satpy/tests/reader_tests/test_sgli_l1b.py | 11 +-- satpy/tests/reader_tests/test_slstr_l1b.py | 12 +-- satpy/tests/reader_tests/test_smos_l2_wind.py | 6 +- satpy/tests/reader_tests/test_tropomi_l2.py | 15 ++-- satpy/tests/reader_tests/test_utils.py | 7 +- satpy/tests/reader_tests/test_viirs_edr.py | 12 +-- satpy/tests/reader_tests/test_viirs_l1b.py | 8 +- satpy/tests/reader_tests/test_viirs_l2.py | 9 +- satpy/tests/reader_tests/test_viirs_sdr.py | 14 +-- .../reader_tests/test_viirs_vgac_l1c_nc.py | 17 ++-- satpy/tests/scene_tests/test_conversions.py | 24 ++--- satpy/tests/scene_tests/test_saving.py | 12 +-- satpy/tests/test_composites.py | 9 +- satpy/tests/test_dataset.py | 67 +++++++------- satpy/tests/test_file_handlers.py | 25 +++--- satpy/tests/test_modifiers.py | 8 +- satpy/tests/test_readers.py | 40 +++------ satpy/tests/test_writers.py | 10 +-- satpy/tests/test_yaml_reader.py | 78 ++++++++-------- satpy/tests/utils.py | 6 +- satpy/tests/writer_tests/test_awips_tiled.py | 8 +- satpy/tests/writer_tests/test_cf.py | 58 ++++++------ satpy/tests/writer_tests/test_geotiff.py | 6 +- satpy/tests/writer_tests/test_mitiff.py | 46 +++------- satpy/tests/writer_tests/test_simple_image.py | 4 +- satpy/writers/awips_tiled.py | 13 +-- 147 files changed, 1006 insertions(+), 916 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 020544ee4a..4bf20b0e38 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -14,9 +14,9 @@ from __future__ import annotations +import datetime as dt import os import sys -from datetime import datetime # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -117,7 +117,7 @@ def __getattr__(cls, name): # General information about the project. project = u"Satpy" -copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # noqa: A001 +copyright = u"2009-{}, The PyTroll Team".format(dt.datetime.utcnow().strftime("%Y")) # noqa: A001 # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/satpy/cf/decoding.py b/satpy/cf/decoding.py index 0d7a9d22be..2515f6bd38 100644 --- a/satpy/cf/decoding.py +++ b/satpy/cf/decoding.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """CF decoding.""" + import copy +import datetime as dt import json -from datetime import datetime def decode_attrs(attrs): @@ -69,6 +71,6 @@ def _datetime_parser_json(json_dict): def _str2datetime(string): """Convert string to datetime object.""" try: - return datetime.fromisoformat(string) + return dt.datetime.fromisoformat(string) except (TypeError, ValueError): return None diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py index 5df2d482af..a9a047fd21 100644 --- a/satpy/composites/viirs.py +++ b/satpy/composites/viirs.py @@ -15,12 +15,14 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Composite classes for the VIIRS instrument.""" + from __future__ import annotations +import datetime as dt import logging import math -from datetime import datetime import dask import dask.array as da @@ -842,7 +844,7 @@ def _linear_normalization_from_0to1( data[mask] = data[mask] / theoretical_max -def _check_moon_phase(moon_datasets: list[xr.DataArray], start_time: datetime) -> float: +def _check_moon_phase(moon_datasets: list[xr.DataArray], start_time: dt.datetime) -> float: """Check if we have Moon phase as an input dataset and, if not, calculate it.""" if moon_datasets: # convert to decimal instead of % diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index a328402e0a..03208ebc50 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -15,11 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Utilities for merging metadata from various sources.""" +import datetime as dt import warnings from collections.abc import Collection -from datetime import datetime from functools import partial, reduce from operator import eq, is_ @@ -135,7 +136,7 @@ def _combine_time_parameters(values): def _filter_time_values(values): """Remove values that are not datetime objects.""" - return [v for v in values if isinstance(v, datetime)] + return [v for v in values if isinstance(v, dt.datetime)] def average_datetimes(datetime_list): @@ -152,8 +153,8 @@ def average_datetimes(datetime_list): Returns: Average datetime as a datetime object """ - total = [datetime.timestamp(dt) for dt in datetime_list] - return datetime.fromtimestamp(sum(total) / len(total)) + total = [dt.datetime.timestamp(d) for d in datetime_list] + return dt.datetime.fromtimestamp(sum(total) / len(total)) def _are_values_combinable(values): diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 1471ba3669..5ea8530612 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -18,11 +18,11 @@ """Utilties for getting various angles for a dataset..""" from __future__ import annotations +import datetime as dt import hashlib import os import shutil import warnings -from datetime import datetime from functools import update_wrapper from glob import glob from typing import Any, Callable, Optional, Union @@ -45,7 +45,7 @@ # pyorbital's get_observer_look function. # The difference is on the order of 1e-10 at most as time changes so we force # it to a single time for easier caching. It is *only* used if caching. -STATIC_EARTH_INERTIAL_DATETIME = datetime(2000, 1, 1, 12, 0, 0) +STATIC_EARTH_INERTIAL_DATETIME = dt.datetime(2000, 1, 1, 12, 0, 0) DEFAULT_UNCACHE_TYPES = (SwathDefinition, xr.DataArray, da.Array) HASHABLE_GEOMETRIES = (AreaDefinition, StackedAreaDefinition) @@ -263,7 +263,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): raise TypeError(f"Unhashable type ({type(arg)}).") if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) - elif isinstance(arg, datetime): + elif isinstance(arg, dt.datetime): arg = arg.isoformat(" ") hashable_args.append(arg) arg_hash = hashlib.sha1() # nosec @@ -274,7 +274,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): def _sanitize_observer_look_args(*args): new_args = [] for arg in args: - if isinstance(arg, datetime): + if isinstance(arg, dt.datetime): new_args.append(STATIC_EARTH_INERTIAL_DATETIME) elif isinstance(arg, (float, np.float64, np.float32)): # Round floating point numbers to nearest tenth. Numpy types don't @@ -448,7 +448,7 @@ def _cos_zen_ndarray(lons, lats, utc_time): return pyob_cos_zen(utc_time, lons, lats) -def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: datetime) -> np.ndarray: +def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: dt.datetime) -> np.ndarray: with ignore_invalid_float_warnings(): suna = get_alt_az(start_time, lons, lats)[1] suna = np.rad2deg(suna) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index ca131b101f..7835292eff 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -15,15 +15,17 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Shared objects of the various reader classes.""" + from __future__ import annotations +import datetime as dt import logging import os import pathlib import pickle # nosec B403 import warnings -from datetime import datetime, timedelta from functools import total_ordering import yaml @@ -213,7 +215,7 @@ def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417 # interest of sorting flat_keys = ((v[0], rn, v[1]) for (rn, vL) in all_file_keys.items() for v in vL) prev_key = None - threshold = timedelta(seconds=time_threshold) + threshold = dt.timedelta(seconds=time_threshold) # file_groups is sorted, because dictionaries are sorted by insertion # order in Python 3.7+ file_groups = {} @@ -222,7 +224,7 @@ def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417 if prev_key is None: is_new_group = True prev_key = gk - elif isinstance(gk[0], datetime): + elif isinstance(gk[0], dt.datetime): # datetimes within threshold difference are "the same time" is_new_group = (gk[0] - prev_key[0]) > threshold else: diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index e502a9da64..6e3072b4d0 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for aapp level 1b data. Options for loading: @@ -24,9 +25,10 @@ https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf """ + +import datetime as dt import functools import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -102,14 +104,14 @@ def _set_filedata_layout(self): @property def start_time(self): """Get the time of the first observation.""" - return datetime(self._data["scnlinyr"][0], 1, 1) + timedelta( + return dt.datetime(self._data["scnlinyr"][0], 1, 1) + dt.timedelta( days=int(self._data["scnlindy"][0]) - 1, milliseconds=int(self._data["scnlintime"][0])) @property def end_time(self): """Get the time of the final observation.""" - return datetime(self._data["scnlinyr"][-1], 1, 1) + timedelta( + return dt.datetime(self._data["scnlinyr"][-1], 1, 1) + dt.timedelta( days=int(self._data["scnlindy"][-1]) - 1, milliseconds=int(self._data["scnlintime"][-1])) @@ -129,10 +131,10 @@ def _get_platform_name(self, platform_names_lookup): def read(self): """Read the data.""" - tic = datetime.now() + tic = dt.datetime.now() header = np.memmap(self.filename, dtype=self._header_type, mode="r", shape=(1, )) data = np.memmap(self.filename, dtype=self._scan_type, offset=self._header_offset, mode="r") - logger.debug("Reading time %s", str(datetime.now() - tic)) + logger.debug("Reading time %s", str(dt.datetime.now() - tic)) self._header = header self._data = data diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 107382d7ba..ecfd20a830 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -15,12 +15,13 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Advance Baseline Imager reader base class for the Level 1b and l2+ reader.""" +import datetime as dt import logging import math from contextlib import suppress -from datetime import datetime import dask import numpy as np @@ -291,12 +292,12 @@ def _get_areadef_fixedgrid(self, key): @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ") def spatial_resolution_to_number(self): """Convert the 'spatial_resolution' global attribute to meters.""" diff --git a/satpy/readers/acspo.py b/satpy/readers/acspo.py index 8a8262af33..90356f46e2 100644 --- a/satpy/readers/acspo.py +++ b/satpy/readers/acspo.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """ACSPO SST Reader. See the following page for more information: @@ -22,8 +23,9 @@ https://podaac.jpl.nasa.gov/dataset/VIIRS_NPP-OSPO-L2P-v2.3 """ + +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -83,7 +85,7 @@ def get_shape(self, ds_id, ds_info): @staticmethod def _parse_datetime(datestr): - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + return dt.datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") @property def start_time(self): diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index bf2ab09e79..7ea83a6820 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Advanced Himawari Imager (AHI) standard format data reader. References: @@ -58,10 +59,10 @@ """ +import datetime as dt import logging import os import warnings -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -419,12 +420,12 @@ def end_time(self): @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"].item())) + return dt.datetime(1858, 11, 17) + dt.timedelta(days=float(self.basic_info["observation_start_time"].item())) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"].item())) + return dt.datetime(1858, 11, 17) + dt.timedelta(days=float(self.basic_info["observation_end_time"].item())) @property def _timeline(self): @@ -760,7 +761,7 @@ def __init__(self, timeline, area): def _parse_timeline(self, timeline): try: - return datetime.strptime(timeline, "%H%M").time() + return dt.datetime.strptime(timeline, "%H%M").time() except ValueError: return None @@ -771,8 +772,8 @@ def get_nominal_start_time(self, observation_start_time): def get_nominal_end_time(self, nominal_start_time): """Get nominal end time of the scan.""" freq = self._observation_frequency - return nominal_start_time + timedelta(minutes=freq // 60, - seconds=freq % 60) + return nominal_start_time + dt.timedelta(minutes=freq // 60, + seconds=freq % 60) def _modify_observation_time_for_nominal(self, observation_time): """Round observation time to a nominal time based on known observation frequency. @@ -793,8 +794,8 @@ def _modify_observation_time_for_nominal(self, observation_time): ) return observation_time timeline = self._get_closest_timeline(observation_time) - dt = self._get_offset_relative_to_timeline() - return timeline + timedelta(minutes=dt//60, seconds=dt % 60) + date = self._get_offset_relative_to_timeline() + return timeline + dt.timedelta(minutes=date//60, seconds=date % 60) def _get_closest_timeline(self, observation_time): """Find the closest timeline for the given observation time. @@ -808,11 +809,11 @@ def _get_closest_timeline(self, observation_time): """ delta_days = [-1, 0, 1] surrounding_dates = [ - (observation_time + timedelta(days=delta)).date() + (observation_time + dt.timedelta(days=delta)).date() for delta in delta_days ] timelines = [ - datetime.combine(date, self.timeline) + dt.datetime.combine(date, self.timeline) for date in surrounding_dates ] diffs = [ diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index d6e6caa887..92c2915a1e 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for Himawari L2 cloud products from NOAA's big data programme. For more information about the data, see: . @@ -43,8 +44,8 @@ supported. These include the CldHgtFlag and the CloudMaskPacked variables. """ +import datetime as dt import logging -from datetime import datetime import xarray as xr @@ -82,14 +83,14 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc.attrs["time_coverage_start"] - return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") + date = self.nc.attrs["time_coverage_start"] + return dt.datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End timestamp of the dataset.""" - dt = self.nc.attrs["time_coverage_end"] - return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") + date = self.nc.attrs["time_coverage_end"] + return dt.datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ") def get_dataset(self, key, info): """Load a dataset.""" diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py index db8c8444d8..6841189eef 100644 --- a/satpy/readers/ami_l1b.py +++ b/satpy/readers/ami_l1b.py @@ -15,10 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Advanced Meteorological Imager reader for the Level 1b NetCDF4 format.""" +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -117,14 +118,14 @@ def __init__(self, filename, filename_info, filetype_info, @property def start_time(self): """Get observation start time.""" - base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs["observation_start_time"]) + base = dt.datetime(2000, 1, 1, 12, 0, 0) + return base + dt.timedelta(seconds=self.nc.attrs["observation_start_time"]) @property def end_time(self): """Get observation end time.""" - base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs["observation_end_time"]) + base = dt.datetime(2000, 1, 1, 12, 0, 0) + return base + dt.timedelta(seconds=self.nc.attrs["observation_end_time"]) def get_area_def(self, dsid): """Get area definition for this file.""" diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 54a3769747..21442f6f3a 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """GCOM-W1 AMSR2 Level 2 files from the GAASP software. GAASP output files are in the NetCDF4 format. Software is provided by NOAA @@ -36,8 +37,8 @@ """ +import datetime as dt import logging -from datetime import datetime from typing import Tuple import numpy as np @@ -94,7 +95,7 @@ def start_time(self): return self.filename_info["start_time"] except KeyError: time_str = self.nc.attrs["time_coverage_start"] - return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): @@ -103,7 +104,7 @@ def end_time(self): return self.filename_info["end_time"] except KeyError: time_str = self.nc.attrs["time_coverage_end"] - return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def sensor_names(self): diff --git a/satpy/readers/ascat_l2_soilmoisture_bufr.py b/satpy/readers/ascat_l2_soilmoisture_bufr.py index a5f77fd7eb..9619977e89 100644 --- a/satpy/readers/ascat_l2_soilmoisture_bufr.py +++ b/satpy/readers/ascat_l2_soilmoisture_bufr.py @@ -15,14 +15,15 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """ASCAT Soil moisture product reader for BUFR messages. Based on the IASI L2 SO2 BUFR reader. """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -82,7 +83,7 @@ def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): minutes = np.resize(ec.codes_get_array(bufr, "minute"), size) seconds = np.resize(ec.codes_get_array(bufr, "second"), size) for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds): - time_stamp = datetime(year, month, day, hour, minute, second) + time_stamp = dt.datetime(year, month, day, hour, minute, second) date_min = time_stamp if not date_min else min(date_min, time_stamp) date_max = time_stamp if not date_max else max(date_max, time_stamp) return date_min, date_max diff --git a/satpy/readers/atms_l1b_nc.py b/satpy/readers/atms_l1b_nc.py index 95d48b81cd..4b8587c824 100644 --- a/satpy/readers/atms_l1b_nc.py +++ b/satpy/readers/atms_l1b_nc.py @@ -12,6 +12,7 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """Advanced Technology Microwave Sounder (ATMS) Level 1B product reader. The format is explained in the `ATMS L1B Product User Guide`_ @@ -21,8 +22,8 @@ """ +import datetime as dt import logging -from datetime import datetime from satpy.readers.netcdf_utils import NetCDF4FileHandler @@ -43,12 +44,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): @property def start_time(self): """Get observation start time.""" - return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) + return dt.datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def end_time(self): """Get observation end time.""" - return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) + return dt.datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def platform_name(self): diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index 96a13449f7..47f0d97283 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reading and calibrating GAC and LAC AVHRR data. Uses Pygac under the hood. See the `Pygac Documentation`_ for supported data @@ -29,8 +30,8 @@ https://pygac.readthedocs.io/en/stable """ +import datetime as dt import logging -from datetime import date, datetime, timedelta import dask.array as da import numpy as np @@ -93,14 +94,14 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.first_valid_lat = None self.last_valid_lat = None self._start_time = filename_info["start_time"] - self._end_time = datetime.combine(filename_info["start_time"].date(), - filename_info["end_time"].time()) + self._end_time = dt.datetime.combine(filename_info["start_time"].date(), + filename_info["end_time"].time()) if self._end_time < self._start_time: - self._end_time += timedelta(days=1) + self._end_time += dt.timedelta(days=1) self.platform_id = filename_info["platform_id"] if len(self.platform_id) == 3: - self.reader_kwargs["header_date"] = date(2000, 1, 1) + self.reader_kwargs["header_date"] = dt.date(2000, 1, 1) if self._is_avhrr3(): if filename_info.get("transfer_mode") == "GHRR": @@ -184,8 +185,8 @@ def get_dataset(self, key, info): # Update start/end time using the actual scanline timestamps times = self.reader.get_times() - self._start_time = times[0].astype(datetime) - self._end_time = times[-1].astype(datetime) + self._start_time = times[0].astype(dt.datetime) + self._end_time = times[-1].astype(dt.datetime) # Select user-defined scanlines and/or strip invalid coordinates if (self.start_line is not None or self.end_line is not None @@ -223,8 +224,8 @@ def slice(self, data, times): # noqa: A003 """ sliced = self._slice(data) times = self._slice(times) - self._start_time = times[0].astype(datetime) - self._end_time = times[-1].astype(datetime) + self._start_time = times[0].astype(dt.datetime) + self._end_time = times[-1].astype(dt.datetime) return sliced, times def _slice(self, data): diff --git a/satpy/readers/caliop_l2_cloud.py b/satpy/readers/caliop_l2_cloud.py index 54dd100ffc..e088dfd853 100644 --- a/satpy/readers/caliop_l2_cloud.py +++ b/satpy/readers/caliop_l2_cloud.py @@ -16,12 +16,13 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore + """Interface to CALIOP L2 HDF4 cloud products.""" +import datetime as dt import logging import os.path import re -from datetime import datetime from pyhdf.SD import SD, SDC @@ -56,7 +57,7 @@ def get_end_time(self): mda_dict = self.filehandle.attributes() core_mda = mda_dict["coremetadata"] end_time_str = self.parse_metadata_string(core_mda) - self._end_time = datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") + self._end_time = dt.datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") @staticmethod def parse_metadata_string(metadata_string): diff --git a/satpy/readers/electrol_hrit.py b/satpy/readers/electrol_hrit.py index c773850a73..62f99fb0a4 100644 --- a/satpy/readers/electrol_hrit.py +++ b/satpy/readers/electrol_hrit.py @@ -24,8 +24,8 @@ """ +import datetime as dt import logging -from datetime import datetime import numpy as np import xarray as xr @@ -299,7 +299,7 @@ def get_dataset(self, key, info): def calibrate(self, data, calibration): """Calibrate the data.""" - tic = datetime.now() + tic = dt.datetime.now() if calibration == "counts": res = data elif calibration in ["radiance", "brightness_temperature"]: @@ -311,7 +311,7 @@ def calibrate(self, data, calibration): res.attrs["standard_name"] = calibration res.attrs["calibration"] = calibration - logger.debug("Calibration time " + str(datetime.now() - tic)) + logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res @staticmethod diff --git a/satpy/readers/epic_l1b_h5.py b/satpy/readers/epic_l1b_h5.py index 3fb8f69c01..0d993b0b6c 100644 --- a/satpy/readers/epic_l1b_h5.py +++ b/satpy/readers/epic_l1b_h5.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """File handler for DSCOVR EPIC L1B data in hdf5 format. The ``epic_l1b_h5`` reader reads and calibrates EPIC L1B image data in hdf5 format. @@ -37,8 +38,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -74,13 +75,13 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S") + start_time = dt.datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S") + end_time = dt.datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S") return end_time @staticmethod diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index 3cbbb46433..fe4579301d 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Utilities for EUMETSAT satellite data.""" -from datetime import datetime, timedelta +import datetime as dt import numpy as np @@ -44,9 +44,9 @@ def timecds2datetime(tcds): except (KeyError, ValueError): pass - reference = datetime(1958, 1, 1) - delta = timedelta(days=days, milliseconds=milliseconds, - microseconds=microseconds) + reference = dt.datetime(1958, 1, 1) + delta = dt.timedelta(days=days, milliseconds=milliseconds, + microseconds=microseconds) return reference + delta diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 0c7b9fb8cc..1344549fa9 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Interface to MTG-FCI L1c NetCDF files. This module defines the :class:`FCIL1cNCFileHandler` file handler, to @@ -111,8 +112,8 @@ from __future__ import absolute_import, division, print_function, unicode_literals +import datetime as dt import logging -from datetime import timedelta from functools import cached_property import dask.array as da @@ -227,12 +228,13 @@ def rc_period_min(self): def nominal_start_time(self): """Get nominal start time.""" rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) - return rc_date + timedelta(minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min) + return rc_date + dt.timedelta( + minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min) @property def nominal_end_time(self): """Get nominal end time.""" - return self.nominal_start_time + timedelta(minutes=self.rc_period_min) + return self.nominal_start_time + dt.timedelta(minutes=self.rc_period_min) @property def observation_start_time(self): diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index b0452a5735..160b5795dd 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Base reader for the L1 HDF data from the AGRI and GHI instruments aboard the FengYun-4A/B satellites. The files read by this reader are described in the official Real Time Data Service: @@ -23,8 +24,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -200,20 +201,20 @@ def start_time(self): """Get the start time.""" start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" try: - return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """Get the end time.""" end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" try: - return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ") def get_area_def(self, key): """Get the area definition.""" diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index 4f34c1fde8..6b3ceb5e0a 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -16,16 +16,14 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . - """GERB L2 HR HDF5 reader. A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation Budget instrument aboard the Meteosat Second Generation satellites. """ - +import datetime as dt import logging -from datetime import timedelta from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def @@ -55,7 +53,7 @@ class GERB_HR_FileHandler(HDF5FileHandler): @property def end_time(self): """Get end time.""" - return self.start_time + timedelta(minutes=15) + return self.start_time + dt.timedelta(minutes=15) @property def start_time(self): diff --git a/satpy/readers/ghrsst_l2.py b/satpy/readers/ghrsst_l2.py index 6c4005623e..d407d49f14 100644 --- a/satpy/readers/ghrsst_l2.py +++ b/satpy/readers/ghrsst_l2.py @@ -14,12 +14,13 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for the GHRSST level-2 formatted data.""" +import datetime as dt import os import tarfile from contextlib import suppress -from datetime import datetime from functools import cached_property import xarray as xr @@ -39,9 +40,9 @@ def __init__(self, filename, filename_info, filetype_info, engine=None): self._engine = engine self._tarfile = None - self.filename_info["start_time"] = datetime.strptime( + self.filename_info["start_time"] = dt.datetime.strptime( self.nc.start_time, "%Y%m%dT%H%M%SZ") - self.filename_info["end_time"] = datetime.strptime( + self.filename_info["end_time"] = dt.datetime.strptime( self.nc.stop_time, "%Y%m%dT%H%M%SZ") @cached_property diff --git a/satpy/readers/ghrsst_l3c_sst.py b/satpy/readers/ghrsst_l3c_sst.py index ef1dd220a9..8960275995 100644 --- a/satpy/readers/ghrsst_l3c_sst.py +++ b/satpy/readers/ghrsst_l3c_sst.py @@ -16,10 +16,11 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore + """An OSISAF SST reader for the netCDF GHRSST format.""" +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -37,7 +38,7 @@ class GHRSST_OSISAFL2(NetCDF4FileHandler): """Reader for the OSISAF SST GHRSST format.""" def _parse_datetime(self, datestr): - return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") + return dt.datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") def get_area_def(self, area_id, area_info): """Override abstract baseclass method.""" diff --git a/satpy/readers/glm_l2.py b/satpy/readers/glm_l2.py index ceb11a33bc..7f1e77cd50 100644 --- a/satpy/readers/glm_l2.py +++ b/satpy/readers/glm_l2.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Geostationary Lightning Mapper reader for the Level 2 format from glmtools. More information about `glmtools` and the files it produces can be found on @@ -23,8 +24,9 @@ https://github.com/deeplycloudy/glmtools """ + +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -52,12 +54,12 @@ def sensor(self): @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ") def _is_category_product(self, data_arr): # if after autoscaling we still have an integer diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index a79d582544..b60a3e3876 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -15,13 +15,14 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for GK-2B GOCI-II L2 products from NOSC. For more information about the data, see: """ +import datetime as dt import logging -from datetime import datetime import xarray as xr @@ -65,14 +66,14 @@ def _merge_navigation_data(self, filetype): @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.attrs["observation_start_time"] - return datetime.strptime(dt, "%Y%m%d_%H%M%S") + date = self.attrs["observation_start_time"] + return dt.datetime.strptime(date, "%Y%m%d_%H%M%S") @property def end_time(self): """End timestamp of the dataset.""" - dt = self.attrs["observation_end_time"] - return datetime.strptime(dt, "%Y%m%d_%H%M%S") + date = self.attrs["observation_end_time"] + return dt.datetime.strptime(date, "%Y%m%d_%H%M%S") def get_dataset(self, key, info): """Load a dataset.""" diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index d90ebb4a72..401274debb 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """GOES HRIT format reader. References: @@ -24,8 +25,8 @@ """ +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -116,21 +117,21 @@ class CalibrationError(Exception): ("msecs", "u1")]) -def make_sgs_time(sgs_time_array: ArrayLike) -> datetime: +def make_sgs_time(sgs_time_array: ArrayLike) -> dt.datetime: """Make sgs time.""" epoch_year = _epoch_year_from_sgs_time(sgs_time_array) doy_offset = _epoch_doy_offset_from_sgs_time(sgs_time_array) return epoch_year + doy_offset -def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> datetime: +def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> dt.datetime: century = sgs_time_array["century"].astype(np.int64) year = sgs_time_array["year"].astype(np.int64) year = ((century >> 4) * 1000 + (century & 15) * 100 + (year >> 4) * 10 + (year & 15)) - return datetime(int(year), 1, 1) + return dt.datetime(int(year), 1, 1) -def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> timedelta: +def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> dt.timedelta: doy1 = sgs_time_array["doy1"].astype(np.int64) doy_hours = sgs_time_array["doy_hours"].astype(np.int64) hours_mins = sgs_time_array["hours_mins"].astype(np.int64) @@ -143,7 +144,7 @@ def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> timedelta: mins = ((hours_mins & 15) * 10 + (mins_secs >> 4)) secs = ((mins_secs & 15) * 10 + (secs_msecs >> 4)) msecs = ((secs_msecs & 15) * 100 + (msecs >> 4) * 10 + (msecs & 15)) - return timedelta( + return dt.timedelta( days=int(doy - 1), hours=int(hours), minutes=int(mins), @@ -426,7 +427,7 @@ def _get_calibration_params(self): def calibrate(self, data, calibration): """Calibrate the data.""" logger.debug("Calibration") - tic = datetime.now() + tic = dt.datetime.now() if calibration == "counts": return data if calibration == "reflectance": @@ -437,7 +438,7 @@ def calibrate(self, data, calibration): raise NotImplementedError("Don't know how to calibrate to " + str(calibration)) - logger.debug("Calibration time " + str(datetime.now() - tic)) + logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res def _calibrate(self, data): diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 1b88919886..2916a36436 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for GOES 8-15 imager data in netCDF format. Supports netCDF files from both NOAA-CLASS and EUMETSAT. @@ -223,10 +224,10 @@ .. _[SCHED-E]: http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html """ +import datetime as dt import logging import re from abc import abstractmethod -from datetime import datetime, timedelta import numpy as np import pyresample.geometry @@ -593,11 +594,11 @@ } # (nlines, ncols) SCAN_DURATION = { - FULL_DISC: timedelta(minutes=26), - NORTH_HEMIS_WEST: timedelta(minutes=10, seconds=5), - SOUTH_HEMIS_WEST: timedelta(minutes=6, seconds=54), - NORTH_HEMIS_EAST: timedelta(minutes=14, seconds=15), - SOUTH_HEMIS_EAST: timedelta(minutes=4, seconds=49) + FULL_DISC: dt.timedelta(minutes=26), + NORTH_HEMIS_WEST: dt.timedelta(minutes=10, seconds=5), + SOUTH_HEMIS_WEST: dt.timedelta(minutes=6, seconds=54), + NORTH_HEMIS_EAST: dt.timedelta(minutes=14, seconds=15), + SOUTH_HEMIS_EAST: dt.timedelta(minutes=4, seconds=49) } # Source: [SCHED-W], [SCHED-E] @@ -730,10 +731,15 @@ def _get_area_def_uniform_sampling(self, lon0, channel): @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc["time"].dt - return datetime(year=int(dt.year.item()), month=int(dt.month.item()), day=int(dt.day.item()), - hour=int(dt.hour.item()), minute=int(dt.minute.item()), - second=int(dt.second.item()), microsecond=int(dt.microsecond.item())) + date = self.nc["time"].dt + return dt.datetime( + year=int(date.year.item()), + month=int(date.month.item()), + day=int(date.day.item()), + hour=int(date.hour.item()), + minute=int(date.minute.item()), + second=int(date.second.item()), + microsecond=int(date.microsecond.item())) @property def end_time(self): @@ -1018,11 +1024,11 @@ def get_dataset(self, key, info): elif "latitude" in key["name"]: data = self.geo_data["lat"] else: - tic = datetime.now() + tic = dt.datetime.now() data = self.calibrate(self.nc["data"].isel(time=0), calibration=key["calibration"], channel=key["name"]) - logger.debug("Calibration time: {}".format(datetime.now() - tic)) + logger.debug("Calibration time: {}".format(dt.datetime.now() - tic)) # Mask space pixels data = data.where(self.meta["earth_mask"]) @@ -1076,11 +1082,11 @@ def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" logger.debug("Reading dataset {}".format(key["name"])) - tic = datetime.now() + tic = dt.datetime.now() data = self.calibrate(self.nc["data"].isel(time=0), calibration=key["calibration"], channel=key["name"]) - logger.debug("Calibration time: {}".format(datetime.now() - tic)) + logger.debug("Calibration time: {}".format(dt.datetime.now() - tic)) # Mask space pixels data = data.where(self.meta["earth_mask"]) diff --git a/satpy/readers/gpm_imerg.py b/satpy/readers/gpm_imerg.py index 7bc65ac4c6..4463be31b9 100644 --- a/satpy/readers/gpm_imerg.py +++ b/satpy/readers/gpm_imerg.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for GPM imerg data on half-hourly timesteps. References: @@ -23,8 +24,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import h5py @@ -49,22 +50,22 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Find the start time from filename info.""" - return datetime(self.finfo["date"].year, - self.finfo["date"].month, - self.finfo["date"].day, - self.finfo["start_time"].hour, - self.finfo["start_time"].minute, - self.finfo["start_time"].second) + return dt.datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["start_time"].hour, + self.finfo["start_time"].minute, + self.finfo["start_time"].second) @property def end_time(self): """Find the end time from filename info.""" - return datetime(self.finfo["date"].year, - self.finfo["date"].month, - self.finfo["date"].day, - self.finfo["end_time"].hour, - self.finfo["end_time"].minute, - self.finfo["end_time"].second) + return dt.datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["end_time"].hour, + self.finfo["end_time"].minute, + self.finfo["end_time"].second) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" diff --git a/satpy/readers/grib.py b/satpy/readers/grib.py index dadccce77a..4372226c12 100644 --- a/satpy/readers/grib.py +++ b/satpy/readers/grib.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Generic Reader for GRIB2 files. Currently this reader depends on the `pygrib` python package. The `eccodes` @@ -22,8 +23,9 @@ of writing. """ + +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -105,7 +107,7 @@ def _create_dataset_ids(self, keys): @staticmethod def _convert_datetime(msg, date_key, time_key, date_format="%Y%m%d%H%M"): date_str = "{:d}{:04d}".format(msg[date_key], msg[time_key]) - return datetime.strptime(date_str, date_format) + return dt.datetime.strptime(date_str, date_format) @property def start_time(self): diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 37fe714435..3fd920c01f 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -15,15 +15,16 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Base HDF-EOS reader.""" from __future__ import annotations +import datetime as dt import logging import re from ast import literal_eval from contextlib import suppress -from datetime import datetime import numpy as np import xarray as xr @@ -182,7 +183,7 @@ def start_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) - return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self._start_time_from_filename() @@ -195,7 +196,7 @@ def end_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) - return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self.start_time diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index bf53d84a65..d0b9ee44db 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """HRIT/LRIT format reader. This module is the base module for all HRIT-based formats. Here, you will find @@ -28,10 +29,10 @@ """ +import datetime as dt import logging import os from contextlib import contextmanager, nullcontext -from datetime import timedelta from io import BytesIO from subprocess import PIPE, Popen # nosec B404 @@ -176,7 +177,7 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info): self.hdr_info = hdr_info self._get_hd(self.hdr_info) self._start_time = filename_info["start_time"] - self._end_time = self._start_time + timedelta(minutes=15) + self._end_time = self._start_time + dt.timedelta(minutes=15) def _get_hd(self, hdr_info): """Open the file, read and get the basic file header info and set the mda dictionary.""" diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 0c88faf46b..bfdd5da93b 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """HRIT format reader for JMA data. Introduction @@ -107,8 +108,8 @@ .. _AHI sample data: https://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html """ +import datetime as dt import logging -from datetime import datetime import numpy as np import xarray as xr @@ -453,7 +454,7 @@ def _interp(arr, cal): def calibrate(self, data, calibration): """Calibrate the data.""" - tic = datetime.now() + tic = dt.datetime.now() if calibration == "counts": return data @@ -466,17 +467,17 @@ def calibrate(self, data, calibration): dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.where(data < 65535) - logger.debug("Calibration time " + str(datetime.now() - tic)) + logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res @property def start_time(self): """Get start time of the scan.""" if self._use_acquisition_time_as_start_time: - return self.acq_time[0].astype(datetime) + return self.acq_time[0].astype(dt.datetime) return self._start_time @property def end_time(self): """Get end time of the scan.""" - return self.acq_time[-1].astype(datetime) + return self.acq_time[-1].astype(dt.datetime) diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py index c4862e8169..cac8b9cd3d 100644 --- a/satpy/readers/hrpt.py +++ b/satpy/readers/hrpt.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reading and calibrating hrpt avhrr data. Todo: @@ -29,8 +30,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -130,7 +131,7 @@ def __init__(self, filename, filename_info, filetype_info): self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} - self.year = filename_info.get("start_time", datetime.utcnow()).year + self.year = filename_info.get("start_time", dt.datetime.utcnow()).year @cached_property def times(self): @@ -272,10 +273,10 @@ def _get_avhrr_tiepoints(self, scan_points, scanline_nb): def start_time(self): """Get the start time.""" return time_seconds(self._data["timecode"][0, np.newaxis, :], - self.year).astype(datetime)[0] + self.year).astype(dt.datetime)[0] @property def end_time(self): """Get the end time.""" return time_seconds(self._data["timecode"][-1, np.newaxis, :], - self.year).astype(datetime)[0] + self.year).astype(dt.datetime)[0] diff --git a/satpy/readers/hsaf_grib.py b/satpy/readers/hsaf_grib.py index a041bf0c73..b8238f17a5 100644 --- a/satpy/readers/hsaf_grib.py +++ b/satpy/readers/hsaf_grib.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """A reader for files produced by the Hydrology SAF. Currently this reader depends on the `pygrib` python package. The `eccodes` @@ -22,8 +23,9 @@ of writing. """ + +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -68,7 +70,7 @@ def __init__(self, filename, filename_info, filetype_info): @staticmethod def _get_datetime(msg): dtstr = str(msg["dataDate"]) + str(msg["dataTime"]).zfill(4) - return datetime.strptime(dtstr, "%Y%m%d%H%M") + return dt.datetime.strptime(dtstr, "%Y%m%d%H%M") @property def analysis_time(self): @@ -151,7 +153,7 @@ def get_dataset(self, ds_id, ds_info): flen = len(self.filename) timedelt = self.filename[flen-10:flen-8] ds_info["start_time"] = (ds_info["end_time"] - - timedelta(hours=int(timedelt))) + dt.timedelta(hours=int(timedelt))) else: ds_info["start_time"] = ds_info["end_time"] fill = msg["missingValue"] diff --git a/satpy/readers/hsaf_h5.py b/satpy/readers/hsaf_h5.py index 478b91ce2d..25b42ec6a5 100644 --- a/satpy/readers/hsaf_h5.py +++ b/satpy/readers/hsaf_h5.py @@ -15,9 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """A reader for HDF5 Snow Cover (SC) file produced by the Hydrology SAF.""" + +import datetime as dt import logging -from datetime import timedelta import dask.array as da import h5py @@ -47,7 +49,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def end_time(self): """Get end time.""" - return self.start_time + timedelta(hours=23, minutes=59, seconds=59) + return self.start_time + dt.timedelta(hours=23, minutes=59, seconds=59) @property def start_time(self): diff --git a/satpy/readers/hy2_scat_l2b_h5.py b/satpy/readers/hy2_scat_l2b_h5.py index 929d7dc934..dae6e44bf6 100644 --- a/satpy/readers/hy2_scat_l2b_h5.py +++ b/satpy/readers/hy2_scat_l2b_h5.py @@ -21,7 +21,7 @@ Also handle the HDF5 files from NSOAS, based on a file example. """ -from datetime import datetime +import datetime as dt import numpy as np import xarray as xr @@ -35,14 +35,14 @@ class HY2SCATL2BH5FileHandler(HDF5FileHandler): @property def start_time(self): """Time for first observation.""" - return datetime.strptime(self["/attr/Range_Beginning_Time"], - "%Y%m%dT%H:%M:%S") + return dt.datetime.strptime(self["/attr/Range_Beginning_Time"], + "%Y%m%dT%H:%M:%S") @property def end_time(self): """Time for final observation.""" - return datetime.strptime(self["/attr/Range_Ending_Time"], - "%Y%m%dT%H:%M:%S") + return dt.datetime.strptime(self["/attr/Range_Ending_Time"], + "%Y%m%dT%H:%M:%S") @property def platform_name(self): diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index b5088aa041..d66edb7995 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + r"""IASI L2 SO2 BUFR format reader. Introduction @@ -84,8 +85,8 @@ # TDB: this reader is based on iasi_l2.py and seviri_l2_bufr.py +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -154,7 +155,7 @@ def get_start_end_date(self): minute = ec.codes_get(bufr, "minute") second = ec.codes_get(bufr, "second") - obs_time = datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) + obs_time = dt.datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) if i == 0: start_time = obs_time diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index 7adef62e4b..a5fd23c23b 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """EUMETSAT EPS-SG Ice Cloud Imager (ICI) Level 1B products reader. The format is explained in the @@ -26,8 +27,8 @@ """ +import datetime as dt import logging -from datetime import datetime from enum import Enum from functools import cached_property @@ -77,12 +78,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): def start_time(self): """Get observation start time.""" try: - start_time = datetime.strptime( + start_time = dt.datetime.strptime( self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f", ) except ValueError: - start_time = datetime.strptime( + start_time = dt.datetime.strptime( self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f", ) @@ -92,12 +93,12 @@ def start_time(self): def end_time(self): """Get observation end time.""" try: - end_time = datetime.strptime( + end_time = dt.datetime.strptime( self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f", ) except ValueError: - end_time = datetime.strptime( + end_time = dt.datetime.strptime( self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f", ) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 205f4d17b2..41ddee5df6 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -1,6 +1,7 @@ """File handler for Insat 3D L1B data in hdf5 format.""" + +import datetime as dt from contextlib import suppress -from datetime import datetime from functools import cached_property import dask.array as da @@ -120,13 +121,15 @@ class Insat3DIMGL1BH5FileHandler(BaseFileHandler): @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S") + start_time = dt.datetime.strptime( + self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S") + end_time = dt.datetime.strptime( + self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S") return end_time @cached_property diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 7675bd1624..1ccb895c25 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for the FY-3D MERSI-2 L1B file format. The files for this reader are HDF5 and come in four varieties; band data @@ -24,7 +25,8 @@ platforms as well assuming no file format changes. """ -from datetime import datetime + +import datetime as dt import dask.array as da import numpy as np @@ -44,7 +46,7 @@ def _strptime(self, date_attr, time_attr): time = self[time_attr] # "18:27:39.720" # cuts off microseconds because of unknown meaning # is .720 == 720 microseconds or 720000 microseconds - return datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S") + return dt.datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S") @property def start_time(self): diff --git a/satpy/readers/msu_gsa_l1b.py b/satpy/readers/msu_gsa_l1b.py index c4e45aa333..4a4ff3518f 100644 --- a/satpy/readers/msu_gsa_l1b.py +++ b/satpy/readers/msu_gsa_l1b.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for the Arctica-M1 MSU-GS/A data. The files for this reader are HDF5 and contain channel data at 1km resolution @@ -24,7 +25,8 @@ This reader was tested on sample data provided by EUMETSAT. """ -from datetime import datetime + +import datetime as dt import numpy as np @@ -38,7 +40,7 @@ class MSUGSAFileHandler(HDF5FileHandler): def start_time(self): """Time for timeslot scan start.""" dtstr = self["/attr/timestamp_without_timezone"] - return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") + return dt.datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") @property def satellite_altitude(self): diff --git a/satpy/readers/mws_l1b.py b/satpy/readers/mws_l1b.py index 372a59ac37..1dc076e68f 100644 --- a/satpy/readers/mws_l1b.py +++ b/satpy/readers/mws_l1b.py @@ -1,24 +1,25 @@ # Copyright (c) 2022 Pytroll Developers - +# # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. - +# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. - +# # You should have received a copy of the GNU General Public License # along with this program. If not, see . + """Reader for the EPS-SG Microwave Sounder (MWS) level-1b data. Documentation: https://www.eumetsat.int/media/44139 """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -101,13 +102,13 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get start time.""" - return datetime.strptime(self["/attr/sensing_start_time_utc"], + return dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") @property def end_time(self): """Get end time.""" - return datetime.strptime(self["/attr/sensing_end_time_utc"], + return dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") @property diff --git a/satpy/readers/nwcsaf_msg2013_hdf5.py b/satpy/readers/nwcsaf_msg2013_hdf5.py index 40a6441655..a3bc9ca168 100644 --- a/satpy/readers/nwcsaf_msg2013_hdf5.py +++ b/satpy/readers/nwcsaf_msg2013_hdf5.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for the old NWCSAF/Geo (v2013 and earlier) cloud product format. References: @@ -27,8 +28,8 @@ """ +import datetime as dt import logging -from datetime import datetime import h5py import numpy as np @@ -127,7 +128,7 @@ def get_area_def(self, dsid): @property def start_time(self): """Return the start time of the object.""" - return datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M") + return dt.datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M") def get_area_extent(cfac, lfac, coff, loff, numcols, numlines): diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index e9809bdce5..64a284200d 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Nowcasting SAF common PPS&MSG NetCDF/CF format reader. References: @@ -22,11 +23,11 @@ """ +import datetime as dt import functools import logging import os from contextlib import suppress -from datetime import datetime import dask.array as da import numpy as np @@ -435,9 +436,9 @@ def read_nwcsaf_time(time_value): try: # MSG: try: - return datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ") except TypeError: # Remove this in summer 2024 (this is not needed since h5netcdf 0.14) - return datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ") + return dt.datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ") except ValueError: # PPS: - return datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ") + return dt.datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ") diff --git a/satpy/readers/oceancolorcci_l3_nc.py b/satpy/readers/oceancolorcci_l3_nc.py index 075e885b36..d38e91c9e6 100644 --- a/satpy/readers/oceancolorcci_l3_nc.py +++ b/satpy/readers/oceancolorcci_l3_nc.py @@ -23,8 +23,9 @@ are supported and both the merged product files (OC_PRODUCTS) and single product (RRS, CHLOR_A, IOP, K_490) are supported. """ + +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -41,7 +42,7 @@ class OCCCIFileHandler(NetCDF4FileHandler): @staticmethod def _parse_datetime(datestr): """Parse datetime.""" - return datetime.strptime(datestr, "%Y%m%d%H%MZ") + return dt.datetime.strptime(datestr, "%Y%m%d%H%MZ") @property def start_time(self): diff --git a/satpy/readers/omps_edr.py b/satpy/readers/omps_edr.py index 5421ae2cd2..12ef7d0ce4 100644 --- a/satpy/readers/omps_edr.py +++ b/satpy/readers/omps_edr.py @@ -15,16 +15,18 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Interface to OMPS EDR format.""" + +import datetime as dt import logging -from datetime import datetime, timedelta import numpy as np from satpy.readers.hdf5_utils import HDF5FileHandler -NO_DATE = datetime(1958, 1, 1) -EPSILON_TIME = timedelta(days=2) +NO_DATE = dt.datetime(1958, 1, 1) +EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) diff --git a/satpy/readers/osisaf_l3_nc.py b/satpy/readers/osisaf_l3_nc.py index 56d4773a43..1356471524 100644 --- a/satpy/readers/osisaf_l3_nc.py +++ b/satpy/readers/osisaf_l3_nc.py @@ -15,8 +15,8 @@ # satpy. If not, see . """A reader for OSI-SAF level 3 products in netCDF format.""" +import datetime as dt import logging -from datetime import datetime from satpy.readers.netcdf_utils import NetCDF4FileHandler @@ -197,7 +197,7 @@ def _get_platname(self): def _parse_datetime(datestr): for dt_format in ("%Y-%m-%d %H:%M:%S","%Y%m%dT%H%M%SZ", "%Y-%m-%dT%H:%M:%SZ"): try: - return datetime.strptime(datestr, dt_format) + return dt.datetime.strptime(datestr, dt_format) except ValueError: continue raise ValueError(f"Unsupported date format: {datestr}") diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py index 886ce458b3..d14665759d 100644 --- a/satpy/readers/scatsat1_l2b.py +++ b/satpy/readers/scatsat1_l2b.py @@ -17,7 +17,7 @@ # type: ignore """ScatSat-1 L2B Reader, distributed by Eumetsat in HDF5 format.""" -from datetime import datetime +import datetime as dt import h5py @@ -34,8 +34,10 @@ def __init__(self, filename, filename_info, filetype_info): self.h5f = h5py.File(self.filename, "r") h5data = self.h5f["science_data"] - self.filename_info["start_time"] = datetime.strptime(h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f") - self.filename_info["end_time"] = datetime.strptime(h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f") + self.filename_info["start_time"] = dt.datetime.strptime( + h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f") + self.filename_info["end_time"] = dt.datetime.strptime( + h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f") self.lons = None self.lats = None diff --git a/satpy/readers/scmi.py b/satpy/readers/scmi.py index a4b8620f8b..fe19c63d8d 100644 --- a/satpy/readers/scmi.py +++ b/satpy/readers/scmi.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """SCMI NetCDF4 Reader. SCMI files are typically used for data for the ABI instrument onboard the @@ -40,9 +41,9 @@ """ +import datetime as dt import logging import os -from datetime import datetime import numpy as np import xarray as xr @@ -273,7 +274,7 @@ def get_area_def(self, key): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S") + return dt.datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S") @property def end_time(self): diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py index 03fa648330..24ee429fda 100644 --- a/satpy/readers/seadas_l2.py +++ b/satpy/readers/seadas_l2.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Reader for SEADAS L2 products. This reader currently only supports MODIS and VIIRS Chlorophyll A from SEADAS. @@ -28,7 +29,7 @@ """ -from datetime import datetime +import datetime as dt from .hdf4_utils import HDF4FileHandler from .netcdf_utils import NetCDF4FileHandler @@ -66,13 +67,13 @@ def _platform_name(self): def start_time(self): """Get the starting observation time of this file's data.""" start_time = self[self.start_time_attr_name] - return datetime.strptime(start_time[:-3], self.time_format) + return dt.datetime.strptime(start_time[:-3], self.time_format) @property def end_time(self): """Get the ending observation time of this file's data.""" end_time = self[self.end_time_attr_name] - return datetime.strptime(end_time[:-3], self.time_format) + return dt.datetime.strptime(end_time[:-3], self.time_format) @property def sensor_names(self): diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 5b19e56833..ace63e3f12 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Common functionality for SEVIRI L1.5 data readers. Introduction @@ -186,8 +187,8 @@ """ from __future__ import annotations +import datetime as dt import warnings -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -387,7 +388,7 @@ # To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope # Epoch for the MEIRINK re-calibration -MEIRINK_EPOCH = datetime(2000, 1, 1) +MEIRINK_EPOCH = dt.datetime(2000, 1, 1) MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {} MEIRINK_COEFS["2023"] = {} @@ -1093,17 +1094,17 @@ def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometr return data -def round_nom_time(dt, time_delta): +def round_nom_time(date, time_delta): """Round a datetime object to a multiple of a timedelta. - dt : datetime.datetime object, default now. + date : datetime.datetime object, default now. time_delta : timedelta object, we round to a multiple of this, default 1 minute. adapted for SEVIRI from: https://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python """ - seconds = (dt - dt.min).seconds + seconds = (date - date.min).seconds round_to = time_delta.total_seconds() rounding = (seconds + round_to / 2) // round_to * round_to - return dt + timedelta(0, rounding - seconds, - dt.microsecond) + return date + dt.timedelta(0, rounding - seconds, - date.microsecond) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 804198da0f..f65faa8ecc 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + r"""SEVIRI Level 1.5 HRIT format reader. Introduction @@ -213,8 +214,8 @@ from __future__ import division import copy +import datetime as dt import logging -from datetime import timedelta import dask.array as da import numpy as np @@ -528,14 +529,14 @@ def nominal_start_time(self): """Get the start time and round it according to scan law.""" tm = self.prologue["ImageAcquisition"][ "PlannedAcquisitionTime"]["TrueRepeatCycleStart"] - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" tm = self.prologue["ImageAcquisition"][ "PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index 2024c46532..4d3243f5c8 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + r"""Interface to SEVIRI L1B data from ICARE (Lille). Introduction @@ -69,7 +70,8 @@ ancillary_variables: [] """ -from datetime import datetime + +import datetime as dt import numpy as np @@ -169,9 +171,9 @@ def end_time(self): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: - endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") + endacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: - endacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") + endacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return endacq @property @@ -182,9 +184,9 @@ def start_time(self): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: - stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") + stacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: - stacq = datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") + stacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return stacq @property diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 361dd1bb50..976cb7c338 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + r"""SEVIRI Level 1.5 native format reader. Introduction @@ -97,9 +98,9 @@ https://www-cdn.eumetsat.int/files/2020-04/pdf_fg15_msg-native-format-15.pdf """ +import datetime as dt import logging import warnings -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -207,13 +208,13 @@ def _repeat_cycle_duration(self): def nominal_start_time(self): """Get the repeat cycle nominal start time from file header and round it to expected nominal time slot.""" tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["TrueRepeatCycleStart"] - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the repeat cycle nominal end time from file header and round it to expected nominal time slot.""" tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): @@ -609,7 +610,7 @@ def _get_hrv_channel(self): def calibrate(self, data, dataset_id): """Calibrate the data.""" - tic = datetime.now() + tic = dt.datetime.now() channel_name = dataset_id["name"] calib = SEVIRICalibrationHandler( platform_id=self.platform_id, @@ -619,7 +620,7 @@ def calibrate(self, data, dataset_id): scan_time=self.observation_start_time ) res = calib.calibrate(data, dataset_id["calibration"]) - logger.debug("Calibration time " + str(datetime.now() - tic)) + logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res def _get_calib_coefs(self, channel_name): diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index 22b55eceda..fd19634fda 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -17,9 +17,8 @@ # satpy. If not, see . """SEVIRI netcdf format reader.""" -import datetime +import datetime as dt import logging -from datetime import timedelta import numpy as np @@ -67,7 +66,7 @@ def __init__(self, filename, filename_info, filetype_info, self.ext_calib_coefs = ext_calib_coefs or {} self.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines self.mda = {} - self.reference = datetime.datetime(1958, 1, 1) + self.reference = dt.datetime(1958, 1, 1) self.get_metadata() @property @@ -82,13 +81,13 @@ def _repeat_cycle_duration(self): def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.deltaSt - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.deltaEnd - return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) + return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): @@ -146,11 +145,11 @@ def get_metadata(self): # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) # self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv']) - self.deltaSt = self.reference + datetime.timedelta( + self.deltaSt = self.reference + dt.timedelta( days=int(self.nc.attrs["true_repeat_cycle_start_day"]), milliseconds=int(self.nc.attrs["true_repeat_cycle_start_mi_sec"])) - self.deltaEnd = self.reference + datetime.timedelta( + self.deltaEnd = self.reference + dt.timedelta( days=int(self.nc.attrs["planned_repeat_cycle_end_day"]), milliseconds=int(self.nc.attrs["planned_repeat_cycle_end_mi_sec"])) diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index 02aa0c2767..a48a7e00d6 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -23,8 +23,9 @@ https://navigator.eumetsat.int/ """ + +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -95,7 +96,7 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= else: # Product was retrieved from the EUMETSAT Data Center timeStr = self.get_attribute("typicalDate")+self.get_attribute("typicalTime") - buf_start_time = datetime.strptime(timeStr, "%Y%m%d%H%M%S") + buf_start_time = dt.datetime.strptime(timeStr, "%Y%m%d%H%M%S") sc_id = self.get_attribute("satelliteIdentifier") self.mpef_header = {} self.mpef_header["NominalTime"] = buf_start_time @@ -120,7 +121,7 @@ def start_time(self): @property def end_time(self): """Return the repeat cycle end time.""" - return self.start_time + timedelta(minutes=15) + return self.start_time + dt.timedelta(minutes=15) @property def platform_name(self): diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py index b69c60e7ac..d178d6b716 100644 --- a/satpy/readers/seviri_l2_grib.py +++ b/satpy/readers/seviri_l2_grib.py @@ -22,8 +22,8 @@ https://navigator.eumetsat.int/ """ +import datetime as dt import logging -from datetime import timedelta import dask.array as da import numpy as np @@ -62,7 +62,7 @@ def start_time(self): @property def end_time(self): """Return the sensing end time.""" - return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION) + return self.start_time + dt.timedelta(minutes=REPEAT_CYCLE_DURATION) def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 1e2a64f783..079f93d2f3 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -13,6 +13,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """GCOM-C SGLI L1b reader. GCOM-C has an imager instrument: SGLI @@ -27,8 +28,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import h5py @@ -63,13 +64,13 @@ def __init__(self, filename, filename_info, filetype_info): def start_time(self): """Get the start time.""" the_time = self.h5file["Global_attributes"].attrs["Scene_start_time"].item() - return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") + return dt.datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") @property def end_time(self): """Get the end time.""" the_time = self.h5file["Global_attributes"].attrs["Scene_end_time"].item() - return datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") + return dt.datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") def get_dataset(self, key, info): """Get the dataset from the file.""" diff --git a/satpy/readers/slstr_l1b.py b/satpy/readers/slstr_l1b.py index 02aae9f72b..3353ade4d3 100644 --- a/satpy/readers/slstr_l1b.py +++ b/satpy/readers/slstr_l1b.py @@ -15,13 +15,14 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """SLSTR L1b reader.""" +import datetime as dt import logging import os import re import warnings -from datetime import datetime import dask.array as da import numpy as np @@ -95,12 +96,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTR1B(BaseFileHandler): @@ -224,12 +225,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRAngles(BaseFileHandler): @@ -326,12 +327,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRFlag(BaseFileHandler): @@ -376,9 +377,9 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py index 4a909ee2e4..c4d349ea67 100644 --- a/satpy/readers/smos_l2_wind.py +++ b/satpy/readers/smos_l2_wind.py @@ -24,8 +24,8 @@ SMOS_WIND_DS_PDD_20191107_signed.pdf """ +import datetime as dt import logging -from datetime import datetime import numpy as np from pyresample.geometry import AreaDefinition @@ -41,12 +41,12 @@ class SMOSL2WINDFileHandler(NetCDF4FileHandler): @property def start_time(self): """Get start time.""" - return datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z") + return dt.datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z") @property def end_time(self): """Get end time.""" - return datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z") + return dt.datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z") @property def platform_shortname(self): diff --git a/satpy/readers/tropomi_l2.py b/satpy/readers/tropomi_l2.py index 768ca70948..2d571e2f12 100644 --- a/satpy/readers/tropomi_l2.py +++ b/satpy/readers/tropomi_l2.py @@ -29,8 +29,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -65,12 +65,12 @@ def platform_shortname(self): @property def time_coverage_start(self): """Get time_coverage_start.""" - return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) + return dt.datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def time_coverage_end(self): """Get time_coverage_end.""" - return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) + return dt.datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def sensor(self): diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py index 83056189dc..07c5f6749a 100644 --- a/satpy/readers/vii_base_nc.py +++ b/satpy/readers/vii_base_nc.py @@ -19,8 +19,8 @@ """EUMETSAT EPS-SG Visible/Infrared Imager (VII) readers base class.""" +import datetime as dt import logging -from datetime import datetime from geotiepoints.viiinterpolator import tie_points_geo_interpolation, tie_points_interpolation @@ -213,18 +213,18 @@ def _get_global_attributes(self): def start_time(self): """Get observation start time.""" try: - start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f") + start_time = dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") + start_time = dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return start_time @property def end_time(self): """Get observation end time.""" try: - end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f") + end_time = dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") + end_time = dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return end_time @property diff --git a/satpy/readers/viirs_atms_sdr_base.py b/satpy/readers/viirs_atms_sdr_base.py index 159a84a070..b55368e92b 100644 --- a/satpy/readers/viirs_atms_sdr_base.py +++ b/satpy/readers/viirs_atms_sdr_base.py @@ -18,8 +18,8 @@ """Common utilities for reading VIIRS and ATMS SDR data.""" +import datetime as dt import logging -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -27,8 +27,8 @@ from satpy.readers.hdf5_utils import HDF5FileHandler -NO_DATE = datetime(1958, 1, 1) -EPSILON_TIME = timedelta(days=2) +NO_DATE = dt.datetime(1958, 1, 1) +EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) @@ -106,7 +106,7 @@ def _parse_datetime(self, datestr, timestr): timestr = str(timestr.data.compute().astype(str)) datetime_str = datestr + timestr - time_val = datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") + time_val = dt.datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") if abs(time_val - NO_DATE) < EPSILON_TIME: # catch rare case when SDR files have incorrect date raise ValueError("Datetime invalid {}".format(time_val)) diff --git a/satpy/readers/viirs_compact.py b/satpy/readers/viirs_compact.py index af3a4ce766..bb3bd83b71 100644 --- a/satpy/readers/viirs_compact.py +++ b/satpy/readers/viirs_compact.py @@ -29,9 +29,9 @@ """ +import datetime as dt import logging from contextlib import suppress -from datetime import datetime, timedelta import dask.array as da import h5py @@ -173,10 +173,10 @@ def start_time(self): @property def end_time(self): """Get the end time.""" - end_time = datetime.combine(self.start_time.date(), + end_time = dt.datetime.combine(self.start_time.date(), self.finfo["end_time"].time()) if end_time < self.start_time: - end_time += timedelta(days=1) + end_time += dt.timedelta(days=1) return end_time def read_geo(self, key, info): diff --git a/satpy/readers/viirs_l1b.py b/satpy/readers/viirs_l1b.py index 510a37165d..7dd3079dbb 100644 --- a/satpy/readers/viirs_l1b.py +++ b/satpy/readers/viirs_l1b.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Interface to VIIRS L1B format.""" +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -32,7 +32,7 @@ class VIIRSL1BFileHandler(NetCDF4FileHandler): def _parse_datetime(self, datestr): """Parse datetime.""" - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") + return dt.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") @property def start_orbit_number(self): diff --git a/satpy/readers/viirs_l2.py b/satpy/readers/viirs_l2.py index 9277620320..7a54b3e10c 100644 --- a/satpy/readers/viirs_l2.py +++ b/satpy/readers/viirs_l2.py @@ -9,8 +9,9 @@ 3. Cloud Top Height 4. Deep Blue Aerosol Optical Thickness (Land and Ocean) """ + +import datetime as dt import logging -from datetime import datetime import numpy as np @@ -23,7 +24,7 @@ class VIIRSL2FileHandler(NetCDF4FileHandler): """NetCDF File Handler for VIIRS L2 Products.""" def _parse_datetime(self, datestr): """Parse datetime.""" - return datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") + return dt.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") @property def start_time(self): diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index eef02f7777..28854b185d 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -28,10 +28,11 @@ - http://npp.gsfc.nasa.gov/science/sciencedocuments/082012/474-00001-03_CDFCBVolIII_RevC.pdf """ + +import datetime as dt import logging import os.path from contextlib import suppress -from datetime import datetime, timedelta from glob import glob import numpy as np @@ -39,8 +40,8 @@ from satpy.readers.viirs_atms_sdr_base import ATMS_DATASET_KEYS, DATASET_KEYS, VIIRS_DATASET_KEYS, JPSS_SDR_FileHandler from satpy.readers.yaml_reader import FileYAMLReader -NO_DATE = datetime(1958, 1, 1) -EPSILON_TIME = timedelta(days=2) +NO_DATE = dt.datetime(1958, 1, 1) +EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index 578e19bed3..2f43ffd2a2 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -15,8 +15,8 @@ # satpy. If not, see . """Reading VIIRS VGAC data.""" +import datetime as dt import logging -from datetime import datetime import numpy as np import xarray as xr @@ -68,20 +68,20 @@ def fix_radiances_not_in_percent(self, data): def set_time_attrs(self, data): """Set time from attributes.""" if "StartTime" in data.attrs: - data.attrs["start_time"] = datetime.strptime(data.attrs["StartTime"], "%Y-%m-%dT%H:%M:%S") - data.attrs["end_time"] = datetime.strptime(data.attrs["EndTime"], "%Y-%m-%dT%H:%M:%S") + data.attrs["start_time"] = dt.datetime.strptime(data.attrs["StartTime"], "%Y-%m-%dT%H:%M:%S") + data.attrs["end_time"] = dt.datetime.strptime(data.attrs["EndTime"], "%Y-%m-%dT%H:%M:%S") self._end_time = data.attrs["end_time"] self._start_time = data.attrs["start_time"] def dt64_to_datetime(self, dt64): """Conversion of numpy.datetime64 to datetime objects.""" if isinstance(dt64, np.datetime64): - return dt64.astype(datetime) + return dt64.astype(dt.datetime) return dt64 def extract_time_data(self, data, nc): """Decode time data.""" - reference_time = np.datetime64(datetime.strptime(nc["proj_time0"].attrs["units"], + reference_time = np.datetime64(dt.datetime.strptime(nc["proj_time0"].attrs["units"], "days since %d/%m/%YT%H:%M:%S")) delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values) delta_full_days = np.timedelta64(delta_full_days.astype(np.int64), "D").astype("timedelta64[us]") diff --git a/satpy/readers/virr_l1b.py b/satpy/readers/virr_l1b.py index 260666ff8b..23e0339c93 100644 --- a/satpy/readers/virr_l1b.py +++ b/satpy/readers/virr_l1b.py @@ -40,8 +40,8 @@ """ +import datetime as dt import logging -from datetime import datetime import dask.array as da import numpy as np @@ -162,10 +162,10 @@ def _correct_slope(self, slope): def start_time(self): """Get starting observation time.""" start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" - return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get ending observation time.""" end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" - return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") + return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/tests/cf_tests/test_decoding.py b/satpy/tests/cf_tests/test_decoding.py index c20cddf6da..51c1bfecaf 100644 --- a/satpy/tests/cf_tests/test_decoding.py +++ b/satpy/tests/cf_tests/test_decoding.py @@ -15,8 +15,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for CF decoding.""" -from datetime import datetime + +import datetime as dt import pytest @@ -46,11 +48,11 @@ def expected(self): "my_integer": 0, "my_float": 0.0, "my_list": [1, 2, 3], - "my_timestamp1": datetime(2000, 1, 1), - "my_timestamp2": datetime(2000, 1, 1, 12, 15, 33), - "my_timestamp3": datetime(2000, 1, 1, 12, 15, 33, 123456), + "my_timestamp1": dt.datetime(2000, 1, 1), + "my_timestamp2": dt.datetime(2000, 1, 1, 12, 15, 33), + "my_timestamp3": dt.datetime(2000, 1, 1, 12, 15, 33, 123456), "my_dict": {"a": {"b": [1, 2, 3]}, - "c": {"d": datetime(2000, 1, 1, 12, 15, 33, 123456)}} + "c": {"d": dt.datetime(2000, 1, 1, 12, 15, 33, 123456)}} } def test_decoding(self, attrs, expected): diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 1641e4248b..95ff3e0d39 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for VIIRS compositors.""" -from datetime import datetime +import datetime as dt import dask.array as da import numpy as np @@ -52,7 +53,7 @@ def dnb(self, area): c01 = xr.DataArray(dnb, dims=("y", "x"), attrs={"name": "DNB", "area": area, - "start_time": datetime(2020, 1, 1, 12, 0, 0)}) + "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)}) return c01 @pytest.fixture() @@ -66,7 +67,7 @@ def sza(self, area): c02 = xr.DataArray(sza, dims=("y", "x"), attrs={"name": "solar_zenith_angle", "area": area, - "start_time": datetime(2020, 1, 1, 12, 0, 0)}) + "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)}) return c02 @pytest.fixture() @@ -79,7 +80,7 @@ def lza(self, area): c03 = xr.DataArray(lza, dims=("y", "x"), attrs={"name": "lunar_zenith_angle", "area": area, - "start_time": datetime(2020, 1, 1, 12, 0, 0) + "start_time": dt.datetime(2020, 1, 1, 12, 0, 0) }) return c03 diff --git a/satpy/tests/features/steps/steps-load.py b/satpy/tests/features/steps/steps-load.py index 7e2d1829a2..d83ac24754 100644 --- a/satpy/tests/features/steps/steps-load.py +++ b/satpy/tests/features/steps/steps-load.py @@ -45,13 +45,13 @@ def step_impl_data_available(context): @when(u"user loads the data without providing a config file") def step_impl_user_loads_no_config(context): """Load the data without a config.""" - from datetime import datetime + import datetime as dt from satpy import Scene, find_files_and_readers os.chdir("/tmp/") readers_files = find_files_and_readers(sensor="viirs", - start_time=datetime(2015, 3, 11, 11, 20), - end_time=datetime(2015, 3, 11, 11, 26)) + start_time=dt.datetime(2015, 3, 11, 11, 20), + end_time=dt.datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=readers_files) scn.load(["M02"]) context.scene = scn @@ -73,13 +73,13 @@ def step_impl_items_not_available(context): @when(u"user wants to know what data is available") def step_impl_user_checks_availability(context): """Check availability.""" - from datetime import datetime + import datetime as dt from satpy import Scene, find_files_and_readers os.chdir("/tmp/") reader_files = find_files_and_readers(sensor="viirs", - start_time=datetime(2015, 3, 11, 11, 20), - end_time=datetime(2015, 3, 11, 11, 26)) + start_time=dt.datetime(2015, 3, 11, 11, 20), + end_time=dt.datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=reader_files) context.available_dataset_ids = scn.available_dataset_ids() diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index ecf0805ca8..b4f5430436 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -14,10 +14,11 @@ # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for the angles in modifiers.""" + import contextlib +import datetime as dt import warnings from copy import deepcopy -from datetime import datetime, timedelta from glob import glob from typing import Optional, Union from unittest import mock @@ -74,7 +75,7 @@ def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDef "satellite_nominal_longitude": 10.0, "satellite_nominal_latitude": 0.0, } - stime = datetime(2020, 1, 1, 12, 0, 0) + stime = dt.datetime(2020, 1, 1, 12, 0, 0) data = da.zeros(shape, chunks=chunks) vis = xr.DataArray(data, dims=dims, @@ -113,7 +114,7 @@ def _similar_sat_pos_datetime(orig_data, lon_offset=0.04): new_data = orig_data.copy() old_lon = new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] = old_lon + lon_offset - new_data.attrs["start_time"] = new_data.attrs["start_time"] + timedelta(hours=36) + new_data.attrs["start_time"] = new_data.attrs["start_time"] + dt.timedelta(hours=36) return new_data @@ -372,15 +373,13 @@ def test_relative_azimuth_calculation(self): def test_solazi_correction(self): """Test that solar azimuth angles are corrected into the right range.""" - from datetime import datetime - from satpy.modifiers.angles import _get_sun_azimuth_ndarray lats = np.array([-80, 40, 0, 40, 80]) lons = np.array([-80, 40, 0, 40, 80]) - dt = datetime(2022, 1, 5, 12, 50, 0) + date = dt.datetime(2022, 1, 5, 12, 50, 0) - azi = _get_sun_azimuth_ndarray(lats, lons, dt) + azi = _get_sun_azimuth_ndarray(lats, lons, date) assert np.all(azi > 0) diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index dc9f4a232a..27c9847030 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -13,8 +13,8 @@ # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for the CREFL ReflectanceCorrector modifier.""" +import datetime as dt from contextlib import contextmanager -from datetime import datetime from unittest import mock import numpy as np @@ -82,8 +82,8 @@ def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units=" "resolution": 371, "name": name, "standard_name": standard_name, "platform_name": "Suomi-NPP", "polarization": None, "sensor": "viirs", "units": units, - "start_time": datetime(2012, 2, 25, 18, 1, 24, 570942), - "end_time": datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area, + "start_time": dt.datetime(2012, 2, 25, 18, 1, 24, 570942), + "end_time": dt.datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area, "ancillary_variables": [] }) @@ -259,8 +259,8 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): assert res.attrs["platform_name"] == "Suomi-NPP" assert res.attrs["sensor"] == "viirs" assert res.attrs["units"] == "%" - assert res.attrs["start_time"] == datetime(2012, 2, 25, 18, 1, 24, 570942) - assert res.attrs["end_time"] == datetime(2012, 2, 25, 18, 11, 21, 175760) + assert res.attrs["start_time"] == dt.datetime(2012, 2, 25, 18, 1, 24, 570942) + assert res.attrs["end_time"] == dt.datetime(2012, 2, 25, 18, 11, 21, 175760) assert res.attrs["area"] == area assert res.attrs["ancillary_variables"] == [] data = res.values @@ -304,8 +304,8 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1 "calibration": calibration, "resolution": resolution, "name": name, "coordinates": ["longitude", "latitude"], "platform_name": "EOS-Aqua", "polarization": None, "sensor": "modis", - "units": "%", "start_time": datetime(2012, 8, 13, 18, 46, 1, 439838), - "end_time": datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area, + "units": "%", "start_time": dt.datetime(2012, 8, 13, 18, 46, 1, 439838), + "end_time": dt.datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area, "ancillary_variables": [] }) @@ -327,8 +327,8 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1 assert res.attrs["platform_name"] == "EOS-Aqua" assert res.attrs["sensor"] == "modis" assert res.attrs["units"] == "%" - assert res.attrs["start_time"] == datetime(2012, 8, 13, 18, 46, 1, 439838) - assert res.attrs["end_time"] == datetime(2012, 8, 13, 18, 57, 47, 746296) + assert res.attrs["start_time"] == dt.datetime(2012, 8, 13, 18, 46, 1, 439838) + assert res.attrs["end_time"] == dt.datetime(2012, 8, 13, 18, 57, 47, 746296) assert res.attrs["area"] == area assert res.attrs["ancillary_variables"] == [] data = res.values diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index c964501225..c003106dea 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -19,7 +19,7 @@ """Unit tests for blending datasets with the Multiscene object.""" -from datetime import datetime +import datetime as dt import dask.array as da import numpy as np @@ -101,8 +101,8 @@ def cloud_type_data_array1(test_area, data_type, image_mode): "satellite_nominal_longitude": 0.0, "satellite_nominal_latitude": 0, } - data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) - data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) + data_arr.attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 9, 17) + data_arr.attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 12, 22) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @@ -127,8 +127,8 @@ def cloud_type_data_array2(test_area, data_type, image_mode): data_arr.attrs["sensor"] = {"avhrr-3"} data_arr.attrs["units"] = "1" data_arr.attrs["long_name"] = "SAFNWC PPS CT Cloud Type" - data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) - data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) + data_arr.attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 12, 57, 500000) + data_arr.attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 28, 1, 900000) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @@ -152,8 +152,8 @@ def scene1_with_weights(cloud_type_data_array1, test_area): modifiers=() ) scene[dsid2] = _create_test_int8_dataset(name="geo-cma", area=test_area, values=2) - scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) - scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) + scene[dsid2].attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 9, 17) + scene[dsid2].attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 12, 22) wgt2 = _create_test_dataset(name="geo-cma-wgt", area=test_area, values=0) @@ -176,8 +176,8 @@ def scene2_with_weights(cloud_type_data_array2, test_area): modifiers=() ) scene[dsid2] = _create_test_int8_dataset(name="polar-cma", area=test_area, values=4) - scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) - scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) + scene[dsid2].attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 12, 57, 500000) + scene[dsid2].attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 28, 1, 900000) wgt2 = _create_test_dataset(name="polar-cma-wgt", area=test_area, values=1) return scene, [wgt1, wgt2] @@ -223,8 +223,8 @@ def test_blend_two_scenes_using_stack(self, multi_scene_and_weights, groups, xr.testing.assert_equal(result, expected.compute()) _check_stacked_metadata(result, "CloudType") - assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000) def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): """Test exception is raised when bad 'blend_type' is used.""" @@ -274,8 +274,8 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr np.testing.assert_allclose(result.data, expected.data) _check_stacked_metadata(result, "CloudType") - assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000) @pytest.fixture() def datasets_and_weights(self): @@ -286,23 +286,23 @@ def datasets_and_weights(self): shape[1], shape[0], [-200, -200, 200, 200]) ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), - attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) datastruct = {"shape": shape, "area": area, @@ -392,9 +392,9 @@ class TestTemporalRGB: @pytest.fixture() def nominal_data(self): """Return the input arrays for the nominal use case.""" - da1 = xr.DataArray([1, 0, 0], attrs={"start_time": datetime(2023, 5, 22, 9, 0, 0)}) - da2 = xr.DataArray([0, 1, 0], attrs={"start_time": datetime(2023, 5, 22, 10, 0, 0)}) - da3 = xr.DataArray([0, 0, 1], attrs={"start_time": datetime(2023, 5, 22, 11, 0, 0)}) + da1 = xr.DataArray([1, 0, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 9, 0, 0)}) + da2 = xr.DataArray([0, 1, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 10, 0, 0)}) + da3 = xr.DataArray([0, 0, 1], attrs={"start_time": dt.datetime(2023, 5, 22, 11, 0, 0)}) return [da1, da2, da3] @@ -422,7 +422,7 @@ def test_extra_datasets(self, nominal_data, expected_result): """Test that only the first three arrays affect the usage.""" from satpy.multiscene import temporal_rgb - da4 = xr.DataArray([0, 0, 1], attrs={"start_time": datetime(2023, 5, 22, 12, 0, 0)}) + da4 = xr.DataArray([0, 0, 1], attrs={"start_time": dt.datetime(2023, 5, 22, 12, 0, 0)}) res = temporal_rgb(nominal_data + [da4,]) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 7ec1a53df8..67158c2334 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -15,17 +15,18 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Unit tests for saving animations using Multiscene.""" # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path +import datetime as dt import os import shutil import tempfile import unittest -from datetime import datetime from unittest import mock import pytest @@ -63,12 +64,12 @@ def test_save_mp4_distributed(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( @@ -125,12 +126,12 @@ def test_save_mp4_no_distributed(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( @@ -165,12 +166,12 @@ def test_save_datasets_simple(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -198,12 +199,12 @@ def test_save_datasets_distributed_delayed(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -233,12 +234,12 @@ def test_save_datasets_distributed_source_target(self): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -313,12 +314,12 @@ def test_save_mp4(smg, tmp_path): scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: - scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue - scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = str(tmp_path / diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index 32107006fc..6b4d3f7629 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -14,7 +14,7 @@ # along with satpy. If not, see . """Common utility modules used for LI mock-oriented unit tests.""" -from datetime import datetime +import datetime as dt import numpy as np import xarray as xr @@ -127,8 +127,8 @@ def rand_u16(num): def l2_lef_schema(settings=None): """Define schema for LI L2 LEF product.""" - epoch_ts = datetime(2000, 1, 1, 0, 0, 0, 0) - start_time = datetime.now() + epoch_ts = dt.datetime(2000, 1, 1, 0, 0, 0, 0) + start_time = dt.datetime.now() start_ts = (start_time - epoch_ts).total_seconds() settings = settings or {} @@ -287,9 +287,9 @@ def l2_lfl_schema(settings=None): settings = settings or {} nobs = settings.get("num_obs", 1234) - epoch = datetime(2000, 1, 1) - stime = (datetime(2019, 1, 1) - epoch).total_seconds() - etime = (datetime(2019, 1, 2) - epoch).total_seconds() + epoch = dt.datetime(2000, 1, 1) + stime = (dt.datetime(2019, 1, 1) - epoch).total_seconds() + etime = (dt.datetime(2019, 1, 2) - epoch).total_seconds() return { "providers": settings.get("providers", {}), diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 6dc4bf2d05..d663f7b9d9 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """MODIS L1b and L2 test fixtures.""" + from __future__ import annotations -from datetime import datetime, timedelta +import datetime as dt from typing import Optional import numpy as np @@ -216,13 +218,13 @@ def _get_l1b_geo_variable_info(filename: str, def generate_nasa_l1b_filename(prefix): """Generate a filename that follows NASA MODIS L1b convention.""" - now = datetime.now() + now = dt.datetime.now() return f"{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf" def generate_imapp_filename(suffix): """Generate a filename that follows IMAPP MODIS L1b convention.""" - now = datetime.now() + now = dt.datetime.now() return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" @@ -275,8 +277,8 @@ def _add_variable_to_file(h, var_name, var_info): def _create_core_metadata(file_shortname: str) -> str: - beginning_date = datetime.now() - ending_date = beginning_date + timedelta(minutes=5) + beginning_date = dt.datetime.now() + ending_date = beginning_date + dt.timedelta(minutes=5) core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ 'GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \ @@ -593,7 +595,7 @@ def _get_mask_byte1_variable_info() -> dict: def generate_nasa_l2_filename(prefix: str) -> str: """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" - now = datetime.now() + now = dt.datetime.now() return f"{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf" @@ -614,7 +616,7 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: def generate_nasa_l3_filename(prefix: str) -> str: """Generate a file name that follows MODIS 09 L3 convention in a temporary directory.""" - now = datetime.now() + now = dt.datetime.now() return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 969c497410..9755190318 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """The abi_l1b reader tests package.""" + from __future__ import annotations -from datetime import datetime +import datetime as dt from pathlib import Path from typing import Any, Callable from unittest import mock @@ -372,8 +374,8 @@ def test_get_dataset(self, c01_data_arr): "timeline_ID": None, "suffix": "suffix", "units": "W m-2 um-1 sr-1", - "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000), - "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), + "start_time": dt.datetime(2017, 9, 20, 17, 30, 40, 800000), + "end_time": dt.datetime(2017, 9, 20, 17, 41, 17, 500000), } res = c01_data_arr diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 4b8d3a9578..98a050aa48 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -14,7 +14,9 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . + """The abi_l2_nc reader tests package.""" + import contextlib from typing import Optional from unittest import mock @@ -151,7 +153,7 @@ class TestMCMIPReading: @mock.patch("satpy.readers.abi_base.xr") def test_mcmip_get_dataset(self, xr_, product, exp_metadata): """Test getting channel from MCMIP file.""" - from datetime import datetime + import datetime as dt from pyresample.geometry import AreaDefinition @@ -183,8 +185,8 @@ def test_mcmip_get_dataset(self, xr_, product, exp_metadata): "scene_id": None, "sensor": "abi", "timeline_ID": None, - "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000), - "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000), + "start_time": dt.datetime(2017, 9, 20, 17, 30, 40, 800000), + "end_time": dt.datetime(2017, 9, 20, 17, 41, 17, 500000), "ancillary_variables": [], } exp_attrs.update(exp_metadata) diff --git a/satpy/tests/reader_tests/test_acspo.py b/satpy/tests/reader_tests/test_acspo.py index 723d1dbecd..b85232bad4 100644 --- a/satpy/tests/reader_tests/test_acspo.py +++ b/satpy/tests/reader_tests/test_acspo.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Module for testing the satpy.readers.acspo module.""" +import datetime as dt import os -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -43,7 +43,7 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) + date = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) sat, inst = { "VIIRS_NPP": ("NPP", "VIIRS"), "VIIRS_N20": ("N20", "VIIRS"), @@ -53,8 +53,8 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/platform": sat, "/attr/sensor": inst, "/attr/spatial_resolution": "742 m at nadir", - "/attr/time_coverage_start": dt.strftime("%Y%m%dT%H%M%SZ"), - "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y%m%dT%H%M%SZ"), + "/attr/time_coverage_start": date.strftime("%Y%m%dT%H%M%SZ"), + "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime("%Y%m%dT%H%M%SZ"), } file_content["lat"] = DEFAULT_LAT_DATA diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 393afca1c8..fbb0857734 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -15,13 +15,15 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """The ahi_hsd reader tests package.""" + from __future__ import annotations import contextlib +import datetime as dt import unittest import warnings -from datetime import datetime from typing import Any, Dict from unittest import mock @@ -340,10 +342,10 @@ def test_read_band(self, calibrate, *mocks): np.testing.assert_allclose(value, actual_obs_params[key]) time_params_exp = { - "nominal_start_time": datetime(2018, 10, 22, 3, 0, 0, 0), - "nominal_end_time": datetime(2018, 10, 22, 3, 10, 0, 0), - "observation_start_time": datetime(2018, 10, 22, 3, 0, 20, 596896), - "observation_end_time": datetime(2018, 10, 22, 3, 10, 20, 596896), + "nominal_start_time": dt.datetime(2018, 10, 22, 3, 0, 0, 0), + "nominal_end_time": dt.datetime(2018, 10, 22, 3, 10, 0, 0), + "observation_start_time": dt.datetime(2018, 10, 22, 3, 0, 20, 596896), + "observation_end_time": dt.datetime(2018, 10, 22, 3, 10, 20, 596896), } actual_time_params = im.attrs["time_parameters"] for key, value in time_params_exp.items(): @@ -416,12 +418,12 @@ def test_scene_loading(self, calibrate, *mocks): def test_time_properties(self): """Test start/end/scheduled time properties.""" with _fake_hsd_handler() as fh: - assert fh.start_time == datetime(2018, 10, 22, 3, 0) - assert fh.end_time == datetime(2018, 10, 22, 3, 10) - assert fh.observation_start_time == datetime(2018, 10, 22, 3, 0, 20, 596896) - assert fh.observation_end_time == datetime(2018, 10, 22, 3, 10, 20, 596896) - assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) - assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 10, 0, 0) + assert fh.start_time == dt.datetime(2018, 10, 22, 3, 0) + assert fh.end_time == dt.datetime(2018, 10, 22, 3, 10) + assert fh.observation_start_time == dt.datetime(2018, 10, 22, 3, 0, 20, 596896) + assert fh.observation_end_time == dt.datetime(2018, 10, 22, 3, 10, 20, 596896) + assert fh.nominal_start_time == dt.datetime(2018, 10, 22, 3, 0, 0, 0) + assert fh.nominal_end_time == dt.datetime(2018, 10, 22, 3, 10, 0, 0) def test_blocklen_error(self, *mocks): """Test erraneous blocklength.""" @@ -639,14 +641,14 @@ class TestNominalTimeCalculator: @pytest.mark.parametrize( ("timeline", "expected"), [ - ("0300", datetime(2020, 1, 1, 3, 0, 0)), - ("65526", datetime(2020, 1, 1, 12, 0, 0)) + ("0300", dt.datetime(2020, 1, 1, 3, 0, 0)), + ("65526", dt.datetime(2020, 1, 1, 12, 0, 0)) ] ) def test_invalid_timeline(self, timeline, expected): """Test handling of invalid timeline.""" calc = _NominalTimeCalculator(timeline, "FLDK") - res = calc.get_nominal_start_time(datetime(2020, 1, 1, 12, 0, 0)) + res = calc.get_nominal_start_time(dt.datetime(2020, 1, 1, 12, 0, 0)) assert res == expected @pytest.mark.parametrize( @@ -654,49 +656,49 @@ def test_invalid_timeline(self, timeline, expected): [ ( "JP01", - {"tstart": datetime(2018, 10, 22, 3, 0, 0), - "tend": datetime(2018, 10, 22, 3, 2, 30)} + {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0), + "tend": dt.datetime(2018, 10, 22, 3, 2, 30)} ), ( "JP04", - {"tstart": datetime(2018, 10, 22, 3, 7, 30, 0), - "tend": datetime(2018, 10, 22, 3, 10, 0, 0)} + {"tstart": dt.datetime(2018, 10, 22, 3, 7, 30, 0), + "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)} ), ( "R301", - {"tstart": datetime(2018, 10, 22, 3, 0, 0), - "tend": datetime(2018, 10, 22, 3, 2, 30)} + {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0), + "tend": dt.datetime(2018, 10, 22, 3, 2, 30)} ), ( "R304", - {"tstart": datetime(2018, 10, 22, 3, 7, 30, 0), - "tend": datetime(2018, 10, 22, 3, 10, 0, 0)} + {"tstart": dt.datetime(2018, 10, 22, 3, 7, 30, 0), + "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)} ), ( "R401", - {"tstart": datetime(2018, 10, 22, 3, 0, 0), - "tend": datetime(2018, 10, 22, 3, 0, 30)} + {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0), + "tend": dt.datetime(2018, 10, 22, 3, 0, 30)} ), ( "R420", - {"tstart": datetime(2018, 10, 22, 3, 9, 30, 0), - "tend": datetime(2018, 10, 22, 3, 10, 0, 0)} + {"tstart": dt.datetime(2018, 10, 22, 3, 9, 30, 0), + "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)} ), ( "R501", - {"tstart": datetime(2018, 10, 22, 3, 0, 0), - "tend": datetime(2018, 10, 22, 3, 0, 30)} + {"tstart": dt.datetime(2018, 10, 22, 3, 0, 0), + "tend": dt.datetime(2018, 10, 22, 3, 0, 30)} ), ( "R520", - {"tstart": datetime(2018, 10, 22, 3, 9, 30, 0), - "tend": datetime(2018, 10, 22, 3, 10, 0, 0)} + {"tstart": dt.datetime(2018, 10, 22, 3, 9, 30, 0), + "tend": dt.datetime(2018, 10, 22, 3, 10, 0, 0)} ), ] ) def test_areas(self, area, expected): """Test nominal timestamps for multiple areas.""" - obs_start_time = datetime(2018, 10, 22, 3, 0, 20, 596896) + obs_start_time = dt.datetime(2018, 10, 22, 3, 0, 20, 596896) calc = _NominalTimeCalculator("0300", area) nom_start_time = calc.get_nominal_start_time(obs_start_time) nom_end_time = calc.get_nominal_end_time(nom_start_time) @@ -708,27 +710,27 @@ def test_areas(self, area, expected): [ ( "2350", - datetime(2022, 12, 31, 23, 50, 1), - {"tstart": datetime(2022, 12, 31, 23, 50, 0), - "tend": datetime(2023, 1, 1, 0, 0, 0)} + dt.datetime(2022, 12, 31, 23, 50, 1), + {"tstart": dt.datetime(2022, 12, 31, 23, 50, 0), + "tend": dt.datetime(2023, 1, 1, 0, 0, 0)} ), ( "2350", - datetime(2022, 12, 31, 23, 49, 59), - {"tstart": datetime(2022, 12, 31, 23, 50, 0), - "tend": datetime(2023, 1, 1, 0, 0, 0)} + dt.datetime(2022, 12, 31, 23, 49, 59), + {"tstart": dt.datetime(2022, 12, 31, 23, 50, 0), + "tend": dt.datetime(2023, 1, 1, 0, 0, 0)} ), ( "0000", - datetime(2023, 1, 1, 0, 0, 1), - {"tstart": datetime(2023, 1, 1, 0, 0, 0), - "tend": datetime(2023, 1, 1, 0, 10, 0)} + dt.datetime(2023, 1, 1, 0, 0, 1), + {"tstart": dt.datetime(2023, 1, 1, 0, 0, 0), + "tend": dt.datetime(2023, 1, 1, 0, 10, 0)} ), ( "0000", - datetime(2022, 12, 31, 23, 59, 59), - {"tstart": datetime(2023, 1, 1, 0, 0, 0), - "tend": datetime(2023, 1, 1, 0, 10, 0)} + dt.datetime(2022, 12, 31, 23, 59, 59), + {"tstart": dt.datetime(2023, 1, 1, 0, 0, 0), + "tend": dt.datetime(2023, 1, 1, 0, 10, 0)} ), ] ) diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 817738bb82..fcb1c34658 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -1,6 +1,6 @@ """Tests for the Himawari L2 netCDF reader.""" -from datetime import datetime +import datetime as dt import numpy as np import pytest @@ -15,8 +15,8 @@ lat_data = rng.uniform(-90, 90, (5500, 5500)) lon_data = rng.uniform(-180, 180, (5500, 5500)) -start_time = datetime(2023, 8, 24, 5, 40, 21) -end_time = datetime(2023, 8, 24, 5, 49, 40) +start_time = dt.datetime(2023, 8, 24, 5, 40, 21) +end_time = dt.datetime(2023, 8, 24, 5, 49, 40) dimensions = {"Columns": 5500, "Rows": 5500} diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index 7dd2cfcb33..6af6c1099f 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -173,9 +173,9 @@ def test_filename_grouping(self): def test_basic_attributes(self): """Test getting basic file attributes.""" - from datetime import datetime - assert self.reader.start_time == datetime(2019, 9, 30, 3, 0, 31, 957882) - assert self.reader.end_time == datetime(2019, 9, 30, 3, 9, 35, 606133) + import datetime as dt + assert self.reader.start_time == dt.datetime(2019, 9, 30, 3, 0, 31, 957882) + assert self.reader.end_time == dt.datetime(2019, 9, 30, 3, 9, 35, 606133) def test_get_dataset(self): """Test gettting radiance data.""" diff --git a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py index 2f1b3ad7b0..d6e6597d69 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py +++ b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Tests for the 'amsr2_l2_gaasp' reader.""" +import datetime as dt import os -from datetime import datetime from unittest import mock import dask.array as da @@ -259,8 +259,8 @@ def _check_attrs(data_arr): assert "add_offset" not in attrs assert attrs["platform_name"] == "GCOM-W1" assert attrs["sensor"] == "amsr2" - assert attrs["start_time"] == datetime(2020, 8, 12, 5, 58, 31) - assert attrs["end_time"] == datetime(2020, 8, 12, 6, 7, 1) + assert attrs["start_time"] == dt.datetime(2020, 8, 12, 5, 58, 31) + assert attrs["end_time"] == dt.datetime(2020, 8, 12, 6, 7, 1) @pytest.mark.parametrize( ("filenames", "loadable_ids"), diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py index 07ed218e72..dc3e371b46 100644 --- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py +++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py @@ -15,12 +15,13 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Unittesting the ASCAT SCATTEROMETER SOIL MOISTURE BUFR reader.""" +import datetime as dt import os import sys import unittest -from datetime import datetime import numpy as np @@ -152,8 +153,8 @@ def test_scene(self): fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) assert "scatterometer" in scn.sensor_names - assert datetime(2020, 12, 21, 9, 33, 0) == scn.start_time - assert datetime(2020, 12, 21, 9, 33, 59) == scn.end_time + assert dt.datetime(2020, 12, 21, 9, 33, 0) == scn.start_time + assert dt.datetime(2020, 12, 21, 9, 33, 59) == scn.end_time @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): diff --git a/satpy/tests/reader_tests/test_atms_l1b_nc.py b/satpy/tests/reader_tests/test_atms_l1b_nc.py index 6b27081ed9..f1f729311a 100644 --- a/satpy/tests/reader_tests/test_atms_l1b_nc.py +++ b/satpy/tests/reader_tests/test_atms_l1b_nc.py @@ -12,9 +12,10 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """The atms_l1b_nc reader tests package.""" -from datetime import datetime +import datetime as dt import numpy as np import pytest @@ -32,7 +33,7 @@ def reader(l1b_file): """Return reader of ATMS level1b data.""" return AtmsL1bNCFileHandler( filename=l1b_file, - filename_info={"creation_time": datetime(2020, 1, 2, 3, 4, 5)}, + filename_info={"creation_time": dt.datetime(2020, 1, 2, 3, 4, 5)}, filetype_info={"antenna_temperature": "antenna_temp"}, ) @@ -78,11 +79,11 @@ class TestAtsmsL1bNCFileHandler: def test_start_time(self, reader): """Test start time.""" - assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5) + assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test end time.""" - assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6) + assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" @@ -100,8 +101,8 @@ def test_antenna_temperature(self, reader, atms_fake_dataset): ) @pytest.mark.parametrize(("param", "expect"), [ - ("start_time", datetime(2000, 1, 2, 3, 4, 5)), - ("end_time", datetime(2000, 1, 2, 4, 5, 6)), + ("start_time", dt.datetime(2000, 1, 2, 3, 4, 5)), + ("end_time", dt.datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), ]) @@ -135,11 +136,11 @@ def test_drop_coords(self, reader): assert coords not in data.coords @pytest.mark.parametrize(("param", "expect"), [ - ("start_time", datetime(2000, 1, 2, 3, 4, 5)), - ("end_time", datetime(2000, 1, 2, 4, 5, 6)), + ("start_time", dt.datetime(2000, 1, 2, 3, 4, 5)), + ("end_time", dt.datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), - ("creation_time", datetime(2020, 1, 2, 3, 4, 5)), + ("creation_time", dt.datetime(2020, 1, 2, 3, 4, 5)), ("type", "test_data"), ("name", "test"), ]) diff --git a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py index 8971c2d933..4fe6c120a1 100644 --- a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py +++ b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py @@ -18,8 +18,8 @@ """Module for testing the ATMS SDR HDF5 reader.""" +import datetime as dt import os -from datetime import datetime from unittest import mock import numpy as np @@ -288,8 +288,8 @@ def test_init_start_end_time(self): """Test basic init with start and end times around the start/end times of the provided file.""" r = load_reader(self.reader_configs, filter_parameters={ - "start_time": datetime(2022, 12, 19), - "end_time": datetime(2022, 12, 21) + "start_time": dt.datetime(2022, 12, 19), + "end_time": dt.datetime(2022, 12, 21) }) loadables = r.select_files_from_pathnames([ "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 4f4e8e974a..3040a46750 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Pygac interface.""" -from datetime import date, datetime +import datetime as dt from unittest import TestCase, mock import dask.array as da @@ -190,7 +191,7 @@ def test_init_eosip(self): fh = self._get_eosip_fh(filename, **kwargs) assert fh.start_time < fh.end_time assert fh.reader_class is reader_cls - assert fh.reader_kwargs["header_date"] > date(1994, 11, 15) + assert fh.reader_kwargs["header_date"] > dt.date(1994, 11, 15) def test_read_raw_data(self): """Test raw data reading.""" @@ -456,8 +457,8 @@ def _slice_patched(data): data_slc, times_slc = fh.slice(data, times) np.testing.assert_array_equal(data_slc, data[1:3]) np.testing.assert_array_equal(times_slc, times[1:3]) - assert fh.start_time == datetime(1970, 1, 1, 0, 0, 0, 2) - assert fh.end_time == datetime(1970, 1, 1, 0, 0, 0, 3) + assert fh.start_time == dt.datetime(1970, 1, 1, 0, 0, 0, 2) + assert fh.end_time == dt.datetime(1970, 1, 1, 0, 0, 0, 3) @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat") diff --git a/satpy/tests/reader_tests/test_epic_l1b_h5.py b/satpy/tests/reader_tests/test_epic_l1b_h5.py index 472cda7f2d..18eedbad6d 100644 --- a/satpy/tests/reader_tests/test_epic_l1b_h5.py +++ b/satpy/tests/reader_tests/test_epic_l1b_h5.py @@ -89,11 +89,11 @@ def setup_method(self): def test_times(self, setup_hdf5_file): """Test start and end times load properly.""" - from datetime import datetime + import datetime as dt test_reader = self._setup_h5(setup_hdf5_file) - assert test_reader.start_time == datetime(2015, 6, 13, 12, 0, 37) - assert test_reader.end_time == datetime(2015, 6, 13, 12, 5, 1) + assert test_reader.start_time == dt.datetime(2015, 6, 13, 12, 0, 37) + assert test_reader.end_time == dt.datetime(2015, 6, 13, 12, 5, 1) def test_counts_calibration(self, setup_hdf5_file): """Test that data is correctly calibrated.""" diff --git a/satpy/tests/reader_tests/test_eum_base.py b/satpy/tests/reader_tests/test_eum_base.py index 55ac977b59..35b29aa79c 100644 --- a/satpy/tests/reader_tests/test_eum_base.py +++ b/satpy/tests/reader_tests/test_eum_base.py @@ -15,10 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """EUMETSAT base reader tests package.""" +import datetime as dt import unittest -from datetime import datetime import numpy as np @@ -40,18 +41,18 @@ def test_fun(self): """Test function for TestMakeTimeCdsDictionary.""" # time_cds_short tcds = {"Days": np.array(1), "Milliseconds": np.array(2)} - expected = datetime(1958, 1, 2, 0, 0, 0, 2000) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2000) assert timecds2datetime(tcds) == expected # time_cds tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3)} - expected = datetime(1958, 1, 2, 0, 0, 0, 2003) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected # time_cds_expanded tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3), "Nanoseconds": np.array(4)} - expected = datetime(1958, 1, 2, 0, 0, 0, 2003) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected @@ -62,17 +63,17 @@ def test_fun(self): """Test function for TestMakeTimeCdsRecarray.""" # time_cds_short tcds = np.array([(1, 2)], dtype=np.dtype(time_cds_short)) - expected = datetime(1958, 1, 2, 0, 0, 0, 2000) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2000) assert timecds2datetime(tcds) == expected # time_cds tcds = np.array([(1, 2, 3)], dtype=np.dtype(time_cds)) - expected = datetime(1958, 1, 2, 0, 0, 0, 2003) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected # time_cds_expanded tcds = np.array([(1, 2, 3, 4)], dtype=np.dtype(time_cds_expanded)) - expected = datetime(1958, 1, 2, 0, 0, 0, 2003) + expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected @@ -97,9 +98,9 @@ def test_timestamps(self): (21916, 42309417, 918, 443))]]], dtype=pat_dt) expected = { - "TrueRepeatCycleStart": datetime(2018, 1, 2, 11, 30, 9, 544305), - "PlanForwardScanEnd": datetime(2018, 1, 2, 11, 42, 40, 340660), - "PlannedRepeatCycleEnd": datetime(2018, 1, 2, 11, 45, 9, 417918) + "TrueRepeatCycleStart": dt.datetime(2018, 1, 2, 11, 30, 9, 544305), + "PlanForwardScanEnd": dt.datetime(2018, 1, 2, 11, 42, 40, 340660), + "PlannedRepeatCycleEnd": dt.datetime(2018, 1, 2, 11, 45, 9, 417918) } assert recarray2dict(pat) == expected diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index cd347ce07e..65e21edd38 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -32,14 +32,14 @@ class TestGenericImage(unittest.TestCase): def setUp(self): """Create temporary images to test on.""" + import datetime as dt import tempfile - from datetime import datetime from pyresample.geometry import AreaDefinition from satpy.scene import Scene - self.date = datetime(2018, 1, 1) + self.date = dt.datetime(2018, 1, 1) # Create area definition pcs_id = "ETRS89 / LAEA Europe" diff --git a/satpy/tests/reader_tests/test_ghrsst_l2.py b/satpy/tests/reader_tests/test_ghrsst_l2.py index 66c030e91d..b4cabccfa4 100644 --- a/satpy/tests/reader_tests/test_ghrsst_l2.py +++ b/satpy/tests/reader_tests/test_ghrsst_l2.py @@ -17,9 +17,9 @@ # satpy. If not, see . """Module for testing the satpy.readers.ghrsst_l2 module.""" +import datetime as dt import os import tarfile -from datetime import datetime from pathlib import Path import numpy as np @@ -124,7 +124,7 @@ def test_get_dataset(self, tmp_path): def test_get_sensor(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z + dt_valid = dt.datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", "satid": "NOAA20_", "valid_time": dt_valid} @@ -136,9 +136,9 @@ def test_get_sensor(self, tmp_path): def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" - dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z - good_start_time = datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z - good_stop_time = datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z + dt_valid = dt.datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z + good_start_time = dt.datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z + good_stop_time = dt.datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", "satid": "NOAA20_", "valid_time": dt_valid} diff --git a/satpy/tests/reader_tests/test_glm_l2.py b/satpy/tests/reader_tests/test_glm_l2.py index 81636ba630..8ee53e29a2 100644 --- a/satpy/tests/reader_tests/test_glm_l2.py +++ b/satpy/tests/reader_tests/test_glm_l2.py @@ -128,9 +128,9 @@ def setUp(self, xr_): def test_basic_attributes(self): """Test getting basic file attributes.""" - from datetime import datetime - assert self.reader.start_time == datetime(2017, 9, 20, 17, 30, 40) - assert self.reader.end_time == datetime(2017, 9, 20, 17, 41, 17) + import datetime as dt + assert self.reader.start_time == dt.datetime(2017, 9, 20, 17, 30, 40) + assert self.reader.end_time == dt.datetime(2017, 9, 20, 17, 41, 17) def test_get_dataset(self): """Test the get_dataset method.""" diff --git a/satpy/tests/reader_tests/test_goci2_l2_nc.py b/satpy/tests/reader_tests/test_goci2_l2_nc.py index 865ac3184e..e8bdae0e58 100644 --- a/satpy/tests/reader_tests/test_goci2_l2_nc.py +++ b/satpy/tests/reader_tests/test_goci2_l2_nc.py @@ -15,8 +15,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Module for testing the satpy.readers.goci2_l2_nc module.""" -from datetime import datetime + +import datetime as dt import numpy as np import pytest @@ -30,8 +32,8 @@ # - tmp_path_factory -start_time = datetime(2024, 2, 14, 2, 32, 27) -end_time = datetime(2024, 2, 14, 2, 33, 31) +start_time = dt.datetime(2024, 2, 14, 2, 32, 27) +end_time = dt.datetime(2024, 2, 14, 2, 33, 31) global_attrs = { "observation_start_time": start_time.strftime("%Y%m%d_%H%M%S"), diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index a75e59863f..d038d0f0d7 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -14,12 +14,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""Unittests for GPM IMERG reader.""" +"""Unittests for GPM IMERG reader.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import dask.array as da @@ -127,8 +127,8 @@ def test_load_data(self): assert reader.file_handlers res = reader.load(["IRprecipitation"]) assert 1 == len(res) - assert res["IRprecipitation"].start_time == datetime(2020, 1, 31, 23, 30, 0) - assert res["IRprecipitation"].end_time == datetime(2020, 1, 31, 23, 59, 59) + assert res["IRprecipitation"].start_time == dt.datetime(2020, 1, 31, 23, 30, 0) + assert res["IRprecipitation"].end_time == dt.datetime(2020, 1, 31, 23, 59, 59) assert res["IRprecipitation"].resolution == 0.1 assert res["IRprecipitation"].area.width == 3600 assert res["IRprecipitation"].area.height == 1800 diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 12317f11f1..dd57c12fdd 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -18,10 +18,10 @@ """The HRIT base reader tests package.""" import bz2 +import datetime as dt import gzip import os import unittest -from datetime import datetime, timedelta from tempfile import NamedTemporaryFile, gettempdir from unittest import mock @@ -189,7 +189,7 @@ def setup_method(self, method): with mock.patch.object(HRITFileHandler, "_get_hd", new=new_get_hd): self.reader = HRITFileHandler("filename", {"platform_shortname": "MSG3", - "start_time": datetime(2016, 3, 3, 0, 0)}, + "start_time": dt.datetime(2016, 3, 3, 0, 0)}, {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) @@ -269,9 +269,9 @@ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): def test_start_end_time(self): """Test reading and converting start/end time.""" - assert self.reader.start_time == datetime(2016, 3, 3, 0, 0) + assert self.reader.start_time == dt.datetime(2016, 3, 3, 0, 0) assert self.reader.start_time == self.reader.observation_start_time - assert self.reader.end_time == datetime(2016, 3, 3, 0, 0) + timedelta(minutes=15) + assert self.reader.end_time == dt.datetime(2016, 3, 3, 0, 0) + dt.timedelta(minutes=15) assert self.reader.end_time == self.reader.observation_end_time @@ -292,7 +292,7 @@ def test_read_band_filepath(self, stub_compressed_hrit_file): with mock.patch.object(HRITFileHandler, "_get_hd", side_effect=new_get_hd, autospec=True) as get_hd: self.reader = HRITFileHandler(filename, {"platform_shortname": "MSG3", - "start_time": datetime(2016, 3, 3, 0, 0)}, + "start_time": dt.datetime(2016, 3, 3, 0, 0)}, {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) diff --git a/satpy/tests/reader_tests/test_hsaf_grib.py b/satpy/tests/reader_tests/test_hsaf_grib.py index da0f6dd86b..296bb921c4 100644 --- a/satpy/tests/reader_tests/test_hsaf_grib.py +++ b/satpy/tests/reader_tests/test_hsaf_grib.py @@ -17,9 +17,9 @@ # satpy. If not, see . """Module for testing the satpy.readers.grib module.""" +import datetime as dt import sys import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -132,7 +132,7 @@ def tearDown(self): def test_init(self, pg): """Test the init function, ensure that the correct dates and metadata are returned.""" pg.open.return_value = FakeGRIB() - correct_dt = datetime(2019, 6, 3, 16, 45, 0) + correct_dt = dt.datetime(2019, 6, 3, 16, 45, 0) from satpy.readers.hsaf_grib import HSAFFileHandler fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) assert fh._analysis_time == correct_dt diff --git a/satpy/tests/reader_tests/test_hsaf_h5.py b/satpy/tests/reader_tests/test_hsaf_h5.py index 49658e6727..bdd523ad0d 100644 --- a/satpy/tests/reader_tests/test_hsaf_h5.py +++ b/satpy/tests/reader_tests/test_hsaf_h5.py @@ -1,6 +1,7 @@ """Tests for the H-SAF H5 reader.""" + +import datetime as dt import os -from datetime import datetime import h5py import numpy as np @@ -50,7 +51,7 @@ def test_hsaf_sc_datetime(sc_h5_file): loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) fname = os.path.basename(sc_h5_file) dtstr = fname.split("_")[1] - obs_time = datetime.strptime(dtstr, "%Y%m%d") + obs_time = dt.datetime.strptime(dtstr, "%Y%m%d") assert loaded_scene["SC"].attrs["data_time"] == obs_time diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py index f90da00613..999fb50045 100644 --- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py +++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py @@ -509,7 +509,7 @@ def test_reading_attrs_nsoas(self): def test_properties(self): """Test platform_name.""" - from datetime import datetime + import datetime as dt from satpy.readers import load_reader filenames = [ @@ -521,5 +521,5 @@ def test_properties(self): # Make sure we have some files res = reader.load(["wvc_lon"]) assert res["wvc_lon"].platform_name == "HY-2B" - assert res["wvc_lon"].start_time == datetime(2020, 3, 26, 1, 11, 7) - assert res["wvc_lon"].end_time == datetime(2020, 3, 26, 2, 55, 40) + assert res["wvc_lon"].start_time == dt.datetime(2020, 3, 26, 1, 11, 7) + assert res["wvc_lon"].end_time == dt.datetime(2020, 3, 26, 2, 55, 40) diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py index a5909b249d..ab8bad2527 100644 --- a/satpy/tests/reader_tests/test_ici_l1b_nc.py +++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py @@ -15,13 +15,14 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """The ici_l1b_nc reader tests package. This version tests the reader for ICI test data as per PFS V3A. """ -from datetime import datetime +import datetime as dt from unittest.mock import patch import numpy as np @@ -50,13 +51,13 @@ def reader(fake_file): filename=fake_file, filename_info={ "sensing_start_time": ( - datetime.fromisoformat("2000-01-01T01:00:00") + dt.datetime.fromisoformat("2000-01-01T01:00:00") ), "sensing_end_time": ( - datetime.fromisoformat("2000-01-01T02:00:00") + dt.datetime.fromisoformat("2000-01-01T02:00:00") ), "creation_time": ( - datetime.fromisoformat("2000-01-01T03:00:00") + dt.datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ @@ -217,11 +218,11 @@ class TestIciL1bNCFileHandler: def test_start_time(self, reader): """Test start time.""" - assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5) + assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test end time.""" - assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6) + assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" @@ -517,13 +518,13 @@ def test_get_global_attributes(self, reader): attributes = reader._get_global_attributes() assert attributes == { "filename": reader.filename, - "start_time": datetime(2000, 1, 2, 3, 4, 5), - "end_time": datetime(2000, 1, 2, 4, 5, 6), + "start_time": dt.datetime(2000, 1, 2, 3, 4, 5), + "end_time": dt.datetime(2000, 1, 2, 4, 5, 6), "spacecraft_name": "SGB", "ssp_lon": None, "sensor": "ICI", - "filename_start_time": datetime(2000, 1, 1, 1, 0), - "filename_end_time": datetime(2000, 1, 1, 2, 0), + "filename_start_time": dt.datetime(2000, 1, 1, 1, 0), + "filename_end_time": dt.datetime(2000, 1, 1, 2, 0), "platform_name": "SGB", "quality_group": { "duration_of_product": np.array(1000., dtype=np.float32), diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 9fa7af224d..0cefac2a2f 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -1,6 +1,7 @@ """Tests for the Insat3D reader.""" + +import datetime as dt import os -from datetime import datetime import dask.array as da import h5netcdf @@ -72,8 +73,8 @@ "ALBEDO": "%", "TEMP": "K"} -start_time = datetime(2009, 6, 9, 9, 0) -end_time = datetime(2009, 6, 9, 9, 30) +start_time = dt.datetime(2009, 6, 9, 9, 0) +end_time = dt.datetime(2009, 6, 9, 9, 30) subsatellite_longitude = 82 time_pattern = "%d-%b-%YT%H:%M:%S" diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 5e9d0ff563..05eb37b4e4 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -12,9 +12,11 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """Unit tests on the LI L2 reader using the conventional mock constructed context.""" + +import datetime as dt import os -from datetime import datetime from unittest import mock import numpy as np @@ -405,7 +407,7 @@ def test_report_datetimes(self, filetype_infos): assert dset.values.dtype == np.dtype("datetime64[ns]") # The default epoch_time should be 1.234 seconds after epoch: - ref_time = np.datetime64(datetime(2000, 1, 1, 0, 0, 1, 234000)) + ref_time = np.datetime64(dt.datetime(2000, 1, 1, 0, 0, 1, 234000)) assert np.all(dset.values == ref_time) # Check time_offset: diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py index 4083f7de00..be0bc12ee1 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py @@ -18,10 +18,10 @@ # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" +import datetime as dt import itertools import os import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -31,7 +31,7 @@ DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (721, 1440) -DEFAULT_DATE = datetime(2019, 6, 19, 13, 0) +DEFAULT_DATE = dt.datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(-90, 90, DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(-180, 180, DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_FLOAT_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py index 63214b0477..29857afbed 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py @@ -16,11 +16,12 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . + """Module for testing the satpy.readers.tropomi_l2 module.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -43,8 +44,8 @@ class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get("start_time", datetime(2019, 6, 19, 13, 0)) - dt_e = filename_info.get("end_time", datetime(2019, 6, 19, 13, 0)) + dt_s = filename_info.get("start_time", dt.datetime(2019, 6, 19, 13, 0)) + dt_e = filename_info.get("end_time", dt.datetime(2019, 6, 19, 13, 0)) if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index b857147e47..e4e547bdf8 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -16,11 +16,13 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . + """Module for testing the satpy.readers.mirs module.""" + from __future__ import annotations +import datetime as dt import os -from datetime import datetime from unittest import mock import numpy as np @@ -45,7 +47,7 @@ N_SCANLINE = 100 DEFAULT_FILE_DTYPE = np.float32 DEFAULT_2D_SHAPE = (N_SCANLINE, N_FOV) -DEFAULT_DATE = datetime(2019, 6, 19, 13, 0) +DEFAULT_DATE = dt.datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(23.09356, 36.42844, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV, @@ -71,8 +73,8 @@ PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"} SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"} -START_TIME = datetime(2017, 2, 6, 16, 1, 0) -END_TIME = datetime(2017, 2, 6, 16, 7, 0) +START_TIME = dt.datetime(2017, 2, 6, 16, 1, 0) +END_TIME = dt.datetime(2017, 2, 6, 16, 7, 0) def fake_coeff_from_fn(fn): diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py index 2d227822a4..52a894bd00 100644 --- a/satpy/tests/reader_tests/test_mws_l1b_nc.py +++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py @@ -19,8 +19,8 @@ """ +import datetime as dt import logging -from datetime import datetime from unittest.mock import patch import numpy as np @@ -50,13 +50,13 @@ def reader(fake_file): filename=fake_file, filename_info={ "start_time": ( - datetime.fromisoformat("2000-01-01T01:00:00") + dt.datetime.fromisoformat("2000-01-01T01:00:00") ), "end_time": ( - datetime.fromisoformat("2000-01-01T02:00:00") + dt.datetime.fromisoformat("2000-01-01T02:00:00") ), "creation_time": ( - datetime.fromisoformat("2000-01-01T03:00:00") + dt.datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ @@ -207,11 +207,11 @@ class TestMwsL1bNCFileHandler: def test_start_time(self, reader): """Test acquiring the start time.""" - assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5) + assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test acquiring the end time.""" - assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6) + assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" @@ -356,12 +356,12 @@ def test_get_global_attributes(self, reader): attributes = reader._get_global_attributes() assert attributes == { "filename": reader.filename, - "start_time": datetime(2000, 1, 2, 3, 4, 5), - "end_time": datetime(2000, 1, 2, 4, 5, 6), + "start_time": dt.datetime(2000, 1, 2, 3, 4, 5), + "end_time": dt.datetime(2000, 1, 2, 4, 5, 6), "spacecraft_name": "Metop-SG-A1", "sensor": "MWS", - "filename_start_time": datetime(2000, 1, 1, 1, 0), - "filename_end_time": datetime(2000, 1, 1, 2, 0), + "filename_start_time": dt.datetime(2000, 1, 1, 1, 0), + "filename_end_time": dt.datetime(2000, 1, 1, 2, 0), "platform_name": "Metop-SG-A1", "quality_group": { "duration_of_product": np.array(5944., dtype=np.float32), diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index 90b9d4432f..0293a88fe3 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -16,10 +16,11 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . + """Module for testing the satpy.readers.oceancolorcci_l3_nc module.""" +import datetime as dt import os -from datetime import datetime import numpy as np import pytest @@ -243,12 +244,12 @@ def test_get_dataset_5d_allprods(self, fake_dataset, fake_file_dict): def test_start_time(self, fake_file_dict): """Test start time property.""" reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) - assert reader.start_time == datetime(2021, 8, 1, 0, 0, 0) + assert reader.start_time == dt.datetime(2021, 8, 1, 0, 0, 0) def test_end_time(self, fake_file_dict): """Test end time property.""" reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) - assert reader.end_time == datetime(2021, 8, 31, 23, 59, 0) + assert reader.end_time == dt.datetime(2021, 8, 31, 23, 59, 0) def test_correct_dimnames(self, fake_file_dict): """Check that the loaded dimension names are correct.""" diff --git a/satpy/tests/reader_tests/test_osisaf_l3.py b/satpy/tests/reader_tests/test_osisaf_l3.py index 80fb581db7..106687a509 100644 --- a/satpy/tests/reader_tests/test_osisaf_l3.py +++ b/satpy/tests/reader_tests/test_osisaf_l3.py @@ -15,8 +15,8 @@ # satpy. If not, see . """Module for testing the satpy.readers.osisaf_l3 module.""" +import datetime as dt import os -from datetime import datetime import numpy as np import pytest @@ -206,8 +206,8 @@ def setup_method(self): super().setup_method(tester="ice") self.filename_info = {"grid": "ease"} self.filetype_info = {"file_type": "osi_sea_ice_conc"} - self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) - self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.good_start_time = dt.datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = dt.datetime(2022, 12, 16, 0, 0, 0) self.varname = "ice_conc" self.stdname = "sea_ice_area_fraction" self.fillv = -999 @@ -260,8 +260,8 @@ def setup_method(self): super().setup_method(tester="flux_stere") self.filename_info = {"grid": "polstere"} self.filetype_info = {"file_type": "osi_radflux_stere"} - self.good_start_time = datetime(2023, 10, 10, 0, 0, 0) - self.good_stop_time = datetime(2023, 10, 10, 23, 59, 59) + self.good_start_time = dt.datetime(2023, 10, 10, 0, 0, 0) + self.good_stop_time = dt.datetime(2023, 10, 10, 23, 59, 59) self.varname = "ssi" self.stdname = "surface_downwelling_shortwave_flux_in_air" self.fillv = -999.99 @@ -295,8 +295,8 @@ def setup_method(self): super().setup_method(tester="flux_geo") self.filename_info = {} self.filetype_info = {"file_type": "osi_radflux_grid"} - self.good_start_time = datetime(2022, 12, 28, 18, 30, 0) - self.good_stop_time = datetime(2022, 12, 28, 19, 30, 0) + self.good_start_time = dt.datetime(2022, 12, 28, 18, 30, 0) + self.good_stop_time = dt.datetime(2022, 12, 28, 19, 30, 0) self.varname = "ssi" self.stdname = "surface_downwelling_shortwave_flux_in_air" self.fillv = -32768 @@ -332,8 +332,8 @@ def setup_method(self): super().setup_method(tester="sst") self.filename_info = {} self.filetype_info = {"file_type": "osi_sst"} - self.good_start_time = datetime(2022, 12, 15, 0, 0, 0) - self.good_stop_time = datetime(2022, 12, 16, 0, 0, 0) + self.good_start_time = dt.datetime(2022, 12, 15, 0, 0, 0) + self.good_stop_time = dt.datetime(2022, 12, 16, 0, 0, 0) self.varname = "surface_temperature" self.stdname = "sea_ice_surface_temperature" self.fillv = -32768 diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index f94ae55cc0..fb4fd6831b 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -15,9 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for the CF reader.""" + +import datetime as dt import warnings -from datetime import datetime import numpy as np import pytest @@ -66,8 +68,8 @@ def _create_test_netcdf(filename, resolution=742): "solar_zenith_angle": solar_zenith_angle_i } - tstart = datetime(2019, 4, 1, 12, 0) - tend = datetime(2019, 4, 1, 12, 15) + tstart = dt.datetime(2019, 4, 1, 12, 0) + tend = dt.datetime(2019, 4, 1, 12, 15) common_attrs = { "start_time": tstart, "end_time": tend, @@ -107,12 +109,12 @@ def area(): def common_attrs(area): """Get common dataset attributes.""" return { - "start_time": datetime(2019, 4, 1, 12, 0, 0, 123456), - "end_time": datetime(2019, 4, 1, 12, 15), + "start_time": dt.datetime(2019, 4, 1, 12, 0, 0, 123456), + "end_time": dt.datetime(2019, 4, 1, 12, 15), "platform_name": "tirosn", "orbit_number": 99999, "area": area, - "my_timestamp": datetime(2000, 1, 1) + "my_timestamp": dt.datetime(2000, 1, 1) } @@ -263,7 +265,7 @@ def cf_scene(datasets, common_attrs): @pytest.fixture() def nc_filename(tmp_path): """Create an nc filename for viirs m band.""" - now = datetime.utcnow() + now = dt.datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @@ -271,7 +273,7 @@ def nc_filename(tmp_path): @pytest.fixture() def nc_filename_i(tmp_path): """Create an nc filename for viirs i band.""" - now = datetime.utcnow() + now = dt.datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 13c74a7d5c..12fbb7dc2a 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -103,11 +103,11 @@ def setUp(self, xr_): def test_basic_attributes(self): """Test getting basic file attributes.""" - from datetime import datetime + import datetime as dt from satpy.tests.utils import make_dataid - assert self.reader.start_time == datetime(2017, 7, 29, 12, 0, 0, 0) - assert self.reader.end_time == datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.start_time == dt.datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.end_time == dt.datetime(2017, 7, 29, 12, 0, 0, 0) assert self.reader.get_shape(make_dataid(name="C05"), {}) == (2, 5) def test_data_load(self): diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index a07bb799bc..f705796521 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Test the MSG common (native and hrit format) functionionalities.""" +import datetime as dt import unittest -from datetime import datetime, timedelta import dask.array as da import numpy as np @@ -117,18 +117,18 @@ def test_pad_data_vertically_bad_shape(self): def observation_start_time(self): """Get scan start timestamp for testing.""" - return datetime(2023, 3, 20, 15, 0, 10, 691000) + return dt.datetime(2023, 3, 20, 15, 0, 10, 691000) def observation_end_time(self): """Get scan end timestamp for testing.""" - return datetime(2023, 3, 20, 15, 12, 43, 843000) + return dt.datetime(2023, 3, 20, 15, 12, 43, 843000) def test_round_nom_time(self): """Test the rouding of start/end_time.""" - assert round_nom_time(dt=self.observation_start_time(), - time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 0) - assert round_nom_time(dt=self.observation_end_time(), - time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 15) + assert round_nom_time(date=self.observation_start_time(), + time_delta=dt.timedelta(minutes=15)) == dt.datetime(2023, 3, 20, 15, 0) + assert round_nom_time(date=self.observation_end_time(), + time_delta=dt.timedelta(minutes=15)) == dt.datetime(2023, 3, 20, 15, 15) @staticmethod def test_pad_data_horizontally(): @@ -177,13 +177,13 @@ def test_get_padding_area_int(): ORBIT_POLYNOMIALS = { "StartTime": np.array([ [ - datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 12), - datetime(2006, 1, 1, 18), datetime(1958, 1, 1, 0)] + dt.datetime(2006, 1, 1, 6), dt.datetime(2006, 1, 1, 12), + dt.datetime(2006, 1, 1, 18), dt.datetime(1958, 1, 1, 0)] ]), "EndTime": np.array([ [ - datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), - datetime(2006, 1, 2, 0), datetime(1958, 1, 1, 0) + dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18), + dt.datetime(2006, 1, 2, 0), dt.datetime(1958, 1, 1, 0) ] ]), "X": [np.zeros(8), @@ -212,18 +212,18 @@ def test_get_padding_area_int(): # 01-03: Overlap (10:00 - 13:00) "StartTime": np.array([ [ - datetime(2005, 12, 31, 10), datetime(2005, 12, 31, 12), - datetime(2006, 1, 1, 10), datetime(2006, 1, 1, 13), - datetime(2006, 1, 2, 0), datetime(2006, 1, 2, 18), - datetime(2006, 1, 3, 6), datetime(2006, 1, 3, 10), + dt.datetime(2005, 12, 31, 10), dt.datetime(2005, 12, 31, 12), + dt.datetime(2006, 1, 1, 10), dt.datetime(2006, 1, 1, 13), + dt.datetime(2006, 1, 2, 0), dt.datetime(2006, 1, 2, 18), + dt.datetime(2006, 1, 3, 6), dt.datetime(2006, 1, 3, 10), ] ]), "EndTime": np.array([ [ - datetime(2005, 12, 31, 12), datetime(2005, 12, 31, 18), - datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), - datetime(2006, 1, 2, 4), datetime(2006, 1, 2, 22), - datetime(2006, 1, 3, 13), datetime(2006, 1, 3, 18), + dt.datetime(2005, 12, 31, 12), dt.datetime(2005, 12, 31, 18), + dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18), + dt.datetime(2006, 1, 2, 4), dt.datetime(2006, 1, 2, 22), + dt.datetime(2006, 1, 3, 13), dt.datetime(2006, 1, 3, 18), ] ]), "X": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], @@ -233,12 +233,12 @@ def test_get_padding_area_int(): ORBIT_POLYNOMIALS_INVALID = { "StartTime": np.array([ [ - datetime(1958, 1, 1), datetime(1958, 1, 1) + dt.datetime(1958, 1, 1), dt.datetime(1958, 1, 1) ] ]), "EndTime": np.array([ [ - datetime(1958, 1, 1), datetime(1958, 1, 1) + dt.datetime(1958, 1, 1), dt.datetime(1958, 1, 1) ] ]), "X": [1, 2], @@ -254,8 +254,8 @@ class TestSatellitePosition: def orbit_polynomial(self): """Get an orbit polynomial for testing.""" return OrbitPolynomial( - start_time=datetime(2006, 1, 1, 12), - end_time=datetime(2006, 1, 1, 18), + start_time=dt.datetime(2006, 1, 1, 12), + end_time=dt.datetime(2006, 1, 1, 18), coefs=( np.array([8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, @@ -272,7 +272,7 @@ def orbit_polynomial(self): @pytest.fixture() def time(self): """Get scan timestamp for testing.""" - return datetime(2006, 1, 1, 12, 15, 9, 304888) + return dt.datetime(2006, 1, 1, 12, 15, 9, 304888) def test_eval_polynomial(self, orbit_polynomial, time): """Test getting the position in cartesian coordinates.""" @@ -305,7 +305,7 @@ class TestOrbitPolynomialFinder: # Contiguous validity intervals (that's the norm) ( ORBIT_POLYNOMIALS_SYNTH, - datetime(2005, 12, 31, 12, 15), + dt.datetime(2005, 12, 31, 12, 15), OrbitPolynomial( coefs=(2.0, 2.1, 2.2), start_time=np.datetime64("2005-12-31 12:00"), @@ -316,7 +316,7 @@ class TestOrbitPolynomialFinder: # not too far away ( ORBIT_POLYNOMIALS_SYNTH, - datetime(2006, 1, 1, 12, 15), + dt.datetime(2006, 1, 1, 12, 15), OrbitPolynomial( coefs=(3.0, 3.1, 3.2), start_time=np.datetime64("2006-01-01 10:00"), @@ -326,7 +326,7 @@ class TestOrbitPolynomialFinder: # Overlapping intervals ( ORBIT_POLYNOMIALS_SYNTH, - datetime(2006, 1, 3, 12, 15), + dt.datetime(2006, 1, 3, 12, 15), OrbitPolynomial( coefs=(8.0, 8.1, 8.2), start_time=np.datetime64("2006-01-03 10:00"), @@ -351,9 +351,9 @@ def test_get_orbit_polynomial(self, orbit_polynomials, time, [ # No interval enclosing the given timestamp and closest interval # too far away - (ORBIT_POLYNOMIALS_SYNTH, datetime(2006, 1, 2, 12, 15)), + (ORBIT_POLYNOMIALS_SYNTH, dt.datetime(2006, 1, 2, 12, 15)), # No valid polynomials at all - (ORBIT_POLYNOMIALS_INVALID, datetime(2006, 1, 1, 12, 15)) + (ORBIT_POLYNOMIALS_INVALID, dt.datetime(2006, 1, 1, 12, 15)) ] ) def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): @@ -378,14 +378,14 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS["2023"][platform_id][channel_name][0]/1000. @pytest.mark.parametrize(("platform_id", "time", "expected"), [ - (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), - (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), - (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), - (322, datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), - (323, datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), - (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), - (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), - (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), + (321, dt.datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), + (321, dt.datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), + (322, dt.datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), + (322, dt.datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), + (323, dt.datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), + (323, dt.datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), + (324, dt.datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), + (324, dt.datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), ]) def test_get_meirink_slope_2020(self, platform_id, time, expected): """Test the value of the slope of the Meirink calibration.""" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index e6c2cdcf16..8eaf2b83da 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Unittesting the native msg reader.""" +import datetime as dt import unittest -from datetime import datetime import numpy as np import pytest @@ -110,7 +110,7 @@ def setUp(self): """Set up the SEVIRI Calibration algorithm for testing.""" self.algo = SEVIRICalibrationAlgorithm( platform_id=PLATFORM_ID, - scan_time=datetime(2020, 8, 15, 13, 0, 40) + scan_time=dt.datetime(2020, 8, 15, 13, 0, 40) ) def test_convert_to_radiance(self): @@ -212,7 +212,7 @@ class TestFileHandlerCalibrationBase: gains_gsics = [0, 0, 0, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 0] offsets_gsics = [0, 0, 0, -0.4, -0.5, -0.6, -0.7, -0.8, -0.9, -1.0, -1.1, 0] radiance_types = 2 * np.ones(12) - scan_time = datetime(2020, 1, 1) + scan_time = dt.datetime(2020, 1, 1) external_coefs = { "VIS006": {"gain": 10, "offset": -10}, "IR_108": {"gain": 20, "offset": -20}, diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 3fe00edc80..1d0313621c 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -15,10 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """The HRIT msg reader tests package.""" +import datetime as dt import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -47,7 +48,7 @@ class TestHRITMSGFileHandlerHRV(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing HRV.""" - self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) + self.observation_start_time = dt.datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.reader = setup.get_fake_file_handler( observation_start_time=self.observation_start_time, @@ -139,7 +140,7 @@ class TestHRITMSGFileHandler(TestHRITMSGBase): def setUp(self): """Set up the hrit file handler for testing.""" - self.observation_start_time = datetime(2006, 1, 1, 12, 15, 9, 304888) + self.observation_start_time = dt.datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.ncols = 3712 self.projection_longitude = 9.5 @@ -214,13 +215,13 @@ def test_get_dataset(self, calibrate, parent_get_dataset): setup.get_attrs_exp(self.projection_longitude) ) # testing start/end time - assert datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time - assert datetime(2006, 1, 1, 12, 15) == self.reader.start_time + assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time + assert dt.datetime(2006, 1, 1, 12, 15) == self.reader.start_time assert self.reader.start_time == self.reader.nominal_start_time - assert datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time + assert dt.datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time assert self.reader.end_time == self.reader.nominal_end_time - assert datetime(2006, 1, 1, 12, 30) == self.reader.end_time + assert dt.datetime(2006, 1, 1, 12, 30) == self.reader.end_time # test repeat cycle duration assert 15 == self.reader._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling @@ -292,7 +293,7 @@ class TestHRITMSGPrologueFileHandler(unittest.TestCase): def setUp(self, *mocks): """Set up the test case.""" fh = setup.get_fake_file_handler( - observation_start_time=datetime(2016, 3, 3, 0, 0), + observation_start_time=dt.datetime(2016, 3, 3, 0, 0), nlines=464, ncols=3712, ) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index d668fe5240..21c42c0281 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Setup for SEVIRI HRIT reader tests.""" -from datetime import datetime +import datetime as dt from unittest import mock import numpy as np @@ -126,8 +127,8 @@ def get_fake_prologue(projection_longitude, orbit_polynomials): }, "ImageAcquisition": { "PlannedAcquisitionTime": { - "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 9, 304888), - "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0) + "TrueRepeatCycleStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "PlannedRepeatCycleEnd": dt.datetime(2006, 1, 1, 12, 30, 0, 0) } } } @@ -149,8 +150,8 @@ def get_fake_epilogue(): }, "ActualScanningSummary": { "ReducedScan": 0, - "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), - "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 39, 0) + "ForwardScanStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": dt.datetime(2006, 1, 1, 12, 27, 39, 0) } } } @@ -198,7 +199,7 @@ def get_fake_dataset_info(): def get_acq_time_cds(start_time, nlines): """Get fake scanline acquisition times.""" - days_since_1958 = (start_time - datetime(1958, 1, 1)).days + days_since_1958 = (start_time - dt.datetime(1958, 1, 1)).days tline = np.zeros( nlines, dtype=[("days", ">u2"), ("milliseconds", ">u4")] @@ -238,12 +239,12 @@ def get_attrs_exp(projection_longitude=0.0): "satellite_actual_latitude": -0.5711243456528018, "satellite_actual_altitude": 35783296.150123544}, "georef_offset_corrected": True, - "nominal_start_time": datetime(2006, 1, 1, 12, 15), - "nominal_end_time": datetime(2006, 1, 1, 12, 30), + "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15), + "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30), "time_parameters": { - "nominal_start_time": datetime(2006, 1, 1, 12, 15), - "nominal_end_time": datetime(2006, 1, 1, 12, 30), - "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), - "observation_end_time": datetime(2006, 1, 1, 12, 27, 39, 0) + "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15), + "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30), + "observation_start_time": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": dt.datetime(2006, 1, 1, 12, 27, 39, 0) } } diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 7c32001168..cb8a1fb6af 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -124,7 +124,7 @@ def test_init(self): def test_load_dataset_vis(self): """Test loading all datasets from a full swath file.""" - from datetime import datetime + import datetime as dt r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" @@ -133,7 +133,7 @@ def test_load_dataset_vis(self): datasets = r.load(["VIS008"]) assert len(datasets) == 1 for v in datasets.values(): - dt = datetime(2004, 12, 29, 12, 27, 44) + dt = dt.datetime(2004, 12, 29, 12, 27, 44) assert v.attrs["end_time"] == dt assert v.attrs["calibration"] == "reflectance" diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index 6382517b55..8f4e46e2fb 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -19,10 +19,10 @@ from __future__ import annotations +import datetime as dt import os import unittest import warnings -from datetime import datetime from unittest import mock import dask.array as da @@ -889,8 +889,8 @@ def file_handler(self): "15TRAILER": { "ImageProductionStats": { "ActualScanningSummary": { - "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), - "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 9, 304888), + "ForwardScanStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": dt.datetime(2006, 1, 1, 12, 27, 9, 304888), "ReducedScan": 0 } } @@ -941,8 +941,8 @@ def _fake_header(): }, "ImageAcquisition": { "PlannedAcquisitionTime": { - "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 0, 0), - "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0), + "TrueRepeatCycleStart": dt.datetime(2006, 1, 1, 12, 15, 0, 0), + "PlannedRepeatCycleEnd": dt.datetime(2006, 1, 1, 12, 30, 0, 0), } } }, @@ -993,19 +993,19 @@ def test_get_dataset(self, file_handler): expected = self._exp_data_array() xr.testing.assert_equal(xarr, expected) assert "raw_metadata" not in xarr.attrs - assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0) - assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0) + assert file_handler.start_time == dt.datetime(2006, 1, 1, 12, 15, 0) + assert file_handler.end_time == dt.datetime(2006, 1, 1, 12, 30, 0) assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4) def test_time(self, file_handler): """Test start/end nominal/observation time handling.""" - assert datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time - assert datetime(2006, 1, 1, 12, 15,) == file_handler.start_time + assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time + assert dt.datetime(2006, 1, 1, 12, 15,) == file_handler.start_time assert file_handler.start_time == file_handler.nominal_start_time - assert datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time + assert dt.datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time assert file_handler.end_time == file_handler.nominal_end_time - assert datetime(2006, 1, 1, 12, 30,) == file_handler.end_time + assert dt.datetime(2006, 1, 1, 12, 30,) == file_handler.end_time def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" @@ -1035,10 +1035,10 @@ def _exp_data_array(): "projection_altitude": 35785831.0 }, "time_parameters": { - "nominal_start_time": datetime(2006, 1, 1, 12, 15, 0), - "nominal_end_time": datetime(2006, 1, 1, 12, 30, 0), - "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), - "observation_end_time": datetime(2006, 1, 1, 12, 27, 9, 304888), + "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15, 0), + "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30, 0), + "observation_start_time": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": dt.datetime(2006, 1, 1, 12, 27, 9, 304888), }, "georef_offset_corrected": True, "platform_name": "MSG-3", diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index cd5e2c713f..d77933b9a0 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -17,7 +17,7 @@ # satpy. If not, see . """The HRIT msg reader tests package.""" -from datetime import datetime +import datetime as dt from unittest import mock import numpy as np @@ -34,7 +34,7 @@ def to_cds_time(time): """Convert datetime to (days, msecs) since 1958-01-01.""" - if isinstance(time, datetime): + if isinstance(time, dt.datetime): time = np.datetime64(time) t0 = np.datetime64("1958-01-01 00:00") delta = time - t0 @@ -62,13 +62,13 @@ def _get_fake_dataset(self, counts, h5netcdf): line_validity = np.repeat([3, 3], 11).reshape(2, 11) line_geom_radio_quality = np.repeat([4, 4], 11).reshape(2, 11) orbit_poly_start_day, orbit_poly_start_msec = to_cds_time( - np.array([datetime(2019, 12, 31, 18), - datetime(2019, 12, 31, 22)], + np.array([dt.datetime(2019, 12, 31, 18), + dt.datetime(2019, 12, 31, 22)], dtype="datetime64") ) orbit_poly_end_day, orbit_poly_end_msec = to_cds_time( - np.array([datetime(2019, 12, 31, 22), - datetime(2020, 1, 1, 2)], + np.array([dt.datetime(2019, 12, 31, 22), + dt.datetime(2020, 1, 1, 2)], dtype="datetime64") ) counts = counts.rename({ @@ -325,10 +325,10 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ "projection_altitude": 35785831.0 }, "time_parameters": { - "nominal_start_time": datetime(2020, 1, 1, 0, 0), - "nominal_end_time": datetime(2020, 1, 1, 0, 0), - "observation_start_time": datetime(2020, 1, 1, 0, 0), - "observation_end_time": datetime(2020, 1, 1, 0, 0), + "nominal_start_time": dt.datetime(2020, 1, 1, 0, 0), + "nominal_end_time": dt.datetime(2020, 1, 1, 0, 0), + "observation_start_time": dt.datetime(2020, 1, 1, 0, 0), + "observation_end_time": dt.datetime(2020, 1, 1, 0, 0), }, "georef_offset_corrected": True, "platform_name": "Meteosat-11", @@ -352,13 +352,13 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ def test_time(self, file_handler): """Test start/end nominal/observation time handling.""" - assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time - assert datetime(2020, 1, 1, 0, 0) == file_handler.start_time + assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time + assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.start_time assert file_handler.start_time == file_handler.nominal_start_time - assert datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time + assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time assert file_handler.end_time == file_handler.nominal_end_time - assert datetime(2020, 1, 1, 0, 0) == file_handler.end_time + assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.end_time def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index ec3fdf7b56..9a6e6e6f83 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -17,9 +17,9 @@ # satpy. If not, see . """Unittesting the SEVIRI L2 BUFR reader.""" +import datetime as dt import sys import unittest -from datetime import datetime from unittest import mock import dask.array as da @@ -37,7 +37,7 @@ "spacecraft": "MSG2", "server": "TESTSERVER"} MPEF_PRODUCT_HEADER = { - "NominalTime": datetime(2019, 11, 6, 18, 0), + "NominalTime": dt.datetime(2019, 11, 6, 18, 0), "SpacecraftName": "09", "RectificationLongitude": "E0455" } diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 7f5fffa70c..db09e04edb 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -1,6 +1,7 @@ """Tests for the SGLI L1B backend.""" + +import datetime as dt import sys -from datetime import datetime, timedelta import dask import h5py @@ -9,8 +10,8 @@ from satpy.readers.sgli_l1b import HDF5SGLI -START_TIME = datetime.now() -END_TIME = START_TIME + timedelta(minutes=5) +START_TIME = dt.datetime.now() +END_TIME = START_TIME + dt.timedelta(minutes=5) FULL_KM_ARRAY = np.arange(1955 * 1250, dtype=np.uint16).reshape((1955, 1250)) MASK = 16383 LON_LAT_ARRAY = np.arange(197 * 126, dtype=np.float32).reshape((197, 126)) @@ -168,14 +169,14 @@ def test_start_time(sgli_vn_file): """Test that the start time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = START_TIME.microsecond % 1000 - assert handler.start_time == START_TIME - timedelta(microseconds=microseconds) + assert handler.start_time == START_TIME - dt.timedelta(microseconds=microseconds) def test_end_time(sgli_vn_file): """Test that the end time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = END_TIME.microsecond % 1000 - assert handler.end_time == END_TIME - timedelta(microseconds=microseconds) + assert handler.end_time == END_TIME - dt.timedelta(microseconds=microseconds) def test_get_dataset_counts(sgli_vn_file): """Test that counts can be extracted from a file.""" diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index b6784d4e2b..becc1455b2 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Module for testing the satpy.readers.nc_slstr module.""" + +import datetime as dt import unittest import unittest.mock as mock -from datetime import datetime import numpy as np import pytest @@ -136,10 +138,10 @@ def test_instantiate(self, bvs_, xr_): bvs_.return_value = self.FakeSpl xr_.open_dataset.return_value = self.fake_dataset - good_start = datetime.strptime(self.start_time, - "%Y-%m-%dT%H:%M:%S.%fZ") - good_end = datetime.strptime(self.end_time, - "%Y-%m-%dT%H:%M:%S.%fZ") + good_start = dt.datetime.strptime(self.start_time, + "%Y-%m-%dT%H:%M:%S.%fZ") + good_end = dt.datetime.strptime(self.end_time, + "%Y-%m-%dT%H:%M:%S.%fZ") ds_id = make_dataid(name="foo", calibration="radiance", stripe="a", view="nadir") diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py index 519030447b..409feb62ad 100644 --- a/satpy/tests/reader_tests/test_smos_l2_wind.py +++ b/satpy/tests/reader_tests/test_smos_l2_wind.py @@ -18,9 +18,9 @@ # Satpy. If not, see . """Module for testing the satpy.readers.smos_l2_wind module.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import numpy as np @@ -35,8 +35,8 @@ class FakeNetCDF4FileHandlerSMOSL2WIND(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get("start_time", datetime(2020, 4, 22, 12, 0, 0)) - dt_e = filename_info.get("end_time", datetime(2020, 4, 22, 12, 0, 0)) + dt_s = filename_info.get("start_time", dt.datetime(2020, 4, 22, 12, 0, 0)) + dt_e = filename_info.get("end_time", dt.datetime(2020, 4, 22, 12, 0, 0)) if filetype_info["file_type"] == "smos_l2_wind": file_content = { diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index 7305bf365c..4bdf3f67d2 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -16,11 +16,12 @@ # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . + """Module for testing the satpy.readers.tropomi_l2 module.""" +import datetime as dt import os import unittest -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -41,13 +42,13 @@ class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt_s = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) - dt_e = filename_info.get("end_time", datetime(2016, 1, 1, 12, 0, 0)) + dt_s = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) + dt_e = filename_info.get("end_time", dt.datetime(2016, 1, 1, 12, 0, 0)) if filetype_info["file_type"] == "tropomi_l2": file_content = { - "/attr/time_coverage_start": (dt_s+timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), - "/attr/time_coverage_end": (dt_e-timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/time_coverage_start": (dt_s+dt.timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/time_coverage_end": (dt_e-dt.timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), "/attr/platform_shortname": "S5P", "/attr/sensor": "TROPOMI", } @@ -141,8 +142,8 @@ def test_load_no2(self): for d in ds.values(): assert d.attrs["platform_shortname"] == "S5P" assert d.attrs["sensor"] == "tropomi" - assert d.attrs["time_coverage_start"] == datetime(2018, 7, 9, 17, 25, 34) - assert d.attrs["time_coverage_end"] == datetime(2018, 7, 9, 18, 23, 4) + assert d.attrs["time_coverage_start"] == dt.datetime(2018, 7, 9, 17, 25, 34) + assert d.attrs["time_coverage_end"] == dt.datetime(2018, 7, 9, 18, 23, 4) assert "area" in d.attrs assert d.attrs["area"] is not None assert "y" in d.dims diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 67bdb41374..ba43688b76 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -15,11 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Testing of helper functions.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import dask.array as da @@ -430,7 +431,7 @@ class TestSunEarthDistanceCorrection: def setup_method(self): """Create input / output arrays for the tests.""" - self.test_date = datetime(2020, 8, 15, 13, 0, 40) + self.test_date = dt.datetime(2020, 8, 15, 13, 0, 40) raw_refl = xr.DataArray(da.from_array([10., 20., 40., 1., 98., 50.]), attrs={"start_time": self.test_date, @@ -462,7 +463,7 @@ def test_get_utc_time(self): # Now check correct time is returned with utc_date passed tmp_array = self.raw_refl.copy() - new_test_date = datetime(2019, 2, 1, 15, 2, 12) + new_test_date = dt.datetime(2019, 2, 1, 15, 2, 12) utc_time = hf.get_array_date(tmp_array, new_test_date) assert utc_time == new_test_date diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index d764891760..49cf7a4885 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -21,8 +21,8 @@ """ from __future__ import annotations +import datetime as dt import shutil -from datetime import datetime, timedelta from pathlib import Path from typing import Iterable @@ -40,8 +40,8 @@ I_ROWS = 32 # one scan M_COLS = 3200 M_ROWS = 16 # one scan -START_TIME = datetime(2023, 5, 30, 17, 55, 41, 0) -END_TIME = datetime(2023, 5, 30, 17, 57, 5, 0) +START_TIME = dt.datetime(2023, 5, 30, 17, 55, 41, 0) +END_TIME = dt.datetime(2023, 5, 30, 17, 57, 5, 0) QF1_FLAG_MEANINGS = """ \tBits are listed from the MSB (bit 7) to the LSB (bit 0): \tBit Description @@ -78,7 +78,7 @@ def surface_reflectance_file(tmp_path_factory: TempPathFactory) -> Path: @pytest.fixture(scope="module") def surface_reflectance_file2(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file.""" - return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5), + return _create_surface_reflectance_file(tmp_path_factory, START_TIME + dt.timedelta(minutes=5), include_veg_indices=False) @@ -97,7 +97,7 @@ def surface_reflectance_with_veg_indices_file(tmp_path_factory: TempPathFactory) @pytest.fixture(scope="module") def surface_reflectance_with_veg_indices_file2(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file with vegetation indexes included.""" - return _create_surface_reflectance_file(tmp_path_factory, START_TIME + timedelta(minutes=5), + return _create_surface_reflectance_file(tmp_path_factory, START_TIME + dt.timedelta(minutes=5), include_veg_indices=True) @@ -110,7 +110,7 @@ def multiple_surface_reflectance_files_with_veg_indices(surface_reflectance_with def _create_surface_reflectance_file( tmp_path_factory: TempPathFactory, - start_time: datetime, + start_time: dt.datetime, include_veg_indices: bool = False, ) -> Path: fn = f"SurfRefl_v1r2_npp_s{start_time:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" diff --git a/satpy/tests/reader_tests/test_viirs_l1b.py b/satpy/tests/reader_tests/test_viirs_l1b.py index e60f83cfd0..b2a5c4b476 100644 --- a/satpy/tests/reader_tests/test_viirs_l1b.py +++ b/satpy/tests/reader_tests/test_viirs_l1b.py @@ -17,8 +17,8 @@ # satpy. If not, see . """Module for testing the satpy.readers.viirs_l1b module.""" +import datetime as dt import os -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -49,7 +49,7 @@ class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) + date = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] @@ -60,8 +60,8 @@ def get_test_content(self, filename, filename_info, filetype_info): "/dimension/number_of_lines": num_lines, "/dimension/number_of_pixels": num_pixels, "/dimension/number_of_LUT_values": num_luts, - "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"), - "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_start": date.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"), "/attr/orbit_number": 26384, "/attr/instrument": "VIIRS", "/attr/platform": "Suomi-NPP", diff --git a/satpy/tests/reader_tests/test_viirs_l2.py b/satpy/tests/reader_tests/test_viirs_l2.py index 79884f3d4f..01801535ed 100644 --- a/satpy/tests/reader_tests/test_viirs_l2.py +++ b/satpy/tests/reader_tests/test_viirs_l2.py @@ -1,6 +1,7 @@ """Module for testing the satpy.readers.viirs_l2 module.""" + +import datetime as dt import os -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -27,7 +28,7 @@ class FakeNetCDF4FileHandlerVIIRSL2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt = filename_info.get("start_time", datetime(2023, 12, 30, 22, 30, 0)) + date = filename_info.get("start_time", dt.datetime(2023, 12, 30, 22, 30, 0)) file_type = filename[:6] num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] @@ -36,8 +37,8 @@ def get_test_content(self, filename, filename_info, filetype_info): "/dimension/number_of_scans": num_scans, "/dimension/number_of_lines": num_lines, "/dimension/number_of_pixels": num_pixels, - "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"), - "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime( + "/attr/time_coverage_start": date.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime( "%Y-%m-%dT%H:%M:%S.000Z" ), "/attr/orbit_number": 26384, diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py index 952224daaf..2758ceb81c 100644 --- a/satpy/tests/reader_tests/test_viirs_sdr.py +++ b/satpy/tests/reader_tests/test_viirs_sdr.py @@ -354,12 +354,12 @@ def test_init_start_time_is_nodate(self): def test_init_start_time_beyond(self): """Test basic init with start_time after the provided files.""" - from datetime import datetime + import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - "start_time": datetime(2012, 2, 26) + "start_time": dt.datetime(2012, 2, 26) }) fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", @@ -368,12 +368,12 @@ def test_init_start_time_beyond(self): def test_init_end_time_beyond(self): """Test basic init with end_time before the provided files.""" - from datetime import datetime + import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - "end_time": datetime(2012, 2, 24) + "end_time": dt.datetime(2012, 2, 24) }) fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", @@ -382,14 +382,14 @@ def test_init_end_time_beyond(self): def test_init_start_end_time(self): """Test basic init with end_time before the provided files.""" - from datetime import datetime + import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - "start_time": datetime(2012, 2, 24), - "end_time": datetime(2012, 2, 26) + "start_time": dt.datetime(2012, 2, 24), + "end_time": dt.datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index ba9b83d707..b03052ea30 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . + """The viirs_vgac_l1b_nc reader tests package. This version tests the readers for VIIIRS VGAC data preliminary version. @@ -22,7 +23,7 @@ """ -from datetime import datetime +import datetime as dt import numpy as np import pytest @@ -33,7 +34,7 @@ @pytest.fixture() def nc_filename(tmp_path): """Create an nc test data file and return its filename.""" - now = datetime.utcnow() + now = dt.datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) # Create test data @@ -107,10 +108,10 @@ def test_read_vgac(self, nc_filename): assert (diff_e > np.timedelta64(-5, "us")) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) - assert scn_.start_time == datetime(year=2023, month=3, day=28, - hour=9, minute=8, second=7) - assert scn_.end_time == datetime(year=2023, month=3, day=28, - hour=10, minute=11, second=12) + assert scn_.start_time == dt.datetime(year=2023, month=3, day=28, + hour=9, minute=8, second=7) + assert scn_.end_time == dt.datetime(year=2023, month=3, day=28, + hour=10, minute=11, second=12) def test_dt64_to_datetime(self): """Test datetime conversion branch.""" @@ -118,8 +119,8 @@ def test_dt64_to_datetime(self): fh = VGACFileHandler(filename="", filename_info={"start_time": "2023-03-28T09:08:07"}, filetype_info="") - in_dt = datetime(year=2023, month=3, day=28, - hour=9, minute=8, second=7) + in_dt = dt.datetime(year=2023, month=3, day=28, + hour=9, minute=8, second=7) out_dt = fh.dt64_to_datetime(in_dt) assert out_dt == in_dt diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 9b0dd9098e..4490903880 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -13,8 +13,10 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Unit tests for Scene conversion functionality.""" -from datetime import datetime + +import datetime as dt import pytest import xarray as xr @@ -61,7 +63,7 @@ def test_geoviews_basic_with_area(self): {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it @@ -75,7 +77,7 @@ def test_geoviews_basic_with_swath(self): lats = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(lons, lats) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it @@ -89,7 +91,7 @@ def test_hvplot_basic_with_area(self): {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it @@ -103,13 +105,13 @@ def test_hvplot_rgb_with_area(self): {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) scn["ds2"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) scn["ds3"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it @@ -123,7 +125,7 @@ def test_hvplot_basic_with_swath(self): latitude = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(longitude, latitude) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it @@ -150,7 +152,7 @@ def single_area_scn(self): 2, 2, [-200, -200, 200, 200]) data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), "area": area}) + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) scn = Scene() scn["var1"] = data_array return scn @@ -169,10 +171,10 @@ def multi_area_scn(self): data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), "area": area1}) + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area1}) data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1), dims=("y", "x"), - attrs={"start_time": datetime(2018, 1, 1), "area": area2}) + attrs={"start_time": dt.datetime(2018, 1, 1), "area": area2}) scn = Scene() scn["var1"] = data_array1 scn["var2"] = data_array2 diff --git a/satpy/tests/scene_tests/test_saving.py b/satpy/tests/scene_tests/test_saving.py index 32c6ff61c2..b67f41cc2e 100644 --- a/satpy/tests/scene_tests/test_saving.py +++ b/satpy/tests/scene_tests/test_saving.py @@ -13,9 +13,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Unit tests for saving-related functionality in scene.py.""" + +import datetime as dt import os -from datetime import datetime from unittest import mock import pytest @@ -39,7 +41,7 @@ def test_save_datasets_default(self, tmp_path): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0)} + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 @@ -52,7 +54,7 @@ def test_save_datasets_by_ext(self, tmp_path): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0)} + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 @@ -70,7 +72,7 @@ def test_save_datasets_bad_writer(self, tmp_path): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow()} + "start_time": dt.datetime.utcnow()} ) scn = Scene() scn["test"] = ds1 @@ -98,7 +100,7 @@ def test_save_dataset_default(self, tmp_path): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0)} + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index c075755d17..b8af090121 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -15,11 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for compositors in composites/__init__.py.""" +import datetime as dt import os import unittest -from datetime import datetime from unittest import mock import dask @@ -175,7 +176,7 @@ def setup_method(self): {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {"area": area, - "start_time": datetime(2018, 1, 1, 18), + "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "resolution": 1000, "calibration": "reflectance", @@ -347,7 +348,7 @@ def setUp(self): {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {"area": area, - "start_time": datetime(2018, 1, 1, 18), + "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "resolution": 1000, "name": "test_vis"} @@ -430,7 +431,7 @@ class TestDayNightCompositor(unittest.TestCase): def setUp(self): """Create test data.""" bands = ["R", "G", "B"] - start_time = datetime(2018, 1, 1, 18, 0, 0) + start_time = dt.datetime(2018, 1, 1, 18, 0, 0) # RGB a = np.zeros((3, 2, 2), dtype=np.float32) diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 82b3a6c1cd..6ca3b25d72 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -13,10 +13,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Test objects and functions in the dataset module.""" +import datetime as dt import unittest -from datetime import datetime import numpy as np import pytest @@ -103,38 +104,38 @@ def setUp(self): """Set up the test case.""" # The times need to be in ascending order (oldest first) self.start_time_dts = ( - {"start_time": datetime(2018, 2, 1, 11, 58, 0)}, - {"start_time": datetime(2018, 2, 1, 11, 59, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 0, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 1, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 2, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, ) self.end_time_dts = ( - {"end_time": datetime(2018, 2, 1, 11, 58, 0)}, - {"end_time": datetime(2018, 2, 1, 11, 59, 0)}, - {"end_time": datetime(2018, 2, 1, 12, 0, 0)}, - {"end_time": datetime(2018, 2, 1, 12, 1, 0)}, - {"end_time": datetime(2018, 2, 1, 12, 2, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, ) self.other_time_dts = ( - {"other_time": datetime(2018, 2, 1, 11, 58, 0)}, - {"other_time": datetime(2018, 2, 1, 11, 59, 0)}, - {"other_time": datetime(2018, 2, 1, 12, 0, 0)}, - {"other_time": datetime(2018, 2, 1, 12, 1, 0)}, - {"other_time": datetime(2018, 2, 1, 12, 2, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"other_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, ) self.start_time_dts_with_none = ( {"start_time": None}, - {"start_time": datetime(2018, 2, 1, 11, 59, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 0, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 1, 0)}, - {"start_time": datetime(2018, 2, 1, 12, 2, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, + {"start_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, ) self.end_time_dts_with_none = ( - {"end_time": datetime(2018, 2, 1, 11, 58, 0)}, - {"end_time": datetime(2018, 2, 1, 11, 59, 0)}, - {"end_time": datetime(2018, 2, 1, 12, 0, 0)}, - {"end_time": datetime(2018, 2, 1, 12, 1, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, + {"end_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, {"end_time": None}, ) @@ -142,11 +143,11 @@ def test_average_datetimes(self): """Test the average_datetimes helper function.""" from satpy.dataset.metadata import average_datetimes dts = ( - datetime(2018, 2, 1, 11, 58, 0), - datetime(2018, 2, 1, 11, 59, 0), - datetime(2018, 2, 1, 12, 0, 0), - datetime(2018, 2, 1, 12, 1, 0), - datetime(2018, 2, 1, 12, 2, 0), + dt.datetime(2018, 2, 1, 11, 58, 0), + dt.datetime(2018, 2, 1, 11, 59, 0), + dt.datetime(2018, 2, 1, 12, 0, 0), + dt.datetime(2018, 2, 1, 12, 1, 0), + dt.datetime(2018, 2, 1, 12, 2, 0), ) ret = average_datetimes(dts) assert dts[2] == ret @@ -373,10 +374,10 @@ def test_combine_dicts_close(): "c": [1, 2, 3], "d": { "e": np.str_("bar"), - "f": datetime(2020, 1, 1, 12, 15, 30), + "f": dt.datetime(2020, 1, 1, 12, 15, 30), "g": np.array([1, 2, 3]), }, - "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 1)]) } } attrs_close = { @@ -386,10 +387,10 @@ def test_combine_dicts_close(): "c": np.array([1, 2, 3]) + 1E-12, "d": { "e": np.str_("bar"), - "f": datetime(2020, 1, 1, 12, 15, 30), + "f": dt.datetime(2020, 1, 1, 12, 15, 30), "g": np.array([1, 2, 3]) + 1E-12 }, - "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 1)]) } } test_metadata = [attrs, attrs_close] diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index 925da3e561..7e1424414e 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -15,10 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """test file handler baseclass.""" +import datetime as dt import unittest -from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -49,8 +50,8 @@ def setUp(self): """Set up the test.""" self.fh = BaseFileHandler( "filename", {"filename_info": "bla"}, "filetype_info") - self.early_time = datetime(2024, 2, 12, 11, 00) - self.late_time = datetime(2024, 2, 12, 12, 00) + self.early_time = dt.datetime(2024, 2, 12, 11, 00) + self.late_time = dt.datetime(2024, 2, 12, 12, 00) def test_combine_times(self): """Combine times.""" @@ -161,13 +162,13 @@ def test_combine_orbital_parameters(self): def test_combine_time_parameters(self): """Combine times in 'time_parameters.""" time_params1 = { - "nominal_start_time": datetime(2020, 1, 1, 12, 0, 0), - "nominal_end_time": datetime(2020, 1, 1, 12, 2, 30), - "observation_start_time": datetime(2020, 1, 1, 12, 0, 2, 23821), - "observation_end_time": datetime(2020, 1, 1, 12, 2, 23, 12348), + "nominal_start_time": dt.datetime(2020, 1, 1, 12, 0, 0), + "nominal_end_time": dt.datetime(2020, 1, 1, 12, 2, 30), + "observation_start_time": dt.datetime(2020, 1, 1, 12, 0, 2, 23821), + "observation_end_time": dt.datetime(2020, 1, 1, 12, 2, 23, 12348), } time_params2 = {} - time_shift = timedelta(seconds=1.5) + time_shift = dt.timedelta(seconds=1.5) for key, value in time_params1.items(): time_params2[key] = value + time_shift res = self.fh.combine_info([ @@ -175,10 +176,10 @@ def test_combine_time_parameters(self): {"time_parameters": time_params2} ]) res_time_params = res["time_parameters"] - assert res_time_params["nominal_start_time"] == datetime(2020, 1, 1, 12, 0, 0) - assert res_time_params["nominal_end_time"] == datetime(2020, 1, 1, 12, 2, 31, 500000) - assert res_time_params["observation_start_time"] == datetime(2020, 1, 1, 12, 0, 2, 23821) - assert res_time_params["observation_end_time"] == datetime(2020, 1, 1, 12, 2, 24, 512348) + assert res_time_params["nominal_start_time"] == dt.datetime(2020, 1, 1, 12, 0, 0) + assert res_time_params["nominal_end_time"] == dt.datetime(2020, 1, 1, 12, 2, 31, 500000) + assert res_time_params["observation_start_time"] == dt.datetime(2020, 1, 1, 12, 0, 2, 23821) + assert res_time_params["observation_end_time"] == dt.datetime(2020, 1, 1, 12, 2, 24, 512348) def test_file_is_kept_intact(self): """Test that the file object passed (string, path, or other) is kept intact.""" diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 0c8eb51b3f..3bc7ca91c8 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -15,9 +15,11 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Tests for modifiers in modifiers/__init__.py.""" + +import datetime as dt import unittest -from datetime import datetime from unittest import mock import dask.array as da @@ -57,7 +59,7 @@ def _sunz_stacked_area_def(): def _shared_sunz_attrs(area_def): attrs = {"area": area_def, - "start_time": datetime(2018, 1, 1, 18), + "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "name": "test_vis"} return attrs @@ -591,7 +593,7 @@ def test_call(self): lats[1, 1] = np.inf lats = da.from_array(lats, chunks=5) area = SwathDefinition(lons, lats) - stime = datetime(2020, 1, 1, 12, 0, 0) + stime = dt.datetime(2020, 1, 1, 12, 0, 0) orb_params = { "satellite_actual_altitude": 12345678, "nadir_longitude": 0.0, diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index db3d1ccb1d..0f2244348d 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -15,10 +15,12 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Test classes and functions in the readers/__init__.py module.""" import builtins import contextlib +import datetime as dt import os import sys import unittest @@ -399,43 +401,37 @@ def test_missing_requirements(self, *mocks): def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" - import datetime - from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } - filter_params = {"start_time": datetime.datetime(1970, 1, 1), - "end_time": datetime.datetime(1970, 1, 2), + filter_params = {"start_time": dt.datetime(1970, 1, 1), + "end_time": dt.datetime(1970, 1, 2), "area": None} with pytest.raises(ValueError, match="No dataset could be loaded.*"): load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" - import datetime - from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc"], } - filter_params = {"start_time": datetime.datetime(1970, 1, 1), - "end_time": datetime.datetime(1970, 1, 2)} + filter_params = {"start_time": dt.datetime(1970, 1, 1), + "end_time": dt.datetime(1970, 1, 2)} with pytest.raises(ValueError, match="No dataset could be loaded."): load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" - import datetime - from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc"], } - filter_params = {"start_time": datetime.datetime(2012, 2, 25), - "end_time": datetime.datetime(2012, 2, 26)} + filter_params = {"start_time": dt.datetime(2012, 2, 25), + "end_time": dt.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) assert "viirs_sdr" in readers @@ -480,11 +476,9 @@ def test_reader_other_name(self, monkeypatch, tmp_path): def test_reader_name_matched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" - from datetime import datetime - ri = find_files_and_readers(reader="viirs_sdr", - start_time=datetime(2012, 2, 25, 18, 0, 0), - end_time=datetime(2012, 2, 25, 19, 0, 0), + start_time=dt.datetime(2012, 2, 25, 18, 0, 0), + end_time=dt.datetime(2012, 2, 25, 19, 0, 0), ) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] @@ -494,9 +488,7 @@ def test_reader_name_matched_start_time(self, viirs_file): Start time in the middle of the file time should still match the file. """ - from datetime import datetime - - ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 1, 30)) + ri = find_files_and_readers(reader="viirs_sdr", start_time=dt.datetime(2012, 2, 25, 18, 1, 30)) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] @@ -506,20 +498,16 @@ def test_reader_name_matched_end_time(self, viirs_file): End time in the middle of the file time should still match the file. """ - from datetime import datetime - - ri = find_files_and_readers(reader="viirs_sdr", end_time=datetime(2012, 2, 25, 18, 1, 30)) + ri = find_files_and_readers(reader="viirs_sdr", end_time=dt.datetime(2012, 2, 25, 18, 1, 30)) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_unmatched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" - from datetime import datetime - with pytest.raises(ValueError, match="No supported files found"): find_files_and_readers(reader="viirs_sdr", - start_time=datetime(2012, 2, 26, 18, 0, 0), - end_time=datetime(2012, 2, 26, 19, 0, 0)) + start_time=dt.datetime(2012, 2, 26, 18, 0, 0), + end_time=dt.datetime(2012, 2, 26, 19, 0, 0)) def test_no_parameters(self, viirs_file): """Test with no limiting parameters.""" diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index bc68d767c1..701347cdbe 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -18,7 +18,7 @@ from __future__ import annotations -import datetime +import datetime as dt import os import shutil import unittest @@ -546,7 +546,6 @@ class TestComputeWriterResults(unittest.TestCase): def setUp(self): """Create temporary directory to save files to and a mock scene.""" import tempfile - from datetime import datetime from pyresample.geometry import AreaDefinition @@ -560,7 +559,7 @@ def setUp(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0), + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": adef} ) self.scn = Scene() @@ -655,7 +654,6 @@ class TestBaseWriter: def setup_method(self): """Set up tests.""" import tempfile - from datetime import datetime from pyresample.geometry import AreaDefinition @@ -670,7 +668,7 @@ def setup_method(self): dims=("y", "x"), attrs={ "name": "test", - "start_time": datetime(2018, 1, 1, 0, 0, 0), + "start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "sensor": "fake_sensor", "area": adef, } @@ -881,7 +879,7 @@ def test_group_results_by_output_file(tmp_path): "kraken_depth": dat}, daskify=True, area=fake_area, - common_attrs={"start_time": datetime.datetime(2022, 11, 16, 13, 27)}) + common_attrs={"start_time": dt.datetime(2022, 11, 16, 13, 27)}) # NB: even if compute=False, ``save_datasets`` creates (empty) files (sources, targets) = fake_scene.save_datasets( filename=os.fspath(tmp_path / "test-{name}.tif"), diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 0b0293e453..699f6619b6 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -15,12 +15,13 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Testing the yaml_reader module.""" +import datetime as dt import os import random import unittest -from datetime import datetime from tempfile import mkdtemp from unittest.mock import MagicMock, call, patch @@ -182,8 +183,8 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize the dummy reader.""" super(DummyReader, self).__init__( filename, filename_info, filetype_info) - self._start_time = datetime(2000, 1, 1, 12, 1) - self._end_time = datetime(2000, 1, 1, 12, 2) + self._start_time = dt.datetime(2000, 1, 1, 12, 1) + self._end_time = dt.datetime(2000, 1, 1, 12, 2) self.metadata = {} @property @@ -227,8 +228,8 @@ def setUp(self): self.config = res_dict self.reader = yr.FileYAMLReader(self.config, filter_parameters={ - "start_time": datetime(2000, 1, 1), - "end_time": datetime(2000, 1, 2)}) + "start_time": dt.datetime(2000, 1, 1), + "end_time": dt.datetime(2000, 1, 2)}) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" @@ -280,8 +281,8 @@ def setUp(self): self.config = MHS_YAML_READER_DICT self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT, filter_parameters={ - "start_time": datetime(2000, 1, 1), - "end_time": datetime(2000, 1, 2), + "start_time": dt.datetime(2000, 1, 1), + "end_time": dt.datetime(2000, 1, 2), }) def test_custom_type_with_dict_contents_gets_parsed_correctly(self): @@ -321,8 +322,8 @@ def setUp(self): self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - "start_time": datetime(2000, 1, 1), - "end_time": datetime(2000, 1, 2), + "start_time": dt.datetime(2000, 1, 1), + "end_time": dt.datetime(2000, 1, 2), }) def test_deprecated_passing_config_files(self): @@ -362,17 +363,18 @@ def test_available_dataset_names(self): def test_filter_fh_by_time(self): """Check filtering filehandlers by time.""" - fh0 = FakeFH(datetime(1999, 12, 30), datetime(1999, 12, 31)) - fh1 = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 1, 12, 30)) - fh2 = FakeFH(datetime(2000, 1, 1, 10, 0), - datetime(2000, 1, 1, 12, 30)) - fh3 = FakeFH(datetime(2000, 1, 1, 12, 30), - datetime(2000, 1, 2, 12, 30)) - fh4 = FakeFH(datetime(2000, 1, 2, 12, 30), - datetime(2000, 1, 3, 12, 30)) - fh5 = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 3, 12, 30)) + fh0 = FakeFH(dt.datetime(1999, 12, 30), + dt.datetime(1999, 12, 31)) + fh1 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 1, 12, 30)) + fh2 = FakeFH(dt.datetime(2000, 1, 1, 10, 0), + dt.datetime(2000, 1, 1, 12, 30)) + fh3 = FakeFH(dt.datetime(2000, 1, 1, 12, 30), + dt.datetime(2000, 1, 2, 12, 30)) + fh4 = FakeFH(dt.datetime(2000, 1, 2, 12, 30), + dt.datetime(2000, 1, 3, 12, 30)) + fh5 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 3, 12, 30)) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, fh.end_time) @@ -388,8 +390,8 @@ def test_filter_fh_by_time(self): @patch("satpy.readers.yaml_reader.Boundary") def test_file_covers_area(self, bnd, adb, gad): """Test that area coverage is checked properly.""" - file_handler = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 3, 12, 30)) + file_handler = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 3, 12, 30)) self.reader.filter_parameters["area"] = True bnd.return_value.contour_poly.intersection.return_value = True @@ -417,18 +419,18 @@ def test_start_end_time(self): with pytest.raises(RuntimeError): self.reader.end_time - fh0 = FakeFH(datetime(1999, 12, 30, 0, 0), - datetime(1999, 12, 31, 0, 0)) - fh1 = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 1, 12, 30)) - fh2 = FakeFH(datetime(2000, 1, 1, 10, 0), - datetime(2000, 1, 1, 12, 30)) - fh3 = FakeFH(datetime(2000, 1, 1, 12, 30), - datetime(2000, 1, 2, 12, 30)) - fh4 = FakeFH(datetime(2000, 1, 2, 12, 30), - datetime(2000, 1, 3, 12, 30)) - fh5 = FakeFH(datetime(1999, 12, 31, 10, 0), - datetime(2000, 1, 3, 12, 30)) + fh0 = FakeFH(dt.datetime(1999, 12, 30, 0, 0), + dt.datetime(1999, 12, 31, 0, 0)) + fh1 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 1, 12, 30)) + fh2 = FakeFH(dt.datetime(2000, 1, 1, 10, 0), + dt.datetime(2000, 1, 1, 12, 30)) + fh3 = FakeFH(dt.datetime(2000, 1, 1, 12, 30), + dt.datetime(2000, 1, 2, 12, 30)) + fh4 = FakeFH(dt.datetime(2000, 1, 2, 12, 30), + dt.datetime(2000, 1, 3, 12, 30)) + fh5 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), + dt.datetime(2000, 1, 3, 12, 30)) self.reader.file_handlers = { "0": [fh1, fh2, fh3, fh4, fh5], @@ -436,8 +438,8 @@ def test_start_end_time(self): "2": [fh2, fh3], } - assert self.reader.start_time == datetime(1999, 12, 30, 0, 0) - assert self.reader.end_time == datetime(2000, 1, 3, 12, 30) + assert self.reader.start_time == dt.datetime(1999, 12, 30, 0, 0) + assert self.reader.end_time == dt.datetime(2000, 1, 3, 12, 30) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" @@ -572,8 +574,8 @@ def setUp(self): self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - "start_time": datetime(2000, 1, 1), - "end_time": datetime(2000, 1, 2), + "start_time": dt.datetime(2000, 1, 1), + "end_time": dt.datetime(2000, 1, 2), }) fake_fh = FakeFH(None, None) self.lons = xr.DataArray(np.ones((2, 2)) * 2, diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index a6ebf8753e..b2e3576d0f 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -16,8 +16,8 @@ # along with this program. If not, see . """Utilities for various satpy tests.""" +import datetime as dt from contextlib import contextmanager -from datetime import datetime from typing import Any from unittest import mock @@ -34,8 +34,8 @@ from satpy.modifiers import ModifierBase from satpy.readers.file_handlers import BaseFileHandler -FAKE_FILEHANDLER_START = datetime(2020, 1, 1, 0, 0, 0) -FAKE_FILEHANDLER_END = datetime(2020, 1, 1, 1, 0, 0) +FAKE_FILEHANDLER_START = dt.datetime(2020, 1, 1, 0, 0, 0) +FAKE_FILEHANDLER_END = dt.datetime(2020, 1, 1, 1, 0, 0) def make_dataid(**items): diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index dbc1bc82d7..364d0c6b8e 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -17,10 +17,10 @@ # satpy. If not, see . """Tests for the AWIPS Tiled writer.""" +import datetime as dt import logging import os import shutil -from datetime import datetime, timedelta from glob import glob import dask @@ -32,8 +32,8 @@ from satpy.resample import update_resampled_coords -START_TIME = datetime(2018, 1, 1, 12, 0, 0) -END_TIME = START_TIME + timedelta(minutes=20) +START_TIME = dt.datetime(2018, 1, 1, 12, 0, 0) +END_TIME = START_TIME + dt.timedelta(minutes=20) # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -378,7 +378,7 @@ def test_lettered_tiles_sector_ref(self, tmp_path): unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - expected_start = (START_TIME + timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S") + expected_start = (START_TIME + dt.timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S") assert masked_ds.attrs["start_date_time"] == expected_start def test_lettered_tiles_no_fit(self, tmp_path): diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 6d1d15527b..18a3682fc7 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -17,10 +17,10 @@ # satpy. If not, see . """Tests for the CF writer.""" +import datetime as dt import os import tempfile import warnings -from datetime import datetime import numpy as np import pytest @@ -74,8 +74,8 @@ def test_init(self): def test_save_array(self): """Test saving an array to netcdf/cf.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, @@ -90,8 +90,8 @@ def test_save_array(self): def test_save_array_coords(self): """Test saving array with coordinates.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) coords = { "x": np.arange(3), "y": np.arange(1), @@ -162,8 +162,8 @@ def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" from satpy.tests.utils import make_dataid scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) da = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, @@ -180,8 +180,8 @@ def test_ancillary_variables(self): def test_groups(self): """Test creating a file with groups.""" - tstart = datetime(2019, 4, 1, 12, 0) - tend = datetime(2019, 4, 1, 12, 15) + tstart = dt.datetime(2019, 4, 1, 12, 0) + tend = dt.datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] @@ -238,8 +238,8 @@ def test_groups(self): def test_single_time_value(self): """Test setting a single time value.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], @@ -272,8 +272,8 @@ def test_time_coordinate_on_a_swath(self): def test_bounds(self): """Test setting time bounds.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y", "time"], @@ -307,10 +307,10 @@ def test_bounds(self): def test_bounds_minimum(self): """Test minimum bounds.""" scn = Scene() - start_timeA = datetime(2018, 5, 30, 10, 0) # expected to be used - end_timeA = datetime(2018, 5, 30, 10, 20) - start_timeB = datetime(2018, 5, 30, 10, 3) - end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used + start_timeA = dt.datetime(2018, 5, 30, 10, 0) # expected to be used + end_timeA = dt.datetime(2018, 5, 30, 10, 20) + start_timeB = dt.datetime(2018, 5, 30, 10, 3) + end_timeB = dt.datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, @@ -333,8 +333,8 @@ def test_bounds_minimum(self): def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" scn = Scene() - start_timeA = datetime(2018, 5, 30, 10, 0) - end_timeA = datetime(2018, 5, 30, 10, 15) + start_timeA = dt.datetime(2018, 5, 30, 10, 0) + end_timeA = dt.datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, @@ -355,8 +355,8 @@ def test_bounds_missing_time_info(self): def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], @@ -372,8 +372,8 @@ def test_unlimited_dims_kwarg(self): def test_header_attrs(self): """Check global attributes are set.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) @@ -423,8 +423,8 @@ def test_load_module_with_old_pyproj(self): def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([[1, 2, 3]], dims=("y", "x"), attrs=dict(start_time=start_time, @@ -439,8 +439,8 @@ def test_global_attr_default_history_and_Conventions(self): def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() - start_time = datetime(2018, 5, 30, 10, 0) - end_time = datetime(2018, 5, 30, 10, 15) + start_time = dt.datetime(2018, 5, 30, 10, 0) + end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([[1, 2, 3]], dims=("y", "x"), attrs=dict(start_time=start_time, @@ -465,8 +465,8 @@ def scene(self): """Create a fake scene.""" scn = Scene() attrs = { - "start_time": datetime(2018, 5, 30, 10, 0), - "end_time": datetime(2018, 5, 30, 10, 15) + "start_time": dt.datetime(2018, 5, 30, 10, 0), + "end_time": dt.datetime(2018, 5, 30, 10, 15) } scn["test-array"] = xr.DataArray([1., 2, 3], attrs=attrs) return scn diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py index 8925857637..d0e879c4b2 100644 --- a/satpy/tests/writer_tests/test_geotiff.py +++ b/satpy/tests/writer_tests/test_geotiff.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Tests for the geotiff writer.""" -from datetime import datetime +import datetime as dt from unittest import mock import dask.array as da @@ -42,7 +42,7 @@ def _get_test_datasets_2d(): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "units": "K", "area": adef} ) @@ -72,7 +72,7 @@ def _get_test_datasets_3d(): dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "area": adef} ) return [ds1] diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 2dafdd5896..1642510583 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -20,6 +20,8 @@ Based on the test for geotiff writer """ + +import datetime as dt import logging import os import unittest @@ -48,8 +50,6 @@ def tearDown(self): def _get_test_datasets(self): """Create a datasets list.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -68,7 +68,7 @@ def _get_test_datasets(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "1", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -91,7 +91,7 @@ def _get_test_datasets(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "4", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -114,8 +114,6 @@ def _get_test_datasets(self): def _get_test_datasets_sensor_set(self): """Create a datasets list.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -134,7 +132,7 @@ def _get_test_datasets_sensor_set(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "1", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"TEST_SENSOR_NAME"}, "area": area_def, @@ -157,7 +155,7 @@ def _get_test_datasets_sensor_set(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "4", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"TEST_SENSOR_NAME"}, "area": area_def, @@ -180,8 +178,6 @@ def _get_test_datasets_sensor_set(self): def _get_test_dataset(self, bands=3): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -201,7 +197,7 @@ def _get_test_dataset(self, bands=3): da.zeros((bands, 100, 200), chunks=50), dims=("bands", "y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -211,8 +207,6 @@ def _get_test_dataset(self, bands=3): def _get_test_one_dataset(self): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -232,7 +226,7 @@ def _get_test_one_dataset(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "avhrr", "area": area_def, @@ -242,8 +236,6 @@ def _get_test_one_dataset(self): def _get_test_one_dataset_sensor_set(self): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -263,7 +255,7 @@ def _get_test_one_dataset_sensor_set(self): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"avhrr"}, "area": area_def, @@ -273,8 +265,6 @@ def _get_test_one_dataset_sensor_set(self): def _get_test_dataset_with_bad_values(self, bands=3): """Create a single test dataset.""" - from datetime import datetime - import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition @@ -298,7 +288,7 @@ def _get_test_dataset_with_bad_values(self, bands=3): ds1 = xr.DataArray(rgb_data, dims=("bands", "y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -307,8 +297,6 @@ def _get_test_dataset_with_bad_values(self, bands=3): def _get_test_dataset_calibration(self, bands=6): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -362,7 +350,7 @@ def _get_test_dataset_calibration(self, bands=6): bands.append(p.attrs["name"]) data["bands"] = list(bands) new_attrs = {"name": "datasets", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "test-sensor", "area": area_def, @@ -411,8 +399,6 @@ def _get_test_dataset_calibration(self, bands=6): def _get_test_dataset_calibration_one_dataset(self, bands=1): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -441,7 +427,7 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): for p in scene: calibration.append(p.attrs["calibration"]) new_attrs = {"name": "datasets", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "test-sensor", "area": area_def, @@ -465,8 +451,6 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): def _get_test_dataset_three_bands_two_prereq(self, bands=3): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -488,7 +472,7 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): coords=[["R", "G", "B"], list(range(100)), list(range(200))], dims=("bands", "y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, @@ -499,8 +483,6 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): def _get_test_dataset_three_bands_prereq(self, bands=3): """Create a single test dataset.""" - from datetime import datetime - import dask.array as da import xarray as xr from pyproj import CRS @@ -522,7 +504,7 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): coords=[["R", "G", "B"], list(range(100)), list(range(200))], dims=("bands", "y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow(), + "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py index 01d89a22ad..6a4eba95e3 100644 --- a/satpy/tests/writer_tests/test_simple_image.py +++ b/satpy/tests/writer_tests/test_simple_image.py @@ -38,7 +38,7 @@ def tearDown(self): @staticmethod def _get_test_datasets(): """Create DataArray for testing.""" - from datetime import datetime + import datetime as dt import dask.array as da import xarray as xr @@ -46,7 +46,7 @@ def _get_test_datasets(): da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", - "start_time": datetime.utcnow()} + "start_time": dt.datetime.utcnow()} ) return [ds1] diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 8fe9a8d2cc..1652a3786e 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -213,13 +213,14 @@ lettered tile locations. """ + +import datetime as dt import logging import os import string import sys import warnings from collections import namedtuple -from datetime import datetime, timedelta import dask import dask.array as da @@ -1101,7 +1102,7 @@ def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_tim if creator is None: creator = "Satpy Version {} - AWIPS Tiled Writer".format(__version__) if creation_time is None: - creation_time = datetime.utcnow() + creation_time = dt.datetime.utcnow() self._add_sector_id_global(new_ds, sector_id) new_ds.attrs["Conventions"] = "CF-1.7" @@ -1493,8 +1494,8 @@ def _save_nonempty_mfdatasets(self, datasets_to_save, output_filenames, **kwargs def _adjust_metadata_times(self, ds_info): debug_shift_time = int(os.environ.get("DEBUG_TIME_SHIFT", 0)) if debug_shift_time: - ds_info["start_time"] += timedelta(minutes=debug_shift_time) - ds_info["end_time"] += timedelta(minutes=debug_shift_time) + ds_info["start_time"] += dt.timedelta(minutes=debug_shift_time) + ds_info["end_time"] += dt.timedelta(minutes=debug_shift_time) def _get_tile_data_info(self, data_arrs, creation_time, source_name): # use the first data array as a "representative" for the group @@ -1597,7 +1598,7 @@ def save_datasets(self, datasets, sector_id=None, # noqa: D417 area_data_arrs = self._group_by_area(datasets) datasets_to_save = [] output_filenames = [] - creation_time = datetime.utcnow() + creation_time = dt.datetime.utcnow() area_tile_data_gen = self._iter_area_tile_info_and_datasets( area_data_arrs, template, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference) @@ -1775,7 +1776,7 @@ def create_debug_lettered_tiles(**writer_kwargs): sector_info = writer.awips_sectors[sector_id] area_def, arr = _create_debug_array(sector_info, save_kwargs["num_subtiles"]) - now = datetime.utcnow() + now = dt.datetime.utcnow() product = xr.DataArray(da.from_array(arr, chunks="auto"), attrs=dict( name="debug_{}".format(sector_id), platform_name="DEBUG", From 7c139590a3c5fbd60c72b0b8af7034b94b3ea52e Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 22 Apr 2024 15:30:14 +0200 Subject: [PATCH 1307/1416] change default with_area_definition value --- satpy/readers/li_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 4fe0826380..03ab46545f 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -46,7 +46,7 @@ class LIL2NCFileHandler(LINCFileHandler): """Implementation class for the unified LI L2 satpy reader.""" - def __init__(self, filename, filename_info, filetype_info, with_area_definition=False): + def __init__(self, filename, filename_info, filetype_info, with_area_definition=True): """Initialize LIL2NCFileHandler.""" super(LIL2NCFileHandler, self).__init__(filename, filename_info, filetype_info) From 1f089fad64c76a3fef1ad9932d12d229201d7e2f Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 22 Apr 2024 15:59:48 +0200 Subject: [PATCH 1308/1416] fix tests by specifying with_area_definition=False --- satpy/tests/reader_tests/test_li_l2_nc.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 5e9d0ff563..c6e13a7fe3 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -174,7 +174,8 @@ def test_dataset_loading(self, filetype_infos): "end_time": "1000" } - handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, ftype)) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, ftype), + with_area_definition=False) ds_desc = handler.ds_desc # retrieve the schema that what used to generate the content for that product: @@ -480,7 +481,8 @@ def test_combine_info(self, filetype_infos): def test_coordinates_projection(self, filetype_infos): """Should automatically generate lat/lon coords from projection data.""" - handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), + with_area_definition=False) dsid = make_dataid(name="flash_accumulation") dset = handler.get_dataset(dsid) @@ -492,7 +494,8 @@ def test_coordinates_projection(self, filetype_infos): with pytest.raises(NotImplementedError): handler.get_area_def(dsid) - handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afr_nc")) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afr_nc"), + with_area_definition=False) dsid = make_dataid(name="flash_radiance") dset = handler.get_dataset(dsid) @@ -501,7 +504,8 @@ def test_coordinates_projection(self, filetype_infos): assert dset.attrs["coordinates"][0] == "longitude" assert dset.attrs["coordinates"][1] == "latitude" - handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afa_nc")) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afa_nc"), + with_area_definition=False) dsid = make_dataid(name="accumulated_flash_area") dset = handler.get_dataset(dsid) From de0e3c7c52ef9e365d0c361465abfe5d9f91c59e Mon Sep 17 00:00:00 2001 From: andream Date: Mon, 22 Apr 2024 17:52:01 +0200 Subject: [PATCH 1309/1416] update documentation --- satpy/readers/li_l2_nc.py | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 03ab46545f..09efecd688 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -13,18 +13,36 @@ # You should have received a copy of the GNU General Public License # along with satpy. If not, see . -"""MTG Lighting Imager (LI) L2 unified reader. +"""MTG Lightning Imager (LI) L2 unified reader. This reader supports reading all the products from the LI L2 processing level: - * L2-LE - * L2-LGR - * L2-AFA - * L2-LEF - * L2-LFL - * L2-AF - * L2-AFR + * L2-LE Lightning Events + * L2-LEF Lightning Events Filtered + * L2-LFL Lightning Flashes + * L2-LGR Lightning Groups + * L2-AF Accumulated Flashes + * L2-AFA Accumulated Flash Area + * L2-AFR Accumulated Flash Radiance + +Point-based products (LE, LEF, LFL, LGR) are provided as 1-D arrays, with a ``pyresample.geometry.SwathDefinition`` area +attribute containing the points lat-lon coordinates. + +Accumulated products (AF, AFA, AFR) are provided as 2-D arrays in the FCI 2km grid as per intended usage, +with a ``pyresample.geometry.AreaDefinition`` area attribute containing the grid geolocation information. +In this way, the products can directly be overlaid to FCI data. +If needed, the products can still be accessed as 1-d array by setting the reader kwarg ``with_area_definition=False``, +eg:: + + scn = Scene(filenames=filenames, reader="li_l2_nc", reader_kwargs={'with_area_definition': False}) + +The lat-lon coordinates of the points/grid pixels can be accessed using e.g. +``scn['dataset_name'].attrs['area'].get_lonlats()``. + +See the LI L2 Product User Guide `PUG`_ for more information on the products. + +.. _PUG: https://www-dr.eumetsat.int/media/49348 """ From a141d5ef7b0dc62f220fed3c4bf4da1bdf67e46e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 22 Apr 2024 18:19:48 +0200 Subject: [PATCH 1310/1416] Update changelog for v0.48.0 --- CHANGELOG.md | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 52d47ad47f..7281a80ceb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,49 @@ +## Version 0.48.0 (2024/04/22) + +### Issues Closed + +* [Issue 2782](https://github.com/pytroll/satpy/issues/2782) - Documentation points to missing setup.py ([PR 2786](https://github.com/pytroll/satpy/pull/2786) by [@mraspaud](https://github.com/mraspaud)) +* [Issue 2771](https://github.com/pytroll/satpy/issues/2771) - Load data in another datatype rather than float64 +* [Issue 2759](https://github.com/pytroll/satpy/issues/2759) - 'defusedxml' missing in "msi_safe" extras ([PR 2761](https://github.com/pytroll/satpy/pull/2761) by [@fwfichtner](https://github.com/fwfichtner)) +* [Issue 2749](https://github.com/pytroll/satpy/issues/2749) - [Question] Resample of mesoscale data gives blank data +* [Issue 2747](https://github.com/pytroll/satpy/issues/2747) - Cannot load from MTG FCI L1C data +* [Issue 2729](https://github.com/pytroll/satpy/issues/2729) - Add Viirs L2 Reader + Enhancments ([PR 2740](https://github.com/pytroll/satpy/pull/2740) by [@wjsharpe](https://github.com/wjsharpe)) +* [Issue 2695](https://github.com/pytroll/satpy/issues/2695) - Improvements for BackgroundCompositor ([PR 2696](https://github.com/pytroll/satpy/pull/2696) by [@yukaribbba](https://github.com/yukaribbba)) + +In this release 7 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2786](https://github.com/pytroll/satpy/pull/2786) - Remove doc references to setup.py ([2782](https://github.com/pytroll/satpy/issues/2782)) +* [PR 2779](https://github.com/pytroll/satpy/pull/2779) - Convert Sentinel-2 MSI sensor name to lowercase in the reader YAML config file and add support for "counts" calibration +* [PR 2774](https://github.com/pytroll/satpy/pull/2774) - Fix the viirs EDR tests for newer xarray +* [PR 2761](https://github.com/pytroll/satpy/pull/2761) - Add missing defusedxml ([2759](https://github.com/pytroll/satpy/issues/2759)) +* [PR 2754](https://github.com/pytroll/satpy/pull/2754) - Bugfix vgac reader +* [PR 2701](https://github.com/pytroll/satpy/pull/2701) - Ici reader tiepoints bugfix +* [PR 2696](https://github.com/pytroll/satpy/pull/2696) - Add double alpha channel support and improve metadata behaviours for BackgroundCompositor ([2695](https://github.com/pytroll/satpy/issues/2695)) + +#### Features added + +* [PR 2780](https://github.com/pytroll/satpy/pull/2780) - Add new (Eumetrain) FCI RGB composites +* [PR 2767](https://github.com/pytroll/satpy/pull/2767) - Use flags from file when available in OLCI NC reader +* [PR 2763](https://github.com/pytroll/satpy/pull/2763) - Replace setup with pyproject.toml +* [PR 2762](https://github.com/pytroll/satpy/pull/2762) - Add support for EO-SIP AVHRR LAC data +* [PR 2753](https://github.com/pytroll/satpy/pull/2753) - Add fsspec support to `li_l2_nc` reader +* [PR 2740](https://github.com/pytroll/satpy/pull/2740) - Add VIIRS L2 Reader ([2729](https://github.com/pytroll/satpy/issues/2729)) +* [PR 2696](https://github.com/pytroll/satpy/pull/2696) - Add double alpha channel support and improve metadata behaviours for BackgroundCompositor ([2695](https://github.com/pytroll/satpy/issues/2695)) +* [PR 2595](https://github.com/pytroll/satpy/pull/2595) - VGAC decode the time variable + +#### Documentation changes + +* [PR 2786](https://github.com/pytroll/satpy/pull/2786) - Remove doc references to setup.py ([2782](https://github.com/pytroll/satpy/issues/2782)) +* [PR 2766](https://github.com/pytroll/satpy/pull/2766) - Add Data Store to EUMETSAT part +* [PR 2750](https://github.com/pytroll/satpy/pull/2750) - Add missing `h` docstring information to _geos_area.py + +In this release 18 pull requests were closed. + + ## Version 0.47.0 (2024/02/21) ### Issues Closed From f24622eb69e7fd8c50e9d4872249becbb4f3692f Mon Sep 17 00:00:00 2001 From: clement laplace Date: Tue, 23 Apr 2024 10:10:52 +0000 Subject: [PATCH 1311/1416] fix: Fix the issue found in codefactor https://github.com/pytroll/satpy/pull/2778/checks?check_run_id=24029557750 --- satpy/readers/fci_l1c_nc.py | 10 ++++----- satpy/tests/reader_tests/test_fci_l1c_nc.py | 4 ++-- satpy/tests/reader_tests/test_olci_nc.py | 24 ++++++++++----------- 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 5b7c669d21..f90ae912d6 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -30,7 +30,9 @@ .. note:: This reader currently supports Full Disk High Spectral Resolution Imagery (FDHSI) ,High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") scanning mode. - The african case ("AF") scanning mode has been added. + In addition it also supports the L1C format for the African dissemination ("AF"), where each file + contains the masked full-dic of a single channel + (https://www-cdn.eumetsat.int/files/2022-07/MTG%20EUMETCast%20Africa%20Product%20User%20Guide%20%5BAfricaPUG%5D_v2E.pdf) If the user provides a list of both FDHSI and HRFI files from the same repeat cycle to the Satpy ``Scene``, Satpy will automatically read the channels from the source with the finest resolution, i.e. from the HRFI files for the vis_06, nir_22, ir_38, and ir_105 channels. @@ -148,14 +150,12 @@ "grid_width": 22272}, "fci_l1c_fdhsi": {"grid_type": "1km", "grid_width": 11136}, - "fci_l1c_af": {"grid_type": "1km", - "grid_width": 11136}} + } LOW_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "1km", "grid_width": 11136}, "fci_l1c_fdhsi": {"grid_type": "2km", "grid_width": 5568}, - "fci_l1c_af":{"grid_type": "3km", - "grid_width": 3712}} + } def _get_aux_data_name_from_dsname(dsname): diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index c87db9c6c2..5710586466 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -433,7 +433,7 @@ def clear_cache(reader): "grid_width": 3712 }, }, - "fci_af_vis_06" : {"3km": {"start_position_row": 1, + "fci_af_vis_06" : {"3km": {"start_position_row": 1, "end_position_row": 67, "segment_height": 67, "grid_width": 3712 @@ -742,7 +742,7 @@ def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_ @mock.patch("satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader") @pytest.mark.parametrize(("channel", "resolution"), generate_parameters("radiance")) def test_not_get_segment_info_called_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,channel,resolution): - """Test that checks that the get_segment_position_info has not be called for AF data.""" + """Test that checks that the get_segment_position_info has not been called for AF data.""" with mock.patch("satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info") as gspi: fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index 2834578176..fe384b9dc3 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -268,10 +268,10 @@ def test_bitflags(self): "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) - expected = np.array([True, False, True, True, True, True, False, - False, True, True, False, False, False, False, - False, False, False, True, False, True, False, - False, False, True, True, False, False, True, + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, False]) assert all(mask == expected) @@ -335,10 +335,10 @@ def test_bitflags_with_dataarray_without_flags(self): "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) - expected = np.array([True, False, True, True, True, True, False, - False, True, True, False, False, False, False, - False, False, False, True, False, True, False, - False, False, True, True, False, False, True, + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, False]) assert all(mask == expected) @@ -367,9 +367,9 @@ def test_bitflags_with_custom_flag_list(self): "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) - expected = np.array([True, False, True, True, True, True, False, - False, True, True, False, False, False, False, - False, False, False, True, False, True, False, - False, False, True, True, False, False, True, + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, False]) assert all(mask == expected) From 98993a45d327e11238466abe5f339909484c4328 Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 23 Apr 2024 16:53:23 +0200 Subject: [PATCH 1312/1416] enhance documentation --- satpy/readers/li_l2_nc.py | 32 ++++++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 09efecd688..dffc243c32 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -13,34 +13,50 @@ # You should have received a copy of the GNU General Public License # along with satpy. If not, see . -"""MTG Lightning Imager (LI) L2 unified reader. +"""MTG Lightning Imager (LI) Level-2 (L2) unified reader. This reader supports reading all the products from the LI L2 processing level: +Point products: * L2-LE Lightning Events * L2-LEF Lightning Events Filtered * L2-LFL Lightning Flashes * L2-LGR Lightning Groups +Accumulated products: * L2-AF Accumulated Flashes * L2-AFA Accumulated Flash Area * L2-AFR Accumulated Flash Radiance -Point-based products (LE, LEF, LFL, LGR) are provided as 1-D arrays, with a ``pyresample.geometry.SwathDefinition`` area +Per default, the unified LI L2 reader returns the data either as an 1-D array +or as a 2-D array depending on the product type. + +Point-based products (LE, LEF, LFL, LGR) are "classic" lightning products +consisting of values with attached latitude and longitude coordinates. +Hence, these products are provided by the reader as 1-D arrays, +with a ``pyresample.geometry.SwathDefinition`` area attribute containing the points lat-lon coordinates. -Accumulated products (AF, AFA, AFR) are provided as 2-D arrays in the FCI 2km grid as per intended usage, -with a ``pyresample.geometry.AreaDefinition`` area attribute containing the grid geolocation information. +Accumulated products (AF, AFA, AFR) are the result of temporal accumulation +of events (e.g. over 30 seconds), and are gridded in the FCI 2km geostationary +projection grid, in order to facilitate the synergistic usage together with FCI. +Compared to the point products, the gridded products also give information +about the spatial extent of the lightning activity. +Hence, these products are provided by the reader as 2-D arrays in the FCI 2km +grid as per intended usage, with a ``pyresample.geometry.AreaDefinition`` area +attribute containing the grid geolocation information. In this way, the products can directly be overlaid to FCI data. -If needed, the products can still be accessed as 1-d array by setting the reader kwarg ``with_area_definition=False``, -eg:: +If needed, the accumulated products can also be accessed as 1-d array by +setting the reader kwarg ``with_area_definition=False``, +e.g.:: scn = Scene(filenames=filenames, reader="li_l2_nc", reader_kwargs={'with_area_definition': False}) -The lat-lon coordinates of the points/grid pixels can be accessed using e.g. +For both 1-d and 2-d products, the lat-lon coordinates of the points/grid pixels +can be accessed using e.g. ``scn['dataset_name'].attrs['area'].get_lonlats()``. -See the LI L2 Product User Guide `PUG`_ for more information on the products. +See the LI L2 Product User Guide `PUG`_ for more information. .. _PUG: https://www-dr.eumetsat.int/media/49348 From 609fdcbbaa7ef76ac82eb38a9bb9a8abade93c2b Mon Sep 17 00:00:00 2001 From: Will Sharpe Date: Tue, 23 Apr 2024 16:31:42 +0000 Subject: [PATCH 1313/1416] fixed DNB_SENZ file_key --- satpy/etc/readers/viirs_l1b.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/viirs_l1b.yaml b/satpy/etc/readers/viirs_l1b.yaml index 4622f7e415..8f5a417acf 100644 --- a/satpy/etc/readers/viirs_l1b.yaml +++ b/satpy/etc/readers/viirs_l1b.yaml @@ -481,7 +481,7 @@ datasets: resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod - file_key: geolocation_data/solar_zenith + file_key: geolocation_data/sensor_zenith DNB_LZA: name: dnb_lunar_zenith_angle standard_name: lunar_zenith_angle From aa18b7f247e8f289c9be415215cc9dd54fa787d5 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 24 Apr 2024 18:04:37 +0300 Subject: [PATCH 1314/1416] Fix variable name overwriting module name --- satpy/tests/reader_tests/test_seviri_l1b_icare.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index cb8a1fb6af..f75d22d385 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -133,8 +133,8 @@ def test_load_dataset_vis(self): datasets = r.load(["VIS008"]) assert len(datasets) == 1 for v in datasets.values(): - dt = dt.datetime(2004, 12, 29, 12, 27, 44) - assert v.attrs["end_time"] == dt + date = dt.datetime(2004, 12, 29, 12, 27, 44) + assert v.attrs["end_time"] == date assert v.attrs["calibration"] == "reflectance" def test_load_dataset_ir(self): From 37301177ea7bded84500ca596c82e621a1b1ef4d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 24 Apr 2024 18:28:58 +0300 Subject: [PATCH 1315/1416] Clarify variable naming --- satpy/readers/ahi_hsd.py | 4 ++-- satpy/readers/ahi_l2_nc.py | 8 ++++---- satpy/readers/goci2_l2_nc.py | 8 ++++---- satpy/readers/goes_imager_nc.py | 18 +++++++++--------- 4 files changed, 19 insertions(+), 19 deletions(-) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index 7ea83a6820..cedc626408 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -794,8 +794,8 @@ def _modify_observation_time_for_nominal(self, observation_time): ) return observation_time timeline = self._get_closest_timeline(observation_time) - date = self._get_offset_relative_to_timeline() - return timeline + dt.timedelta(minutes=date//60, seconds=date % 60) + offset = self._get_offset_relative_to_timeline() + return timeline + dt.timedelta(minutes=offset//60, seconds=offset % 60) def _get_closest_timeline(self, observation_time): """Find the closest timeline for the given observation time. diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 92c2915a1e..74872d5410 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -83,14 +83,14 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Start timestamp of the dataset.""" - date = self.nc.attrs["time_coverage_start"] - return dt.datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ") + date_str = self.nc.attrs["time_coverage_start"] + return dt.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End timestamp of the dataset.""" - date = self.nc.attrs["time_coverage_end"] - return dt.datetime.strptime(date, "%Y-%m-%dT%H:%M:%SZ") + date_str = self.nc.attrs["time_coverage_end"] + return dt.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") def get_dataset(self, key, info): """Load a dataset.""" diff --git a/satpy/readers/goci2_l2_nc.py b/satpy/readers/goci2_l2_nc.py index b60a3e3876..0679be41ff 100644 --- a/satpy/readers/goci2_l2_nc.py +++ b/satpy/readers/goci2_l2_nc.py @@ -66,14 +66,14 @@ def _merge_navigation_data(self, filetype): @property def start_time(self): """Start timestamp of the dataset.""" - date = self.attrs["observation_start_time"] - return dt.datetime.strptime(date, "%Y%m%d_%H%M%S") + date_str = self.attrs["observation_start_time"] + return dt.datetime.strptime(date_str, "%Y%m%d_%H%M%S") @property def end_time(self): """End timestamp of the dataset.""" - date = self.attrs["observation_end_time"] - return dt.datetime.strptime(date, "%Y%m%d_%H%M%S") + date_str = self.attrs["observation_end_time"] + return dt.datetime.strptime(date_str, "%Y%m%d_%H%M%S") def get_dataset(self, key, info): """Load a dataset.""" diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 2916a36436..44a01e44f2 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -731,15 +731,15 @@ def _get_area_def_uniform_sampling(self, lon0, channel): @property def start_time(self): """Start timestamp of the dataset.""" - date = self.nc["time"].dt - return dt.datetime( - year=int(date.year.item()), - month=int(date.month.item()), - day=int(date.day.item()), - hour=int(date.hour.item()), - minute=int(date.minute.item()), - second=int(date.second.item()), - microsecond=int(date.microsecond.item())) + timestamp = self.nc["time"].dt + return dt.timestamptime( + year=int(timestamp.year.item()), + month=int(timestamp.month.item()), + day=int(timestamp.day.item()), + hour=int(timestamp.hour.item()), + minute=int(timestamp.minute.item()), + second=int(timestamp.second.item()), + microsecond=int(timestamp.microsecond.item())) @property def end_time(self): From d3a7ab035eba223338fb0c97fbcb4f82d1d5959b Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 25 Apr 2024 09:06:34 +0200 Subject: [PATCH 1316/1416] Update MSI SAFE to use actual sensing time --- satpy/readers/msi_safe.py | 22 ++++------------------ satpy/tests/reader_tests/test_msi_safe.py | 10 ++++------ 2 files changed, 8 insertions(+), 24 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 8c0fb730b7..5ec5ff3ea0 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -28,17 +28,6 @@ reader_kwargs={'mask_saturated': False}) scene.load(['B01']) -MSI data typically have the same start time across multiple tiles, which can cause -problems if iterating over multiple tiles, as the saved imagery from one tile -may be overwritten by the next tile. -To overcome this, the user can specify `use_tile_time`, which will determine the start -time from the tile metadata rather than from the filename:: - - scene = satpy.Scene(filenames, - reader='msi_safe', - reader_kwargs={'use_tile_time': True}) - scene.load(['B01']) - L1C format description for the files read here: https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/ @@ -70,7 +59,7 @@ class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" - def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True, use_tile_time=False): + def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) @@ -80,10 +69,7 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] - if use_tile_time: - self._start_time = self._tile_mda.start_time() - else: - self._start_time = filename_info["observation_time"] + self._start_time = self._tile_mda.start_time() self._end_time = filename_info["observation_time"] def get_dataset(self, key, info): @@ -128,7 +114,7 @@ def get_area_def(self, dsid): class SAFEMSIXMLMetadata(BaseFileHandler): """Base class for SAFE MSI XML metadata filehandlers.""" - def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, use_tile_time=False): + def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info) self._start_time = filename_info["observation_time"] @@ -244,7 +230,7 @@ def _fill_swath_edges(angles): class SAFEMSITileMDXML(SAFEMSIXMLMetadata): """File handle for sentinel 2 safe XML tile metadata.""" - def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, use_tile_time=False): + def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info, mask_saturated) self.geocoding = self.root.find(".//Tile_Geocoding") diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index e53cf9adfb..b919278bf5 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1012,14 +1012,12 @@ def test_calibration_and_masking(self, mask_saturated, calibration, expected): res = self.jp2_fh.get_dataset(make_dataid(name="B01", calibration=calibration), info=dict()) np.testing.assert_allclose(res, expected) - @pytest.mark.parametrize(("use_obs_time", "expected"), - [(True, tilemd_dt), - (False, fname_dt)]) - def test_start_time(self, use_obs_time, expected): + + def test_start_time(self): """Test that the correct start time is returned.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock()) self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), - mda, self.tile_mda, use_tile_time=use_obs_time) - assert expected == self.jp2_fh.start_time + mda, self.tile_mda) + assert tilemd_dt == self.jp2_fh.start_time From 3d926645d40dd839d91e1a18226ea200ca30773d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 25 Apr 2024 14:35:45 +0300 Subject: [PATCH 1317/1416] Fix search/replace error --- satpy/readers/goes_imager_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 44a01e44f2..1c5b513f36 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -732,7 +732,7 @@ def _get_area_def_uniform_sampling(self, lon0, channel): def start_time(self): """Start timestamp of the dataset.""" timestamp = self.nc["time"].dt - return dt.timestamptime( + return dt.datetime( year=int(timestamp.year.item()), month=int(timestamp.month.item()), day=int(timestamp.day.item()), From de20173930b265a99decdb6fcd208b99682e838d Mon Sep 17 00:00:00 2001 From: clement laplace Date: Thu, 25 Apr 2024 12:03:04 +0000 Subject: [PATCH 1318/1416] style: Add the hyperlink in the fci_l1c_nc.py and do the corrections according to sauli in https://github.com/pytroll/satpy/pull/2778 --- satpy/etc/readers/fci_l1c_nc.yaml | 551 ++------------------ satpy/readers/fci_l1c_nc.py | 4 +- satpy/tests/reader_tests/test_fci_l1c_nc.py | 243 +++++---- 3 files changed, 169 insertions(+), 629 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 5f86ad2326..8f39097479 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -13,6 +13,35 @@ reader: # Source: MTG FCI L1 Product User Guide [FCIL1PUG] # https://www.eumetsat.int/media/45923 +required_netcdf_variables: &required-variables + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time + file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler @@ -21,34 +50,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc", ] expected_segments: 40 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_04 @@ -74,34 +76,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc", ] expected_segments: 40 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_06_hr @@ -116,34 +91,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_06 @@ -154,34 +102,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_04 @@ -192,34 +113,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_05 @@ -230,34 +124,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_08 @@ -268,34 +135,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_09 @@ -306,34 +146,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - nir_13 @@ -344,34 +157,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - nir_16 @@ -382,34 +168,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - nir_22 @@ -420,34 +179,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_38 @@ -458,34 +190,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - wv_63 @@ -496,34 +201,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - wv_73 @@ -534,34 +212,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_87 @@ -572,34 +223,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_97 @@ -610,34 +234,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_105 @@ -648,34 +245,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_123 @@ -686,34 +256,7 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 - required_netcdf_variables: - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time + required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_133 diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index f90ae912d6..f94377b91c 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -31,8 +31,7 @@ This reader currently supports Full Disk High Spectral Resolution Imagery (FDHSI) ,High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") scanning mode. In addition it also supports the L1C format for the African dissemination ("AF"), where each file - contains the masked full-dic of a single channel - (https://www-cdn.eumetsat.int/files/2022-07/MTG%20EUMETCast%20Africa%20Product%20User%20Guide%20%5BAfricaPUG%5D_v2E.pdf) + contains the masked full-dic of a single channel see `AF PUG`_. If the user provides a list of both FDHSI and HRFI files from the same repeat cycle to the Satpy ``Scene``, Satpy will automatically read the channels from the source with the finest resolution, i.e. from the HRFI files for the vis_06, nir_22, ir_38, and ir_105 channels. @@ -107,6 +106,7 @@ If you use ``hdf5plugin``, make sure to add the line ``import hdf5plugin`` at the top of your script. +.. _AF PUG: https://www-cdn.eumetsat.int/files/2022-07/MTG%20EUMETCast%20Africa%20Product%20User%20Guide%20%5BAfricaPUG%5D_v2E.pdf .. _PUG: https://www-cdn.eumetsat.int/files/2020-07/pdf_mtg_fci_l1_pug.pdf .. _EUMETSAT: https://www.eumetsat.int/mtg-flexible-combined-imager # noqa: E501 .. _test data releases: https://www.eumetsat.int/mtg-test-data diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 5710586466..52485a18d9 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -65,7 +65,127 @@ }, } +list_channel_solar = ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", + "nir_13", "nir_16", "nir_22"] +list_channel_terran = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", + "ir_123", "ir_133"] +list_total_channel = list_channel_solar + list_channel_terran +list_resolution_v06 = ["1km","3km"] +list_resolution = ["3km"] +expected_pos_info_for_filetype = { + "fdhsi": {"1km": {"start_position_row": 1, + "end_position_row": 200, + "segment_height": 200, + "grid_width": 11136}, + "2km": {"start_position_row": 1, + "end_position_row": 100, + "segment_height": 100, + "grid_width": 5568}}, + "hrfi": {"500m": {"start_position_row": 1, + "end_position_row": 400, + "segment_height": 400, + "grid_width": 22272}, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200}}, + "fci_af" : {"3km": {"start_position_row": 1, + "end_position_row": 67, + "segment_height": 67, + "grid_width": 3712 + }, + }, + "fci_af_vis_06" : {"3km": {"start_position_row": 1, + "end_position_row": 67, + "segment_height": 67, + "grid_width": 3712 + }, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200} + } + } + +_chans_fdhsi = {"solar": list_channel_solar, + "solar_grid_type": ["1km"] * 8, + "terran": list_channel_terran, + "terran_grid_type": ["2km"] * 8} + +_chans_hrfi = {"solar": ["vis_06", "nir_22"], + "solar_grid_type": ["500m"] * 2, + "terran": ["ir_38", "ir_105"], + "terran_grid_type": ["1km"] * 2} + +dict_calibration = { "radiance" : {"dtype": np.float32, + "value_1": 15, + "value_0":9700, + "attrs_dict":{"calibration":"radiance", + "units":"mW m-2 sr-1 (cm-1)-1", + "radiance_unit_conversion_coefficient": np.float32(1234.56) + }, + }, + + "reflectance" : {"dtype": np.float32, + "attrs_dict":{"calibration":"reflectance", + "units":"%" + }, + }, + + "counts" : {"dtype": np.uint16, + "value_1": 1, + "value_0": 5000, + "attrs_dict":{"calibration":"counts", + "units":"count", + }, + }, + "brightness_temperature" : {"dtype": np.float32, + "value_1": np.float32(209.68275), + "value_0": np.float32(1888.8513), + "attrs_dict":{"calibration":"brightness_temperature", + "units":"K", + }, + }, +} +_test_filenames = {"fdhsi": [ + "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" + "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" + "20170410113925_20170410113934_N__C_0070_0067.nc" +], + "hrfi": [ + "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" + "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" + "20170410113925_20170410113934_N__C_0070_0067.nc" + ] +} + +def resolutions(channel): + """Get the resolutions.""" + if channel == "vis_06": + return list_resolution_v06 + else: + return list_resolution + +def fill_chans_af(): + """Fill the dict _chans_af with the right channel and resolution.""" + _chans_af = {} + for channel in list_total_channel: + list_resol = resolutions(channel) + for resol in list_resol: + chann_upp = channel.replace("_","").upper() + _test_filenames[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" + f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" + f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] + if channel.split("_")[0] in ["vis","nir"]: + _chans_af[f"{channel}_{resol}"] = {"solar":[channel], + "solar_grid_type": [resol]} + elif channel.split("_")[0] in ["ir","wv"]: + _chans_af[f"{channel}_{resol}"] = {"terran":[channel], + "terran_grid_type": [resol]} + return _chans_af + +_chans_af = fill_chans_af() # ---------------------------------------------------- # Filehandlers preparation --------------------------- # ---------------------------------------------------- @@ -403,56 +523,6 @@ def clear_cache(reader): for fh in fhs: fh.cached_file_content = {} -list_channel_solar = ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", - "nir_13", "nir_16", "nir_22"] -list_channel_terran = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", - "ir_123", "ir_133"] -list_total_channel = list_channel_solar + list_channel_terran -list_resolution_v06 = ["1km","3km"] -list_resolution = ["3km"] -expected_pos_info_for_filetype = { - "fdhsi": {"1km": {"start_position_row": 1, - "end_position_row": 200, - "segment_height": 200, - "grid_width": 11136}, - "2km": {"start_position_row": 1, - "end_position_row": 100, - "segment_height": 100, - "grid_width": 5568}}, - "hrfi": {"500m": {"start_position_row": 1, - "end_position_row": 400, - "segment_height": 400, - "grid_width": 22272}, - "1km": {"start_position_row": 1, - "end_position_row": 200, - "grid_width": 11136, - "segment_height": 200}}, - "fci_af" : {"3km": {"start_position_row": 1, - "end_position_row": 67, - "segment_height": 67, - "grid_width": 3712 - }, - }, - "fci_af_vis_06" : {"3km": {"start_position_row": 1, - "end_position_row": 67, - "segment_height": 67, - "grid_width": 3712 - }, - "1km": {"start_position_row": 1, - "end_position_row": 200, - "grid_width": 11136, - "segment_height": 200} - } - } - - -def resolutions(channel): - """Get the resolutions.""" - if channel == "vis_06": - return list_resolution_v06 - else: - return list_resolution - def get_list_channel_calibration(calibration): """Get the channel's list according the calibration.""" if calibration == "reflectance": @@ -468,79 +538,6 @@ def generate_parameters(calibration): for resolution in resolutions(channel): yield (channel, resolution) -_chans_fdhsi = {"solar": list_channel_solar, - "solar_grid_type": ["1km"] * 8, - "terran": list_channel_terran, - "terran_grid_type": ["2km"] * 8} - -_chans_hrfi = {"solar": ["vis_06", "nir_22"], - "solar_grid_type": ["500m"] * 2, - "terran": ["ir_38", "ir_105"], - "terran_grid_type": ["1km"] * 2} - -dict_calibration = { "radiance" : {"dtype": np.float32, - "value_1": 15, - "value_0":9700, - "attrs_dict":{"calibration":"radiance", - "units":"mW m-2 sr-1 (cm-1)-1", - "radiance_unit_conversion_coefficient": np.float32(1234.56) - }, - }, - - "reflectance" : {"dtype": np.float32, - "attrs_dict":{"calibration":"reflectance", - "units":"%" - }, - }, - - "counts" : {"dtype": np.uint16, - "value_1": 1, - "value_0": 5000, - "attrs_dict":{"calibration":"counts", - "units":"count", - }, - }, - - "brightness_temperature" : {"dtype": np.float32, - "value_1": np.float32(209.68275), - "value_0": np.float32(1888.8513), - "attrs_dict":{"calibration":"brightness_temperature", - "units":"K", - }, - }, -} -_test_filenames = {"fdhsi": [ - "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" - "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" - "20170410113925_20170410113934_N__C_0070_0067.nc" -], - "hrfi": [ - "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" - "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" - "20170410113925_20170410113934_N__C_0070_0067.nc" - ] -} - -def fill_chans_af(): - """Fill the dict _chans_af with the right channel and resolution.""" - _chans_af = {} - for channel in list_total_channel: - list_resol = resolutions(channel) - for resol in list_resol: - chann_upp = channel.replace("_","").upper() - _test_filenames[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" - f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" - f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] - if channel.split("_")[0] in ["vis","nir"]: - _chans_af[f"{channel}_{resol}"] = {"solar":[channel], - "solar_grid_type": [resol]} - elif channel.split("_")[0] in ["ir","wv"]: - _chans_af[f"{channel}_{resol}"] = {"terran":[channel], - "terran_grid_type": [resol]} - return _chans_af - -_chans_af = fill_chans_af() - @contextlib.contextmanager def mocked_basefilehandler(filehandler): """Mock patch the base class of the FCIL1cNCFileHandler with the content of our fake files (filehandler).""" From 47a0fa2c3249f0117128bbce71f9b8f9e2edabe6 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Thu, 25 Apr 2024 12:47:59 +0000 Subject: [PATCH 1319/1416] style: Put in uppercase the contant into the test_fci_l1c.py file --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 62 ++++++++++----------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 52485a18d9..56130190af 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -65,14 +65,14 @@ }, } -list_channel_solar = ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", +LIST_CHANNEL_SOLAR = ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", "nir_13", "nir_16", "nir_22"] -list_channel_terran = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", +LIST_CHANNEL_TERRAN = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", "ir_123", "ir_133"] -list_total_channel = list_channel_solar + list_channel_terran -list_resolution_v06 = ["1km","3km"] -list_resolution = ["3km"] -expected_pos_info_for_filetype = { +LIST_TOTAL_CHANNEL = LIST_CHANNEL_SOLAR + LIST_CHANNEL_TERRAN +LIST_RESOLUTION_V06 = ["1km","3km"] +LIST_RESOLUTION = ["3km"] +EXPECTED_POS_INFO_FOR_FILETYPE = { "fdhsi": {"1km": {"start_position_row": 1, "end_position_row": 200, "segment_height": 200, @@ -107,17 +107,17 @@ } } -_chans_fdhsi = {"solar": list_channel_solar, +_CHANS_FDHSI = {"solar": LIST_CHANNEL_SOLAR, "solar_grid_type": ["1km"] * 8, - "terran": list_channel_terran, + "terran": LIST_CHANNEL_TERRAN, "terran_grid_type": ["2km"] * 8} -_chans_hrfi = {"solar": ["vis_06", "nir_22"], +_CHANS_HRFI = {"solar": ["vis_06", "nir_22"], "solar_grid_type": ["500m"] * 2, "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} -dict_calibration = { "radiance" : {"dtype": np.float32, +DICT_CALIBRATION = { "radiance" : {"dtype": np.float32, "value_1": 15, "value_0":9700, "attrs_dict":{"calibration":"radiance", @@ -163,14 +163,14 @@ def resolutions(channel): """Get the resolutions.""" if channel == "vis_06": - return list_resolution_v06 + return LIST_RESOLUTION_V06 else: - return list_resolution + return LIST_RESOLUTION def fill_chans_af(): - """Fill the dict _chans_af with the right channel and resolution.""" - _chans_af = {} - for channel in list_total_channel: + """Fill the dict _CHANS_AF and the list _test_filenames with the right channel and resolution.""" + _CHANS_AF = {} + for channel in LIST_TOTAL_CHANNEL: list_resol = resolutions(channel) for resol in list_resol: chann_upp = channel.replace("_","").upper() @@ -178,14 +178,14 @@ def fill_chans_af(): f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] if channel.split("_")[0] in ["vis","nir"]: - _chans_af[f"{channel}_{resol}"] = {"solar":[channel], + _CHANS_AF[f"{channel}_{resol}"] = {"solar":[channel], "solar_grid_type": [resol]} elif channel.split("_")[0] in ["ir","wv"]: - _chans_af[f"{channel}_{resol}"] = {"terran":[channel], + _CHANS_AF[f"{channel}_{resol}"] = {"terran":[channel], "terran_grid_type": [resol]} - return _chans_af + return _CHANS_AF,_test_filenames -_chans_af = fill_chans_af() +_CHANS_AF,_test_filenames = fill_chans_af() # ---------------------------------------------------- # Filehandlers preparation --------------------------- # ---------------------------------------------------- @@ -526,11 +526,11 @@ def clear_cache(reader): def get_list_channel_calibration(calibration): """Get the channel's list according the calibration.""" if calibration == "reflectance": - return list_channel_solar + return LIST_CHANNEL_SOLAR elif calibration == "brightness_temperature": - return list_channel_terran + return LIST_CHANNEL_TERRAN else: - return list_total_channel + return LIST_TOTAL_CHANNEL def generate_parameters(calibration): """Generate dinamicaly the parameters.""" @@ -553,7 +553,7 @@ def FakeFCIFileHandlerFDHSI_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", - "channels": _chans_fdhsi, + "channels": _CHANS_FDHSI, "filenames": _test_filenames["fdhsi"] } yield param_dict @@ -565,7 +565,7 @@ def FakeFCIFileHandlerHRFI_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerHRFI): param_dict = { "filetype": "fci_l1c_hrfi", - "channels": _chans_hrfi, + "channels": _CHANS_HRFI, "filenames": _test_filenames["hrfi"] } yield param_dict @@ -579,7 +579,7 @@ def FakeFCIFileHandlerAF_fixture(channel,resolution): with mocked_basefilehandler(FakeFCIFileHandlerAF): param_dict = { "filetype": "fci_l1c_af", - "channels": _chans_af[f"{channel}_{resolution}"], + "channels": _CHANS_AF[f"{channel}_{resolution}"], "filenames": _test_filenames[f"af_{channel}_{resolution}"], } yield param_dict @@ -592,9 +592,9 @@ def FakeFCIFileHandlerAF_fixture(channel,resolution): class TestFCIL1cNCReader: """Test FCI L1c NetCDF reader with nominal data.""" - fh_param_for_filetype = {"hrfi": {"channels": _chans_hrfi, + fh_param_for_filetype = {"hrfi": {"channels": _CHANS_HRFI, "filenames": _test_filenames["hrfi"]}, - "fdhsi": {"channels": _chans_fdhsi, + "fdhsi": {"channels": _CHANS_FDHSI, "filenames": _test_filenames["fdhsi"]}} def _get_type_ter_AF(self,channel): @@ -682,7 +682,7 @@ def test_load_calibration(self, reader_configs, fh_param, assert expected_res_n[res_type] == len(res) for ch, grid_type in zip(list_chan, list_grid): - self._get_assert_load(res,ch,grid_type,dict_calibration[calibration]) + self._get_assert_load(res,ch,grid_type,DICT_CALIBRATION[calibration]) @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ (calibration, channel, resolution) @@ -700,7 +700,7 @@ def test_load_calibration_af(self,FakeFCIFileHandlerAF_fixture,reader_configs,ch assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): - self._get_assert_load(res,ch,grid_type,dict_calibration[calibration]) + self._get_assert_load(res,ch,grid_type,DICT_CALIBRATION[calibration]) @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), @@ -726,8 +726,8 @@ def test_orbital_parameters_attr(self, reader_configs, fh_param): } @pytest.mark.parametrize(("fh_param", "expected_pos_info"), [ - (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), expected_pos_info_for_filetype["fdhsi"]), - (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), expected_pos_info_for_filetype["hrfi"]) + (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["fdhsi"]), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]) ]) def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_info): """Test the segment position info method.""" From 391f04bd375960efb854d955a7b815ba01edc68d Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 28 Apr 2024 22:38:16 +0800 Subject: [PATCH 1320/1416] reader and composites --- satpy/etc/composites/mersi-1.yaml | 73 ++++ satpy/etc/readers/fy3a_mersi1_l1b.yaml | 422 ++++++++++++++++++++++ satpy/etc/readers/fy3b_mersi1_l1b.yaml | 428 ++++++++++++++++++++++ satpy/etc/readers/fy3c_mersi1_l1b.yaml | 480 +++++++++++++++++++++++++ satpy/readers/mersi_l1b.py | 65 +++- 5 files changed, 1453 insertions(+), 15 deletions(-) create mode 100644 satpy/etc/composites/mersi-1.yaml create mode 100644 satpy/etc/readers/fy3a_mersi1_l1b.yaml create mode 100644 satpy/etc/readers/fy3b_mersi1_l1b.yaml create mode 100644 satpy/etc/readers/fy3c_mersi1_l1b.yaml diff --git a/satpy/etc/composites/mersi-1.yaml b/satpy/etc/composites/mersi-1.yaml new file mode 100644 index 0000000000..fce0127697 --- /dev/null +++ b/satpy/etc/composites/mersi-1.yaml @@ -0,0 +1,73 @@ +sensor_name: visir/mersi-1 + +modifiers: + rayleigh_corrected: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + prerequisites: + - name: '3' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + # sunz_corrected: + # modifier: !!python/name:satpy.modifiers.SunZenithCorrector + # prerequisites: + # - solar_zenith_angle + + nir_reflectance: + modifier: !!python/name:satpy.modifiers.NIRReflectance + prerequisites: + - name: '24' + optional_prerequisites: + - solar_zenith_angle + + +composites: + colorized_ir: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: '5' + standard_name: colorized_ir_clouds + + true_color_uncorr: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '3' + modifiers: [sunz_corrected] + - name: '2' + modifiers: [sunz_corrected] + - name: '1' + modifiers: [sunz_corrected] + standard_name: true_color + + natural_color: + compositor: !!python/name:satpy.composites.RatioSharpenedRGB + prerequisites: + - name: '6' + modifiers: [sunz_corrected] + - name: '16' + modifiers: [sunz_corrected] + - name: '3' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: '4' + modifiers: [sunz_corrected] + high_resolution_band: green + neutral_resolution_band: blue + standard_name: natural_color + + overview: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '3' + modifiers: [sunz_corrected] + - name: '4' + modifiers: [sunz_corrected] + - name: '5' + standard_name: overview + diff --git a/satpy/etc/readers/fy3a_mersi1_l1b.yaml b/satpy/etc/readers/fy3a_mersi1_l1b.yaml new file mode 100644 index 0000000000..2db2617f6d --- /dev/null +++ b/satpy/etc/readers/fy3a_mersi1_l1b.yaml @@ -0,0 +1,422 @@ +reader: + name: fy3a_mersi1_l1b + short_name: FY3A MERSI-1 l1b + long_name: FY-3A MERSI-1 L1B data in HDF5 format + description: FY-3A Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader + status: Beta + supports_fsspec: false + sensors: [mersi-1] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + fy3a_mersi1_l1b_1000: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + - 'FY3A_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' + + fy3a_mersi1_l1b_250: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3A_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' + +datasets: + '1': + name: '1' + wavelength: [0.445, 0.470, 0.495] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 0 + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_RefSB_b1 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '2': + name: '2' + wavelength: [0.525, 0.550, 0.575] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 1 + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_RefSB_b2 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '3': + name: '3' + wavelength: [0.625, 0.650, 0.675] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 2 + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_RefSB_b3 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '4': + name: '4' + wavelength: [0.840, 0.865, 0.890] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 3 + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_RefSB_b4 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '5': + name: '5' + wavelength: [10, 11.25, 12.5] + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_Emissive + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: EV_250_Emissive + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + counts: + units: "1" + standard_name: counts + + '6': + name: '6' + wavelength: [1.615, 1.640, 1.665] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '7': + name: '7' + wavelength: [2.105, 2.130, 2.155] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 1 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '8': + name: '8' + wavelength: [0.402, 0.412, 0.422] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 2 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '9': + name: '9' + wavelength: [0.433, 0.443, 0.453] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 3 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '10': + name: '10' + wavelength: [0.480, 0.490, 0.500] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 4 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '11': + name: '11' + wavelength: [0.510, 0.520, 0.530] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 5 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '12': + name: '12' + wavelength: [0.555, 0.565, 0.575] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 6 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '13': + name: '13' + wavelength: [0.640, 0.650, 0.660] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 7 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '14': + name: '14' + wavelength: [0.675, 0.685, 0.695] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 8 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '15': + name: '15' + wavelength: [0.755, 0.765, 0.775] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 9 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '16': + name: '16' + wavelength: [0.855, 0.865, 0.875] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 10 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '17': + name: '17' + wavelength: [0.895, 0.905, 0.915] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 11 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '18': + name: '18' + wavelength: [0.930, 0.940, 0.950] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 12 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '19': + name: '19' + wavelength: [0.970, 0.980, 0.990] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 13 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '20': + name: '20' + wavelength: [1.020, 1.030, 1.040] + resolution: 1000 + file_type: fy3a_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 14 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: Longitude + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: Longitude + + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: + 1000: + file_type: fy3a_mersi1_l1b_1000 + file_key: Latitude + 250: + file_type: fy3a_mersi1_l1b_250 + file_key: Latitude + + solar_zenith_angle: + name: solar_zenith_angle + units: degree + standard_name: solar_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3a_mersi1_l1b_1000 + file_key: SolarZenith + + solar_azimuth_angle: + name: solar_azimuth_angle + units: degree + standard_name: solar_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3a_mersi1_l1b_1000 + file_key: SolarAzimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + units: degree + standard_name: sensor_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3a_mersi1_l1b_1000 + file_key: SensorZenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degree + standard_name: sensor_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3a_mersi1_l1b_1000 + file_key: SensorAzimuth diff --git a/satpy/etc/readers/fy3b_mersi1_l1b.yaml b/satpy/etc/readers/fy3b_mersi1_l1b.yaml new file mode 100644 index 0000000000..464c079868 --- /dev/null +++ b/satpy/etc/readers/fy3b_mersi1_l1b.yaml @@ -0,0 +1,428 @@ +reader: + name: fy3b_mersi1_l1b + short_name: FY3B MERSI-1 l1b + long_name: FY-3B MERSI-1 L1B data in HDF5 format + description: FY-3B Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader + status: Beta + supports_fsspec: false + sensors: [mersi-1] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + fy3b_mersi1_l1b_1000: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' + + fy3b_mersi1_l1b_250: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' + + fy3b_mersi1_l1b_geo: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEOXX_MS.{ext:3s}' + +datasets: + '1': + name: '1' + wavelength: [0.445, 0.470, 0.495] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 0 + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_RefSB_b1 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '2': + name: '2' + wavelength: [0.525, 0.550, 0.575] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 1 + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_RefSB_b2 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '3': + name: '3' + wavelength: [0.625, 0.650, 0.675] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 2 + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_RefSB_b3 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '4': + name: '4' + wavelength: [0.840, 0.865, 0.890] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_RefSB + band_index: 3 + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_RefSB_b4 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '5': + name: '5' + wavelength: [10, 11.25, 12.5] + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_250_Aggr.1KM_Emissive + 250: + file_type: fy3b_mersi1_l1b_250 + file_key: EV_250_Emissive + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + counts: + units: "1" + standard_name: counts + + '6': + name: '6' + wavelength: [1.615, 1.640, 1.665] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '7': + name: '7' + wavelength: [2.105, 2.130, 2.155] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 1 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '8': + name: '8' + wavelength: [0.402, 0.412, 0.422] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 2 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '9': + name: '9' + wavelength: [0.433, 0.443, 0.453] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 3 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '10': + name: '10' + wavelength: [0.480, 0.490, 0.500] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 4 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '11': + name: '11' + wavelength: [0.510, 0.520, 0.530] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 5 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '12': + name: '12' + wavelength: [0.555, 0.565, 0.575] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 6 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '13': + name: '13' + wavelength: [0.640, 0.650, 0.660] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 7 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '14': + name: '14' + wavelength: [0.675, 0.685, 0.695] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 8 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '15': + name: '15' + wavelength: [0.755, 0.765, 0.775] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 9 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '16': + name: '16' + wavelength: [0.855, 0.865, 0.875] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 10 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '17': + name: '17' + wavelength: [0.895, 0.905, 0.915] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 11 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '18': + name: '18' + wavelength: [0.930, 0.940, 0.950] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 12 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '19': + name: '19' + wavelength: [0.970, 0.980, 0.990] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 13 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '20': + name: '20' + wavelength: [1.020, 1.030, 1.040] + resolution: 1000 + file_type: fy3b_mersi1_l1b_1000 + file_key: EV_1KM_RefSB + band_index: 14 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: Longitude + 250: + file_type: fy3b_mersi1_l1b_geo + file_key: Longitude + + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: + 1000: + file_type: fy3b_mersi1_l1b_1000 + file_key: Latitude + 250: + file_type: fy3b_mersi1_l1b_geo + file_key: Latitude + + solar_zenith_angle: + name: solar_zenith_angle + units: degree + standard_name: solar_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3b_mersi1_l1b_1000 + file_key: SolarZenith + + solar_azimuth_angle: + name: solar_azimuth_angle + units: degree + standard_name: solar_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3b_mersi1_l1b_1000 + file_key: SolarAzimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + units: degree + standard_name: sensor_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3b_mersi1_l1b_1000 + file_key: SensorZenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degree + standard_name: sensor_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3b_mersi1_l1b_1000 + file_key: SensorAzimuth diff --git a/satpy/etc/readers/fy3c_mersi1_l1b.yaml b/satpy/etc/readers/fy3c_mersi1_l1b.yaml new file mode 100644 index 0000000000..31c52d5ee5 --- /dev/null +++ b/satpy/etc/readers/fy3c_mersi1_l1b.yaml @@ -0,0 +1,480 @@ +reader: + name: fy3c_mersi1_l1b + short_name: FY3C MERSI-1 l1b + long_name: FY-3C MERSI-1 L1B data in HDF5 format + description: FY-3B Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader + status: Beta + supports_fsspec: false + sensors: [mersi-1] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + fy3c_mersi1_l1b_1000: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' + + fy3c_mersi1_l1b_250: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' + + fy3c_mersi1_l1b_1000_geo: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 10 + file_patterns: + - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEO1K_MS.{ext:3s}' + + fy3c_mersi1_l1b_250_geo: + file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B + rows_per_scan: 40 + file_patterns: + - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEOQK_MS.{ext:3s}' + +datasets: + '1': + name: '1' + wavelength: [0.445, 0.470, 0.495] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_RefSB + band_index: 0 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 0 + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_RefSB_b1 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 0 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '2': + name: '2' + wavelength: [0.525, 0.550, 0.575] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_RefSB + band_index: 1 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 1 + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_RefSB_b2 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 1 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '3': + name: '3' + wavelength: [0.625, 0.650, 0.675] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_RefSB + band_index: 2 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 2 + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_RefSB_b3 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 2 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '4': + name: '4' + wavelength: [0.840, 0.865, 0.890] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_RefSB + band_index: 3 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 3 + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_RefSB_b4 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 3 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '5': + name: '5' + wavelength: [10, 11.25, 12.5] + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_250_Aggr.1KM_Emissive + 250: + file_type: fy3c_mersi1_l1b_250 + file_key: Data/EV_250_Emissive + coordinates: [longitude, latitude] + calibration: + brightness_temperature: + units: "K" + standard_name: toa_brightness_temperature + counts: + units: "1" + standard_name: counts + + '6': + name: '6' + wavelength: [1.615, 1.640, 1.665] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 0 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 4 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '7': + name: '7' + wavelength: [2.105, 2.130, 2.155] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 1 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 5 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '8': + name: '8' + wavelength: [0.402, 0.412, 0.422] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 2 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 6 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '9': + name: '9' + wavelength: [0.433, 0.443, 0.453] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 3 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 7 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '10': + name: '10' + wavelength: [0.480, 0.490, 0.500] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 4 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 8 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '11': + name: '11' + wavelength: [0.510, 0.520, 0.530] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 5 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 9 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '12': + name: '12' + wavelength: [0.555, 0.565, 0.575] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 6 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 10 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '13': + name: '13' + wavelength: [0.640, 0.650, 0.660] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 7 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 11 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '14': + name: '14' + wavelength: [0.675, 0.685, 0.695] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 8 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 12 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '15': + name: '15' + wavelength: [0.755, 0.765, 0.775] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 9 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 13 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '16': + name: '16' + wavelength: [0.855, 0.865, 0.875] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 10 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 14 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '17': + name: '17' + wavelength: [0.895, 0.905, 0.915] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 11 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 15 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '18': + name: '18' + wavelength: [0.930, 0.940, 0.950] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 12 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 16 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '19': + name: '19' + wavelength: [0.970, 0.980, 0.990] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 13 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 17 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + '20': + name: '20' + wavelength: [1.020, 1.030, 1.040] + resolution: 1000 + file_type: fy3c_mersi1_l1b_1000 + file_key: Data/EV_1KM_RefSB + band_index: 14 + calibration_key: Calibration/VIS_Cal_Coeff + calibration_index: 18 + coordinates: [longitude, latitude] + calibration: + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts + + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/Longitude + 250: + file_type: fy3c_mersi1_l1b_250_geo + file_key: Longitude + + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: + 1000: + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/Latitude + 250: + file_type: fy3c_mersi1_l1b_250_geo + file_key: Latitude + + solar_zenith_angle: + name: solar_zenith_angle + units: degree + standard_name: solar_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/SolarZenith + + solar_azimuth_angle: + name: solar_azimuth_angle + units: degree + standard_name: solar_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/SolarAzimuth + + satellite_zenith_angle: + name: satellite_zenith_angle + units: degree + standard_name: sensor_zenith_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/SensorZenith + + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degree + standard_name: sensor_azimuth_angle + resolution: 1000 + coordinates: [longitude, latitude] + file_type: fy3c_mersi1_l1b_1000_geo + file_key: Geolocation/SensorAzimuth diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 7675bd1624..f615ee2d40 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -33,10 +33,17 @@ from satpy.readers.hdf5_utils import HDF5FileHandler N_TOT_IR_CHANS_LL = 6 +PLATFORMS_INSTRUMENTS = {"FY-3A": "mersi-1", + "FY-3B": "mersi-1", + "FY-3C": "mersi-1", + "FY-3D": "mersi-2", + "FY-3E": "mersi-ll", + "FY-3F": "mersi-3", + "FY-3G": "mersi-rm"} class MERSIL1B(HDF5FileHandler): - """MERSI-2/MERSI-LL/MERSI-RM L1B file reader.""" + """MERSI-1/MERSI-2/MERSI-LL/MERSI-RM L1B file reader.""" def _strptime(self, date_attr, time_attr): """Parse date/time strings.""" @@ -59,13 +66,12 @@ def end_time(self): @property def sensor_name(self): """Map sensor name to Satpy 'standard' sensor names.""" - file_sensor = self["/attr/Sensor Identification Code"] - sensor = { - "MERSI": "mersi-2", - "MERSI LL": "mersi-ll", - "MERSI RM": "mersi-rm", - }.get(file_sensor, file_sensor) - return sensor + return PLATFORMS_INSTRUMENTS.get(self.platform_name) + + @property + def platform_name(self): + """Platform name.""" + return self["/attr/Satellite Name"] def get_refl_mult(self): """Get reflectance multiplier.""" @@ -84,6 +90,7 @@ def _get_single_slope_intercept(self, slope, intercept, cal_index): return slope[cal_index], intercept[cal_index] def _get_coefficients(self, cal_key, cal_index): + """Get VIS calibration coeffs from calibration datasets""" coeffs = self[cal_key][cal_index] slope = coeffs.attrs.pop("Slope", None) intercept = coeffs.attrs.pop("Intercept", None) @@ -93,11 +100,25 @@ def _get_coefficients(self, cal_key, cal_index): coeffs = coeffs * slope + intercept return coeffs + def _get_coefficients_mersi1(self, band_index): + """Get VIS calibration coeffs from attributes. Only for MERSI-1 on FY-3A/B""" + try: + # This is found in the actual file. + coeffs = self["/attr/VIR_Cal_Coeff"] + except KeyError: + # This is in the official manual. + coeffs = self["/attr/VIS_Cal_Coeff"] + coeffs = coeffs.reshape(19, 3) + if band_index is not None: + coeffs = coeffs[band_index] + return coeffs + def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get("file_key", dataset_id["name"]) band_index = ds_info.get("band_index") data = self[file_key] + if band_index is not None: data = data[band_index] if data.ndim >= 2: @@ -115,18 +136,26 @@ def get_dataset(self, dataset_id, ds_info): if band_index is not None and slope.size > 1: slope = slope[band_index] intercept = intercept[band_index] + # There's a bug in the slope for MERSI-1 11.25(5) + if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and slope in [100, 1]: + slope = 0.01 data = data * slope + intercept if dataset_id.get("calibration") == "reflectance": - coeffs = self._get_coefficients(ds_info["calibration_key"], - ds_info["calibration_index"]) + # Only FY-3A/B stores VIS calibration coefficients in attributes + coeffs = self._get_coefficients_mersi1(band_index) if self.platform_name in ["FY-3A", "FY-3B"] else \ + self._get_coefficients(ds_info["calibration_key"], + ds_info["calibration_index"]) + data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 data = data * self.get_refl_mult() + elif dataset_id.get("calibration") == "brightness_temperature": - calibration_index = ds_info["calibration_index"] # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = 1. / (dataset_id["wavelength"][1] / 1e6) + # MERSI-1 doesn't have additional corrections + calibration_index = None if self.sensor_name == "mersi-1" else ds_info["calibration_index"] data = self._get_bt_dataset(data, calibration_index, wave_number) data.attrs = attrs @@ -137,7 +166,7 @@ def get_dataset(self, dataset_id, ds_info): data.attrs[key] = val.decode("utf8") data.attrs.update({ - "platform_name": self["/attr/Satellite Name"], + "platform_name": self.platform_name, "sensor": self.sensor_name, }) @@ -145,7 +174,8 @@ def get_dataset(self, dataset_id, ds_info): def _mask_data(self, data, dataset_id, attrs): """Mask the data using fill_value and valid_range attributes.""" - fill_value = attrs.pop("FillValue", np.nan) # covered by valid_range + fill_value = attrs.pop("_FillValue", np.nan) if self.platform_name in ["FY-3A", "FY-3B"] else \ + attrs.pop("FillValue", np.nan) # covered by valid_range valid_range = attrs.pop("valid_range", None) if dataset_id.get("calibration") == "counts": # preserve integer type of counts if possible @@ -156,8 +186,13 @@ def _mask_data(self, data, dataset_id, attrs): if valid_range is not None: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. - if dataset_id["name"] in ["24", "25"] and valid_range[1] == 4095: - valid_range[1] = 25000 + if self.sensor_name == "mersi-2": + if dataset_id["name"] in ["24", "25"] and valid_range[1] == 4095: + valid_range[1] = 25000 + # Similar bug also found in MERSI-1 + elif self.sensor_name == "mersi-1": + if dataset_id["name"] == "5" and valid_range[1] == 4095: + valid_range[1] = 25000 # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 data = data.where((data >= valid_range[0]) & From a54b641049e331e7e3d995d3c11a8c9862ced798 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 28 Apr 2024 22:50:36 +0800 Subject: [PATCH 1321/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index f615ee2d40..ced1deb0ab 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -118,7 +118,6 @@ def get_dataset(self, dataset_id, ds_info): file_key = ds_info.get("file_key", dataset_id["name"]) band_index = ds_info.get("band_index") data = self[file_key] - if band_index is not None: data = data[band_index] if data.ndim >= 2: From b0aa1fd91bd09a3ed6c90efae0a33d55e411cc59 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 28 Apr 2024 22:53:44 +0800 Subject: [PATCH 1322/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index ced1deb0ab..5c9f68713e 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -90,7 +90,7 @@ def _get_single_slope_intercept(self, slope, intercept, cal_index): return slope[cal_index], intercept[cal_index] def _get_coefficients(self, cal_key, cal_index): - """Get VIS calibration coeffs from calibration datasets""" + """Get VIS calibration coeffs from calibration datasets.""" coeffs = self[cal_key][cal_index] slope = coeffs.attrs.pop("Slope", None) intercept = coeffs.attrs.pop("Intercept", None) @@ -101,7 +101,7 @@ def _get_coefficients(self, cal_key, cal_index): return coeffs def _get_coefficients_mersi1(self, band_index): - """Get VIS calibration coeffs from attributes. Only for MERSI-1 on FY-3A/B""" + """Get VIS calibration coeffs from attributes. Only for MERSI-1 on FY-3A/B.""" try: # This is found in the actual file. coeffs = self["/attr/VIR_Cal_Coeff"] From 9f1ad43123224177a86a7f5ffd1ca51aa628a6c1 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 28 Apr 2024 23:01:25 +0800 Subject: [PATCH 1323/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 5c9f68713e..d760ec6170 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -113,6 +113,20 @@ def _get_coefficients_mersi1(self, band_index): coeffs = coeffs[band_index] return coeffs + def _get_dn_corrections(self, data ,band_index, dataset_id, attrs): + """Use slope and intercept to get DN corrections.""" + slope = attrs.pop("Slope", None) + intercept = attrs.pop("Intercept", None) + if slope is not None and dataset_id.get("calibration") != "counts": + if band_index is not None and slope.size > 1: + slope = slope[band_index] + intercept = intercept[band_index] + # There's a bug in the slope for MERSI-1 11.25(5) + if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and slope in [100, 1]: + slope = 0.01 + data = data * slope + intercept + return data + def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get("file_key", dataset_id["name"]) @@ -128,17 +142,7 @@ def get_dataset(self, dataset_id, ds_info): attrs.setdefault("rows_per_scan", self.filetype_info["rows_per_scan"]) data = self._mask_data(data, dataset_id, attrs) - - slope = attrs.pop("Slope", None) - intercept = attrs.pop("Intercept", None) - if slope is not None and dataset_id.get("calibration") != "counts": - if band_index is not None and slope.size > 1: - slope = slope[band_index] - intercept = intercept[band_index] - # There's a bug in the slope for MERSI-1 11.25(5) - if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and slope in [100, 1]: - slope = 0.01 - data = data * slope + intercept + data = self._get_dn_corrections(data, band_index, dataset_id, attrs) if dataset_id.get("calibration") == "reflectance": # Only FY-3A/B stores VIS calibration coefficients in attributes From 06b806daf2b4cd3763ffd3251c2e7180182e4d99 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 28 Apr 2024 23:01:42 +0800 Subject: [PATCH 1324/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index d760ec6170..263d3ab992 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -113,7 +113,7 @@ def _get_coefficients_mersi1(self, band_index): coeffs = coeffs[band_index] return coeffs - def _get_dn_corrections(self, data ,band_index, dataset_id, attrs): + def _get_dn_corrections(self, data, band_index, dataset_id, attrs): """Use slope and intercept to get DN corrections.""" slope = attrs.pop("Slope", None) intercept = attrs.pop("Intercept", None) From d67fe2076819cf65aa51b69f34c60666db1c65a6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 28 Apr 2024 15:03:14 +0000 Subject: [PATCH 1325/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/composites/mersi-1.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/etc/composites/mersi-1.yaml b/satpy/etc/composites/mersi-1.yaml index fce0127697..ee92ee9ed4 100644 --- a/satpy/etc/composites/mersi-1.yaml +++ b/satpy/etc/composites/mersi-1.yaml @@ -70,4 +70,3 @@ composites: modifiers: [sunz_corrected] - name: '5' standard_name: overview - From 911e2fd0f436fdfe41e2eab35ce5a2cd10b34104 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Sun, 28 Apr 2024 23:42:48 +0800 Subject: [PATCH 1326/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 62 +++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index a3145d3f76..a190b80336 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -86,6 +86,45 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols): } return data +def _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, old_form=False): + # Set some default attributes + def_attrs = {"FillValue": 65535, + "valid_range": [0, 4095], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) + } + nounits_attrs = {**def_attrs, **{"units": "NO"}} + # Old form from FY-3A/B + prefix = "" if old_form else "Data/" + + data = { + f"{prefix}EV_250_RefSB_b1": + xr.DataArray( + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs=nounits_attrs, + dims=("_rows", "_cols")), + f"{prefix}EV_250_RefSB_b2": + xr.DataArray( + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs=nounits_attrs, + dims=("_rows", "_cols")), + f"{prefix}EV_250_RefSB_b3": + xr.DataArray( + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs=nounits_attrs, + dims=("_rows", "_cols")), + f"{prefix}EV_250_RefSB_b4": + xr.DataArray( + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs=nounits_attrs, + dims=("_rows", "_cols")), + f"{prefix}EV_250_Emissive": + xr.DataArray( + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs=radunits_attrs, + dims=("_rows", "_cols")), + } + return data + def _get_500m_data(num_scans, rows_per_scan, num_cols): data = { @@ -278,7 +317,19 @@ def get_test_content(self, filename, filename_info, filetype_info): return test_content def _set_sensor_attrs(self, global_attrs): - if "mersi2_l1b" in self.filetype_info["file_type"]: + if "fy3a_mersi1" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3A" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + ftype = "VIS" + elif "fy3b_mersi1" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3B" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + ftype = "VIS" + elif "fy3c_mersi1" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3C" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + ftype = "VIS" + elif "mersi2_l1b" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3D" global_attrs["/attr/Sensor Identification Code"] = "MERSI" ftype = "VIS" @@ -308,11 +359,20 @@ def _add_band_data_file_content(self): num_cols = self._num_cols_for_file_type num_scans = self.num_scans rows_per_scan = self._rows_per_scan + is_fy3a_mersi1 = self.filetype_info["file_type"].startswith("fy3a_mersi1") + is_fy3b_mersi1 = self.filetype_info["file_type"].startswith("fy3b_mersi1") + is_fy3c_mersi1 = self.filetype_info["file_type"].startswith("fy3c_mersi1") is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") is_1km = "_1000" in self.filetype_info["file_type"] if is_1km: data_func = _get_1km_data + elif is_fy3a_mersi1: + data_func = _get_mersi1_250m_data(old_form=True) + elif is_fy3b_mersi1: + data_func = _get_mersi1_250m_data(old_form=True) + elif is_fy3c_mersi1: + data_func = _get_mersi1_250m_data elif is_mersi2: data_func = _get_250m_data elif is_mersill: From 5faebd1fa3a83b204a0f917c6e723a9e1d2b2c52 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 11:26:02 +0800 Subject: [PATCH 1327/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 66 +++++++++++++--------- 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index a190b80336..151801db0b 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -86,38 +86,36 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols): } return data -def _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, old_form=False): +def _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, key_prefix="Data/"): # Set some default attributes def_attrs = {"FillValue": 65535, "valid_range": [0, 4095], "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) } nounits_attrs = {**def_attrs, **{"units": "NO"}} - # Old form from FY-3A/B - prefix = "" if old_form else "Data/" data = { - f"{prefix}EV_250_RefSB_b1": + f"{key_prefix}EV_250_RefSB_b1": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - f"{prefix}EV_250_RefSB_b2": + f"{key_prefix}EV_250_RefSB_b2": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - f"{prefix}EV_250_RefSB_b3": + f"{key_prefix}EV_250_RefSB_b3": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - f"{prefix}EV_250_RefSB_b4": + f"{key_prefix}EV_250_RefSB_b4": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - f"{prefix}EV_250_Emissive": + f"{key_prefix}EV_250_Emissive": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, @@ -157,9 +155,9 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): return data -def _get_1km_data(num_scans, rows_per_scan, num_cols): +def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix="Data/"): data = { - "Data/EV_1KM_LL": + f"{key_prefix}EV_1KM_LL": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), @@ -171,7 +169,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols): "long_name": b"1km Earth View Science Data", }, dims=("_rows", "_cols")), - "Data/EV_1KM_RefSB": + f"{key_prefix}EV_1KM_RefSB": xr.DataArray( da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), @@ -183,7 +181,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols): "long_name": b"1km Earth View Science Data", }, dims=("_ref_bands", "_rows", "_cols")), - "Data/EV_1KM_Emissive": + f"{key_prefix}EV_1KM_Emissive": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), @@ -196,7 +194,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols): b"Science Data", }, dims=("_ir_bands", "_rows", "_cols")), - "Data/EV_250_Aggr.1KM_RefSB": + f"{key_prefix}EV_250_Aggr.1KM_RefSB": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), @@ -209,7 +207,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols): b"Science Data Aggregated to 1 km" }, dims=("_ref250_bands", "_rows", "_cols")), - "Data/EV_250_Aggr.1KM_Emissive": + f"{key_prefix}EV_250_Aggr.1KM_Emissive": xr.DataArray( da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), @@ -305,6 +303,12 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/Observing Beginning Time": "18:27:39.720", "/attr/Observing Ending Time": "18:38:36.728", } + fy3a_attrs = { + "/attr/VIR_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + } # noqa + fy3b_attrs = { + "/attr/VIS_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + } # noqa global_attrs, ftype = self._set_sensor_attrs(global_attrs) self._add_tbb_coefficients(global_attrs) @@ -313,7 +317,12 @@ def get_test_content(self, filename, filename_info, filetype_info): test_content = {} test_content.update(global_attrs) test_content.update(data) - test_content.update(_get_calibration(self.num_scans, ftype)) + if "fy3a_mersi1" in self.filetype_info["file_type"]: + test_content.update(fy3a_attrs) + elif "fy3b_mersi1" in self.filetype_info["file_type"]: + test_content.update(fy3b_attrs) + if not self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")): + test_content.update(_get_calibration(self.num_scans, ftype)) return test_content def _set_sensor_attrs(self, global_attrs): @@ -359,18 +368,18 @@ def _add_band_data_file_content(self): num_cols = self._num_cols_for_file_type num_scans = self.num_scans rows_per_scan = self._rows_per_scan - is_fy3a_mersi1 = self.filetype_info["file_type"].startswith("fy3a_mersi1") - is_fy3b_mersi1 = self.filetype_info["file_type"].startswith("fy3b_mersi1") + is_fy3ab_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) is_fy3c_mersi1 = self.filetype_info["file_type"].startswith("fy3c_mersi1") is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") - is_1km = "_1000" in self.filetype_info["file_type"] + is_fy3ab_1km = "_1000" in self.filetype_info["file_type"] and is_fy3ab_mersi1 + is_1km = "_1000" in self.filetype_info["file_type"] and not is_fy3ab_1km if is_1km: data_func = _get_1km_data - elif is_fy3a_mersi1: - data_func = _get_mersi1_250m_data(old_form=True) - elif is_fy3b_mersi1: - data_func = _get_mersi1_250m_data(old_form=True) + elif is_fy3ab_1km: + data_func = _get_1km_data(key_prefix="") + elif is_fy3ab_mersi1: + data_func = _get_mersi1_250m_data(key_prefix="") elif is_fy3c_mersi1: data_func = _get_mersi1_250m_data elif is_mersi2: @@ -398,12 +407,15 @@ def _num_cols_for_file_type(self): @property def _geo_prefix_for_file_type(self): - if "1000" in self.filetype_info["file_type"]: - return "Geolocation/" - elif "500" in self.filetype_info["file_type"]: - return "Geolocation/" - else: + if self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")): return "" + else: + if "1000" in self.filetype_info["file_type"]: + return "Geolocation/" + elif "500" in self.filetype_info["file_type"]: + return "Geolocation/" + else: + return "" def _test_helper(res): From fa3e5feae829bf3fd7e0e3902e303fad2e019ce2 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 11:27:03 +0800 Subject: [PATCH 1328/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 151801db0b..c7027a38e7 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -118,7 +118,7 @@ def _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, key_prefix="Data/" f"{key_prefix}EV_250_Emissive": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs=radunits_attrs, + attrs=nounits_attrs, dims=("_rows", "_cols")), } return data From df915c6a91ae55d53754ced19eaa5e21329d4a65 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 11:27:42 +0800 Subject: [PATCH 1329/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index c7027a38e7..516bd18da9 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -304,11 +304,13 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/Observing Ending Time": "18:38:36.728", } fy3a_attrs = { - "/attr/VIR_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" - } # noqa + "/attr/VIR_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + " 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + } fy3b_attrs = { - "/attr/VIS_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" - } # noqa + "/attr/VIS_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 " + "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + } global_attrs, ftype = self._set_sensor_attrs(global_attrs) self._add_tbb_coefficients(global_attrs) From 2336640cb640baa15cb341eea420fdd5c8286d0b Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 11:34:03 +0800 Subject: [PATCH 1330/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 516bd18da9..4afb0f6e58 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -155,7 +155,7 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): return data -def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix="Data/"): +def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix="Data/", radunits="mW/ (m2 cm-1 sr)"): data = { f"{key_prefix}EV_1KM_LL": xr.DataArray( @@ -214,7 +214,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix="Data/"): attrs={ "Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), "FillValue": 65535, - "units": "mW/ (m2 cm-1 sr)", + "units": radunits, "valid_range": [0, 4095], "long_name": b"250m Emissive Bands Earth View " b"Science Data Aggregated to 1 km" @@ -308,8 +308,8 @@ def get_test_content(self, filename, filename_info, filetype_info): " 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" } fy3b_attrs = { - "/attr/VIS_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 " - "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + "/attr/VIS_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + " 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" } global_attrs, ftype = self._set_sensor_attrs(global_attrs) @@ -375,11 +375,14 @@ def _add_band_data_file_content(self): is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") is_fy3ab_1km = "_1000" in self.filetype_info["file_type"] and is_fy3ab_mersi1 + is_fy3c_1km = "_1000" in self.filetype_info["file_type"] and is_fy3c_mersi1 is_1km = "_1000" in self.filetype_info["file_type"] and not is_fy3ab_1km if is_1km: data_func = _get_1km_data elif is_fy3ab_1km: - data_func = _get_1km_data(key_prefix="") + data_func = _get_1km_data(key_prefix="", radunits="NO") + elif is_fy3c_1km: + data_func = _get_1km_data(radunits="NO") elif is_fy3ab_mersi1: data_func = _get_mersi1_250m_data(key_prefix="") elif is_fy3c_mersi1: From bdda69725fb4142fee611ca9440e1cb7c2b12310 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 13:38:58 +0800 Subject: [PATCH 1331/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 75 ++++++++++++++-------- 1 file changed, 50 insertions(+), 25 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 4afb0f6e58..2bba910a2c 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -86,7 +86,7 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols): } return data -def _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, key_prefix="Data/"): +def _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, key_prefix): # Set some default attributes def_attrs = {"FillValue": 65535, "valid_range": [0, 4095], @@ -155,7 +155,7 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): return data -def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix="Data/", radunits="mW/ (m2 cm-1 sr)"): +def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix, radunits): data = { f"{key_prefix}EV_1KM_LL": xr.DataArray( @@ -304,12 +304,12 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/Observing Ending Time": "18:38:36.728", } fy3a_attrs = { - "/attr/VIR_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" - " 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + "/attr/VIR_Cal_Coeff": "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 " + "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0", } fy3b_attrs = { - "/attr/VIS_Cal_Coeff: 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" - " 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0" + "/attr/VIS_Cal_Coeff": "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 " + "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0", } global_attrs, ftype = self._set_sensor_attrs(global_attrs) @@ -371,29 +371,28 @@ def _add_band_data_file_content(self): num_scans = self.num_scans rows_per_scan = self._rows_per_scan is_fy3ab_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) - is_fy3c_mersi1 = self.filetype_info["file_type"].startswith("fy3c_mersi1") + is_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1", "fy3c_mersi1")) is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") - is_fy3ab_1km = "_1000" in self.filetype_info["file_type"] and is_fy3ab_mersi1 - is_fy3c_1km = "_1000" in self.filetype_info["file_type"] and is_fy3c_mersi1 - is_1km = "_1000" in self.filetype_info["file_type"] and not is_fy3ab_1km + is_1km = "_1000" in self.filetype_info["file_type"] + is_250m = "_250" in self.filetype_info["file_type"] + + key_prefix = "" if is_fy3ab_mersi1 else "Data/" + radunits = "NO" if is_mersi1 else "mW/ (m2 cm-1 sr)" + if is_1km: - data_func = _get_1km_data - elif is_fy3ab_1km: - data_func = _get_1km_data(key_prefix="", radunits="NO") - elif is_fy3c_1km: - data_func = _get_1km_data(radunits="NO") - elif is_fy3ab_mersi1: - data_func = _get_mersi1_250m_data(key_prefix="") - elif is_fy3c_mersi1: - data_func = _get_mersi1_250m_data - elif is_mersi2: - data_func = _get_250m_data - elif is_mersill: - data_func = _get_250m_ll_data + return _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix, radunits) + elif is_250m: + if is_mersi1: + return _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, key_prefix) + elif is_mersi2: + return _get_250m_data(num_scans, rows_per_scan, num_cols) + elif is_mersill: + return _get_250m_ll_data(num_scans, rows_per_scan, num_cols) + else: + return else: - data_func = _get_500m_data - return data_func(num_scans, rows_per_scan, num_cols) + return _get_500m_data(num_scans, rows_per_scan, num_cols) def _add_tbb_coefficients(self, global_attrs): if not self.filetype_info["file_type"].startswith("mersi2_"): @@ -457,6 +456,32 @@ def teardown_method(self): self.p.stop() +class TestFY3AMERSI1L1B(MERSIL1BTester): + """Test the FY3A MERSI1 L1B reader.""" + + yaml_file = "fy3a_mersi1_l1b.yaml" + filenames_1000m = ["FY3A_MERSI_GBAL_L1_20090601_1200_1000M_MS.hdf"] + filenames_250m = ["FY3A_MERSI_GBAL_L1_20090601_1200_0250M_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m + + def test_all_resolutions(self): + """Test loading data when all resolutions are available.""" + from satpy.dataset.data_dict import get_key + from satpy.readers import load_reader + from satpy.tests.utils import make_dataid + filenames = self.filenames_all + reader = load_reader(self.reader_configs) + files = reader.select_files_from_pathnames(filenames) + assert 2 == len(files) + reader.create_filehandlers(files) + # Make sure we have some files + assert reader.file_handlers + + + + + + class TestMERSI2L1B(MERSIL1BTester): """Test the FY3D MERSI2 L1B reader.""" From 8fcc32c5a42fd2b6d0b04a27fc3fc88cc34dbe8e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 29 Apr 2024 15:29:19 +0200 Subject: [PATCH 1332/1416] Bugfix --- satpy/readers/__init__.py | 2 +- satpy/readers/yaml_reader.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 33196b3aab..34d2f9a9d6 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -639,7 +639,7 @@ def _get_reader_kwargs(reader, reader_kwargs): reader_kwargs = reader_kwargs or {} if isinstance(reader, str): - reader = list(reader) + reader = [reader] # ensure one reader_kwargs per reader, None if not provided if reader is None: diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 84c4fcd068..5bbaba4a6c 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -346,8 +346,6 @@ def __init__(self, config_dict, filter_parameters=None, filter_filenames=True): self.filter_parameters = filter_parameters or {} self.filter_filenames = self.info.get("filter_filenames", filter_filenames) - - def filter_selected_filenames(self, filenames): """Filter provided files based on metadata in the filename.""" if not isinstance(filenames, set): From 08c7af220e2d26f8374683453e6ea31563069948 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 29 Apr 2024 15:40:27 +0200 Subject: [PATCH 1333/1416] Fix breaking change --- satpy/readers/sar_c_safe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index c175b17b3c..a5ec535462 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -36,6 +36,7 @@ import functools import logging +import warnings from collections import defaultdict from datetime import timezone as tz from functools import cached_property @@ -714,14 +715,13 @@ def end_time(self): def load(self, dataset_keys, **kwargs): """Load some data.""" if kwargs: - raise NotImplementedError(f"Don't know how to handle kwargs {kwargs}") + warnings.warn(f"Don't know how to handle kwargs {kwargs}") datasets = DatasetDict() for key in dataset_keys: for handler in self.storage_items.values(): val = handler.get_dataset(key, info=dict()) if val is not None: val.attrs["start_time"] = handler.start_time - # val.attrs["footprint"] = self.footprint if key["name"] not in ["longitude", "latitude"]: lonlats = self.load([DataID(self._id_keys, name="longitude", polarization=key["polarization"]), DataID(self._id_keys, name="latitude", polarization=key["polarization"])]) From 3b406759752e27c3ffcc76cecc327609ea578a80 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 29 Apr 2024 16:00:27 +0200 Subject: [PATCH 1334/1416] Restore get_reader_kwargs --- satpy/readers/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 34d2f9a9d6..aefb8ba0da 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -638,9 +638,6 @@ def _get_reader_kwargs(reader, reader_kwargs): """ reader_kwargs = reader_kwargs or {} - if isinstance(reader, str): - reader = [reader] - # ensure one reader_kwargs per reader, None if not provided if reader is None: reader_kwargs = {None: reader_kwargs} From c9aabb7f2d9b8292b872cd2a52dca9e91d23d302 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 25 Apr 2024 12:14:17 -0500 Subject: [PATCH 1335/1416] Fix ABI L2 datasets when unitless and no calibration --- satpy/readers/abi_l2_nc.py | 2 +- satpy/tests/reader_tests/test_abi_l2_nc.py | 28 ++++++++++++++++------ 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index 2324d3e1fd..5e8ae1c7bb 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -43,7 +43,7 @@ def get_dataset(self, key, info): self._remove_problem_attrs(variable) # convert to satpy standard units - if variable.attrs["units"] == "1" and key["calibration"] == "reflectance": + if variable.attrs["units"] == "1" and key.get("calibration") == "reflectance": variable *= 100.0 variable.attrs["units"] = "%" diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 4b8d3a9578..3d2b063edc 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -96,23 +96,36 @@ def _create_mcmip_dataset(): return ds1 +def _create_aod_dataset(): + ds1 = _create_cmip_dataset("AOD") + ds1["AOD"].attrs["units"] = "1" + return ds1 + + class Test_NC_ABI_L2_get_dataset: """Test get dataset function of the NC_ABI_L2 reader.""" - def test_get_dataset(self): + @pytest.mark.parametrize( + ("obs_type", "ds_func", "var_name", "var_attrs"), + [ + ("ACHA", _create_cmip_dataset, "HT", {"units": "m"}), + ("AOD", _create_aod_dataset, "AOD", {"units": "1"}), + ] + ) + def test_get_dataset(self, obs_type, ds_func, var_name, var_attrs): """Test basic L2 load.""" from satpy.tests.utils import make_dataid - key = make_dataid(name="HT") - with _create_reader_for_fake_data("ACHA", _create_cmip_dataset()) as reader: - res = reader.get_dataset(key, {"file_key": "HT"}) + key = make_dataid(name=var_name) + with _create_reader_for_fake_data(obs_type, ds_func()) as reader: + res = reader.get_dataset(key, {"file_key": var_name}) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) exp_attrs = {"instrument_ID": None, "modifiers": (), - "name": "HT", - "observation_type": "ACHA", + "name": var_name, + "observation_type": obs_type, "orbital_slot": None, "platform_name": "GOES-16", "platform_shortname": "G16", @@ -122,7 +135,8 @@ def test_get_dataset(self): "scene_id": None, "sensor": "abi", "timeline_ID": None, - "units": "m"} + } + exp_attrs.update(var_attrs) np.testing.assert_allclose(res.data, exp_data, equal_nan=True) _compare_subdict(res.attrs, exp_attrs) From 6d0311db97fd2524f5989d272b344601bb0b7700 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 23:08:57 +0800 Subject: [PATCH 1336/1416] nearly complete --- satpy/etc/readers/fy3a_mersi1_l1b.yaml | 23 ++ satpy/etc/readers/fy3b_mersi1_l1b.yaml | 31 +- satpy/readers/mersi_l1b.py | 15 +- satpy/tests/reader_tests/test_mersi_l1b.py | 408 +++++++++++---------- 4 files changed, 277 insertions(+), 200 deletions(-) diff --git a/satpy/etc/readers/fy3a_mersi1_l1b.yaml b/satpy/etc/readers/fy3a_mersi1_l1b.yaml index 2db2617f6d..0f16f46454 100644 --- a/satpy/etc/readers/fy3a_mersi1_l1b.yaml +++ b/satpy/etc/readers/fy3a_mersi1_l1b.yaml @@ -30,9 +30,11 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 0 + calibration_index: 0 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_RefSB_b1 + calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: @@ -50,9 +52,11 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 1 + calibration_index: 1 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_RefSB_b2 + calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: @@ -70,9 +74,11 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 2 + calibration_index: 2 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_RefSB_b3 + calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: @@ -90,9 +96,11 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 3 + calibration_index: 3 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_RefSB_b4 + calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: @@ -128,6 +136,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 0 + calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: @@ -144,6 +153,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 1 + calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: @@ -160,6 +170,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 2 + calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: @@ -176,6 +187,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 3 + calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: @@ -192,6 +204,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 4 + calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: @@ -208,6 +221,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 5 + calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: @@ -224,6 +238,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 6 + calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: @@ -240,6 +255,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 7 + calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: @@ -256,6 +272,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 8 + calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: @@ -272,6 +289,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 9 + calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: @@ -288,6 +306,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 10 + calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: @@ -304,6 +323,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 11 + calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: @@ -320,6 +340,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 12 + calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: @@ -336,6 +357,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 13 + calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: @@ -352,6 +374,7 @@ datasets: file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 14 + calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: diff --git a/satpy/etc/readers/fy3b_mersi1_l1b.yaml b/satpy/etc/readers/fy3b_mersi1_l1b.yaml index 464c079868..02f7e14883 100644 --- a/satpy/etc/readers/fy3b_mersi1_l1b.yaml +++ b/satpy/etc/readers/fy3b_mersi1_l1b.yaml @@ -36,9 +36,11 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 0 + calibration_index: 0 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_RefSB_b1 + calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: @@ -56,9 +58,11 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 1 + calibration_index: 1 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_RefSB_b2 + calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: @@ -76,9 +80,11 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 2 + calibration_index: 2 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_RefSB_b3 + calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: @@ -96,9 +102,11 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 3 + calibration_index: 3 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_RefSB_b4 + calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: @@ -134,6 +142,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 0 + calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: @@ -150,6 +159,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 1 + calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: @@ -166,6 +176,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 2 + calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: @@ -182,6 +193,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 3 + calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: @@ -198,6 +210,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 4 + calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: @@ -214,6 +227,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 5 + calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: @@ -230,6 +244,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 6 + calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: @@ -246,6 +261,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 7 + calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: @@ -262,6 +278,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 8 + calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: @@ -278,6 +295,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 9 + calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: @@ -294,6 +312,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 10 + calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: @@ -310,6 +329,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 11 + calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: @@ -326,6 +346,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 12 + calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: @@ -342,6 +363,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 13 + calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: @@ -358,6 +380,7 @@ datasets: file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 14 + calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: @@ -397,7 +420,7 @@ datasets: standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] - file_type: fy3b_mersi1_l1b_1000 + file_type: fy3b_mersi1_l1b_geo file_key: SolarZenith solar_azimuth_angle: @@ -406,7 +429,7 @@ datasets: standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] - file_type: fy3b_mersi1_l1b_1000 + file_type: fy3b_mersi1_l1b_geo file_key: SolarAzimuth satellite_zenith_angle: @@ -415,7 +438,7 @@ datasets: standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] - file_type: fy3b_mersi1_l1b_1000 + file_type: fy3b_mersi1_l1b_geo file_key: SensorZenith satellite_azimuth_angle: @@ -424,5 +447,5 @@ datasets: standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] - file_type: fy3b_mersi1_l1b_1000 + file_type: fy3b_mersi1_l1b_geo file_key: SensorAzimuth diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 263d3ab992..90b47a58d6 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -20,8 +20,8 @@ The files for this reader are HDF5 and come in four varieties; band data and geolocation data, both at 250m and 1000m resolution. -This reader was tested on FY-3D MERSI-2 data, but should work on future -platforms as well assuming no file format changes. +This reader was tested on FY-3A/B/C MERSI-1, FY-3D MERSI-2, FY-3E MERSI-LL and FY-3G MERSI-RM data, +but should work on future platforms as well assuming no file format changes. """ from datetime import datetime @@ -100,7 +100,7 @@ def _get_coefficients(self, cal_key, cal_index): coeffs = coeffs * slope + intercept return coeffs - def _get_coefficients_mersi1(self, band_index): + def _get_coefficients_mersi1(self, cal_index): """Get VIS calibration coeffs from attributes. Only for MERSI-1 on FY-3A/B.""" try: # This is found in the actual file. @@ -109,8 +109,7 @@ def _get_coefficients_mersi1(self, band_index): # This is in the official manual. coeffs = self["/attr/VIS_Cal_Coeff"] coeffs = coeffs.reshape(19, 3) - if band_index is not None: - coeffs = coeffs[band_index] + coeffs = coeffs[cal_index].tolist() return coeffs def _get_dn_corrections(self, data, band_index, dataset_id, attrs): @@ -146,10 +145,8 @@ def get_dataset(self, dataset_id, ds_info): if dataset_id.get("calibration") == "reflectance": # Only FY-3A/B stores VIS calibration coefficients in attributes - coeffs = self._get_coefficients_mersi1(band_index) if self.platform_name in ["FY-3A", "FY-3B"] else \ - self._get_coefficients(ds_info["calibration_key"], - ds_info["calibration_index"]) - + coeffs = self._get_coefficients_mersi1(ds_info["calibration_index"]) if self.platform_name in ["FY-3A", + "FY-3B"] else self._get_coefficients(ds_info["calibration_key"], ds_info["calibration_index"]) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 data = data * self.get_refl_mult() diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 2bba910a2c..1a18af39fe 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -43,9 +43,12 @@ def _get_calibration(num_scans, ftype): return calibration -def _get_250m_data(num_scans, rows_per_scan, num_cols): +def _get_250m_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=False): # Set some default attributes - def_attrs = {"FillValue": 65535, + fill_value_name = "_FillValue" if old_fy3ab_form else "FillValue" + key_prefix = "" if old_fy3ab_form else "Data/" + + def_attrs = {fill_value_name: 65535, "valid_range": [0, 4095], "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) } @@ -53,68 +56,36 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols): radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} data = { - "Data/EV_250_RefSB_b1": + f"{key_prefix}EV_250_RefSB_b1": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_RefSB_b2": + f"{key_prefix}EV_250_RefSB_b2": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_RefSB_b3": + f"{key_prefix}EV_250_RefSB_b3": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_RefSB_b4": + f"{key_prefix}EV_250_RefSB_b4": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_Emissive_b24": + f"{key_prefix}EV_250_Emissive_b24": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, dims=("_rows", "_cols")), - "Data/EV_250_Emissive_b25": + f"{key_prefix}EV_250_Emissive_b25": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, dims=("_rows", "_cols")), - } - return data - -def _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, key_prefix): - # Set some default attributes - def_attrs = {"FillValue": 65535, - "valid_range": [0, 4095], - "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) - } - nounits_attrs = {**def_attrs, **{"units": "NO"}} - - data = { - f"{key_prefix}EV_250_RefSB_b1": - xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs=nounits_attrs, - dims=("_rows", "_cols")), - f"{key_prefix}EV_250_RefSB_b2": - xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs=nounits_attrs, - dims=("_rows", "_cols")), - f"{key_prefix}EV_250_RefSB_b3": - xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs=nounits_attrs, - dims=("_rows", "_cols")), - f"{key_prefix}EV_250_RefSB_b4": - xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs=nounits_attrs, - dims=("_rows", "_cols")), f"{key_prefix}EV_250_Emissive": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), @@ -155,9 +126,13 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): return data -def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix, radunits): +def _get_1km_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=False, mersi1=False): + fill_value_name = "_FillValue" if old_fy3ab_form else "FillValue" + key_prefix = "" if old_fy3ab_form else "Data/" + radunits = "NO" if mersi1 else "mW/ (m2 cm-1 sr)" + data = { - f"{key_prefix}EV_1KM_LL": + "Data/EV_1KM_LL": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), @@ -175,13 +150,13 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix, radunits): dtype=np.uint16), attrs={ "Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), - "FillValue": 65535, + fill_value_name: 65535, "units": "NO", "valid_range": [0, 4095], "long_name": b"1km Earth View Science Data", }, dims=("_ref_bands", "_rows", "_cols")), - f"{key_prefix}EV_1KM_Emissive": + "Data/EV_1KM_Emissive": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), @@ -200,7 +175,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix, radunits): dtype=np.uint16), attrs={ "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), - "FillValue": 65535, + fill_value_name: 65535, "units": "NO", "valid_range": [0, 4095], "long_name": b"250m Reflective Bands Earth View " @@ -209,17 +184,29 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix, radunits): dims=("_ref250_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_Emissive": xr.DataArray( - da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - "Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), - "FillValue": 65535, + "Slope": np.array([1.]), "Intercept": np.array([0.]), + fill_value_name: 65535, "units": radunits, "valid_range": [0, 4095], "long_name": b"250m Emissive Bands Earth View " b"Science Data Aggregated to 1 km" }, - dims=("_ir250_bands", "_rows", "_cols")), + dims=("_rows", "_cols")) if mersi1 else \ + xr.DataArray( + da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, + dtype=np.uint16), + attrs={ + "Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 4095], + "long_name": b"250m Emissive Bands Earth View " + b"Science Data Aggregated to 1 km" + }, + dims=("_ir250_bands", "_rows", "_cols")) } return data @@ -304,12 +291,10 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/Observing Ending Time": "18:38:36.728", } fy3a_attrs = { - "/attr/VIR_Cal_Coeff": "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 " - "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0", + "/attr/VIR_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), } fy3b_attrs = { - "/attr/VIS_Cal_Coeff": "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 " - "0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0", + "/attr/VIS_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), } global_attrs, ftype = self._set_sensor_attrs(global_attrs) @@ -318,7 +303,11 @@ def get_test_content(self, filename, filename_info, filetype_info): test_content = {} test_content.update(global_attrs) - test_content.update(data) + if "fy3a_mersi1" in self.filetype_info["file_type"]: + test_content.update(data[0]) + test_content.update(data[1]) + else: + test_content.update(data) if "fy3a_mersi1" in self.filetype_info["file_type"]: test_content.update(fy3a_attrs) elif "fy3b_mersi1" in self.filetype_info["file_type"]: @@ -355,9 +344,13 @@ def _set_sensor_attrs(self, global_attrs): return global_attrs, ftype def _get_data_file_content(self): - if "_geo" in self.filetype_info["file_type"]: - return self._add_geo_data_file_content() - return self._add_band_data_file_content() + if "fy3a_mersi1" in self.filetype_info["file_type"]: + return self._add_band_data_file_content(), self._add_geo_data_file_content() + else: + if "_geo" in self.filetype_info["file_type"]: + return self._add_geo_data_file_content() + else: + return self._add_band_data_file_content() def _add_geo_data_file_content(self): num_scans = self.num_scans @@ -370,23 +363,18 @@ def _add_band_data_file_content(self): num_cols = self._num_cols_for_file_type num_scans = self.num_scans rows_per_scan = self._rows_per_scan - is_fy3ab_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) is_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1", "fy3c_mersi1")) + is_fy3ab_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") is_1km = "_1000" in self.filetype_info["file_type"] is_250m = "_250" in self.filetype_info["file_type"] - key_prefix = "" if is_fy3ab_mersi1 else "Data/" - radunits = "NO" if is_mersi1 else "mW/ (m2 cm-1 sr)" - if is_1km: - return _get_1km_data(num_scans, rows_per_scan, num_cols, key_prefix, radunits) + return _get_1km_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=is_fy3ab_mersi1, mersi1=is_mersi1) elif is_250m: - if is_mersi1: - return _get_mersi1_250m_data(num_scans, rows_per_scan, num_cols, key_prefix) - elif is_mersi2: - return _get_250m_data(num_scans, rows_per_scan, num_cols) + if is_mersi1 or is_mersi2: + return _get_250m_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=is_fy3ab_mersi1) elif is_mersill: return _get_250m_ll_data(num_scans, rows_per_scan, num_cols) else: @@ -422,24 +410,16 @@ def _geo_prefix_for_file_type(self): return "" -def _test_helper(res): +def _test_helper(res, band_list, exp_cal, exp_unit, exp_shape): """Remove test code duplication.""" - assert (2 * 40, 2048 * 2) == res["1"].shape - assert "reflectance" == res["1"].attrs["calibration"] - assert "%" == res["1"].attrs["units"] - assert (2 * 40, 2048 * 2) == res["2"].shape - assert "reflectance" == res["2"].attrs["calibration"] - assert "%" == res["2"].attrs["units"] - assert (2 * 40, 2048 * 2) == res["3"].shape - assert "reflectance" == res["3"].attrs["calibration"] - assert "%" == res["3"].attrs["units"] - assert (2 * 40, 2048 * 2) == res["4"].shape - assert "reflectance" == res["4"].attrs["calibration"] - assert "%" == res["4"].attrs["units"] + for band in band_list: + assert res[band].attrs["calibration"] == exp_cal + assert res[band].attrs["units"] == exp_unit + assert res[band].shape == exp_shape class MERSIL1BTester: - """Test MERSI2 L1B Reader.""" + """Test MERSI1/2/LL/RM L1B Reader.""" def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" @@ -456,13 +436,13 @@ def teardown_method(self): self.p.stop() -class TestFY3AMERSI1L1B(MERSIL1BTester): - """Test the FY3A MERSI1 L1B reader.""" +class MERSI1L1BTester(MERSIL1BTester): + """Test MERSI1 L1B Reader.""" - yaml_file = "fy3a_mersi1_l1b.yaml" - filenames_1000m = ["FY3A_MERSI_GBAL_L1_20090601_1200_1000M_MS.hdf"] - filenames_250m = ["FY3A_MERSI_GBAL_L1_20090601_1200_0250M_MS.hdf"] - filenames_all = filenames_1000m + filenames_250m + yaml_file = None + filenames_1000m = None + filenames_250m = None + filenames_all = None def test_all_resolutions(self): """Test loading data when all resolutions are available.""" @@ -472,13 +452,155 @@ def test_all_resolutions(self): filenames = self.filenames_all reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) + assert len(files) == len(filenames) + reader.create_filehandlers(files) + # Make sure we have some files + assert reader.file_handlers + + # Verify that we have multiple resolutions for: + # - Bands 1-4 (visible) + # - Bands 5 (IR) + available_datasets = reader.available_dataset_ids + for band_name in ("1", "2", "3", "4", "5"): + num_results = 2 + ds_id = make_dataid(name=band_name, resolution=250) + res = get_key(ds_id, available_datasets, + num_results=num_results, best=False) + assert num_results == len(res) + ds_id = make_dataid(name=band_name, resolution=1000) + res = get_key(ds_id, available_datasets, + num_results=num_results, best=False) + assert num_results == len(res) + + res = reader.load(["1", "2", "3", "4", "5", "6", "7", "8"]) + assert len(res) == 8 + _test_helper(res, ["1", "2", "3", "4"], "reflectance", "%", (2 * 40, 2048 * 2)) + assert res["5"].shape == (2 * 40, 2048 * 2) + assert res["5"].attrs["calibration"] == "brightness_temperature" + assert res["5"].attrs["units"] == "K" + assert res["6"].shape == (2 * 10, 2048) + assert res["6"].attrs["calibration"] == "reflectance" + assert res["6"].attrs["units"] == "%" + + def test_counts_calib(self): + """Test loading data at counts calibration.""" + from satpy.readers import load_reader + from satpy.tests.utils import make_dataid + filenames = self.filenames_all + reader = load_reader(self.reader_configs) + files = reader.select_files_from_pathnames(filenames) + assert len(files) == len(filenames) + reader.create_filehandlers(files) + # Make sure we have some files + assert reader.file_handlers + + ds_ids = [] + for band_name in ["1", "2", "3", "4", "5", "6", "19", "20"]: + ds_ids.append(make_dataid(name=band_name, calibration="counts")) + ds_ids.append(make_dataid(name="satellite_zenith_angle")) + res = reader.load(ds_ids) + assert len(res) == 9 + _test_helper(res, ["1", "2", "3", "4", "5"], "counts", "1", (2 * 40, 2048 * 2)) + _test_helper(res, ["6", "19", "20"], "counts", "1", (2 * 10, 2048)) + + def test_1km_resolutions(self): + """Test loading data when only 1km resolutions are available.""" + from satpy.dataset.data_dict import get_key + from satpy.readers import load_reader + from satpy.tests.utils import make_dataid + filenames = self.filenames_1000m + reader = load_reader(self.reader_configs) + files = reader.select_files_from_pathnames(filenames) + assert len(files) == len(filenames) + reader.create_filehandlers(files) + # Make sure we have some files + assert reader.file_handlers + + # Verify that we have multiple resolutions for: + # - Bands 1-4 (visible) + # - Bands 5 (IR) + available_datasets = reader.available_dataset_ids + for band_name in ("1", "2", "3", "4", "5"): + num_results = 2 + ds_id = make_dataid(name=band_name, resolution=250) + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=num_results, best=False) + ds_id = make_dataid(name=band_name, resolution=1000) + res = get_key(ds_id, available_datasets, + num_results=num_results, best=False) + assert num_results == len(res) + + res = reader.load(["1", "2", "3", "4", "5", "6", "7", "8"]) + assert len(res) == 8 + _test_helper(res, ["1", "2", "3", "4", "6", "7", "8"], "reflectance", "%", (2 * 10, 2048)) + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "brightness_temperature" + assert res["5"].attrs["units"] == "K" + + def test_250_resolutions(self): + """Test loading data when only 250m resolutions are available.""" + from satpy.dataset.data_dict import get_key + from satpy.readers import load_reader + from satpy.tests.utils import make_dataid + filenames = self.filenames_250m + reader = load_reader(self.reader_configs) + files = reader.select_files_from_pathnames(filenames) + assert len(files) == len(filenames) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers + # Verify that we have multiple resolutions for: + # - Bands 1-4 (visible) + # - Bands 5 (IR) + available_datasets = reader.available_dataset_ids + for band_name in ("1", "2", "3", "4", "5"): + num_results = 2 + ds_id = make_dataid(name=band_name, resolution=250) + res = get_key(ds_id, available_datasets, + num_results=num_results, best=False) + assert num_results == len(res) + ds_id = make_dataid(name=band_name, resolution=1000) + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=num_results, best=False) + + res = reader.load(["1", "2", "3", "4", "5", "6", "7"]) + assert len(res) == 5 + with pytest.raises(KeyError): + res.__getitem__("6") + with pytest.raises(KeyError): + res.__getitem__("7") + _test_helper(res, ["1", "2", "3", "4"], "reflectance", "%", (2 * 40, 2048 * 2)) + assert res["5"].shape == (2 * 40, 2048 * 2) + assert res["5"].attrs["calibration"] == "brightness_temperature" + assert res["5"].attrs["units"] == "K" + + +class TestFY3AMERSI1L1B(MERSI1L1BTester): + """Test the FY3A MERSI1 L1B reader.""" + + yaml_file = "fy3a_mersi1_l1b.yaml" + filenames_1000m = ["FY3A_MERSI_GBAL_L1_20090601_1200_1000M_MS.hdf"] + filenames_250m = ["FY3A_MERSI_GBAL_L1_20090601_1200_0250M_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m + +class TestFY3BMERSI1L1B(MERSI1L1BTester): + """Test the FY3A MERSI1 L1B reader.""" + yaml_file = "fy3b_mersi1_l1b.yaml" + filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] + filenames_250m = ["FY3B_MERSI_GBAL_L1_20110824_1850_0250M_MS.hdf", "FY3B_MERSI_GBAL_L1_20110824_1850_GEOXX_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m + + +class TestFY3CMERSI1L1B(MERSI1L1BTester): + """Test the FY3A MERSI1 L1B reader.""" + + yaml_file = "fy3c_mersi1_l1b.yaml" + filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] + filenames_250m = ["FY3C_MERSI_GBAL_L1_20131002_1835_0250M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEOQK_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m @@ -524,18 +646,14 @@ def test_all_resolutions(self): res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 + _test_helper(res, ["1", "2", "3", "4"], "reflectance", "%", (2 * 40, 2048 * 2)) + _test_helper(res, ["24", "25"], "brightness_temperature", "K", (2 * 40, 2048 * 2)) assert res["5"].shape == (2 * 10, 2048) assert res["5"].attrs["calibration"] == "reflectance" assert res["5"].attrs["units"] == "%" assert res["20"].shape == (2 * 10, 2048) assert res["20"].attrs["calibration"] == "brightness_temperature" assert res["20"].attrs["units"] == "K" - assert res["24"].shape == (2 * 40, 2048 * 2) - assert res["24"].attrs["calibration"] == "brightness_temperature" - assert res["24"].attrs["units"] == "K" - assert res["25"].shape == (2 * 40, 2048 * 2) - assert res["25"].attrs["calibration"] == "brightness_temperature" - assert res["25"].attrs["units"] == "K" def test_counts_calib(self): """Test loading data at counts calibration.""" @@ -555,38 +673,8 @@ def test_counts_calib(self): ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) assert len(res) == 9 - assert res["1"].shape == (2 * 40, 2048 * 2) - assert res["1"].attrs["calibration"] == "counts" - assert res["1"].dtype == np.uint16 - assert res["1"].attrs["units"] == "1" - assert res["2"].shape == (2 * 40, 2048 * 2) - assert res["2"].attrs["calibration"] == "counts" - assert res["2"].dtype == np.uint16 - assert res["2"].attrs["units"] == "1" - assert res["3"].shape == (2 * 40, 2048 * 2) - assert res["3"].attrs["calibration"] == "counts" - assert res["3"].dtype == np.uint16 - assert res["3"].attrs["units"] == "1" - assert res["4"].shape == (2 * 40, 2048 * 2) - assert res["4"].attrs["calibration"] == "counts" - assert res["4"].dtype == np.uint16 - assert res["4"].attrs["units"] == "1" - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "counts" - assert res["5"].dtype == np.uint16 - assert res["5"].attrs["units"] == "1" - assert res["20"].shape == (2 * 10, 2048) - assert res["20"].attrs["calibration"] == "counts" - assert res["20"].dtype == np.uint16 - assert res["20"].attrs["units"] == "1" - assert res["24"].shape == (2 * 40, 2048 * 2) - assert res["24"].attrs["calibration"] == "counts" - assert res["24"].dtype == np.uint16 - assert res["24"].attrs["units"] == "1" - assert res["25"].shape == (2 * 40, 2048 * 2) - assert res["25"].attrs["calibration"] == "counts" - assert res["25"].dtype == np.uint16 - assert res["25"].attrs["units"] == "1" + _test_helper(res, ["1", "2", "3", "4", "24", "25"], "counts", "1", (2 * 40, 2048 * 2)) + _test_helper(res, ["5", "20"], "counts", "1", (2 * 10, 2048)) def test_rad_calib(self): """Test loading data at radiance calibration.""" @@ -605,18 +693,7 @@ def test_rad_calib(self): ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res["1"].shape == (2 * 40, 2048 * 2) - assert res["1"].attrs["calibration"] == "radiance" - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["2"].shape == (2 * 40, 2048 * 2) - assert res["2"].attrs["calibration"] == "radiance" - assert res["2"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["3"].shape == (2 * 40, 2048 * 2) - assert res["3"].attrs["calibration"] == "radiance" - assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["4"].shape == (2 * 40, 2048 * 2) - assert res["4"].attrs["calibration"] == "radiance" - assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" + _test_helper(res, ["1", "2", "3", "4"], "radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2)) assert res["5"].shape == (2 * 10, 2048) assert res["5"].attrs["calibration"] == "radiance" assert res["5"].attrs["units"] == "mW/ (m2 cm-1 sr)" @@ -654,30 +731,14 @@ def test_1km_resolutions(self): res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - assert res["1"].shape == (2 * 10, 2048) - assert res["1"].attrs["calibration"] == "reflectance" - assert res["1"].attrs["units"] == "%" - assert res["2"].shape == (2 * 10, 2048) - assert res["2"].attrs["calibration"] == "reflectance" - assert res["2"].attrs["units"] == "%" - assert res["3"].shape == (2 * 10, 2048) - assert res["3"].attrs["calibration"] == "reflectance" - assert res["3"].attrs["units"] == "%" - assert res["4"].shape == (2 * 10, 2048) - assert res["4"].attrs["calibration"] == "reflectance" - assert res["4"].attrs["units"] == "%" + _test_helper(res, ["1", "2", "3", "4"], "reflectance", "%", (2 * 10, 2048)) + _test_helper(res, ["24", "25"], "brightness_temperature", "K", (2 * 10, 2048)) assert res["5"].shape == (2 * 10, 2048) assert res["5"].attrs["calibration"] == "reflectance" assert res["5"].attrs["units"] == "%" assert res["20"].shape == (2 * 10, 2048) assert res["20"].attrs["calibration"] == "brightness_temperature" assert res["20"].attrs["units"] == "K" - assert res["24"].shape == (2 * 10, 2048) - assert res["24"].attrs["calibration"] == "brightness_temperature" - assert res["24"].attrs["units"] == "K" - assert res["25"].shape == (2 * 10, 2048) - assert res["25"].attrs["calibration"] == "brightness_temperature" - assert res["25"].attrs["units"] == "K" def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" @@ -716,7 +777,7 @@ def test_250_resolutions(self): res.__getitem__("5") with pytest.raises(KeyError): res.__getitem__("20") - _test_helper(res) + # _test_helper(res) assert res["24"].shape == (2 * 40, 2048 * 2) assert res["24"].attrs["calibration"] == "brightness_temperature" assert res["24"].attrs["units"] == "K" @@ -790,21 +851,8 @@ def test_rad_calib(self): ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res["1"].shape == (2 * 10, 2048) - assert res["1"].attrs["calibration"] == "radiance" - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["3"].shape == (2 * 10, 2048) - assert res["3"].attrs["calibration"] == "radiance" - assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["4"].shape == (2 * 10, 2048) - assert res["4"].attrs["calibration"] == "radiance" - assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["6"].shape == (2 * 40, 2048 * 2) - assert res["6"].attrs["calibration"] == "radiance" - assert res["6"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["7"].shape == (2 * 40, 2048 * 2) - assert res["7"].attrs["calibration"] == "radiance" - assert res["7"].attrs["units"] == "mW/ (m2 cm-1 sr)" + _test_helper(res, ["1", "3", "4"], "radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048)) + _test_helper(res, ["6", "7"], "radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2)) def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -843,23 +891,9 @@ def test_1km_resolutions(self): res = reader.load(["1", "2", "3", "5", "6", "7"]) assert len(res) == 6 assert res["1"].shape == (2 * 10, 2048) - assert "radiance" == res["1"].attrs["calibration"] + assert res["1"].attrs["calibration"] == "radiance" assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["2"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["2"].attrs["calibration"] - assert res["2"].attrs["units"] == "K" - assert res["3"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["3"].attrs["calibration"] - assert res["3"].attrs["units"] == "K" - assert res["5"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["5"].attrs["calibration"] - assert res["5"].attrs["units"] == "K" - assert res["6"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["6"].attrs["calibration"] - assert res["6"].attrs["units"] == "K" - assert res["7"].shape == (2 * 10, 2048) - assert "brightness_temperature" == res["7"].attrs["calibration"] - assert res["7"].attrs["units"] == "K" + _test_helper(res, ["2", "3", "5", "6", "7"], "brightness_temperature", "K", (2 * 10, 2048)) def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" From 75e9f49950fca12b425df9f27c71d235d9dc750e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 23:13:11 +0800 Subject: [PATCH 1337/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 1a18af39fe..08d3ec37db 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -194,7 +194,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=False, mers "long_name": b"250m Emissive Bands Earth View " b"Science Data Aggregated to 1 km" }, - dims=("_rows", "_cols")) if mersi1 else \ + dims=("_rows", "_cols")) if mersi1 else xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), From 9829d775ce7f852fcd0480512bf2cf3af37ba53d Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 23:14:49 +0800 Subject: [PATCH 1338/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 08d3ec37db..1197735591 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -439,10 +439,10 @@ def teardown_method(self): class MERSI1L1BTester(MERSIL1BTester): """Test MERSI1 L1B Reader.""" - yaml_file = None - filenames_1000m = None - filenames_250m = None - filenames_all = None + yaml_file = "" + filenames_1000m = [] + filenames_250m = [] + filenames_all = [] def test_all_resolutions(self): """Test loading data when all resolutions are available.""" From d81860ed1fa3a4991061cb4c55188504e291a65b Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 23:19:58 +0800 Subject: [PATCH 1339/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 1197735591..6abb47168f 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -440,9 +440,9 @@ class MERSI1L1BTester(MERSIL1BTester): """Test MERSI1 L1B Reader.""" yaml_file = "" - filenames_1000m = [] - filenames_250m = [] - filenames_all = [] + filenames_1000m: list= [] + filenames_250m: list = [] + filenames_all: list = [] def test_all_resolutions(self): """Test loading data when all resolutions are available.""" From d11d600642114e1cd4e5bf2f89cd417299ca8956 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 23:30:57 +0800 Subject: [PATCH 1340/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 90b47a58d6..3c55d6ca45 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -117,13 +117,12 @@ def _get_dn_corrections(self, data, band_index, dataset_id, attrs): slope = attrs.pop("Slope", None) intercept = attrs.pop("Intercept", None) if slope is not None and dataset_id.get("calibration") != "counts": - if band_index is not None and slope.size > 1: - slope = slope[band_index] - intercept = intercept[band_index] + new_slope = slope[band_index] if (band_index is not None and slope.size > 1) else slope + new_intercept = intercept[band_index] if (band_index is not None and slope.size > 1) else intercept # There's a bug in the slope for MERSI-1 11.25(5) - if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and slope in [100, 1]: - slope = 0.01 - data = data * slope + intercept + new_slope = 0.01 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and new_slope in [100, 1] \ + else new_slope + data = data * new_slope + new_intercept return data def get_dataset(self, dataset_id, ds_info): From 45f1d1f0d07442e1eeab12ebf4462ed6fa3d1bf0 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 29 Apr 2024 23:34:24 +0800 Subject: [PATCH 1341/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 3c55d6ca45..0dc4644988 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -185,13 +185,11 @@ def _mask_data(self, data, dataset_id, attrs): if valid_range is not None: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. - if self.sensor_name == "mersi-2": - if dataset_id["name"] in ["24", "25"] and valid_range[1] == 4095: - valid_range[1] = 25000 + valid_range[1] = 25000 if self.sensor_name == "mersi-2" and dataset_id["name"] in ["24", "25"] and \ + valid_range[1] == 4095 else valid_range[1] # Similar bug also found in MERSI-1 - elif self.sensor_name == "mersi-1": - if dataset_id["name"] == "5" and valid_range[1] == 4095: - valid_range[1] = 25000 + valid_range[1] = 25000 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and \ + valid_range[1] == 4095 else valid_range[1] # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 data = data.where((data >= valid_range[0]) & From b62a410ac67adbc50ec33e796be17603d15b1612 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 00:07:57 +0800 Subject: [PATCH 1342/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 0dc4644988..136068098e 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -116,14 +116,17 @@ def _get_dn_corrections(self, data, band_index, dataset_id, attrs): """Use slope and intercept to get DN corrections.""" slope = attrs.pop("Slope", None) intercept = attrs.pop("Intercept", None) - if slope is not None and dataset_id.get("calibration") != "counts": - new_slope = slope[band_index] if (band_index is not None and slope.size > 1) else slope - new_intercept = intercept[band_index] if (band_index is not None and slope.size > 1) else intercept + try: + new_slope = slope[band_index] + new_intercept = intercept[band_index] # There's a bug in the slope for MERSI-1 11.25(5) new_slope = 0.01 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and new_slope in [100, 1] \ else new_slope data = data * new_slope + new_intercept - return data + return data + + except TypeError: + return data def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" From 64a4a86ce6c8cb4b7a164fb06bcb5f2484554155 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 00:11:14 +0800 Subject: [PATCH 1343/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 136068098e..91e143f7e9 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -185,19 +185,21 @@ def _mask_data(self, data, dataset_id, attrs): new_fill = data.dtype.type(fill_value) else: new_fill = np.nan - if valid_range is not None: + try: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. valid_range[1] = 25000 if self.sensor_name == "mersi-2" and dataset_id["name"] in ["24", "25"] and \ - valid_range[1] == 4095 else valid_range[1] + valid_range[1] == 4095 else valid_range[1] # Similar bug also found in MERSI-1 valid_range[1] = 25000 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and \ - valid_range[1] == 4095 else valid_range[1] + valid_range[1] == 4095 else valid_range[1] # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 data = data.where((data >= valid_range[0]) & (data <= valid_range[1]), new_fill) - return data + return data + except TypeError: + return data def _get_bt_dataset(self, data, calibration_index, wave_number): """Get the dataset as brightness temperature. From a2d4c88b118b30d3294c81fd789ef6dbe1833d1f Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 15:43:10 +0800 Subject: [PATCH 1344/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 167 +++++++++------------ 1 file changed, 73 insertions(+), 94 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 6abb47168f..4238a92214 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -43,10 +43,12 @@ def _get_calibration(num_scans, ftype): return calibration -def _get_250m_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=False): +def _get_250m_data(num_scans, rows_per_scan, num_cols, filetype_info): # Set some default attributes - fill_value_name = "_FillValue" if old_fy3ab_form else "FillValue" - key_prefix = "" if old_fy3ab_form else "Data/" + is_fy3ab_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) + + fill_value_name = "_FillValue" if is_fy3ab_mersi1 else "FillValue" + key_prefix = "" if is_fy3ab_mersi1 else "Data/" def_attrs = {fill_value_name: 65535, "valid_range": [0, 4095], @@ -126,88 +128,62 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): return data -def _get_1km_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=False, mersi1=False): - fill_value_name = "_FillValue" if old_fy3ab_form else "FillValue" - key_prefix = "" if old_fy3ab_form else "Data/" - radunits = "NO" if mersi1 else "mW/ (m2 cm-1 sr)" +def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): + is_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1", "fy3c_mersi1")) + is_fy3ab_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) - data = { - "Data/EV_1KM_LL": - xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.]), "Intercept": np.array([0.]), - "FillValue": 65535, - "units": "NO", - "valid_range": [0, 4095], - "long_name": b"1km Earth View Science Data", - }, + fill_value_name = "_FillValue" if is_fy3ab_mersi1 else "FillValue" + key_prefix = "" if is_fy3ab_mersi1 else "Data/" + radunits = "NO" if is_mersi1 else "mW/ (m2 cm-1 sr)" + + data = {"Data/EV_1KM_LL": + xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.]), "Intercept": np.array([0.]), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data"}, dims=("_rows", "_cols")), - f"{key_prefix}EV_1KM_RefSB": - xr.DataArray( - da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), - fill_value_name: 65535, - "units": "NO", - "valid_range": [0, 4095], - "long_name": b"1km Earth View Science Data", - }, + f"{key_prefix}EV_1KM_RefSB": + xr.DataArray(da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), + fill_value_name: 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data"}, dims=("_ref_bands", "_rows", "_cols")), "Data/EV_1KM_Emissive": - xr.DataArray( - da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), - "FillValue": 65535, - "units": "mW/ (m2 cm-1 sr)", - "valid_range": [0, 25000], - "long_name": b"1km Emissive Bands Earth View " - b"Science Data", - }, + xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 25000], + "long_name": b"1km Emissive Bands Earth View Science Data"}, dims=("_ir_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_RefSB": - xr.DataArray( - da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), - fill_value_name: 65535, - "units": "NO", - "valid_range": [0, 4095], - "long_name": b"250m Reflective Bands Earth View " - b"Science Data Aggregated to 1 km" - }, + xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + fill_value_name: 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"250m Reflective Bands Earth View Science Data Aggregated to 1 km"}, dims=("_ref250_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_Emissive": - xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.]), "Intercept": np.array([0.]), - fill_value_name: 65535, - "units": radunits, - "valid_range": [0, 4095], - "long_name": b"250m Emissive Bands Earth View " - b"Science Data Aggregated to 1 km" - }, - dims=("_rows", "_cols")) if mersi1 else - xr.DataArray( - da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, - dtype=np.uint16), - attrs={ - "Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), - "FillValue": 65535, - "units": "mW/ (m2 cm-1 sr)", - "valid_range": [0, 4095], - "long_name": b"250m Emissive Bands Earth View " - b"Science Data Aggregated to 1 km" - }, + xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.]), "Intercept": np.array([0.]), + fill_value_name: 65535, + "units": radunits, + "valid_range": [0, 4095], + "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, + dims=("_rows", "_cols")) if is_mersi1 else + xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + attrs={"Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 4095], + "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, dims=("_ir250_bands", "_rows", "_cols")) - } + } return data @@ -371,10 +347,10 @@ def _add_band_data_file_content(self): is_250m = "_250" in self.filetype_info["file_type"] if is_1km: - return _get_1km_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=is_fy3ab_mersi1, mersi1=is_mersi1) + return _get_1km_data(num_scans, rows_per_scan, num_cols, self.filetype_info) elif is_250m: if is_mersi1 or is_mersi2: - return _get_250m_data(num_scans, rows_per_scan, num_cols, old_fy3ab_form=is_fy3ab_mersi1) + return _get_250m_data(num_scans, rows_per_scan, num_cols, self.filetype_info) elif is_mersill: return _get_250m_ll_data(num_scans, rows_per_scan, num_cols) else: @@ -410,8 +386,11 @@ def _geo_prefix_for_file_type(self): return "" -def _test_helper(res, band_list, exp_cal, exp_unit, exp_shape): +def _test_helper(res, band_list, exp_result): """Remove test code duplication.""" + exp_cal = exp_result[0] + exp_unit = exp_result[1] + exp_shape = exp_result[2] for band in band_list: assert res[band].attrs["calibration"] == exp_cal assert res[band].attrs["units"] == exp_unit @@ -474,7 +453,7 @@ def test_all_resolutions(self): res = reader.load(["1", "2", "3", "4", "5", "6", "7", "8"]) assert len(res) == 8 - _test_helper(res, ["1", "2", "3", "4"], "reflectance", "%", (2 * 40, 2048 * 2)) + _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) assert res["5"].shape == (2 * 40, 2048 * 2) assert res["5"].attrs["calibration"] == "brightness_temperature" assert res["5"].attrs["units"] == "K" @@ -500,8 +479,8 @@ def test_counts_calib(self): ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) assert len(res) == 9 - _test_helper(res, ["1", "2", "3", "4", "5"], "counts", "1", (2 * 40, 2048 * 2)) - _test_helper(res, ["6", "19", "20"], "counts", "1", (2 * 10, 2048)) + _test_helper(res, ["1", "2", "3", "4", "5"], ("counts", "1", (2 * 40, 2048 * 2))) + _test_helper(res, ["6", "19", "20"], ("counts", "1", (2 * 10, 2048))) def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -532,7 +511,7 @@ def test_1km_resolutions(self): res = reader.load(["1", "2", "3", "4", "5", "6", "7", "8"]) assert len(res) == 8 - _test_helper(res, ["1", "2", "3", "4", "6", "7", "8"], "reflectance", "%", (2 * 10, 2048)) + _test_helper(res, ["1", "2", "3", "4", "6", "7", "8"], ("reflectance", "%", (2 * 10, 2048))) assert res["5"].shape == (2 * 10, 2048) assert res["5"].attrs["calibration"] == "brightness_temperature" assert res["5"].attrs["units"] == "K" @@ -570,7 +549,7 @@ def test_250_resolutions(self): res.__getitem__("6") with pytest.raises(KeyError): res.__getitem__("7") - _test_helper(res, ["1", "2", "3", "4"], "reflectance", "%", (2 * 40, 2048 * 2)) + _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) assert res["5"].shape == (2 * 40, 2048 * 2) assert res["5"].attrs["calibration"] == "brightness_temperature" assert res["5"].attrs["units"] == "K" @@ -646,8 +625,8 @@ def test_all_resolutions(self): res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - _test_helper(res, ["1", "2", "3", "4"], "reflectance", "%", (2 * 40, 2048 * 2)) - _test_helper(res, ["24", "25"], "brightness_temperature", "K", (2 * 40, 2048 * 2)) + _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) + _test_helper(res, ["24", "25"], ("brightness_temperature", "K", (2 * 40, 2048 * 2))) assert res["5"].shape == (2 * 10, 2048) assert res["5"].attrs["calibration"] == "reflectance" assert res["5"].attrs["units"] == "%" @@ -673,8 +652,8 @@ def test_counts_calib(self): ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) assert len(res) == 9 - _test_helper(res, ["1", "2", "3", "4", "24", "25"], "counts", "1", (2 * 40, 2048 * 2)) - _test_helper(res, ["5", "20"], "counts", "1", (2 * 10, 2048)) + _test_helper(res, ["1", "2", "3", "4", "24", "25"], ("counts", "1", (2 * 40, 2048 * 2))) + _test_helper(res, ["5", "20"], ("counts", "1", (2 * 10, 2048))) def test_rad_calib(self): """Test loading data at radiance calibration.""" @@ -693,7 +672,7 @@ def test_rad_calib(self): ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - _test_helper(res, ["1", "2", "3", "4"], "radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2)) + _test_helper(res, ["1", "2", "3", "4"], ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) assert res["5"].shape == (2 * 10, 2048) assert res["5"].attrs["calibration"] == "radiance" assert res["5"].attrs["units"] == "mW/ (m2 cm-1 sr)" @@ -731,8 +710,8 @@ def test_1km_resolutions(self): res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - _test_helper(res, ["1", "2", "3", "4"], "reflectance", "%", (2 * 10, 2048)) - _test_helper(res, ["24", "25"], "brightness_temperature", "K", (2 * 10, 2048)) + _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 10, 2048))) + _test_helper(res, ["24", "25"], ("brightness_temperature", "K", (2 * 10, 2048))) assert res["5"].shape == (2 * 10, 2048) assert res["5"].attrs["calibration"] == "reflectance" assert res["5"].attrs["units"] == "%" @@ -851,8 +830,8 @@ def test_rad_calib(self): ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - _test_helper(res, ["1", "3", "4"], "radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048)) - _test_helper(res, ["6", "7"], "radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2)) + _test_helper(res, ["1", "3", "4"], ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) + _test_helper(res, ["6", "7"], ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -893,7 +872,7 @@ def test_1km_resolutions(self): assert res["1"].shape == (2 * 10, 2048) assert res["1"].attrs["calibration"] == "radiance" assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - _test_helper(res, ["2", "3", "5", "6", "7"], "brightness_temperature", "K", (2 * 10, 2048)) + _test_helper(res, ["2", "3", "5", "6", "7"], ("brightness_temperature", "K", (2 * 10, 2048))) def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" From 9d699cd4c56a62185888266de9b51446baf68330 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 15:44:20 +0800 Subject: [PATCH 1345/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 4238a92214..d07ca1b078 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -340,7 +340,6 @@ def _add_band_data_file_content(self): num_scans = self.num_scans rows_per_scan = self._rows_per_scan is_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1", "fy3c_mersi1")) - is_fy3ab_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") is_1km = "_1000" in self.filetype_info["file_type"] From 26d3b1621fc711ec8630e480b52c5ba9c2766137 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 16:00:57 +0800 Subject: [PATCH 1346/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 26 +++++++++++----------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index d07ca1b078..188eefd65c 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -473,13 +473,13 @@ def test_counts_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ["1", "2", "3", "4", "5", "6", "19", "20"]: + for band_name in ["1", "5", "16", "19", "20"]: ds_ids.append(make_dataid(name=band_name, calibration="counts")) ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) - assert len(res) == 9 - _test_helper(res, ["1", "2", "3", "4", "5"], ("counts", "1", (2 * 40, 2048 * 2))) - _test_helper(res, ["6", "19", "20"], ("counts", "1", (2 * 10, 2048))) + assert len(res) == 6 + _test_helper(res, ["1", "5"], ("counts", "1", (2 * 40, 2048 * 2))) + _test_helper(res, ["16", "19", "20"], ("counts", "1", (2 * 10, 2048))) def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -498,7 +498,7 @@ def test_1km_resolutions(self): # - Bands 1-4 (visible) # - Bands 5 (IR) available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "5"): + for band_name in ("3", "5", "9", "14", "17"): num_results = 2 ds_id = make_dataid(name=band_name, resolution=250) with pytest.raises(KeyError): @@ -508,9 +508,9 @@ def test_1km_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(["1", "2", "3", "4", "5", "6", "7", "8"]) - assert len(res) == 8 - _test_helper(res, ["1", "2", "3", "4", "6", "7", "8"], ("reflectance", "%", (2 * 10, 2048))) + res = reader.load(["2", "4", "5", "12", "15", "18"]) + assert len(res) == 6 + _test_helper(res, ["2", "4", "12", "15", "18"], ("reflectance", "%", (2 * 10, 2048))) assert res["5"].shape == (2 * 10, 2048) assert res["5"].attrs["calibration"] == "brightness_temperature" assert res["5"].attrs["units"] == "K" @@ -532,7 +532,7 @@ def test_250_resolutions(self): # - Bands 1-4 (visible) # - Bands 5 (IR) available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "5"): + for band_name in ("2", "3", "4", "5"): num_results = 2 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, @@ -542,13 +542,13 @@ def test_250_resolutions(self): with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=num_results, best=False) - res = reader.load(["1", "2", "3", "4", "5", "6", "7"]) - assert len(res) == 5 + res = reader.load(["1", "2", "4", "5", "11", "13", "15"]) + assert len(res) == 4 with pytest.raises(KeyError): res.__getitem__("6") with pytest.raises(KeyError): res.__getitem__("7") - _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) + _test_helper(res, ["1", "2", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) assert res["5"].shape == (2 * 40, 2048 * 2) assert res["5"].attrs["calibration"] == "brightness_temperature" assert res["5"].attrs["units"] == "K" @@ -755,7 +755,7 @@ def test_250_resolutions(self): res.__getitem__("5") with pytest.raises(KeyError): res.__getitem__("20") - # _test_helper(res) + _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) assert res["24"].shape == (2 * 40, 2048 * 2) assert res["24"].attrs["calibration"] == "brightness_temperature" assert res["24"].attrs["units"] == "K" From 9495d5d1d9430ad53ecdc3cd8d12ba01d52683dc Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 16:03:28 +0800 Subject: [PATCH 1347/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 188eefd65c..0b89f9bb82 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -350,10 +350,8 @@ def _add_band_data_file_content(self): elif is_250m: if is_mersi1 or is_mersi2: return _get_250m_data(num_scans, rows_per_scan, num_cols, self.filetype_info) - elif is_mersill: - return _get_250m_ll_data(num_scans, rows_per_scan, num_cols) else: - return + return _get_250m_ll_data(num_scans, rows_per_scan, num_cols) else: return _get_500m_data(num_scans, rows_per_scan, num_cols) From f6f2105090fd6e5265202ebbde6569ad00301a0f Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 16:05:42 +0800 Subject: [PATCH 1348/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 0b89f9bb82..4062bd0e20 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -339,8 +339,6 @@ def _add_band_data_file_content(self): num_cols = self._num_cols_for_file_type num_scans = self.num_scans rows_per_scan = self._rows_per_scan - is_mersi1 = self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1", "fy3c_mersi1")) - is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") is_1km = "_1000" in self.filetype_info["file_type"] is_250m = "_250" in self.filetype_info["file_type"] @@ -348,10 +346,10 @@ def _add_band_data_file_content(self): if is_1km: return _get_1km_data(num_scans, rows_per_scan, num_cols, self.filetype_info) elif is_250m: - if is_mersi1 or is_mersi2: - return _get_250m_data(num_scans, rows_per_scan, num_cols, self.filetype_info) - else: + if is_mersill: return _get_250m_ll_data(num_scans, rows_per_scan, num_cols) + else: + return _get_250m_data(num_scans, rows_per_scan, num_cols, self.filetype_info) else: return _get_500m_data(num_scans, rows_per_scan, num_cols) From bfe22db57508ef6478ab3fcb9bf4dd6caf8148db Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 20:01:10 +0800 Subject: [PATCH 1349/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 221 +++++++++------------ 1 file changed, 93 insertions(+), 128 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 4062bd0e20..e8c7d50d4e 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -392,6 +392,17 @@ def _test_helper(res, band_list, exp_result): assert res[band].shape == exp_shape +def _test_find_files_and_readers(reader_config, filenames): + from satpy.readers import load_reader + reader = load_reader(reader_config) + files = reader.select_files_from_pathnames(filenames) + # Make sure we have some files + reader.create_filehandlers(files) + assert len(files) == len(filenames) + assert reader.file_handlers + return files, reader + + class MERSIL1BTester: """Test MERSI1/2/LL/RM L1B Reader.""" @@ -419,54 +430,80 @@ class MERSI1L1BTester(MERSIL1BTester): filenames_all: list = [] def test_all_resolutions(self): - """Test loading data when all resolutions are available.""" + """Test loading data when all resolutions or specific one are available.""" from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader from satpy.tests.utils import make_dataid - filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert len(files) == len(filenames) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 5 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "5"): - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) + from satpy.readers import load_reader - res = reader.load(["1", "2", "3", "4", "5", "6", "7", "8"]) - assert len(res) == 8 - _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) - assert res["5"].shape == (2 * 40, 2048 * 2) - assert res["5"].attrs["calibration"] == "brightness_temperature" - assert res["5"].attrs["units"] == "K" - assert res["6"].shape == (2 * 10, 2048) - assert res["6"].attrs["calibration"] == "reflectance" - assert res["6"].attrs["units"] == "%" + resolution_list = ["all", "250", "1000"] + file_list = [self.filenames_all, self.filenames_250m, self.filenames_1000m] + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["5"] + vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] + ir_1000_bands = [] + bands_1000 = vis_1000_bands + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands + + for resolution in resolution_list: + filenames = file_list[resolution_list.index(resolution)] + reader = load_reader(self.reader_configs) + files = reader.select_files_from_pathnames(filenames) + assert len(files) == len(filenames) + reader.create_filehandlers(files) + # Make sure we have some files + assert reader.file_handlers + + # Verify that we have multiple resolutions for: + # - Bands 1-4 (visible) + # - Bands 5 (IR) + available_datasets = reader.available_dataset_ids + for band_name in bands_250: + num_results = 2 # ("reflectance"/"brightness temperature" and "coutns") + + ds_id = make_dataid(name=band_name, resolution=250) + if resolution == "1000": + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=num_results, best=False) + else: + res = get_key(ds_id, available_datasets, num_results=num_results, best=False) + assert num_results == len(res) + + ds_id = make_dataid(name=band_name, resolution=1000) + if resolution == "250": + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=num_results, best=False) + else: + res = get_key(ds_id, available_datasets, num_results=num_results, best=False) + assert num_results == len(res) + + res = reader.load(bands_1000 + bands_250) + if resolution != "250": + assert len(res) == len(bands_1000 + bands_250) + else: + assert len(res) == len(bands_250) + for band in bands_1000: + with pytest.raises(KeyError): + res.__getitem__(band) + + if resolution in ["all", "250"]: + _test_helper(res, vis_250_bands, ("reflectance", "%", (2 * 40, 2048 * 2))) + _test_helper(res, ir_250_bands, ("brightness_temperature", "K", (2 * 40, 2048 * 2))) + + if resolution == "all": + _test_helper(res, vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) + _test_helper(res, ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + else: + _test_helper(res, vis_250_bands, ("reflectance", "%", (2 * 10, 2048))) + _test_helper(res, vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) + _test_helper(res, ir_250_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + _test_helper(res, ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) def test_counts_calib(self): """Test loading data at counts calibration.""" - from satpy.readers import load_reader + from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dataid filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert len(files) == len(filenames) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers + files, reader = _test_find_files_and_readers(self.reader_configs, filenames) ds_ids = [] for band_name in ["1", "5", "16", "19", "20"]: @@ -477,78 +514,6 @@ def test_counts_calib(self): _test_helper(res, ["1", "5"], ("counts", "1", (2 * 40, 2048 * 2))) _test_helper(res, ["16", "19", "20"], ("counts", "1", (2 * 10, 2048))) - def test_1km_resolutions(self): - """Test loading data when only 1km resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_1000m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert len(files) == len(filenames) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 5 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("3", "5", "9", "14", "17"): - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - - res = reader.load(["2", "4", "5", "12", "15", "18"]) - assert len(res) == 6 - _test_helper(res, ["2", "4", "12", "15", "18"], ("reflectance", "%", (2 * 10, 2048))) - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "brightness_temperature" - assert res["5"].attrs["units"] == "K" - - def test_250_resolutions(self): - """Test loading data when only 250m resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_250m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert len(files) == len(filenames) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 5 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("2", "3", "4", "5"): - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - - res = reader.load(["1", "2", "4", "5", "11", "13", "15"]) - assert len(res) == 4 - with pytest.raises(KeyError): - res.__getitem__("6") - with pytest.raises(KeyError): - res.__getitem__("7") - _test_helper(res, ["1", "2", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) - assert res["5"].shape == (2 * 40, 2048 * 2) - assert res["5"].attrs["calibration"] == "brightness_temperature" - assert res["5"].attrs["units"] == "K" - class TestFY3AMERSI1L1B(MERSI1L1BTester): """Test the FY3A MERSI1 L1B reader.""" @@ -559,22 +524,22 @@ class TestFY3AMERSI1L1B(MERSI1L1BTester): filenames_all = filenames_1000m + filenames_250m -class TestFY3BMERSI1L1B(MERSI1L1BTester): - """Test the FY3A MERSI1 L1B reader.""" - - yaml_file = "fy3b_mersi1_l1b.yaml" - filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] - filenames_250m = ["FY3B_MERSI_GBAL_L1_20110824_1850_0250M_MS.hdf", "FY3B_MERSI_GBAL_L1_20110824_1850_GEOXX_MS.hdf"] - filenames_all = filenames_1000m + filenames_250m - - -class TestFY3CMERSI1L1B(MERSI1L1BTester): - """Test the FY3A MERSI1 L1B reader.""" - - yaml_file = "fy3c_mersi1_l1b.yaml" - filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] - filenames_250m = ["FY3C_MERSI_GBAL_L1_20131002_1835_0250M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEOQK_MS.hdf"] - filenames_all = filenames_1000m + filenames_250m +# class TestFY3BMERSI1L1B(MERSI1L1BTester): +# """Test the FY3A MERSI1 L1B reader.""" +# +# yaml_file = "fy3b_mersi1_l1b.yaml" +# filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] +# filenames_250m = ["FY3B_MERSI_GBAL_L1_20110824_1850_0250M_MS.hdf", "FY3B_MERSI_GBAL_L1_20110824_1850_GEOXX_MS.hdf"] +# filenames_all = filenames_1000m + filenames_250m +# +# +# class TestFY3CMERSI1L1B(MERSI1L1BTester): +# """Test the FY3A MERSI1 L1B reader.""" +# +# yaml_file = "fy3c_mersi1_l1b.yaml" +# filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] +# filenames_250m = ["FY3C_MERSI_GBAL_L1_20131002_1835_0250M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEOQK_MS.hdf"] +# filenames_all = filenames_1000m + filenames_250m From 9ff7419450ad42c21d8b071fda2b47daaca7a0ca Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 20:23:15 +0800 Subject: [PATCH 1350/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 38 ++++++++++++---------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index e8c7d50d4e..976dee5fef 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -403,6 +403,26 @@ def _test_find_files_and_readers(reader_config, filenames): return files, reader +def _test_multi_resolutions(available_datasets, band_name, test_resolution, cal_results_number): + from satpy.dataset.data_dict import get_key + from satpy.tests.utils import make_dataid + ds_id = make_dataid(name=band_name, resolution=250) + if test_resolution == "1000": + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + else: + res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + assert len(res) == cal_results_number + + ds_id = make_dataid(name=band_name, resolution=1000) + if test_resolution == "250": + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + else: + res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + assert len(res) == cal_results_number + + class MERSIL1BTester: """Test MERSI1/2/LL/RM L1B Reader.""" @@ -459,22 +479,7 @@ def test_all_resolutions(self): available_datasets = reader.available_dataset_ids for band_name in bands_250: num_results = 2 # ("reflectance"/"brightness temperature" and "coutns") - - ds_id = make_dataid(name=band_name, resolution=250) - if resolution == "1000": - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - else: - res = get_key(ds_id, available_datasets, num_results=num_results, best=False) - assert num_results == len(res) - - ds_id = make_dataid(name=band_name, resolution=1000) - if resolution == "250": - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - else: - res = get_key(ds_id, available_datasets, num_results=num_results, best=False) - assert num_results == len(res) + _test_multi_resolutions(available_datasets, band_name, resolution, num_results) res = reader.load(bands_1000 + bands_250) if resolution != "250": @@ -500,7 +505,6 @@ def test_all_resolutions(self): def test_counts_calib(self): """Test loading data at counts calibration.""" - from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dataid filenames = self.filenames_all files, reader = _test_find_files_and_readers(self.reader_configs, filenames) From 9600428d6fdd2dd9bd298d09983902d5d9a85d2a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 21:19:30 +0800 Subject: [PATCH 1351/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 113 ++++++++++----------- 1 file changed, 53 insertions(+), 60 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 976dee5fef..5956fe9a63 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -400,27 +400,28 @@ def _test_find_files_and_readers(reader_config, filenames): reader.create_filehandlers(files) assert len(files) == len(filenames) assert reader.file_handlers - return files, reader + return reader -def _test_multi_resolutions(available_datasets, band_name, test_resolution, cal_results_number): - from satpy.dataset.data_dict import get_key - from satpy.tests.utils import make_dataid - ds_id = make_dataid(name=band_name, resolution=250) - if test_resolution == "1000": - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) - else: - res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) - assert len(res) == cal_results_number +def _test_multi_resolutions(available_datasets, band_list, test_resolution, cal_results_number): + for band_name in band_list: + from satpy.dataset.data_dict import get_key + from satpy.tests.utils import make_dataid + ds_id = make_dataid(name=band_name, resolution=250) + if test_resolution == "1000": + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + else: + res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + assert len(res) == cal_results_number - ds_id = make_dataid(name=band_name, resolution=1000) - if test_resolution == "250": - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) - else: - res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) - assert len(res) == cal_results_number + ds_id = make_dataid(name=band_name, resolution=1000) + if test_resolution == "250": + with pytest.raises(KeyError): + get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + else: + res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) + assert len(res) == cal_results_number class MERSIL1BTester: @@ -448,38 +449,30 @@ class MERSI1L1BTester(MERSIL1BTester): filenames_1000m: list= [] filenames_250m: list = [] filenames_all: list = [] + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["5"] + vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] + ir_1000_bands = [] def test_all_resolutions(self): """Test loading data when all resolutions or specific one are available.""" - from satpy.dataset.data_dict import get_key - from satpy.tests.utils import make_dataid from satpy.readers import load_reader resolution_list = ["all", "250", "1000"] file_list = [self.filenames_all, self.filenames_250m, self.filenames_1000m] - vis_250_bands = ["1", "2", "3", "4"] - ir_250_bands = ["5"] - vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] - ir_1000_bands = [] - bands_1000 = vis_1000_bands + ir_1000_bands - bands_250 = vis_250_bands + ir_250_bands + bands_1000 = self.vis_1000_bands + self.ir_1000_bands + bands_250 = self.vis_250_bands + self.ir_250_bands for resolution in resolution_list: filenames = file_list[resolution_list.index(resolution)] - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert len(files) == len(filenames) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers + reader = _test_find_files_and_readers(self.reader_configs, filenames) # Verify that we have multiple resolutions for: # - Bands 1-4 (visible) # - Bands 5 (IR) available_datasets = reader.available_dataset_ids - for band_name in bands_250: - num_results = 2 # ("reflectance"/"brightness temperature" and "coutns") - _test_multi_resolutions(available_datasets, band_name, resolution, num_results) + num_results = 2 # ("reflectance"/"brightness temperature" and "coutns") + _test_multi_resolutions(available_datasets, bands_250, resolution, num_results) res = reader.load(bands_1000 + bands_250) if resolution != "250": @@ -491,23 +484,23 @@ def test_all_resolutions(self): res.__getitem__(band) if resolution in ["all", "250"]: - _test_helper(res, vis_250_bands, ("reflectance", "%", (2 * 40, 2048 * 2))) - _test_helper(res, ir_250_bands, ("brightness_temperature", "K", (2 * 40, 2048 * 2))) + _test_helper(res, self.vis_250_bands, ("reflectance", "%", (2 * 40, 2048 * 2))) + _test_helper(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 40, 2048 * 2))) if resolution == "all": - _test_helper(res, vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) - _test_helper(res, ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + _test_helper(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) + _test_helper(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) else: - _test_helper(res, vis_250_bands, ("reflectance", "%", (2 * 10, 2048))) - _test_helper(res, vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) - _test_helper(res, ir_250_bands, ("brightness_temperature", "K", (2 * 10, 2048))) - _test_helper(res, ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + _test_helper(res, self.vis_250_bands, ("reflectance", "%", (2 * 10, 2048))) + _test_helper(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) + _test_helper(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + _test_helper(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) def test_counts_calib(self): """Test loading data at counts calibration.""" from satpy.tests.utils import make_dataid filenames = self.filenames_all - files, reader = _test_find_files_and_readers(self.reader_configs, filenames) + reader = _test_find_files_and_readers(self.reader_configs, filenames) ds_ids = [] for band_name in ["1", "5", "16", "19", "20"]: @@ -528,22 +521,22 @@ class TestFY3AMERSI1L1B(MERSI1L1BTester): filenames_all = filenames_1000m + filenames_250m -# class TestFY3BMERSI1L1B(MERSI1L1BTester): -# """Test the FY3A MERSI1 L1B reader.""" -# -# yaml_file = "fy3b_mersi1_l1b.yaml" -# filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] -# filenames_250m = ["FY3B_MERSI_GBAL_L1_20110824_1850_0250M_MS.hdf", "FY3B_MERSI_GBAL_L1_20110824_1850_GEOXX_MS.hdf"] -# filenames_all = filenames_1000m + filenames_250m -# -# -# class TestFY3CMERSI1L1B(MERSI1L1BTester): -# """Test the FY3A MERSI1 L1B reader.""" -# -# yaml_file = "fy3c_mersi1_l1b.yaml" -# filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] -# filenames_250m = ["FY3C_MERSI_GBAL_L1_20131002_1835_0250M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEOQK_MS.hdf"] -# filenames_all = filenames_1000m + filenames_250m +class TestFY3BMERSI1L1B(MERSI1L1BTester): + """Test the FY3A MERSI1 L1B reader.""" + + yaml_file = "fy3b_mersi1_l1b.yaml" + filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] + filenames_250m = ["FY3B_MERSI_GBAL_L1_20110824_1850_0250M_MS.hdf", "FY3B_MERSI_GBAL_L1_20110824_1850_GEOXX_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m + + +class TestFY3CMERSI1L1B(MERSI1L1BTester): + """Test the FY3A MERSI1 L1B reader.""" + + yaml_file = "fy3c_mersi1_l1b.yaml" + filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] + filenames_250m = ["FY3C_MERSI_GBAL_L1_20131002_1835_0250M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEOQK_MS.hdf"] + filenames_all = filenames_1000m + filenames_250m From 96c8672af0e468806f271d36e6c094c910a62675 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 21:27:02 +0800 Subject: [PATCH 1352/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 50 +++++++++++++++------- 1 file changed, 34 insertions(+), 16 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 5956fe9a63..6c9bb8d7af 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -445,14 +445,16 @@ def teardown_method(self): class MERSI1L1BTester(MERSIL1BTester): """Test MERSI1 L1B Reader.""" - yaml_file = "" + yaml_file: str = "" filenames_1000m: list= [] filenames_250m: list = [] filenames_all: list = [] - vis_250_bands = ["1", "2", "3", "4"] - ir_250_bands = ["5"] - vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] - ir_1000_bands = [] + vis_250_bands: list = [] + ir_250_bands: list = [] + vis_1000_bands: list = [] + ir_1000_bands: list = [] + bands_1000: list = [] + bands_250: list = [] def test_all_resolutions(self): """Test loading data when all resolutions or specific one are available.""" @@ -460,8 +462,6 @@ def test_all_resolutions(self): resolution_list = ["all", "250", "1000"] file_list = [self.filenames_all, self.filenames_250m, self.filenames_1000m] - bands_1000 = self.vis_1000_bands + self.ir_1000_bands - bands_250 = self.vis_250_bands + self.ir_250_bands for resolution in resolution_list: filenames = file_list[resolution_list.index(resolution)] @@ -472,14 +472,14 @@ def test_all_resolutions(self): # - Bands 5 (IR) available_datasets = reader.available_dataset_ids num_results = 2 # ("reflectance"/"brightness temperature" and "coutns") - _test_multi_resolutions(available_datasets, bands_250, resolution, num_results) + _test_multi_resolutions(available_datasets, self.bands_250, resolution, num_results) - res = reader.load(bands_1000 + bands_250) + res = reader.load(self.bands_1000 + self.bands_250) if resolution != "250": - assert len(res) == len(bands_1000 + bands_250) + assert len(res) == len(self.bands_1000 + self.bands_250) else: - assert len(res) == len(bands_250) - for band in bands_1000: + assert len(res) == len(self.bands_250) + for band in self.bands_1000: with pytest.raises(KeyError): res.__getitem__(band) @@ -503,13 +503,13 @@ def test_counts_calib(self): reader = _test_find_files_and_readers(self.reader_configs, filenames) ds_ids = [] - for band_name in ["1", "5", "16", "19", "20"]: + for band_name in self.bands_1000 + self.bands_250: ds_ids.append(make_dataid(name=band_name, calibration="counts")) ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) - assert len(res) == 6 - _test_helper(res, ["1", "5"], ("counts", "1", (2 * 40, 2048 * 2))) - _test_helper(res, ["16", "19", "20"], ("counts", "1", (2 * 10, 2048))) + assert len(res) == len(self.bands_1000) + len(self.bands_250) + 1 + _test_helper(res, self.bands_250, ("counts", "1", (2 * 40, 2048 * 2))) + _test_helper(res, self.bands_1000, ("counts", "1", (2 * 10, 2048))) class TestFY3AMERSI1L1B(MERSI1L1BTester): @@ -519,6 +519,12 @@ class TestFY3AMERSI1L1B(MERSI1L1BTester): filenames_1000m = ["FY3A_MERSI_GBAL_L1_20090601_1200_1000M_MS.hdf"] filenames_250m = ["FY3A_MERSI_GBAL_L1_20090601_1200_0250M_MS.hdf"] filenames_all = filenames_1000m + filenames_250m + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["5"] + vis_1000_bandst = ["6", "7", "8", "11", "15", "19", "20"] + ir_1000_bands = [] + bands_1000 = vis_1000_bandst + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands class TestFY3BMERSI1L1B(MERSI1L1BTester): @@ -528,6 +534,12 @@ class TestFY3BMERSI1L1B(MERSI1L1BTester): filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] filenames_250m = ["FY3B_MERSI_GBAL_L1_20110824_1850_0250M_MS.hdf", "FY3B_MERSI_GBAL_L1_20110824_1850_GEOXX_MS.hdf"] filenames_all = filenames_1000m + filenames_250m + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["5"] + vis_1000_bandst = ["6", "7", "8", "11", "15", "19", "20"] + ir_1000_bands = [] + bands_1000 = vis_1000_bandst + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands class TestFY3CMERSI1L1B(MERSI1L1BTester): @@ -537,6 +549,12 @@ class TestFY3CMERSI1L1B(MERSI1L1BTester): filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] filenames_250m = ["FY3C_MERSI_GBAL_L1_20131002_1835_0250M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEOQK_MS.hdf"] filenames_all = filenames_1000m + filenames_250m + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["5"] + vis_1000_bandst = ["6", "7", "8", "11", "15", "19", "20"] + ir_1000_bands = [] + bands_1000 = vis_1000_bandst + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands From 62ef70ddc11596853c8ff08c9802fd5067f1eb7a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 30 Apr 2024 23:55:47 +0800 Subject: [PATCH 1353/1416] calibration and tests --- satpy/etc/readers/fy3a_mersi1_l1b.yaml | 3 + satpy/etc/readers/fy3b_mersi1_l1b.yaml | 3 + satpy/etc/readers/fy3c_mersi1_l1b.yaml | 3 + satpy/etc/readers/mersi2_l1b.yaml | 20 +- satpy/etc/readers/mersi_ll_l1b.yaml | 29 +- satpy/readers/mersi_l1b.py | 3 + satpy/tests/reader_tests/test_mersi_l1b.py | 391 +++------------------ 7 files changed, 113 insertions(+), 339 deletions(-) diff --git a/satpy/etc/readers/fy3a_mersi1_l1b.yaml b/satpy/etc/readers/fy3a_mersi1_l1b.yaml index 0f16f46454..7ce31300bb 100644 --- a/satpy/etc/readers/fy3a_mersi1_l1b.yaml +++ b/satpy/etc/readers/fy3a_mersi1_l1b.yaml @@ -125,6 +125,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts diff --git a/satpy/etc/readers/fy3b_mersi1_l1b.yaml b/satpy/etc/readers/fy3b_mersi1_l1b.yaml index 02f7e14883..65ce53cd73 100644 --- a/satpy/etc/readers/fy3b_mersi1_l1b.yaml +++ b/satpy/etc/readers/fy3b_mersi1_l1b.yaml @@ -131,6 +131,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts diff --git a/satpy/etc/readers/fy3c_mersi1_l1b.yaml b/satpy/etc/readers/fy3c_mersi1_l1b.yaml index 31c52d5ee5..e797b52405 100644 --- a/satpy/etc/readers/fy3c_mersi1_l1b.yaml +++ b/satpy/etc/readers/fy3c_mersi1_l1b.yaml @@ -145,6 +145,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts diff --git a/satpy/etc/readers/mersi2_l1b.yaml b/satpy/etc/readers/mersi2_l1b.yaml index 78bd861169..3e0ecb390c 100644 --- a/satpy/etc/readers/mersi2_l1b.yaml +++ b/satpy/etc/readers/mersi2_l1b.yaml @@ -468,8 +468,6 @@ datasets: counts: units: "1" standard_name: counts - - # Not sure how to get radiance for BT channels '20': name: '20' wavelength: [3.710, 3.800, 3.890] @@ -484,6 +482,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -501,6 +502,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -518,6 +522,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -535,6 +542,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -558,6 +568,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -581,6 +594,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index 652708d733..25da42b473 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -53,11 +53,16 @@ datasets: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_1KM_LL + calibration_key: Calibration/LL_Cal_Coeff + calibration_index: 0 coordinates: [longitude, latitude] calibration: - radiance: - units: 'mW/ (m2 cm-1 sr)' - standard_name: toa_outgoing_radiance_per_unit_wavelength + reflectance: + units: "%" + standard_name: toa_bidirectional_reflectance + counts: + units: "1" + standard_name: counts '2': name: '2' wavelength: [3.710, 3.800, 3.890] @@ -76,6 +81,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '3': name: '3' wavelength: [3.9725, 4.050, 4.1275] @@ -94,6 +102,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '4': name: '4' wavelength: [6.950, 7.20, 7.450] @@ -112,6 +123,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '5': name: '5' wavelength: [8.400, 8.550, 8.700] @@ -130,6 +144,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '6': name: '6' wavelength: [10.300, 10.800, 11.300] @@ -153,6 +170,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts '7': name: '7' wavelength: [11.500, 12.000, 12.500] @@ -176,6 +196,9 @@ datasets: radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength + counts: + units: "1" + standard_name: counts longitude: name: longitude diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 91e143f7e9..5d5b4911bf 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -152,6 +152,9 @@ def get_dataset(self, dataset_id, ds_info): data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 data = data * self.get_refl_mult() + elif dataset_id.get("calibration") == "radiance": + data = data + elif dataset_id.get("calibration") == "brightness_temperature": # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 6c9bb8d7af..ef89d993e2 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -176,7 +176,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): "valid_range": [0, 4095], "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, dims=("_rows", "_cols")) if is_mersi1 else - xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), + xr.DataArray(da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={"Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", @@ -312,7 +312,7 @@ def _set_sensor_attrs(self, global_attrs): elif "mersi_ll" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3E" global_attrs["/attr/Sensor Identification Code"] = "MERSI LL" - ftype = "VIS" + ftype = "LL" elif "mersi_rm" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3G" global_attrs["/attr/Sensor Identification Code"] = "MERSI RM" @@ -412,6 +412,7 @@ def _test_multi_resolutions(available_datasets, band_list, test_resolution, cal_ with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) else: + res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) assert len(res) == cal_results_number @@ -420,6 +421,7 @@ def _test_multi_resolutions(available_datasets, band_list, test_resolution, cal_ with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) else: + res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) assert len(res) == cal_results_number @@ -442,8 +444,8 @@ def teardown_method(self): self.p.stop() -class MERSI1L1BTester(MERSIL1BTester): - """Test MERSI1 L1B Reader.""" +class MERSI12llL1BTester(MERSIL1BTester): + """Test MERSI1/2/LL L1B Reader.""" yaml_file: str = "" filenames_1000m: list= [] @@ -458,8 +460,6 @@ class MERSI1L1BTester(MERSIL1BTester): def test_all_resolutions(self): """Test loading data when all resolutions or specific one are available.""" - from satpy.readers import load_reader - resolution_list = ["all", "250", "1000"] file_list = [self.filenames_all, self.filenames_250m, self.filenames_1000m] @@ -471,8 +471,10 @@ def test_all_resolutions(self): # - Bands 1-4 (visible) # - Bands 5 (IR) available_datasets = reader.available_dataset_ids - num_results = 2 # ("reflectance"/"brightness temperature" and "coutns") - _test_multi_resolutions(available_datasets, self.bands_250, resolution, num_results) + vis_num_results = 3 if "mersi2" in self.yaml_file else 2 # Only MERSI-2 VIS has radiance calibration + ir_num_results = 3 + _test_multi_resolutions(available_datasets, self.vis_250_bands, resolution, vis_num_results) + _test_multi_resolutions(available_datasets, self.ir_250_bands, resolution, ir_num_results) res = reader.load(self.bands_1000 + self.bands_250) if resolution != "250": @@ -511,8 +513,29 @@ def test_counts_calib(self): _test_helper(res, self.bands_250, ("counts", "1", (2 * 40, 2048 * 2))) _test_helper(res, self.bands_1000, ("counts", "1", (2 * 10, 2048))) + def test_rad_calib(self): + """Test loading data at radiance calibration. For MERSI-2 VIS/IR and MERSI-1/LL IR""" + from satpy.tests.utils import make_dataid + filenames = self.filenames_all + reader = _test_find_files_and_readers(self.reader_configs, filenames) + + ds_ids = [] + test_bands = self.bands_1000 + self.bands_250 if "mersi2" in self.yaml_file else \ + self.ir_250_bands + self.ir_1000_bands + + for band_name in test_bands: + ds_ids.append(make_dataid(name=band_name, calibration="radiance")) + res = reader.load(ds_ids) + assert len(res) == len(test_bands) + if "mersi2" in self.yaml_file: + _test_helper(res, self.bands_250, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) + _test_helper(res, self.bands_1000, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) + else: + _test_helper(res, self.ir_250_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) + _test_helper(res, self.ir_1000_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) -class TestFY3AMERSI1L1B(MERSI1L1BTester): + +class TestFY3AMERSI1L1B(MERSI12llL1BTester): """Test the FY3A MERSI1 L1B reader.""" yaml_file = "fy3a_mersi1_l1b.yaml" @@ -521,14 +544,14 @@ class TestFY3AMERSI1L1B(MERSI1L1BTester): filenames_all = filenames_1000m + filenames_250m vis_250_bands = ["1", "2", "3", "4"] ir_250_bands = ["5"] - vis_1000_bandst = ["6", "7", "8", "11", "15", "19", "20"] + vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] ir_1000_bands = [] - bands_1000 = vis_1000_bandst + ir_1000_bands + bands_1000 = vis_1000_bands + ir_1000_bands bands_250 = vis_250_bands + ir_250_bands -class TestFY3BMERSI1L1B(MERSI1L1BTester): - """Test the FY3A MERSI1 L1B reader.""" +class TestFY3BMERSI1L1B(MERSI12llL1BTester): + """Test the FY3B MERSI1 L1B reader.""" yaml_file = "fy3b_mersi1_l1b.yaml" filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] @@ -536,14 +559,14 @@ class TestFY3BMERSI1L1B(MERSI1L1BTester): filenames_all = filenames_1000m + filenames_250m vis_250_bands = ["1", "2", "3", "4"] ir_250_bands = ["5"] - vis_1000_bandst = ["6", "7", "8", "11", "15", "19", "20"] + vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] ir_1000_bands = [] - bands_1000 = vis_1000_bandst + ir_1000_bands + bands_1000 = vis_1000_bands + ir_1000_bands bands_250 = vis_250_bands + ir_250_bands -class TestFY3CMERSI1L1B(MERSI1L1BTester): - """Test the FY3A MERSI1 L1B reader.""" +class TestFY3CMERSI1L1B(MERSI12llL1BTester): + """Test the FY3C MERSI1 L1B reader.""" yaml_file = "fy3c_mersi1_l1b.yaml" filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] @@ -551,340 +574,40 @@ class TestFY3CMERSI1L1B(MERSI1L1BTester): filenames_all = filenames_1000m + filenames_250m vis_250_bands = ["1", "2", "3", "4"] ir_250_bands = ["5"] - vis_1000_bandst = ["6", "7", "8", "11", "15", "19", "20"] + vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] ir_1000_bands = [] - bands_1000 = vis_1000_bandst + ir_1000_bands + bands_1000 = vis_1000_bands + ir_1000_bands bands_250 = vis_250_bands + ir_250_bands - -class TestMERSI2L1B(MERSIL1BTester): +class TestFYDCMERSI2L1B(MERSI12llL1BTester): """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi2_l1b.yaml" filenames_1000m = ["tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF"] filenames_250m = ["tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF"] filenames_all = filenames_1000m + filenames_250m + vis_250_bands = ["1", "2", "3", "4"] + ir_250_bands = ["24", "25"] + vis_1000_bands = ["5", "8", "9", "11", "15", "17", "19"] + ir_1000_bands = ["20", "21", "23"] + bands_1000 = vis_1000_bands + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands - def test_all_resolutions(self): - """Test loading data when all resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "24", "25"): - if band_name in ("24", "25"): - # don't know how to get radiance for IR bands - num_results = 2 - else: - num_results = 3 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - - res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) - assert len(res) == 8 - _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) - _test_helper(res, ["24", "25"], ("brightness_temperature", "K", (2 * 40, 2048 * 2))) - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "reflectance" - assert res["5"].attrs["units"] == "%" - assert res["20"].shape == (2 * 10, 2048) - assert res["20"].attrs["calibration"] == "brightness_temperature" - assert res["20"].attrs["units"] == "K" - - def test_counts_calib(self): - """Test loading data at counts calibration.""" - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - ds_ids = [] - for band_name in ["1", "2", "3", "4", "5", "20", "24", "25"]: - ds_ids.append(make_dataid(name=band_name, calibration="counts")) - ds_ids.append(make_dataid(name="satellite_zenith_angle")) - res = reader.load(ds_ids) - assert len(res) == 9 - _test_helper(res, ["1", "2", "3", "4", "24", "25"], ("counts", "1", (2 * 40, 2048 * 2))) - _test_helper(res, ["5", "20"], ("counts", "1", (2 * 10, 2048))) - - def test_rad_calib(self): - """Test loading data at radiance calibration.""" - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - ds_ids = [] - for band_name in ["1", "2", "3", "4", "5"]: - ds_ids.append(make_dataid(name=band_name, calibration="radiance")) - res = reader.load(ds_ids) - assert len(res) == 5 - _test_helper(res, ["1", "2", "3", "4"], ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "radiance" - assert res["5"].attrs["units"] == "mW/ (m2 cm-1 sr)" - def test_1km_resolutions(self): - """Test loading data when only 1km resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_1000m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "24", "25"): - if band_name in ("24", "25"): - # don't know how to get radiance for IR bands - num_results = 2 - else: - num_results = 3 - ds_id = make_dataid(name=band_name, resolution=250) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - - res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) - assert len(res) == 8 - _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 10, 2048))) - _test_helper(res, ["24", "25"], ("brightness_temperature", "K", (2 * 10, 2048))) - assert res["5"].shape == (2 * 10, 2048) - assert res["5"].attrs["calibration"] == "reflectance" - assert res["5"].attrs["units"] == "%" - assert res["20"].shape == (2 * 10, 2048) - assert res["20"].attrs["calibration"] == "brightness_temperature" - assert res["20"].attrs["units"] == "K" - - def test_250_resolutions(self): - """Test loading data when only 250m resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_250m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "24", "25"): - if band_name in ("24", "25"): - # don't know how to get radiance for IR bands - num_results = 2 - else: - num_results = 3 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - - res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) - assert len(res) == 6 - with pytest.raises(KeyError): - res.__getitem__("5") - with pytest.raises(KeyError): - res.__getitem__("20") - _test_helper(res, ["1", "2", "3", "4"], ("reflectance", "%", (2 * 40, 2048 * 2))) - assert res["24"].shape == (2 * 40, 2048 * 2) - assert res["24"].attrs["calibration"] == "brightness_temperature" - assert res["24"].attrs["units"] == "K" - assert res["25"].shape == (2 * 40, 2048 * 2) - assert res["25"].attrs["calibration"] == "brightness_temperature" - assert res["25"].attrs["units"] == "K" - - -class TestMERSILLL1B(MERSIL1BTester): - """Test the FY3E MERSI-LL L1B reader.""" +class TestFY3EMERSIllL1B(MERSI12llL1BTester): + """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi_ll_l1b.yaml" filenames_1000m = ["FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF"] filenames_250m = ["FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF"] filenames_all = filenames_1000m + filenames_250m - - def test_all_resolutions(self): - """Test loading data when all resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 1-4 (visible) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("6", "7"): - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - - res = reader.load(["1", "2", "4", "7"]) - assert len(res) == 4 - assert res["4"].shape == (2 * 10, 2048) - assert res["1"].attrs["calibration"] == "radiance" - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - assert res["2"].shape == (2 * 10, 2048) - assert res["2"].attrs["calibration"] == "brightness_temperature" - assert res["2"].attrs["units"] == "K" - assert res["7"].shape == (2 * 40, 2048 * 2) - assert res["7"].attrs["calibration"] == "brightness_temperature" - assert res["7"].attrs["units"] == "K" - - def test_rad_calib(self): - """Test loading data at radiance calibration.""" - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_all - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 4 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - ds_ids = [] - for band_name in ["1", "3", "4", "6", "7"]: - ds_ids.append(make_dataid(name=band_name, calibration="radiance")) - res = reader.load(ds_ids) - assert len(res) == 5 - _test_helper(res, ["1", "3", "4"], ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) - _test_helper(res, ["6", "7"], ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) - - def test_1km_resolutions(self): - """Test loading data when only 1km resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_1000m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Band 6-7 (IR) - # - Bands 24-25 (IR) - available_datasets = reader.available_dataset_ids - for band_name in ("1", "2", "3", "4", "6", "7"): - if band_name == "1": - # don't know how to get anything apart from radiance for LL band - num_results = 1 - else: - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - ds_id = make_dataid(name=band_name, resolution=1000) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - if band_name == "1": - assert num_results == len([res]) - else: - assert num_results == len(res) - - res = reader.load(["1", "2", "3", "5", "6", "7"]) - assert len(res) == 6 - assert res["1"].shape == (2 * 10, 2048) - assert res["1"].attrs["calibration"] == "radiance" - assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" - _test_helper(res, ["2", "3", "5", "6", "7"], ("brightness_temperature", "K", (2 * 10, 2048))) - - def test_250_resolutions(self): - """Test loading data when only 250m resolutions are available.""" - from satpy.dataset.data_dict import get_key - from satpy.readers import load_reader - from satpy.tests.utils import make_dataid - filenames = self.filenames_250m - reader = load_reader(self.reader_configs) - files = reader.select_files_from_pathnames(filenames) - assert 2 == len(files) - reader.create_filehandlers(files) - # Make sure we have some files - assert reader.file_handlers - - # Verify that we have multiple resolutions for: - # - Bands 6-7 - available_datasets = reader.available_dataset_ids - for band_name in ("6", "7"): - num_results = 2 - ds_id = make_dataid(name=band_name, resolution=250) - res = get_key(ds_id, available_datasets, - num_results=num_results, best=False) - assert num_results == len(res) - ds_id = make_dataid(name=band_name, resolution=1000) - with pytest.raises(KeyError): - get_key(ds_id, available_datasets, num_results=num_results, best=False) - - res = reader.load(["1", "6", "7"]) - assert 2 == len(res) - with pytest.raises(KeyError): - res.__getitem__("1") - assert (2 * 40, 2048 * 2) == res["6"].shape - assert "brightness_temperature" == res["6"].attrs["calibration"] - assert "K" == res["6"].attrs["units"] - assert (2 * 40, 2048 * 2) == res["7"].shape - assert "brightness_temperature" == res["7"].attrs["calibration"] - assert "K" == res["7"].attrs["units"] + vis_250_bands = [] + ir_250_bands = ["6", "7"] + vis_1000_bands = ["1"] + ir_1000_bands = ["2", "3", "5"] + bands_1000 = vis_1000_bands + ir_1000_bands + bands_250 = vis_250_bands + ir_250_bands class TestMERSIRML1B(MERSIL1BTester): From 4ed4cf0cd8d5d1fd899046694c8a89c8b27bb757 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 1 May 2024 16:13:29 +0800 Subject: [PATCH 1354/1416] radiance calibraton --- satpy/etc/readers/mersi_ll_l1b.yaml | 6 +- satpy/readers/mersi_l1b.py | 80 +++++++++++++++++----- satpy/tests/reader_tests/test_mersi_l1b.py | 42 +++++++++--- 3 files changed, 99 insertions(+), 29 deletions(-) diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index 25da42b473..c9c21fa539 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -53,13 +53,15 @@ datasets: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_1KM_LL - calibration_key: Calibration/LL_Cal_Coeff - calibration_index: 0 + calibration_key: Calibration/Solar_Irradiance_LL coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 5d5b4911bf..22614ac8b1 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -91,7 +91,8 @@ def _get_single_slope_intercept(self, slope, intercept, cal_index): def _get_coefficients(self, cal_key, cal_index): """Get VIS calibration coeffs from calibration datasets.""" - coeffs = self[cal_key][cal_index] + # Only one VIS band for MERSI-LL + coeffs = self[cal_key][cal_index] if self.sensor_name != "mersi-ll" else self[cal_key] slope = coeffs.attrs.pop("Slope", None) intercept = coeffs.attrs.pop("Intercept", None) if slope is not None: @@ -116,17 +117,12 @@ def _get_dn_corrections(self, data, band_index, dataset_id, attrs): """Use slope and intercept to get DN corrections.""" slope = attrs.pop("Slope", None) intercept = attrs.pop("Intercept", None) - try: - new_slope = slope[band_index] - new_intercept = intercept[band_index] - # There's a bug in the slope for MERSI-1 11.25(5) - new_slope = 0.01 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and new_slope in [100, 1] \ - else new_slope - data = data * new_slope + new_intercept - return data - - except TypeError: - return data + if slope is not None and dataset_id.get("calibration") != "counts": + if band_index is not None and slope.size > 1: + slope = slope[band_index] + intercept = intercept[band_index] + data = data * slope + intercept + return data def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" @@ -146,14 +142,10 @@ def get_dataset(self, dataset_id, ds_info): data = self._get_dn_corrections(data, band_index, dataset_id, attrs) if dataset_id.get("calibration") == "reflectance": - # Only FY-3A/B stores VIS calibration coefficients in attributes - coeffs = self._get_coefficients_mersi1(ds_info["calibration_index"]) if self.platform_name in ["FY-3A", - "FY-3B"] else self._get_coefficients(ds_info["calibration_key"], ds_info["calibration_index"]) - data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 - data = data * self.get_refl_mult() + data = self._get_ref_dataset(data, ds_info) elif dataset_id.get("calibration") == "radiance": - data = data + data = self._get_rad_dataset(data, ds_info, dataset_id) elif dataset_id.get("calibration") == "brightness_temperature": # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) @@ -201,9 +193,61 @@ def _mask_data(self, data, dataset_id, attrs): data = data.where((data >= valid_range[0]) & (data <= valid_range[1]), new_fill) return data + # valid_range could be None except TypeError: return data + def _get_ref_dataset(self, data, ds_info): + """Get the dataset as reflectance. + + For MERSI-1/2/RM, coefficients will be as:: + + Reflectance = coeffs_1 + coeffs_2 * DN + coeffs_3 * DN ** 2 + + For MERSI-LL, the DN value is in radiance and the reflectance could be calculated by:: + + Reflectance = Rad * pi / E0 * 100 + + Here E0 represents the solar irradiance of the specific band and is the coefficient. + + """ + # Only FY-3A/B stores VIS calibration coefficients in attributes + coeffs = self._get_coefficients_mersi1(ds_info["calibration_index"]) if self.platform_name in ["FY-3A", + "FY-3B"] else self._get_coefficients(ds_info["calibration_key"], ds_info.get("calibration_index", None)) + data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 if self.sensor_name != "mersi-ll" else \ + data * np.pi / coeffs[0] * 100 + + data = data * self.get_refl_mult() + return data + + def _get_rad_dataset(self, data, ds_info, datset_id): + """Get the dataset as radiance. + + For MERSI-2/RM VIS bands, this could be calculated by:: + + Rad = Reflectance / 100 * E0 / pi + + For MERSI-2, E0 is in the attribute "Solar_Irradiance". + For MERSI-RM, E0 is in the calibration dataset "Solar_Irradiance". + However we can't find the way to retrieve this value from MERSI-1. + + For MERSI-LL VIS band, it has already been stored in DN values. + After applying slope and intercept, we just get it. And Same way for IR bands, no matter which sensor it is. + + """ + mersi_2_vis = [str(i) for i in range(1, 20)] + mersi_rm_vis = [str(i) for i in range(1, 6)] + + if self.sensor_name == "mersi-2" and datset_id["name"] in mersi_2_vis: + E0 = self["/attr/Solar_Irradiance"] + rad = self._get_ref_dataset(data, ds_info) / 100 * E0[mersi_2_vis.index(datset_id["name"])] / np.pi + elif self.sensor_name == "mersi-rm" and datset_id["name"] in mersi_rm_vis: + E0 = self._get_coefficients("Calibration/Solar_Irradiance", mersi_rm_vis.index(datset_id["name"])) + rad = self._get_ref_dataset(data, ds_info) / 100 * E0 / np.pi + else: + rad = data + return rad + def _get_bt_dataset(self, data, calibration_index, wave_number): """Get the dataset as brightness temperature. diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index ef89d993e2..3403adb205 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -34,6 +34,16 @@ def _get_calibration(num_scans, ftype): da.ones((19, 3), chunks=1024), attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, dims=("_bands", "_coeffs")), + "Calibration/Solar_Irradiance": + xr.DataArray( + da.ones((19, ), chunks=1024), + attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + dims=("_bands")), + "Calibration/Solar_Irradiance_LL": + xr.DataArray( + da.ones((1, ), chunks=1024), + attrs={"Slope": np.array([1.]), "Intercept": np.array([0.])}, + dims=("_bands")), "Calibration/IR_Cal_Coeff": xr.DataArray( da.ones((6, 4, num_scans), chunks=1024), @@ -272,6 +282,9 @@ def get_test_content(self, filename, filename_info, filetype_info): fy3b_attrs = { "/attr/VIS_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), } + fy3d_attrs = { + "/attr/Solar_Irradiance": np.array([1.0] * 19), + } global_attrs, ftype = self._set_sensor_attrs(global_attrs) self._add_tbb_coefficients(global_attrs) @@ -288,6 +301,8 @@ def get_test_content(self, filename, filename_info, filetype_info): test_content.update(fy3a_attrs) elif "fy3b_mersi1" in self.filetype_info["file_type"]: test_content.update(fy3b_attrs) + elif "mersi2" in self.filetype_info["file_type"]: + test_content.update(fy3d_attrs) if not self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")): test_content.update(_get_calibration(self.num_scans, ftype)) return test_content @@ -393,6 +408,7 @@ def _test_helper(res, band_list, exp_result): def _test_find_files_and_readers(reader_config, filenames): + """Test file and reader search.""" from satpy.readers import load_reader reader = load_reader(reader_config) files = reader.select_files_from_pathnames(filenames) @@ -404,6 +420,7 @@ def _test_find_files_and_readers(reader_config, filenames): def _test_multi_resolutions(available_datasets, band_list, test_resolution, cal_results_number): + """Test some bands have multiple resolutions.""" for band_name in band_list: from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dataid @@ -468,10 +485,17 @@ def test_all_resolutions(self): reader = _test_find_files_and_readers(self.reader_configs, filenames) # Verify that we have multiple resolutions for: + # ---------MERSI-1--------- # - Bands 1-4 (visible) # - Bands 5 (IR) + # ---------MERSI-2--------- + # - Bands 1-4 (visible) + # - Bands 24-25 (IR) + # ---------MERSI-LL--------- + # - Bands 6-7 (IR) available_datasets = reader.available_dataset_ids - vis_num_results = 3 if "mersi2" in self.yaml_file else 2 # Only MERSI-2 VIS has radiance calibration + # Only MERSI-2/LL VIS has radiance calibration + vis_num_results = 3 if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"] else 2 ir_num_results = 3 _test_multi_resolutions(available_datasets, self.vis_250_bands, resolution, vis_num_results) _test_multi_resolutions(available_datasets, self.ir_250_bands, resolution, ir_num_results) @@ -514,20 +538,20 @@ def test_counts_calib(self): _test_helper(res, self.bands_1000, ("counts", "1", (2 * 10, 2048))) def test_rad_calib(self): - """Test loading data at radiance calibration. For MERSI-2 VIS/IR and MERSI-1/LL IR""" + """Test loading data at radiance calibration. For MERSI-2/LL VIS/IR and MERSI-1 IR.""" from satpy.tests.utils import make_dataid filenames = self.filenames_all reader = _test_find_files_and_readers(self.reader_configs, filenames) ds_ids = [] - test_bands = self.bands_1000 + self.bands_250 if "mersi2" in self.yaml_file else \ - self.ir_250_bands + self.ir_1000_bands + test_bands = self.bands_1000 + self.bands_250 if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"] \ + else self.ir_250_bands + self.ir_1000_bands for band_name in test_bands: ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == len(test_bands) - if "mersi2" in self.yaml_file: + if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"]: _test_helper(res, self.bands_250, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) _test_helper(res, self.bands_1000, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) else: @@ -580,7 +604,7 @@ class TestFY3CMERSI1L1B(MERSI12llL1BTester): bands_250 = vis_250_bands + ir_250_bands -class TestFYDCMERSI2L1B(MERSI12llL1BTester): +class TestFY3DMERSI2L1B(MERSI12llL1BTester): """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi2_l1b.yaml" @@ -660,6 +684,6 @@ def test_rad_calib(self): res = reader.load(ds_ids) assert len(res) == 5 for band_name in band_names: - assert res[band_name].shape == (20, 4096) - assert res[band_name].attrs["calibration"] == "radiance" - assert res[band_name].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res[band_name].shape == (20, 4096) + assert res[band_name].attrs["calibration"] == "radiance" + assert res[band_name].attrs["units"] == "mW/ (m2 cm-1 sr)" From 60e77320342f2d5d848bca55fe46590abaef631e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 1 May 2024 08:17:23 +0000 Subject: [PATCH 1355/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/readers/mersi_ll_l1b.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index c9c21fa539..47b6d432b0 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -61,7 +61,7 @@ datasets: standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts From b922967b54ccc28dfc34cbd911e20fefa896c262 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 1 May 2024 16:34:27 +0800 Subject: [PATCH 1356/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 3403adb205..07299060fa 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -66,6 +66,8 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols, filetype_info): } nounits_attrs = {**def_attrs, **{"units": "NO"}} radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} + valid_range_none_attrs = radunits_attrs.copy() + valid_range_none_attrs["valid_range"] = None data = { f"{key_prefix}EV_250_RefSB_b1": @@ -91,7 +93,7 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols, filetype_info): f"{key_prefix}EV_250_Emissive_b24": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs=radunits_attrs, + attrs=valid_range_none_attrs, dims=("_rows", "_cols")), f"{key_prefix}EV_250_Emissive_b25": xr.DataArray( From 2eef43a9e86d53b79d75fde2379a878df66e970e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 1 May 2024 17:08:16 +0800 Subject: [PATCH 1357/1416] fy3b geo --- satpy/etc/readers/fy3b_mersi1_l1b.yaml | 8 ++++---- satpy/tests/reader_tests/test_mersi_l1b.py | 11 ++++++++++- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/fy3b_mersi1_l1b.yaml b/satpy/etc/readers/fy3b_mersi1_l1b.yaml index 65ce53cd73..5d2dc48f56 100644 --- a/satpy/etc/readers/fy3b_mersi1_l1b.yaml +++ b/satpy/etc/readers/fy3b_mersi1_l1b.yaml @@ -423,7 +423,7 @@ datasets: standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] - file_type: fy3b_mersi1_l1b_geo + file_type: fy3b_mersi1_l1b_1000 file_key: SolarZenith solar_azimuth_angle: @@ -432,7 +432,7 @@ datasets: standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] - file_type: fy3b_mersi1_l1b_geo + file_type: fy3b_mersi1_l1b_1000 file_key: SolarAzimuth satellite_zenith_angle: @@ -441,7 +441,7 @@ datasets: standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] - file_type: fy3b_mersi1_l1b_geo + file_type: fy3b_mersi1_l1b_1000 file_key: SensorZenith satellite_azimuth_angle: @@ -450,5 +450,5 @@ datasets: standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] - file_type: fy3b_mersi1_l1b_geo + file_type: fy3b_mersi1_l1b_1000 file_key: SensorAzimuth diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 07299060fa..ffe4562bd5 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -194,7 +194,16 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 4095], "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, - dims=("_ir250_bands", "_rows", "_cols")) + dims=("_ir250_bands", "_rows", "_cols")), + f"{key_prefix}SensorZenith": + xr.DataArray( + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + attrs={ + "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [0, 28000], + }, + dims=("_rows", "_cols")), } return data From 48dda53422b5f7bb0defd1c7ede9e04a115ca2ed Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 1 May 2024 17:30:18 +0800 Subject: [PATCH 1358/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 22614ac8b1..9959d9272e 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -121,6 +121,8 @@ def _get_dn_corrections(self, data, band_index, dataset_id, attrs): if band_index is not None and slope.size > 1: slope = slope[band_index] intercept = intercept[band_index] + # There's a bug in slope for MERSI-1 IR band + slope = 0.01 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" else slope data = data * slope + intercept return data From 768a5f54805fc443b036e784de0383e4ec8bd94c Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 1 May 2024 23:00:20 +0800 Subject: [PATCH 1359/1416] ir cal --- satpy/etc/composites/mersi-1.yaml | 19 +++++++++++++++---- satpy/readers/mersi_l1b.py | 9 +++++++-- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/satpy/etc/composites/mersi-1.yaml b/satpy/etc/composites/mersi-1.yaml index ee92ee9ed4..8239f73bee 100644 --- a/satpy/etc/composites/mersi-1.yaml +++ b/satpy/etc/composites/mersi-1.yaml @@ -14,10 +14,10 @@ modifiers: - name: solar_azimuth_angle - name: solar_zenith_angle - # sunz_corrected: - # modifier: !!python/name:satpy.modifiers.SunZenithCorrector - # prerequisites: - # - solar_zenith_angle + sunz_corrected: + modifier: !!python/name:satpy.modifiers.SunZenithCorrector + prerequisites: + - name: solar_zenith_angle nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance @@ -34,6 +34,17 @@ composites: - name: '5' standard_name: colorized_ir_clouds + true_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '3' + modifiers: [sunz_corrected, rayleigh_corrected] + - name: '2' + modifiers: [sunz_corrected, rayleigh_corrected] + - name: '1' + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: true_color + true_color_uncorr: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 9959d9272e..1a2353fbf2 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -279,7 +279,11 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): data = data.where(data != 0) # additional corrections from the file - if self.sensor_name == "mersi-2": + if self.sensor_name == "mersi-1": + # https://img.nsmc.org.cn/PORTAL/NSMC/DATASERVICE/SRF/FY3C/FY3C_MERSI_SRF.rar + corr_coeff_a = 1.0047 + corr_coeff_b = -0.8549 + elif self.sensor_name == "mersi-2": corr_coeff_a = float(self["/attr/TBB_Trans_Coefficient_A"][calibration_index]) corr_coeff_b = float(self["/attr/TBB_Trans_Coefficient_B"][calibration_index]) elif self.sensor_name == "mersi-ll": @@ -295,7 +299,8 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): corr_coeff_a = 0 if corr_coeff_a != 0: - data = (data - corr_coeff_b) / corr_coeff_a + data = (data - corr_coeff_b) / corr_coeff_a if self.sensor_name != "mersi-1" else \ + data * corr_coeff_a + corr_coeff_b # some bands have 0 counts for the first N columns and # seem to be invalid data points data = data.where(data != 0) From e2537efdf1bc1fce3deabc39b1309d801c7e75a3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 1 May 2024 15:02:23 +0000 Subject: [PATCH 1360/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/etc/composites/mersi-1.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/mersi-1.yaml b/satpy/etc/composites/mersi-1.yaml index 8239f73bee..1d7a2df79d 100644 --- a/satpy/etc/composites/mersi-1.yaml +++ b/satpy/etc/composites/mersi-1.yaml @@ -43,7 +43,7 @@ composites: modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - standard_name: true_color + standard_name: true_color true_color_uncorr: compositor: !!python/name:satpy.composites.GenericCompositor From e40917708edea99ce86fb52a971d951a07f3a475 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 4 May 2024 22:05:28 -0500 Subject: [PATCH 1361/1416] Add another safe directory to git config --- .github/workflows/ci.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c472709bc8..50880f0e95 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -34,6 +34,8 @@ jobs: steps: - name: Checkout source uses: actions/checkout@v4 + run: | + git config --global --add safe.directory /github/workspace - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v3 From f6ffd64432f2451aed0bc9814541c326b8779159 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 4 May 2024 22:12:26 -0500 Subject: [PATCH 1362/1416] Try again --- .github/workflows/ci.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 50880f0e95..4b13c6a27b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -34,8 +34,6 @@ jobs: steps: - name: Checkout source uses: actions/checkout@v4 - run: | - git config --global --add safe.directory /github/workspace - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v3 @@ -103,6 +101,7 @@ jobs: - name: Install satpy shell: bash -l {0} run: | + git config --global --add safe.directory /github/workspace python -m pip install --no-deps -e . - name: Run unit tests From ebb77aa4cd87d9c0809badac14bf0f8ff1133e09 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sat, 4 May 2024 22:27:55 -0500 Subject: [PATCH 1363/1416] Add git safe directory locally only --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4b13c6a27b..244371f9dc 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -102,6 +102,7 @@ jobs: shell: bash -l {0} run: | git config --global --add safe.directory /github/workspace + git config --local --add safe.directory /github/workspace python -m pip install --no-deps -e . - name: Run unit tests From cdf8fbbf1e6ce0b185350f602fadd89871b69745 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 5 May 2024 21:20:17 -0500 Subject: [PATCH 1364/1416] Debug coveralls action --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 244371f9dc..1193579c59 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -123,6 +123,7 @@ jobs: with: flag-name: run-${{ matrix.test_number }} parallel: true + debug: true if: runner.os == 'Linux' - name: Run behaviour tests From 597e9ad5f39519bc8d81979dab4717f99a4f1479 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 7 May 2024 08:56:07 -0500 Subject: [PATCH 1365/1416] Use djhoese debug coveralls action --- .github/workflows/ci.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1193579c59..6098e4d7a2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -119,7 +119,8 @@ jobs: env_vars: OS,PYTHON_VERSION,UNSTABLE - name: Coveralls Parallel - uses: AndreMiras/coveralls-python-action@develop +# uses: AndreMiras/coveralls-python-action@develop + uses: djhoese/coveralls-python-action@git-safe-dir with: flag-name: run-${{ matrix.test_number }} parallel: true @@ -145,6 +146,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Coveralls Finished - uses: AndreMiras/coveralls-python-action@develop +# uses: AndreMiras/coveralls-python-action@develop + uses: djhoese/coveralls-python-action@git-safe-dir with: parallel-finished: true From 3bf7beefa4f96a146dc1bfd4a3cfe7668360d783 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 7 May 2024 12:00:22 -0500 Subject: [PATCH 1366/1416] Add missing coverage config to pyproject.toml --- .github/workflows/ci.yaml | 9 ++------- pyproject.toml | 4 ++++ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6098e4d7a2..c472709bc8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -101,8 +101,6 @@ jobs: - name: Install satpy shell: bash -l {0} run: | - git config --global --add safe.directory /github/workspace - git config --local --add safe.directory /github/workspace python -m pip install --no-deps -e . - name: Run unit tests @@ -119,12 +117,10 @@ jobs: env_vars: OS,PYTHON_VERSION,UNSTABLE - name: Coveralls Parallel -# uses: AndreMiras/coveralls-python-action@develop - uses: djhoese/coveralls-python-action@git-safe-dir + uses: AndreMiras/coveralls-python-action@develop with: flag-name: run-${{ matrix.test_number }} parallel: true - debug: true if: runner.os == 'Linux' - name: Run behaviour tests @@ -146,7 +142,6 @@ jobs: runs-on: ubuntu-latest steps: - name: Coveralls Finished -# uses: AndreMiras/coveralls-python-action@develop - uses: djhoese/coveralls-python-action@git-safe-dir + uses: AndreMiras/coveralls-python-action@develop with: parallel-finished: true diff --git a/pyproject.toml b/pyproject.toml index 35ba5e8dc2..ea3c094615 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -145,3 +145,7 @@ convention = "google" [tool.ruff.lint.mccabe] # Unlike Flake8, default to a complexity level of 10. max-complexity = 10 + +[tool.coverage.run] +relative_files = true +omit = ["satpy/version.py"] From 06e3139a056dafe8e597f55eaef1a22bf46c87ae Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 7 May 2024 12:29:22 -0500 Subject: [PATCH 1367/1416] Use djhoese debug coveralls action again --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c472709bc8..d8a1e63231 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -117,7 +117,7 @@ jobs: env_vars: OS,PYTHON_VERSION,UNSTABLE - name: Coveralls Parallel - uses: AndreMiras/coveralls-python-action@develop + uses: djhoese/coveralls-python-action@git-safe-dir with: flag-name: run-${{ matrix.test_number }} parallel: true @@ -142,6 +142,6 @@ jobs: runs-on: ubuntu-latest steps: - name: Coveralls Finished - uses: AndreMiras/coveralls-python-action@develop + uses: djhoese/coveralls-python-action@git-safe-dir with: parallel-finished: true From f90edd54630d38ca90ecc01bb1b445f06378d7b4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 7 May 2024 12:39:16 -0500 Subject: [PATCH 1368/1416] Add debug to coveralls action --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d8a1e63231..bc13fd9a4f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -121,6 +121,7 @@ jobs: with: flag-name: run-${{ matrix.test_number }} parallel: true + debug: true if: runner.os == 'Linux' - name: Run behaviour tests From 7936297170ea8368635d5a3017cd57a6b8f76e01 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 8 May 2024 08:39:15 +0000 Subject: [PATCH 1369/1416] typo: change the place of recquired_netcdf_variables into the fci_l1c_nc.yaml file, erase _ that is in front for some constant --- satpy/etc/readers/fci_l1c_nc.yaml | 57 ++++++++++----------- satpy/tests/reader_tests/test_fci_l1c_nc.py | 54 +++++++++---------- 2 files changed, 55 insertions(+), 56 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 8f39097479..51a023cc56 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -13,34 +13,6 @@ reader: # Source: MTG FCI L1 Product User Guide [FCIL1PUG] # https://www.eumetsat.int/media/45923 -required_netcdf_variables: &required-variables - - attr/platform - - data/{channel_name}/measured/start_position_row - - data/{channel_name}/measured/end_position_row - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - - data/{channel_name}/measured/radiance_unit_conversion_coefficient - - data/{channel_name}/measured/channel_effective_solar_irradiance - - data/{channel_name}/measured/effective_radiance - - data/{channel_name}/measured/x - - data/{channel_name}/measured/y - - data/{channel_name}/measured/pixel_quality - - data/{channel_name}/measured/index_map - - data/mtg_geos_projection - - data/swath_direction - - data/swath_number - - index - - state/celestial/earth_sun_distance - - state/celestial/subsolar_latitude - - state/celestial/subsolar_longitude - - state/celestial/sun_satellite_distance - - state/platform/platform_altitude - - state/platform/subsatellite_latitude - - state/platform/subsatellite_longitude - - time file_types: fci_l1c_fdhsi: @@ -50,7 +22,34 @@ file_types: "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc", ] expected_segments: 40 - required_netcdf_variables: *required-variables + required_netcdf_variables: &required-variables + - attr/platform + - data/{channel_name}/measured/start_position_row + - data/{channel_name}/measured/end_position_row + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a + - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 + - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 + - data/{channel_name}/measured/radiance_unit_conversion_coefficient + - data/{channel_name}/measured/channel_effective_solar_irradiance + - data/{channel_name}/measured/effective_radiance + - data/{channel_name}/measured/x + - data/{channel_name}/measured/y + - data/{channel_name}/measured/pixel_quality + - data/{channel_name}/measured/index_map + - data/mtg_geos_projection + - data/swath_direction + - data/swath_number + - index + - state/celestial/earth_sun_distance + - state/celestial/subsolar_latitude + - state/celestial/subsolar_longitude + - state/celestial/sun_satellite_distance + - state/platform/platform_altitude + - state/platform/subsatellite_latitude + - state/platform/subsatellite_longitude + - time variable_name_replacements: channel_name: - vis_04 diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 56130190af..98777d0e51 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -107,12 +107,12 @@ } } -_CHANS_FDHSI = {"solar": LIST_CHANNEL_SOLAR, +CHANS_FHDSI = {"solar": LIST_CHANNEL_SOLAR, "solar_grid_type": ["1km"] * 8, "terran": LIST_CHANNEL_TERRAN, "terran_grid_type": ["2km"] * 8} -_CHANS_HRFI = {"solar": ["vis_06", "nir_22"], +CHANS_HRFI = {"solar": ["vis_06", "nir_22"], "solar_grid_type": ["500m"] * 2, "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} @@ -148,7 +148,7 @@ }, }, } -_test_filenames = {"fdhsi": [ +TEST_FILENAMES = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" @@ -168,24 +168,24 @@ def resolutions(channel): return LIST_RESOLUTION def fill_chans_af(): - """Fill the dict _CHANS_AF and the list _test_filenames with the right channel and resolution.""" - _CHANS_AF = {} + """Fill the dict CHANS_AF and the list TEST_FILENAMES with the right channel and resolution.""" + CHANS_AF = {} for channel in LIST_TOTAL_CHANNEL: list_resol = resolutions(channel) for resol in list_resol: chann_upp = channel.replace("_","").upper() - _test_filenames[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" + TEST_FILENAMES[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" f"-{resol.upper()}-AF-{chann_upp}-x-x---NC4E_C_EUMT_20240125144655_DT_OPE" f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] if channel.split("_")[0] in ["vis","nir"]: - _CHANS_AF[f"{channel}_{resol}"] = {"solar":[channel], + CHANS_AF[f"{channel}_{resol}"] = {"solar":[channel], "solar_grid_type": [resol]} elif channel.split("_")[0] in ["ir","wv"]: - _CHANS_AF[f"{channel}_{resol}"] = {"terran":[channel], + CHANS_AF[f"{channel}_{resol}"] = {"terran":[channel], "terran_grid_type": [resol]} - return _CHANS_AF,_test_filenames + return CHANS_AF,TEST_FILENAMES -_CHANS_AF,_test_filenames = fill_chans_af() +CHANS_AF,TEST_FILENAMES = fill_chans_af() # ---------------------------------------------------- # Filehandlers preparation --------------------------- # ---------------------------------------------------- @@ -553,8 +553,8 @@ def FakeFCIFileHandlerFDHSI_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", - "channels": _CHANS_FDHSI, - "filenames": _test_filenames["fdhsi"] + "channels": CHANS_FHDSI, + "filenames": TEST_FILENAMES["fdhsi"] } yield param_dict @@ -565,8 +565,8 @@ def FakeFCIFileHandlerHRFI_fixture(): with mocked_basefilehandler(FakeFCIFileHandlerHRFI): param_dict = { "filetype": "fci_l1c_hrfi", - "channels": _CHANS_HRFI, - "filenames": _test_filenames["hrfi"] + "channels": CHANS_HRFI, + "filenames": TEST_FILENAMES["hrfi"] } yield param_dict @@ -579,8 +579,8 @@ def FakeFCIFileHandlerAF_fixture(channel,resolution): with mocked_basefilehandler(FakeFCIFileHandlerAF): param_dict = { "filetype": "fci_l1c_af", - "channels": _CHANS_AF[f"{channel}_{resolution}"], - "filenames": _test_filenames[f"af_{channel}_{resolution}"], + "channels": CHANS_AF[f"{channel}_{resolution}"], + "filenames": TEST_FILENAMES[f"af_{channel}_{resolution}"], } yield param_dict @@ -592,10 +592,10 @@ def FakeFCIFileHandlerAF_fixture(channel,resolution): class TestFCIL1cNCReader: """Test FCI L1c NetCDF reader with nominal data.""" - fh_param_for_filetype = {"hrfi": {"channels": _CHANS_HRFI, - "filenames": _test_filenames["hrfi"]}, - "fdhsi": {"channels": _CHANS_FDHSI, - "filenames": _test_filenames["fdhsi"]}} + fh_param_for_filetype = {"hrfi": {"channels": CHANS_HRFI, + "filenames": TEST_FILENAMES["hrfi"]}, + "fdhsi": {"channels": CHANS_FHDSI, + "filenames": TEST_FILENAMES["fdhsi"]}} def _get_type_ter_AF(self,channel): """Get the type_ter.""" @@ -632,7 +632,7 @@ def _get_res_AF(self,channel,fh_param,calibration,reader_configs): for name in fh_param["channels"][type_ter]], pad_data=False) return res - @pytest.mark.parametrize("filenames", [_test_filenames[filename] for filename in _test_filenames.keys()]) + @pytest.mark.parametrize("filenames", [TEST_FILENAMES[filename] for filename in TEST_FILENAMES.keys()]) def test_file_pattern(self, reader_configs, filenames): """Test file pattern matching.""" from satpy.readers import load_reader @@ -641,8 +641,8 @@ def test_file_pattern(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 1 - @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"][0].replace("BODY", "TRAIL"), - _test_filenames["hrfi"][0].replace("BODY", "TRAIL")]) + @pytest.mark.parametrize("filenames", [TEST_FILENAMES["fdhsi"][0].replace("BODY", "TRAIL"), + TEST_FILENAMES["hrfi"][0].replace("BODY", "TRAIL")]) def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): """Test file pattern matching for TRAIL files, which should not be picked up.""" from satpy.readers import load_reader @@ -880,7 +880,7 @@ class TestFCIL1cNCReaderBadData: def test_handling_bad_data_ir(self, reader_configs, caplog): """Test handling of bad IR data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="ir_105", @@ -890,7 +890,7 @@ def test_handling_bad_data_ir(self, reader_configs, caplog): def test_handling_bad_data_vis(self, reader_configs, caplog): """Test handling of bad VIS data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="vis_06", @@ -904,7 +904,7 @@ class TestFCIL1cNCReaderBadDataFromIDPF: def test_handling_bad_earthsun_distance(self, reader_configs): """Test handling of bad earth-sun distance data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) res = reader.load([make_dataid(name=["vis_06"], calibration="reflectance")], pad_data=False) numpy.testing.assert_array_almost_equal(res["vis_06"], 100 * 15 * 1 * np.pi / 50) @@ -912,7 +912,7 @@ def test_handling_bad_earthsun_distance(self, reader_configs): def test_bad_xy_coords(self, reader_configs): """Test that the geolocation computation is correct.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) res = reader.load(["vis_06"], pad_data=False) area_def = res["vis_06"].attrs["area"] From 1d8a0ff0d568a556207821c003f947909b72c984 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 8 May 2024 12:21:08 +0000 Subject: [PATCH 1370/1416] feat: Add file patterns into the fci_l1c_nc.yaml file to handle the future change in the file name for AF data --- satpy/etc/readers/fci_l1c_nc.yaml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 51a023cc56..897e3750c3 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -82,12 +82,16 @@ file_types: - nir_22_hr - ir_38_hr - ir_105_hr + # Note: In The current file the 'MTI1-FCI-1C' which is a part of the file will be replaced by MTI1+FCI-1C, patterns have been added + # to maanage this issue fci_l1c_af_vis_06: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -99,6 +103,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -110,6 +115,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -121,6 +127,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -132,6 +139,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -143,6 +151,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -154,6 +163,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -165,6 +175,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -176,6 +187,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -187,6 +199,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -198,6 +211,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -209,6 +223,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -220,6 +235,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -231,6 +247,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -242,6 +259,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables @@ -253,6 +271,7 @@ file_types: file_patterns: [ "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", + "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", ] expected_segments: 1 required_netcdf_variables: *required-variables From 2a54930b70e355539eaf73f30c40e58faf6d0312 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 8 May 2024 12:32:13 +0000 Subject: [PATCH 1371/1416] feat: Add comment into the fci_l1c_nc.yaml file --- satpy/etc/readers/fci_l1c_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 897e3750c3..dee588aff9 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -83,7 +83,7 @@ file_types: - ir_38_hr - ir_105_hr # Note: In The current file the 'MTI1-FCI-1C' which is a part of the file will be replaced by MTI1+FCI-1C, patterns have been added - # to maanage this issue + # to maanage this fci_l1c_af_vis_06: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: From be6fd983f6ea6c08d81d20c661cd4b8faaefc121 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 9 May 2024 09:34:56 -0500 Subject: [PATCH 1372/1416] Switch back to upstream coveralls action --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bc13fd9a4f..1f14aa2421 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -117,7 +117,7 @@ jobs: env_vars: OS,PYTHON_VERSION,UNSTABLE - name: Coveralls Parallel - uses: djhoese/coveralls-python-action@git-safe-dir + uses: AndreaMiras/coveralls-python-action@develop with: flag-name: run-${{ matrix.test_number }} parallel: true @@ -143,6 +143,6 @@ jobs: runs-on: ubuntu-latest steps: - name: Coveralls Finished - uses: djhoese/coveralls-python-action@git-safe-dir + uses: AndreMiras/coveralls-python-action@develop with: parallel-finished: true From 8b653213f2d7085ca7b94842ec4eb03a34c3247d Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 9 May 2024 09:38:25 -0500 Subject: [PATCH 1373/1416] Fix typo in coveralls action name --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1f14aa2421..15695cddd0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -117,7 +117,7 @@ jobs: env_vars: OS,PYTHON_VERSION,UNSTABLE - name: Coveralls Parallel - uses: AndreaMiras/coveralls-python-action@develop + uses: AndreMiras/coveralls-python-action@develop with: flag-name: run-${{ matrix.test_number }} parallel: true From 174261655ce8a5036d8814610fe5ac5f2284c76c Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 9 May 2024 09:50:19 -0500 Subject: [PATCH 1374/1416] Remove coveralls debug from CI --- .github/workflows/ci.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 15695cddd0..c472709bc8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -121,7 +121,6 @@ jobs: with: flag-name: run-${{ matrix.test_number }} parallel: true - debug: true if: runner.os == 'Linux' - name: Run behaviour tests From f71d65e8570f23d30ff68c689a150d39a21ed5b8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 21:33:47 +0000 Subject: [PATCH 1375/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.3.5 → v0.4.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.3.5...v0.4.3) - [github.com/pre-commit/pre-commit-hooks: v4.5.0 → v4.6.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.5.0...v4.6.0) - [github.com/pre-commit/mirrors-mypy: v1.9.0 → v1.10.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.9.0...v1.10.0) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eadd0f2c55..53db32e42a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,11 +3,11 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.3.5' + rev: 'v0.4.3' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.9.0' # Use the sha / tag you want to point at + rev: 'v1.10.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From 32df7500f056436c0ff765c5be98c80efc4ea35f Mon Sep 17 00:00:00 2001 From: isotr0py <2037008807@qq.com> Date: Sat, 11 May 2024 13:13:42 +0800 Subject: [PATCH 1376/1416] Add netcdf4 to goci2 optional dependency --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index ea3c094615..33086ffe85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ avhrr_l1b_eps = ["defusedxml"] avhrr_l1b_gaclac = ["pygac >= 1.3.0"] modis_l1b = ["pyhdf", "python-geotiepoints >= 1.1.7"] geocat = ["pyhdf"] +goci2 = ["netCDF4 >= 1.1.8"] acspo = ["netCDF4 >= 1.1.8"] clavrx = ["netCDF4 >= 1.1.8"] viirs_l1b = ["netCDF4 >= 1.1.8"] From e9aa4ad18d730aeaec76975183ea09d9d816dd68 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 13 May 2024 12:14:05 +0800 Subject: [PATCH 1377/1416] Update mersi_l1b.py --- satpy/readers/mersi_l1b.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 1a2353fbf2..84e1fb43cf 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -131,10 +131,9 @@ def get_dataset(self, dataset_id, ds_info): file_key = ds_info.get("file_key", dataset_id["name"]) band_index = ds_info.get("band_index") data = self[file_key] - if band_index is not None: - data = data[band_index] - if data.ndim >= 2: - data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) + data = data[band_index] if band_index is not None else data + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) if data.ndim >= 2 else data + attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) if "rows_per_scan" in self.filetype_info: @@ -267,12 +266,10 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): """ # pass the dask array bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature - if isinstance(bt_data, np.ndarray): - # old versions of pyspectral produce numpy arrays - data.data = da.from_array(bt_data, chunks=data.data.chunks) - else: - # new versions of pyspectral can do dask arrays - data.data = bt_data + + # old versions of pyspectral produce numpy arrays + # new versions of pyspectral can do dask arrays + data.data = da.from_array(bt_data, chunks=data.data.chunks) if isinstance(bt_data, np.ndarray) else bt_data # Some BT bands seem to have 0 in the first 10 columns # and it is an invalid measurement, so let's mask From f176ec3887bd64e395fcf0c243dd4992ddac2b41 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 13 May 2024 08:51:54 +0200 Subject: [PATCH 1378/1416] Add numpy rules to ruff --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ea3c094615..01abbbd1ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -132,7 +132,7 @@ line-length = 120 [tool.ruff.lint] # See https://docs.astral.sh/ruff/rules/ # In the future, add "B", "S", "N" -select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "NPY"] [tool.ruff.lint.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests From 809107edff658d18141c2e7378f29d6521fa1e3a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 15 May 2024 12:02:12 +0800 Subject: [PATCH 1379/1416] reverse --- satpy/etc/readers/msi_safe.yaml | 57 +- satpy/readers/msi_safe.py | 66 +- satpy/tests/reader_tests/test_msi_safe.py | 848 +++------------------- 3 files changed, 168 insertions(+), 803 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index cc39c26a74..20b324a036 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -1,8 +1,8 @@ reader: name: msi_safe - short_name: MSI SAFE L1C - long_name: Sentinel-2 A and B MSI L1C data in SAFE format - description: SAFE Reader for MSI L1C data (Sentinel-2) + short_name: MSI SAFE + long_name: Sentinel-2 A and B MSI data in SAFE format, supporting L1C format only. + description: SAFE Reader for MSI data (Sentinel-2) status: Nominal supports_fsspec: false sensors: [msi] @@ -10,18 +10,20 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: - l1c_safe_granule: + safe_granule_l1c: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] - requires: [l1c_safe_metadata, l1c_safe_tile_metadata] - l1c_safe_tile_metadata: + file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] + requires: [safe_metadata, safe_tile_metadata] + safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] - l1c_safe_metadata: + file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] + file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] + datasets: + B01: name: B01 sensor: msi @@ -37,7 +39,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B02: name: B02 @@ -54,7 +56,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B03: name: B03 @@ -71,7 +73,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B04: name: B04 @@ -88,7 +90,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B05: name: B05 @@ -105,7 +107,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B06: name: B06 @@ -122,7 +124,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B07: name: B07 @@ -139,7 +141,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B08: name: B08 @@ -156,7 +158,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B8A: name: B8A @@ -173,7 +175,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B09: name: B09 @@ -190,7 +192,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B10: name: B10 @@ -207,7 +209,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B11: name: B11 @@ -224,7 +226,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c B12: name: B12 @@ -241,30 +243,31 @@ datasets: counts: standard_name: counts units: "1" - file_type: l1c_safe_granule + file_type: safe_granule_l1c + solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] - file_type: l1c_safe_tile_metadata + file_type: safe_tile_metadata xml_tag: Sun_Angles_Grid/Zenith solar_azimuth_angle: name: solar_azimuth_angle resolution: [10, 20, 60] - file_type: l1c_safe_tile_metadata + file_type: safe_tile_metadata xml_tag: Sun_Angles_Grid/Azimuth satellite_azimuth_angle: name: satellite_azimuth_angle resolution: [10, 20, 60] - file_type: l1c_safe_tile_metadata + file_type: safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth satellite_zenith_angle: name: satellite_zenith_angle resolution: [10, 20, 60] - file_type: l1c_safe_tile_metadata + file_type: safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index d0aa94538c..5ec5ff3ea0 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -28,16 +28,14 @@ reader_kwargs={'mask_saturated': False}) scene.load(['B01']) -L1C/L2A format description for the files read here: +L1C format description for the files read here: - https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 - -Please note: for L2A datasets, the band name has been fixed with a "_L2A" suffix. Do not change it in the YAML file or -the reader can't recogonize it and nothing will be loaded. + https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/ """ import logging +from datetime import datetime import dask.array as da import defusedxml.ElementTree as ET @@ -66,28 +64,21 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated - self._start_time = filename_info["observation_time"] - self._end_time = filename_info["observation_time"] self._channel = filename_info["band_name"] - self.process_level = filename_info["process_level"] self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] + self._start_time = self._tile_mda.start_time() + self._end_time = filename_info["observation_time"] + def get_dataset(self, key, info): """Load a dataset.""" - if self.process_level == "L1C": - if self._channel != key["name"]: - return - else: - if self._channel + "_L2A" != key["name"]: - return + if self._channel != key["name"]: + return logger.debug("Reading %s.", key["name"]) - proj = self._read_from_file(key) - if proj is None: - return proj.attrs = info.copy() proj.attrs["units"] = "%" proj.attrs["platform_name"] = self.platform_name @@ -102,8 +93,6 @@ def _read_from_file(self, key): return self._mda.calibrate_to_radiances(proj, self._channel) if key["calibration"] == "counts": return self._mda._sanitize_data(proj) - if key["calibration"] in ["aerosol_thickness", "water_vapor"]: - return self._mda.calibrate_to_atmospheric(proj, self._channel) @property def start_time(self): @@ -117,13 +106,8 @@ def end_time(self): def get_area_def(self, dsid): """Get the area def.""" - if self.process_level == "L1C": - if self._channel != dsid["name"]: - return - else: - if self._channel + "_L2A" != dsid["name"]: - return - + if self._channel != dsid["name"]: + return return self._tile_mda.get_area_def(dsid) @@ -137,7 +121,6 @@ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): self._end_time = filename_info["observation_time"] self.root = ET.parse(self.filename) self.tile = filename_info["dtile_number"] - self.process_level = filename_info["process_level"] self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated import bottleneck # noqa @@ -159,23 +142,10 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level == "L1C" else \ - int(self.root.find(".//BOA_QUANTIFICATION_VALUE").text) + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 - def calibrate_to_atmospheric(self, data, band_name): - """Calibrate L2A AOT/WVP product.""" - atmospheric_bands = ["AOT", "WVP"] - if self.process_level == "L1C": - return - elif self.process_level == "L2A" and band_name not in atmospheric_bands: - return - - quantification = float(self.root.find(f".//{band_name}_QUANTIFICATION_VALUE").text) - data = self._sanitize_data(data) - return data / quantification - def _sanitize_data(self, data): data = data.where(data != self.no_data) if self.mask_saturated: @@ -204,8 +174,7 @@ def band_indices(self): @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level == "L1C" else \ - self.root.find(".//BOA_ADD_OFFSET_VALUES_LIST") + offsets = self.root.find(".//Radiometric_Offset_List") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: @@ -302,6 +271,11 @@ def _shape(self, resolution): cols = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text) return cols, rows + def start_time(self): + """Get the observation time from the tile metadata.""" + timestr = self.root.find(".//SENSING_TIME").text + return datetime.strptime(timestr, "%Y-%m-%dT%H:%M:%S.%fZ") + @staticmethod def _do_interp(minterp, xcoord, ycoord): interp_points2 = np.vstack((ycoord.ravel(), xcoord.ravel())) @@ -328,11 +302,9 @@ def interpolate_angles(self, angles, resolution): def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" angles = self.root.find(".//Tile_Angles") - if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", - "solar_zenith_angle_l2a", "solar_azimuth_angle_l2a"]: + if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle"]: angles = self._get_solar_angles(angles, info) - elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle", - "satellite_zenith_angle_l2a", "satellite_azimuth_angle_l2a"]: + elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle"]: angles = self._get_satellite_angles(angles, info) else: angles = None diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 59c3564fd4..b919278bf5 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -17,6 +17,7 @@ # satpy. If not, see . """Module for testing the satpy.readers.msi_safe module.""" import unittest.mock as mock +from datetime import datetime from io import BytesIO, StringIO import numpy as np @@ -25,7 +26,11 @@ from satpy.tests.utils import make_dataid -mtd_l1c_tile_xml = b""" +# Datetimes used for checking start time is correctly set. +fname_dt = datetime(2020, 10, 1, 18, 35, 41) +tilemd_dt = datetime(2020, 10, 1, 16, 34, 23, 153611) + +mtd_tile_xml = b""" @@ -575,6 +580,7 @@ """ # noqa + mtd_l1c_old_xml = """ @@ -860,597 +866,27 @@ """ # noqa -mtd_l2a_xml = """ - - - - 2024-04-11T03:05:21.024Z - 2024-04-11T03:05:21.024Z - S2A_MSIL2A_20240411T030521_N0510_R075_T50TMK_20240411T080950.SAFE - Level-2A - S2MSI2A - 05.10 - https://doi.org/10.5270/S2_-znk9xsj - 2024-04-11T08:09:50.000000Z - Not applicable - Not applicable - - Sentinel-2A - INS-NOBS - 2024-04-11T03:05:21.024Z - 75 - DESCENDING - - -SAFE_COMPACT - - - - - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B02_10m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B03_10m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B04_10m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B08_10m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_TCI_10m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_AOT_10m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_WVP_10m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B01_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B02_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B03_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B04_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B05_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B06_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B07_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B8A_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B11_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B12_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_TCI_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_AOT_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_WVP_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_SCL_20m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B01_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B02_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B03_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B04_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B05_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B06_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B07_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B8A_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B09_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B11_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B12_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_TCI_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_AOT_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_WVP_60m - GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_SCL_60m - - - - - - - NODATA - 0 - - - SATURATED - 65535 - - - 3 - 2 - 1 - - - 10000 - 1000.0 - 1000.0 - - - -1000 - -1000 - -1000 - -1000 - -1000 - -1000 - -1000 - -1000 - -1000 - -1000 - -1000 - -1000 - -1000 - - - 0.998279632507911 - - 1884.69 - 1959.66 - 1823.24 - 1512.06 - 1424.64 - 1287.61 - 1162.08 - 1041.63 - 955.32 - 812.92 - 367.15 - 245.59 - 85.25 - - - - - 60 - - 412 - 456 - 442.7 - - - 1 - 0.001775742 0.004073061 0.003626143 0.003515199 0.005729163 0.003780292 0.002636732 0.001262113 0.001987583 0.001368913 0.001250444 0.000463454 0.000814293 0.001376431 0.001485086 0.001823735 0.001626817 0.004392062 0.029008099 0.11874593 0.32387506 0.57281921 0.71472749 0.76196778 0.78929702 0.80862387 0.81089382 0.82419876 0.85415811 0.87079088 0.88731097 0.92619924 0.98228149 1 0.9752382 0.93596338 0.88997148 0.85021048 0.82569453 0.78390239 0.61417422 0.33007109 0.12410831 0.04365694 0.014749595 - - - - 10 - - 456 - 533 - 492.7 - - - 1 - 0.04255531 0.0722983 0.15374322 0.32799225 0.55336788 0.71011166 0.75285179 0.75232691 0.75668081 0.76326948 0.76239425 0.7852515 0.81546669 0.86179176 0.89282599 0.9195221 0.91900649 0.91315754 0.90035366 0.88989693 0.8823246 0.87606118 0.88429987 0.90695544 0.93232085 0.93947252 0.94383543 0.92204086 0.8860231 0.84743609 0.81251687 0.7823971 0.7731087 0.77209054 0.78742652 0.81217177 0.84605052 0.88767996 0.92793997 0.95069235 0.96573311 0.96938253 0.96570294 0.95832003 0.95405064 0.95178268 0.95699722 0.96556515 0.9770514 0.97709574 0.97436606 0.95903183 0.93506318 0.90190134 0.87165792 0.84402444 0.82280852 0.81536043 0.82057639 0.8395149 0.86992171 0.91526205 0.96067028 0.99163699 1 0.98356097 0.91130763 0.74018256 0.50395858 0.3050155 0.18004605 0.10738342 0.06593592 0.04207746 0.02662129 0.0143396 0.00265779 0.00081822 - - - - 10 - - 538 - 583 - 559.8 - - - 1 - 0.01448181 0.03422251 0.07346335 0.15444843 0.31661425 0.55322279 0.74859406 0.84890306 0.89772216 0.9215368 0.92572845 0.91122688 0.88818924 0.86523756 0.84718187 0.8387572 0.84459081 0.86219653 0.88838714 0.92443236 0.96017974 0.98685516 1 0.9986008 0.98076472 0.94522089 0.8981778 0.85580323 0.81841734 0.78862048 0.76460653 0.74963745 0.75055111 0.76137888 0.78244479 0.79890086 0.81016957 0.81408886 0.77358596 0.62881065 0.40397555 0.21542098 0.10715281 0.04792877 0.01848693 0.00108588 - - - - 10 - - 646 - 684 - 664.6 - - - 1 - 0.00141521 0.02590238 0.11651178 0.39088616 0.74959342 0.94485805 0.98011173 0.99406309 1 0.99545475 0.99052772 0.97733476 0.94055988 0.87894956 0.81629384 0.77345952 0.75448766 0.75991531 0.7826343 0.8101689 0.83612975 0.86125424 0.88609106 0.91138767 0.93405146 0.95042063 0.9592573 0.96039555 0.95913395 0.95809013 0.95527459 0.94376465 0.89490799 0.74426308 0.476777 0.22960399 0.08009118 0.02617076 0.00415242 - - - - 20 - - 695 - 714 - 704.1 - - - 1 - 0.02835786 0.12369337 0.39378774 0.76113071 0.97108502 0.99889523 1 0.99412258 0.98321789 0.96704093 0.94847389 0.92714833 0.90372458 0.88614713 0.86723745 0.79075319 0.58840332 0.26334833 0.05675422 0.00618833 - - - - 20 - - 731 - 749 - 740.5 - - - 1 - 0.00171088 0.05467153 0.25806676 0.64722098 0.89218999 0.90232877 0.91508768 0.94115846 0.96299993 0.97510481 0.9770217 0.98736251 1 0.98880277 0.97179916 0.90126739 0.60672391 0.20520227 0.0267569 - - - - 20 - - 769 - 797 - 782.8 - - - 1 - 0.00045899 0.0117201 0.05219715 0.16561733 0.36903355 0.63685453 0.86119638 0.97002897 0.99119602 0.99897921 1 0.97725155 0.92572385 0.86605804 0.81969611 0.79407674 0.79111029 0.80431552 0.81902721 0.82571292 0.82011829 0.79222195 0.72054559 0.58767794 0.41430355 0.23088817 0.09850282 0.02736551 0.00516235 - - - - 10 - - 760 - 907 - 832.8 - - - 1 - 0.00067259 0.00388856 0 0 0 0 0 0 0 0 0 0 0 0.00028956 0.00702964 0.01752391 0.03231111 0.05328661 0.08299885 0.12748502 0.19591065 0.30246323 0.43553954 0.57141637 0.69766701 0.80303852 0.89115744 0.95284584 0.98894161 1 0.98840653 0.96389216 0.94207967 0.93694643 0.94227343 0.95395718 0.96828896 0.97966549 0.9854444 0.98592681 0.98391181 0.97793903 0.97722771 0.97810609 0.98144486 0.98764558 0.98857708 0.9862422 0.98070921 0.97078624 0.95721089 0.93865821 0.91672388 0.89620759 0.872888 0.85160331 0.8246394 0.80078117 0.7823386 0.76360274 0.74962771 0.7387221 0.73079407 0.72271237 0.72507708 0.72563856 0.72304217 0.72229211 0.71616364 0.71159446 0.70826954 0.70157205 0.69924532 0.70093762 0.70692733 0.71824001 0.73124634 0.7484061 0.76818541 0.78394807 0.7968381 0.80260206 0.8045194 0.80240918 0.79699072 0.78920304 0.77691621 0.76518406 0.75119717 0.73700357 0.72262399 0.70412578 0.68410805 0.66474528 0.64736891 0.63005125 0.61564222 0.60249557 0.58988992 0.57993399 0.57136506 0.56094242 0.55235105 0.54568236 0.53958052 0.53510215 0.53093675 0.53016508 0.52984662 0.53036682 0.53211463 0.53271918 0.53246806 0.53331158 0.5319278 0.53051055 0.52951499 0.52996848 0.53253373 0.53705085 0.54235344 0.54912497 0.55523055 0.56011135 0.55767999 0.54821984 0.53144613 0.50763528 0.47811224 0.45092793 0.42798466 0.41051405 0.40039139 0.40087302 0.40829375 0.42086556 0.43007022 0.42456692 0.39136817 0.33009008 0.25720509 0.18189031 0.11650668 0.07031579 0.04275381 0.02593154 0.01574394 0.00394326 - - - - 20 - - 837 - 881 - 864.7 - - - 1 - 0.00030097 0 0 0 0 0 0 0 0 0 0.00157217 0.00249886 0.01332037 0.02614866 0.05260479 0.10779709 0.22160755 0.39721628 0.60986885 0.81658883 0.9322445 0.97210033 0.97545482 0.97538048 0.97328205 0.97607828 0.98034955 0.98690928 0.99087465 0.99741818 0.99984673 0.99939141 0.99587928 0.99541228 1 0.99640762 0.92359433 0.74137684 0.48965971 0.25020643 0.11221246 0.04755984 0.02297815 0.01061438 0.00108149 - - - - 60 - - 932 - 958 - 945.1 - - - 1 - 0.01662953 0.06111857 0.17407094 0.38946454 0.6645915 0.87454114 0.93695988 0.96751014 0.9893391 0.9951269 1 0.97845762 0.98069118 0.9922335 0.98798379 0.99428313 0.98348041 0.97820013 0.95023367 0.95299604 0.92240308 0.85573828 0.70970227 0.46429542 0.21538427 0.06534121 0.01625596 - - - - 60 - - 1337 - 1412 - 1373.5 - - - 1 - 0.00024052 5.404e-05 3.052e-05 2.872e-05 7.632e-05 0.00010949 8.804e-05 0.00012356 0.00017424 0.0003317 0.00036891 0.0004467 0.00065919 0.0010913 0.00196903 0.00373668 0.00801754 0.01884719 0.04466732 0.10165546 0.20111776 0.34284841 0.50710992 0.6632068 0.78377143 0.86153862 0.91000261 0.94193255 0.96182259 0.97365119 0.98169786 0.98795826 0.99283342 0.99649788 0.99906011 1 0.99907734 0.99601604 0.9909083 0.98479854 0.97802142 0.97030114 0.96080954 0.94849765 0.93314108 0.91482336 0.8937997 0.86825426 0.83023193 0.76384193 0.65440009 0.50671604 0.35014737 0.21799972 0.12643091 0.06768988 0.0322709 0.013544 0.00544557 0.00237642 0.00111267 0.00053796 0.0003457 0.00017488 0.00021619 0.00019479 0.00010421 5.919e-05 5.109e-05 6.115e-05 5.527e-05 3.856e-05 3.147e-05 0.00012289 0.0001089 2.502e-05 - - - - 20 - - 1539 - 1682 - 1613.7 - - - 1 - 6.79e-06 6.66e-06 8e-06 2.734e-05 3.685e-05 8.851e-05 0.00014522 0.00024812 0.00047627 0.00056335 0.00065326 0.00089835 0.00114664 0.00165604 0.00241611 0.00350246 0.00524274 0.0081538 0.01237062 0.0186097 0.02721853 0.03879155 0.05379167 0.07353187 0.09932758 0.1334178 0.18029249 0.24484994 0.32834511 0.42749961 0.53576798 0.64570396 0.74245998 0.81447017 0.85866596 0.87924777 0.88665266 0.888727 0.89105732 0.89725046 0.90632982 0.91627527 0.9263751 0.93515828 0.94226446 0.94739906 0.95131987 0.95416808 0.95635128 0.95813297 0.96062738 0.96344083 0.96577764 0.96818134 0.97104025 0.97343195 0.97597444 0.97865413 0.97994672 0.98064126 0.98094979 0.98143338 0.98123856 0.98068083 0.98033995 0.98101894 0.98268503 0.98507875 0.98777658 0.9903608 0.99202087 0.9933069 0.99256744 0.99044883 0.98717314 0.98353656 0.9800432 0.97617287 0.97253451 0.96977033 0.96762556 0.9662626 0.96572411 0.96592079 0.96729798 0.96975438 0.97337748 0.97862858 0.98345358 0.98765317 0.9919238 0.99554959 0.99767411 0.99866451 0.99941783 0.99930984 0.99885298 0.99913515 0.99973164 0.99973592 1 0.9998438 0.9967639 0.99175576 0.9859206 0.97887302 0.97029262 0.96135891 0.95379752 0.94709017 0.94228614 0.93919512 0.93616637 0.92889205 0.9129921 0.88158383 0.82602164 0.74412949 0.64281662 0.53483955 0.42772166 0.32439525 0.23488131 0.16445229 0.11056237 0.07271886 0.04634859 0.02949618 0.01941871 0.0133487 0.00934594 0.00654231 0.00487921 0.00341903 0.00249864 0.00196431 0.00142754 0.00105878 0.00049978 0.00022833 0.00015999 3.415e-05 4.517e-05 1.313e-05 - - - - 20 - - 2078 - 2320 - 2202.4 - - - 1 - 0.00063835 0.00102286 0.00288712 0.00399879 0.00658916 0.00765458 0.00799918 0.00853524 0.00929493 0.00999614 0.01096645 0.01208363 0.01335837 0.01501119 0.01711931 0.01977307 0.02332743 0.02765779 0.03320435 0.04020464 0.04886709 0.0596238 0.07315348 0.09050885 0.11143964 0.13686671 0.16776886 0.20341457 0.24281992 0.28484195 0.32711894 0.36834301 0.40794043 0.4447145 0.47647207 0.50303896 0.52524762 0.54328057 0.55717994 0.5685619 0.57895708 0.58860881 0.59881758 0.60990899 0.62128986 0.63421311 0.64847648 0.66363778 0.67997936 0.69609688 0.71189957 0.7269499 0.74124079 0.75734734 0.77201504 0.78552587 0.79818641 0.80962939 0.81965718 0.82855741 0.83668178 0.84440292 0.85106862 0.85321701 0.85471321 0.8561428 0.85778963 0.8594989 0.86142876 0.86322831 0.86511218 0.8672932 0.86967076 0.87427502 0.87856212 0.88241466 0.88590611 0.8894516 0.89320419 0.8966738 0.89987484 0.90257636 0.90481219 0.90550545 0.90564491 0.90548208 0.90513822 0.90476379 0.90406427 0.90332978 0.90274309 0.90235795 0.90196488 0.90340528 0.90429478 0.90529761 0.90642862 0.90807348 0.91010493 0.91293181 0.91556686 0.91842631 0.92128288 0.92431702 0.92719913 0.92972159 0.93190455 0.93412538 0.93588954 0.93707083 0.93762594 0.93828534 0.93763643 0.94042634 0.94250397 0.94324531 0.94301861 0.94210283 0.94061808 0.93841726 0.93665003 0.93524569 0.93301102 0.92686708 0.92104485 0.91547175 0.91100989 0.90828339 0.9072733 0.90817907 0.91115631 0.91617845 0.92284525 0.92059829 0.91947472 0.91947973 0.92126575 0.92451632 0.92772589 0.93196884 0.93676408 0.94147739 0.94679545 0.95119533 0.95443018 0.95704142 0.95972628 0.9625372 0.96485326 0.96603599 0.96664138 0.96630455 0.96545713 0.96484036 0.96365512 0.96169531 0.95944859 0.95732078 0.95513625 0.95355574 0.95273072 0.95217795 0.95172542 0.9521403 0.95263595 0.95405248 0.95707559 0.96063594 0.96421772 0.96830187 0.97268597 0.97741944 0.98289489 0.9871429 0.99073348 0.99398244 0.99678431 0.99875181 1 0.9999284 0.9991523 0.99712951 0.99388228 0.98968273 0.98373274 0.97621057 0.96780985 0.95833495 0.94842856 0.93818752 0.9277078 0.91702104 0.90597951 0.89384371 0.88165575 0.86861704 0.85460324 0.84058628 0.82598123 0.80948042 0.79182917 0.7724052 0.74907137 0.72031195 0.68815487 0.65125598 0.6100244 0.56600904 0.52095058 0.47464344 0.42924778 0.38584718 0.34208462 0.30067509 0.26317221 0.22770037 0.19571781 0.16808736 0.14467686 0.12482737 0.10823403 0.09439655 0.08235799 0.07149445 0.0626855 0.05498009 0.04818852 0.04285814 0.03859244 0.03494044 0.03199172 0.02958044 0.02741084 0.02556884 0.02395058 0.02166741 0.0191457 0.01632139 0.0109837 0.00736032 0.00649061 0.00469736 0.00205874 - - - - 4.10137842 - 3.75605469 - 4.18741753 - 4.52205376 - 5.20680393 - 4.8729478 - 4.5356737 - 6.16247757 - 5.13772343 - 8.53898524 - 55.10485389 - 35.30373192 - 106.24732599 - - - SC_NODATA - 0 - - - SC_SATURATED_DEFECTIVE - 1 - - - SC_DARK_FEATURE_SHADOW - 2 - - - SC_CLOUD_SHADOW - 3 - - - SC_VEGETATION - 4 - - - SC_NOT_VEGETATED - 5 - - - SC_WATER - 6 - - - SC_UNCLASSIFIED - 7 - - - SC_CLOUD_MEDIUM_PROBA - 8 - - - SC_CLOUD_HIGH_PROBA - 9 - - - SC_THIN_CIRRUS - 10 - - - SC_SNOW_ICE - 11 - - - - - - - - - 40.64479480422486 115.81682739339685 40.65079881136531 117.1154430676197 39.66155122739065 117.11377991452629 39.655752572676114 115.83386830444628 40.64479480422486 115.81682739339685 - - - POINT - 1 - - - EPSG - GEOGRAPHIC - - - - - S2A_OPER_GIP_INVLOC_MPC__20171206T000000_V20150703T000000_21000101T000000_B00 - S2A_OPER_GIP_LREXTR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_ATMIMA_MPC__20150605T094744_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_ATMSAD_MPC__20160729T000005_V20150703T000000_21000101T000000_B00 - S2A_OPER_GIP_BLINDP_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_CLOINV_MPC__20210609T000005_V20210823T030000_21000101T000000_B00 - S2A_OPER_GIP_CLOPAR_MPC__20220120T000001_V20220125T022000_21000101T000000_B00 - S2A_OPER_GIP_CONVER_MPC__20150710T131444_V20150627T000000_21000101T000000_B00 - S2A_OPER_GIP_DATATI_MPC__20151117T131048_V20150703T000000_21000101T000000_B00 - S2A_OPER_GIP_DECOMP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 - S2__OPER_GIP_EARMOD_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_ECMWFP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 - S2A_OPER_GIP_G2PARA_MPC__20231208T000027_V20231213T070000_21000101T000000_B00 - S2A_OPER_GIP_G2PARE_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_GEOPAR_MPC__20150605T094741_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_INTDET_MPC__20220120T000010_V20220125T022000_21000101T000000_B00 - S2A_OPER_GIP_JP2KPA_MPC__20220120T000006_V20220125T022000_21000101T000000_B00 - S2A_OPER_GIP_MASPAR_MPC__20220120T000009_V20220125T022000_21000101T000000_B00 - S2A_OPER_GIP_OLQCPA_MPC__20220715T000042_V20220830T002500_21000101T000000_B00 - S2A_OPER_GIP_PRDLOC_MPC__20180301T130000_V20180305T005000_21000101T000000_B00 - S2A_OPER_GIP_PROBAS_MPC__20240305T000510_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_R2ABCA_MPC__20240315T121000_V20240319T003000_21000101T000000_B00 - S2A_OPER_GIP_R2BINN_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_R2CRCO_MPC__20151023T224715_V20150622T224715_21000101T000000_B00 - S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 - S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 - S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 - S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A - S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B09 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B10 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B12 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B11 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 - S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A - S2A_OPER_GIP_R2DENT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 - S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 - S2A_OPER_GIP_R2DEPI_MPC__20230424T160000_V20230426T000000_21000101T000000_B00 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B12 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B03 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B07 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B09 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B10 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B01 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B05 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B8A - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B06 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B04 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B11 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B02 - S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B08 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B10 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B05 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B04 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B06 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B08 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B03 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B01 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B12 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B11 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B02 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B07 - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B8A - S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B09 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 - S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 - S2A_OPER_GIP_R2NOMO_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_R2PARA_MPC__20221206T000009_V20221206T073000_21000101T000000_B00 - S2A_OPER_GIP_R2SWIR_MPC__20180406T000021_V20180604T100000_21000101T000000_B00 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 - S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 - S2A_OPER_GIP_RESPAR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_SPAMOD_MPC__20231122T110026_V20231123T010000_21000101T000000_B00 - S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B8A - S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B03 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B08 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131048_V20150703T000000_21000101T000000_B01 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B11 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B10 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B06 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B04 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B02 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B05 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131051_V20150703T000000_21000101T000000_B12 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B09 - S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B07 - S2__OPER_GIP_L2ACSC_MPC__20220121T000003_V20220125T022000_21000101T000000_B00 - S2__OPER_GIP_L2ACAC_MPC__20220121T000004_V20220125T022000_21000101T000000_B00 - S2__OPER_GIP_PROBA2_MPC__20231208T000510_V20231213T070000_21000101T000000_B00 - - - CopernicusDEM30 - S2__OPER_AUX_UT1UTC_PDMC_20240404T000000_V20240405T000000_20250404T000000 - - S2__OPER_AUX_ECMWFD_ADG__20240410T120000_V20240410T210000_20240412T150000 - - None - - GlobalSnowMap.tiff - ESACCI-LC-L4-WB-Map-150m-P13Y-2000-v4.0.tif - ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7.tif - ESACCI-LC-L4-Snow-Cond-500m-MONTHLY-2000-2012-v2.4 - - - 3.500058 - - 0.0 - 0 - - - - PASSED - PASSED - PASSED - PASSED - PASSED - PASSED - - - - - 3.354197 - 0.0 - 0.0 - 8.675177 - 0.268831 - 2.81222 - 83.179593 - 0.992827 - 0.571295 - 0.275278 - 0.038401 - 3.18638 - 0.0 - 0.0 - 0.0 - 0.0 - CAMS - 0.392921 - 1.224094 - AUX_ECMWFT - 357.927923 - - - -""" # noqa - -PROCESS_LEVELS = ["L1C", "oldL1C", "L2A"] -MTD_XMLS = [mtd_l1c_xml, mtd_l1c_old_xml, mtd_l2a_xml] -TILE_XMLS = [mtd_l1c_tile_xml, mtd_l1c_tile_xml, mtd_l1c_tile_xml] - -def xml_builder(process_level, mask_saturated=True, band_name=None): - """Build fake SAFE MTD/Tile XML.""" - from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML - filename_info = dict(observation_time=None, dtile_number=None, band_name=band_name, fmission_id="S2A", - process_level=process_level.replace("old", "")) - xml_fh = SAFEMSIMDXML(StringIO(MTD_XMLS[PROCESS_LEVELS.index(process_level)]), - filename_info, mock.MagicMock(), mask_saturated=mask_saturated) - xml_tile_fh = SAFEMSITileMDXML(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), - filename_info, mock.MagicMock()) - return xml_fh, xml_tile_fh - -def jp2_builder(process_level, band_name, mask_saturated=True): - """Build fake SAFE jp2 image file.""" - from satpy.readers.msi_safe import SAFEMSIL1C - filename_info = dict(observation_time=None, dtile_number=None, band_name=band_name, fmission_id="S2A", - process_level=process_level.replace("old", "")) - xml_fh, tile_xml_fh = xml_builder(process_level, mask_saturated, band_name) - jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) - return jp2_fh +class TestMTDXML: + """Test the SAFE MTD XML file handler.""" -class TestTileXML: - """Test the SAFE TILE XML file handler. + def setup_method(self): + """Set up the test case.""" + from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML + filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") + self.xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_tile_xml), filename_info, mock.MagicMock()) + self.old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) + self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) - Since L1C/L2A share almost the same Tile XML structure, we only use L1C Tile here. + def test_start_time(self): + """Ensure start time is read correctly from XML.""" + assert self.xml_tile_fh.start_time() == tilemd_dt - """ + def test_satellite_zenith_array(self): + """Test reading the satellite zenith array.""" + info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith") - @pytest.mark.parametrize(("process_level","angle_name", "angle_tag", "expected"), - [ - ("L1C", "satellite_zenith_angle", ("Viewing_Incidence_Angles_Grids", "Zenith"), - [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, + expected_data = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], @@ -1469,123 +905,82 @@ class TestTileXML: [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), - ("L2A", "solar_zenith_angle_l2a", ("Sun_Angles_Grid", "Zenith"), - [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, - 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], - [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, - 39.5574369, 39.51323286, 39.46920212, 39.4253673, 39.38179377], - [39.6806035, 39.63532838, 39.5902497, 39.54538507, 39.5007087, - 39.45621756, 39.41195347, 39.36779169, 39.3239121, 39.28027381], - [39.57980525, 39.53445664, 39.48931088, 39.44434154, 39.39957879, - 39.35503587, 39.31067408, 39.26649344, 39.22249393, 39.17876143], - [39.479007, 39.43355483, 39.38829092, 39.34328573, 39.29846167, - 39.25381983, 39.2093947, 39.16513007, 39.12109926, 39.07726878], - [39.37820875, 39.33268069, 39.28735495, 39.24224914, 39.19736058, - 39.15267709, 39.1081719, 39.06385068, 39.01973446, 38.97584982], - [39.2774105, 39.23184303, 39.18646737, 39.14130809, 39.09632176, - 39.05153988, 39.00696049, 38.9625713, 38.91842056, 38.87444401], - [39.17671225, 39.13104478, 39.08559031, 39.04034757, 38.99528294, - 38.95039991, 38.9057971, 38.86130793, 38.81705183, 38.77303821], - [39.076014, 39.03026112, 38.98477906, 38.93940875, 38.89425338, - 38.84936063, 38.80464763, 38.76011645, 38.7157479, 38.67164839], - [38.97531575, 38.92950771, 38.88389967, 38.83852091, 38.7933053, - 38.74831897, 38.7034912, 38.65891427, 38.61446851, 38.57030388]]), - ("L1C", "moon_zenith_angle", ("Sun_Angles_Grid", "Zenith"), None) - ]) - def test_angles(self, process_level, angle_name, angle_tag, expected): - """Test reading angles array.""" - info = dict(xml_tag=angle_tag[0], xml_item=angle_tag[1]) if "satellite" in angle_name else \ - dict(xml_tag=angle_tag[0] + "/" + angle_tag[1]) - xml_tile_fh = xml_builder(process_level)[1] - - res = xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) - if res is not None: - res = res[::200, ::200] - - if res is not None: - np.testing.assert_allclose(res, expected) - else: - assert res is expected - - def test_navigation(self): + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]) + res = self.xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle", + resolution=60), + info)[::200, ::200] + np.testing.assert_allclose(res, expected_data) + + def test_old_xml_calibration(self): + """Test the calibration of older data formats (no offset).""" + fake_data = xr.DataArray([[[0, 1, 2, 3], + [4, 1000, 65534, 65535]]], + dims=["band", "x", "y"]) + result = self.old_xml_fh.calibrate_to_reflectances(fake_data, "B01") + np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03], + [0.04, 10, 655.34, np.inf]]]) + + def test_xml_calibration(self): + """Test the calibration with radiometric offset.""" + fake_data = xr.DataArray([[[0, 1, 2, 3], + [4, 1000, 65534, 65535]]], + dims=["band", "x", "y"]) + result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") + np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], + [0.04 - 10, 0, 655.34 - 10, np.inf]]]) + + def test_xml_calibration_to_counts(self): + """Test the calibration to counts.""" + fake_data = xr.DataArray([[[0, 1, 2, 3], + [4, 1000, 65534, 65535]]], + dims=["band", "x", "y"]) + result = self.xml_fh._sanitize_data(fake_data) + np.testing.assert_allclose(result, [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]]) + + def test_xml_calibration_unmasked_saturated(self): + """Test the calibration with radiometric offset but unmasked saturated pixels.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") + self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False) + + fake_data = xr.DataArray([[[0, 1, 2, 3], + [4, 1000, 65534, 65535]]], + dims=["band", "x", "y"]) + result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") + np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], + [0.04 - 10, 0, 655.34 - 10, 655.35 - 10]]]) + + def test_xml_calibration_with_different_offset(self): + """Test the calibration with a different offset.""" + fake_data = xr.DataArray([[[0, 1, 2, 3], + [4, 1000, 65534, 65535]]], + dims=["band", "x", "y"]) + result = self.xml_fh.calibrate_to_reflectances(fake_data, "B10") + np.testing.assert_allclose(result, [[[np.nan, 0.01 - 20, 0.02 - 20, 0.03 - 20], + [0.04 - 20, -10, 655.34 - 20, np.inf]]]) + + def test_xml_calibration_to_radiance(self): + """Test the calibration with a different offset.""" + fake_data = xr.DataArray([[[0, 1, 2, 3], + [4, 1000, 65534, 65535]]], + dims=["band", "x", "y"]) + result = self.xml_fh.calibrate_to_radiances(fake_data, "B01") + expected = np.array([[[np.nan, -251.584265, -251.332429, -251.080593], + [-250.828757, 0., 16251.99095, np.inf]]]) + np.testing.assert_allclose(result, expected) + + def test_xml_navigation(self): """Test the navigation.""" from pyproj import CRS crs = CRS("EPSG:32616") dsid = make_dataid(name="B01", resolution=60) - xml_tile_fh = xml_builder("L1C")[1] - result = xml_tile_fh.get_area_def(dsid) - area_extent = (499980.0, 3590220.0, 609780.0, 3700020.0) - assert result.crs == crs - np.testing.assert_allclose(result.area_extent, area_extent) - + result = self.xml_tile_fh.get_area_def(dsid) -class TestMTDXML: - """Test the SAFE MTD XML file handler.""" - - def setup_method(self): - """Set up the test case.""" - self.fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) - - @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), - [ - ("L1C", True, "B01", ([[[np.nan, -9.99, -9.98, -9.97], - [-9.96, 0, 645.34, np.inf]]], - [[[np.nan, -251.584265, -251.332429, -251.080593], - [-250.828757, 0., 16251.99095, np.inf]]], - [[[np.nan, 1, 2, 3], - [4, 1000, 65534, np.inf]]])), - ("L1C", False, "B10", ([[[np.nan, -19.99, -19.98, -19.97], - [-19.96, -10, 635.34, 635.35]]], - [[[np.nan, -35.465976, -35.448234, -35.430493], - [-35.412751, -17.741859, 1127.211275, 1127.229017]]], - [[[np.nan, 1, 2, 3], - [4, 1000, 65534, 65535]]])), - ("oldL1C", True, "B01", ([[[np.nan, 0.01, 0.02, 0.03], - [0.04, 10, 655.34, np.inf]]], - [[[np.nan, 0.251836101, 0.503672202, 0.755508303], - [1.00734440, 251.836101, 16503.8271, np.inf]]], - [[[np.nan, 1, 2, 3], - [4, 1000, 65534, np.inf]]])), - ("L2A", False, "B03", ([[[np.nan, -9.99, -9.98, -9.97], - [-9.96, 0, 645.34, 645.35]]], - [[[np.nan, -238.571863, -238.333052, -238.094241], - [-237.855431, 0, 15411.407995, 15411.646806]]], - [[[np.nan, 1, 2, 3], - [4, 1000, 65534, 65535]]])), - ]) - def test_xml_calibration(self, process_level, mask_saturated, band_name, expected): - """Test the calibration to reflectance/radiance/counts.""" - xml_fh = xml_builder(process_level, mask_saturated)[0] - - res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) - res2 = xml_fh.calibrate_to_radiances(self.fake_data, band_name) - res3 = xml_fh._sanitize_data(self.fake_data) - - results = (res1, res2, res3) - np.testing.assert_allclose(results, expected) - - @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), - [ - ("L1C", True, "B01", None), - ("L2A", False, "AOT", [[[np.nan, 0.001, 0.002, 0.003], - [0.004, 1., 65.534, 65.535]]]), - ("L2A", True, "WVP", [[[np.nan, 0.001, 0.002, 0.003], - [0.004, 1., 65.534, np.inf]]]), - ("L2A", False, "CLOUD", None), - ("L2A", False, "B10", None), - ]) - def test_xml_calibration_to_atmospheric(self, process_level, mask_saturated, band_name, expected): - """Test the calibration to L2A atmospheric products.""" - xml_fh = xml_builder(process_level, mask_saturated)[0] - - result =xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) - - if result is not None: - np.testing.assert_allclose(result, expected) - else: - assert result is expected + area_extents = (499980.0, 3590220.0, 609780.0, 3700020.0) + assert result.crs == crs + np.testing.assert_allclose(result.area_extent, area_extents) class TestSAFEMSIL1C: @@ -1593,41 +988,36 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" + from satpy.readers.msi_safe import SAFEMSITileMDXML + self.filename_info = dict(observation_time=fname_dt, fmission_id="S2A", band_name="B01", dtile_number=None) self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - - - @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), - [ - (False, "B01_L2A", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), - (True, "B02_L2A", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), - (True, "B03_L2A", "counts", [[np.nan, 1], [65534, np.inf]]), - (False, "AOT_L2A", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), - (True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), - (True, "SNOW_L2A", "water_vapor", None), - ]) - def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): + self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), + self.filename_info, mock.MagicMock()) + self.tile_mda.start_time.return_value = tilemd_dt + + @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), + [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), + (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), + (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]]), + (False, "counts", [[np.nan, 1], [65534, 65535]])]) + def test_calibration_and_masking(self, mask_saturated, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - jp2_fh = jp2_builder("L2A", dataset_name.replace("_L2A", ""), mask_saturated) + from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML + + mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock(), + mask_saturated=mask_saturated) + self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), mda, self.tile_mda) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = jp2_fh.get_dataset(make_dataid(name=dataset_name, calibration=calibration), info=dict()) - if res is not None: - np.testing.assert_allclose(res, expected) - else: - assert res is expected + res = self.jp2_fh.get_dataset(make_dataid(name="B01", calibration=calibration), info=dict()) + np.testing.assert_allclose(res, expected) - @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), - [ - ("L1C", "B01", "B03"), - ("L2A", "B02", "B03_L2A"), - ]) - def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): - """Test when dataset name and file band name mismatch, the data and its area definition should both be None.""" - jp2_fh = jp2_builder(process_level, band_name) - with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res1 = jp2_fh.get_dataset(make_dataid(name=dataset_name), info=dict()) - res2 = jp2_fh.get_area_def(make_dataid(name=dataset_name)) + def test_start_time(self): + """Test that the correct start time is returned.""" + from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML - assert res1 is None - assert res2 is None + mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock()) + self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), + mda, self.tile_mda) + assert tilemd_dt == self.jp2_fh.start_time From a026d8287968f637f57257e3e7c168359340a62c Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 15 May 2024 15:04:38 +0800 Subject: [PATCH 1380/1416] re-add --- satpy/etc/readers/msi_safe.yaml | 57 +- satpy/readers/msi_safe.py | 56 +- satpy/tests/reader_tests/test_msi_safe.py | 889 ++++++++++++++++++---- 3 files changed, 831 insertions(+), 171 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index 20b324a036..cc39c26a74 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -1,8 +1,8 @@ reader: name: msi_safe - short_name: MSI SAFE - long_name: Sentinel-2 A and B MSI data in SAFE format, supporting L1C format only. - description: SAFE Reader for MSI data (Sentinel-2) + short_name: MSI SAFE L1C + long_name: Sentinel-2 A and B MSI L1C data in SAFE format + description: SAFE Reader for MSI L1C data (Sentinel-2) status: Nominal supports_fsspec: false sensors: [msi] @@ -10,20 +10,18 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: - safe_granule_l1c: + l1c_safe_granule: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C - file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] - requires: [safe_metadata, safe_tile_metadata] - safe_tile_metadata: + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] + requires: [l1c_safe_metadata, l1c_safe_tile_metadata] + l1c_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML - file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] - safe_metadata: + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + l1c_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML - file_patterns: ['{fmission_id:3s}_MSI{proclevel:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] - + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] datasets: - B01: name: B01 sensor: msi @@ -39,7 +37,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B02: name: B02 @@ -56,7 +54,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B03: name: B03 @@ -73,7 +71,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B04: name: B04 @@ -90,7 +88,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B05: name: B05 @@ -107,7 +105,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B06: name: B06 @@ -124,7 +122,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B07: name: B07 @@ -141,7 +139,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B08: name: B08 @@ -158,7 +156,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B8A: name: B8A @@ -175,7 +173,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B09: name: B09 @@ -192,7 +190,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B10: name: B10 @@ -209,7 +207,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B11: name: B11 @@ -226,7 +224,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c + file_type: l1c_safe_granule B12: name: B12 @@ -243,31 +241,30 @@ datasets: counts: standard_name: counts units: "1" - file_type: safe_granule_l1c - + file_type: l1c_safe_granule solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Sun_Angles_Grid/Zenith solar_azimuth_angle: name: solar_azimuth_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Sun_Angles_Grid/Azimuth satellite_azimuth_angle: name: satellite_azimuth_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth satellite_zenith_angle: name: satellite_zenith_angle resolution: [10, 20, 60] - file_type: safe_tile_metadata + file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 5ec5ff3ea0..d36350f7ab 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -28,9 +28,12 @@ reader_kwargs={'mask_saturated': False}) scene.load(['B01']) -L1C format description for the files read here: +L1C/L2A format description for the files read here: - https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/ + https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 + +Please note: for L2A datasets, the band name has been fixed with a "_L2A" suffix. Do not change it in the YAML file or +the reader can't recogonize it and nothing will be loaded. """ @@ -65,20 +68,27 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s filetype_info) del mask_saturated self._channel = filename_info["band_name"] + self.process_level = filename_info["process_level"] self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] - self._start_time = self._tile_mda.start_time() self._end_time = filename_info["observation_time"] def get_dataset(self, key, info): """Load a dataset.""" - if self._channel != key["name"]: - return + if self.process_level == "L1C": + if self._channel != key["name"]: + return + else: + if self._channel + "_L2A" != key["name"]: + return logger.debug("Reading %s.", key["name"]) + proj = self._read_from_file(key) + if proj is None: + return proj.attrs = info.copy() proj.attrs["units"] = "%" proj.attrs["platform_name"] = self.platform_name @@ -93,6 +103,8 @@ def _read_from_file(self, key): return self._mda.calibrate_to_radiances(proj, self._channel) if key["calibration"] == "counts": return self._mda._sanitize_data(proj) + if key["calibration"] in ["aerosol_thickness", "water_vapor"]: + return self._mda.calibrate_to_atmospheric(proj, self._channel) @property def start_time(self): @@ -106,8 +118,13 @@ def end_time(self): def get_area_def(self, dsid): """Get the area def.""" - if self._channel != dsid["name"]: - return + if self.process_level == "L1C": + if self._channel != dsid["name"]: + return + else: + if self._channel + "_L2A" != dsid["name"]: + return + return self._tile_mda.get_area_def(dsid) @@ -121,6 +138,7 @@ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): self._end_time = filename_info["observation_time"] self.root = ET.parse(self.filename) self.tile = filename_info["dtile_number"] + self.process_level = filename_info["process_level"] self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated import bottleneck # noqa @@ -142,10 +160,23 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level == "L1C" else \ + int(self.root.find(".//BOA_QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 + def calibrate_to_atmospheric(self, data, band_name): + """Calibrate L2A AOT/WVP product.""" + atmospheric_bands = ["AOT", "WVP"] + if self.process_level == "L1C": + return + elif self.process_level == "L2A" and band_name not in atmospheric_bands: + return + + quantification = float(self.root.find(f".//{band_name}_QUANTIFICATION_VALUE").text) + data = self._sanitize_data(data) + return data / quantification + def _sanitize_data(self, data): data = data.where(data != self.no_data) if self.mask_saturated: @@ -174,7 +205,8 @@ def band_indices(self): @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find(".//Radiometric_Offset_List") + offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level == "L1C" else \ + self.root.find(".//BOA_ADD_OFFSET_VALUES_LIST") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: @@ -302,9 +334,11 @@ def interpolate_angles(self, angles, resolution): def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" angles = self.root.find(".//Tile_Angles") - if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle"]: + if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", + "solar_zenith_angle_l2a", "solar_azimuth_angle_l2a"]: angles = self._get_solar_angles(angles, info) - elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle"]: + elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle", + "satellite_zenith_angle_l2a", "satellite_azimuth_angle_l2a"]: angles = self._get_satellite_angles(angles, info) else: angles = None diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index b919278bf5..d2de3e1a54 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -30,7 +30,7 @@ fname_dt = datetime(2020, 10, 1, 18, 35, 41) tilemd_dt = datetime(2020, 10, 1, 16, 34, 23, 153611) -mtd_tile_xml = b""" +mtd_l1c_tile_xml = b""" @@ -580,7 +580,6 @@ """ # noqa - mtd_l1c_old_xml = """ @@ -866,121 +865,742 @@ """ # noqa +mtd_l2a_xml = """ + + + + 2024-04-11T03:05:21.024Z + 2024-04-11T03:05:21.024Z + S2A_MSIL2A_20240411T030521_N0510_R075_T50TMK_20240411T080950.SAFE + Level-2A + S2MSI2A + 05.10 + https://doi.org/10.5270/S2_-znk9xsj + 2024-04-11T08:09:50.000000Z + Not applicable + Not applicable + + Sentinel-2A + INS-NOBS + 2024-04-11T03:05:21.024Z + 75 + DESCENDING + + +SAFE_COMPACT + + + + + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B02_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B03_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B04_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B08_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_TCI_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_AOT_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_WVP_10m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B01_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B02_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B03_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B04_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B05_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B06_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B07_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B8A_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B11_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B12_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_TCI_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_AOT_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_WVP_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_SCL_20m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B01_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B02_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B03_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B04_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B05_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B06_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B07_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B8A_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B09_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B11_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B12_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_TCI_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_AOT_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_WVP_60m + GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_SCL_60m + + + + + + + NODATA + 0 + + + SATURATED + 65535 + + + 3 + 2 + 1 + + + 10000 + 1000.0 + 1000.0 + + + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + -1000 + + + 0.998279632507911 + + 1884.69 + 1959.66 + 1823.24 + 1512.06 + 1424.64 + 1287.61 + 1162.08 + 1041.63 + 955.32 + 812.92 + 367.15 + 245.59 + 85.25 + + + + + 60 + + 412 + 456 + 442.7 + + + 1 + 0.001775742 0.004073061 0.003626143 0.003515199 0.005729163 0.003780292 0.002636732 0.001262113 0.001987583 0.001368913 0.001250444 0.000463454 0.000814293 0.001376431 0.001485086 0.001823735 0.001626817 0.004392062 0.029008099 0.11874593 0.32387506 0.57281921 0.71472749 0.76196778 0.78929702 0.80862387 0.81089382 0.82419876 0.85415811 0.87079088 0.88731097 0.92619924 0.98228149 1 0.9752382 0.93596338 0.88997148 0.85021048 0.82569453 0.78390239 0.61417422 0.33007109 0.12410831 0.04365694 0.014749595 + + + + 10 + + 456 + 533 + 492.7 + + + 1 + 0.04255531 0.0722983 0.15374322 0.32799225 0.55336788 0.71011166 0.75285179 0.75232691 0.75668081 0.76326948 0.76239425 0.7852515 0.81546669 0.86179176 0.89282599 0.9195221 0.91900649 0.91315754 0.90035366 0.88989693 0.8823246 0.87606118 0.88429987 0.90695544 0.93232085 0.93947252 0.94383543 0.92204086 0.8860231 0.84743609 0.81251687 0.7823971 0.7731087 0.77209054 0.78742652 0.81217177 0.84605052 0.88767996 0.92793997 0.95069235 0.96573311 0.96938253 0.96570294 0.95832003 0.95405064 0.95178268 0.95699722 0.96556515 0.9770514 0.97709574 0.97436606 0.95903183 0.93506318 0.90190134 0.87165792 0.84402444 0.82280852 0.81536043 0.82057639 0.8395149 0.86992171 0.91526205 0.96067028 0.99163699 1 0.98356097 0.91130763 0.74018256 0.50395858 0.3050155 0.18004605 0.10738342 0.06593592 0.04207746 0.02662129 0.0143396 0.00265779 0.00081822 + + + + 10 + + 538 + 583 + 559.8 + + + 1 + 0.01448181 0.03422251 0.07346335 0.15444843 0.31661425 0.55322279 0.74859406 0.84890306 0.89772216 0.9215368 0.92572845 0.91122688 0.88818924 0.86523756 0.84718187 0.8387572 0.84459081 0.86219653 0.88838714 0.92443236 0.96017974 0.98685516 1 0.9986008 0.98076472 0.94522089 0.8981778 0.85580323 0.81841734 0.78862048 0.76460653 0.74963745 0.75055111 0.76137888 0.78244479 0.79890086 0.81016957 0.81408886 0.77358596 0.62881065 0.40397555 0.21542098 0.10715281 0.04792877 0.01848693 0.00108588 + + + + 10 + + 646 + 684 + 664.6 + + + 1 + 0.00141521 0.02590238 0.11651178 0.39088616 0.74959342 0.94485805 0.98011173 0.99406309 1 0.99545475 0.99052772 0.97733476 0.94055988 0.87894956 0.81629384 0.77345952 0.75448766 0.75991531 0.7826343 0.8101689 0.83612975 0.86125424 0.88609106 0.91138767 0.93405146 0.95042063 0.9592573 0.96039555 0.95913395 0.95809013 0.95527459 0.94376465 0.89490799 0.74426308 0.476777 0.22960399 0.08009118 0.02617076 0.00415242 + + + + 20 + + 695 + 714 + 704.1 + + + 1 + 0.02835786 0.12369337 0.39378774 0.76113071 0.97108502 0.99889523 1 0.99412258 0.98321789 0.96704093 0.94847389 0.92714833 0.90372458 0.88614713 0.86723745 0.79075319 0.58840332 0.26334833 0.05675422 0.00618833 + + + + 20 + + 731 + 749 + 740.5 + + + 1 + 0.00171088 0.05467153 0.25806676 0.64722098 0.89218999 0.90232877 0.91508768 0.94115846 0.96299993 0.97510481 0.9770217 0.98736251 1 0.98880277 0.97179916 0.90126739 0.60672391 0.20520227 0.0267569 + + + + 20 + + 769 + 797 + 782.8 + + + 1 + 0.00045899 0.0117201 0.05219715 0.16561733 0.36903355 0.63685453 0.86119638 0.97002897 0.99119602 0.99897921 1 0.97725155 0.92572385 0.86605804 0.81969611 0.79407674 0.79111029 0.80431552 0.81902721 0.82571292 0.82011829 0.79222195 0.72054559 0.58767794 0.41430355 0.23088817 0.09850282 0.02736551 0.00516235 + + + + 10 + + 760 + 907 + 832.8 + + + 1 + 0.00067259 0.00388856 0 0 0 0 0 0 0 0 0 0 0 0.00028956 0.00702964 0.01752391 0.03231111 0.05328661 0.08299885 0.12748502 0.19591065 0.30246323 0.43553954 0.57141637 0.69766701 0.80303852 0.89115744 0.95284584 0.98894161 1 0.98840653 0.96389216 0.94207967 0.93694643 0.94227343 0.95395718 0.96828896 0.97966549 0.9854444 0.98592681 0.98391181 0.97793903 0.97722771 0.97810609 0.98144486 0.98764558 0.98857708 0.9862422 0.98070921 0.97078624 0.95721089 0.93865821 0.91672388 0.89620759 0.872888 0.85160331 0.8246394 0.80078117 0.7823386 0.76360274 0.74962771 0.7387221 0.73079407 0.72271237 0.72507708 0.72563856 0.72304217 0.72229211 0.71616364 0.71159446 0.70826954 0.70157205 0.69924532 0.70093762 0.70692733 0.71824001 0.73124634 0.7484061 0.76818541 0.78394807 0.7968381 0.80260206 0.8045194 0.80240918 0.79699072 0.78920304 0.77691621 0.76518406 0.75119717 0.73700357 0.72262399 0.70412578 0.68410805 0.66474528 0.64736891 0.63005125 0.61564222 0.60249557 0.58988992 0.57993399 0.57136506 0.56094242 0.55235105 0.54568236 0.53958052 0.53510215 0.53093675 0.53016508 0.52984662 0.53036682 0.53211463 0.53271918 0.53246806 0.53331158 0.5319278 0.53051055 0.52951499 0.52996848 0.53253373 0.53705085 0.54235344 0.54912497 0.55523055 0.56011135 0.55767999 0.54821984 0.53144613 0.50763528 0.47811224 0.45092793 0.42798466 0.41051405 0.40039139 0.40087302 0.40829375 0.42086556 0.43007022 0.42456692 0.39136817 0.33009008 0.25720509 0.18189031 0.11650668 0.07031579 0.04275381 0.02593154 0.01574394 0.00394326 + + + + 20 + + 837 + 881 + 864.7 + + + 1 + 0.00030097 0 0 0 0 0 0 0 0 0 0.00157217 0.00249886 0.01332037 0.02614866 0.05260479 0.10779709 0.22160755 0.39721628 0.60986885 0.81658883 0.9322445 0.97210033 0.97545482 0.97538048 0.97328205 0.97607828 0.98034955 0.98690928 0.99087465 0.99741818 0.99984673 0.99939141 0.99587928 0.99541228 1 0.99640762 0.92359433 0.74137684 0.48965971 0.25020643 0.11221246 0.04755984 0.02297815 0.01061438 0.00108149 + + + + 60 + + 932 + 958 + 945.1 + + + 1 + 0.01662953 0.06111857 0.17407094 0.38946454 0.6645915 0.87454114 0.93695988 0.96751014 0.9893391 0.9951269 1 0.97845762 0.98069118 0.9922335 0.98798379 0.99428313 0.98348041 0.97820013 0.95023367 0.95299604 0.92240308 0.85573828 0.70970227 0.46429542 0.21538427 0.06534121 0.01625596 + + + + 60 + + 1337 + 1412 + 1373.5 + + + 1 + 0.00024052 5.404e-05 3.052e-05 2.872e-05 7.632e-05 0.00010949 8.804e-05 0.00012356 0.00017424 0.0003317 0.00036891 0.0004467 0.00065919 0.0010913 0.00196903 0.00373668 0.00801754 0.01884719 0.04466732 0.10165546 0.20111776 0.34284841 0.50710992 0.6632068 0.78377143 0.86153862 0.91000261 0.94193255 0.96182259 0.97365119 0.98169786 0.98795826 0.99283342 0.99649788 0.99906011 1 0.99907734 0.99601604 0.9909083 0.98479854 0.97802142 0.97030114 0.96080954 0.94849765 0.93314108 0.91482336 0.8937997 0.86825426 0.83023193 0.76384193 0.65440009 0.50671604 0.35014737 0.21799972 0.12643091 0.06768988 0.0322709 0.013544 0.00544557 0.00237642 0.00111267 0.00053796 0.0003457 0.00017488 0.00021619 0.00019479 0.00010421 5.919e-05 5.109e-05 6.115e-05 5.527e-05 3.856e-05 3.147e-05 0.00012289 0.0001089 2.502e-05 + + + + 20 + + 1539 + 1682 + 1613.7 + + + 1 + 6.79e-06 6.66e-06 8e-06 2.734e-05 3.685e-05 8.851e-05 0.00014522 0.00024812 0.00047627 0.00056335 0.00065326 0.00089835 0.00114664 0.00165604 0.00241611 0.00350246 0.00524274 0.0081538 0.01237062 0.0186097 0.02721853 0.03879155 0.05379167 0.07353187 0.09932758 0.1334178 0.18029249 0.24484994 0.32834511 0.42749961 0.53576798 0.64570396 0.74245998 0.81447017 0.85866596 0.87924777 0.88665266 0.888727 0.89105732 0.89725046 0.90632982 0.91627527 0.9263751 0.93515828 0.94226446 0.94739906 0.95131987 0.95416808 0.95635128 0.95813297 0.96062738 0.96344083 0.96577764 0.96818134 0.97104025 0.97343195 0.97597444 0.97865413 0.97994672 0.98064126 0.98094979 0.98143338 0.98123856 0.98068083 0.98033995 0.98101894 0.98268503 0.98507875 0.98777658 0.9903608 0.99202087 0.9933069 0.99256744 0.99044883 0.98717314 0.98353656 0.9800432 0.97617287 0.97253451 0.96977033 0.96762556 0.9662626 0.96572411 0.96592079 0.96729798 0.96975438 0.97337748 0.97862858 0.98345358 0.98765317 0.9919238 0.99554959 0.99767411 0.99866451 0.99941783 0.99930984 0.99885298 0.99913515 0.99973164 0.99973592 1 0.9998438 0.9967639 0.99175576 0.9859206 0.97887302 0.97029262 0.96135891 0.95379752 0.94709017 0.94228614 0.93919512 0.93616637 0.92889205 0.9129921 0.88158383 0.82602164 0.74412949 0.64281662 0.53483955 0.42772166 0.32439525 0.23488131 0.16445229 0.11056237 0.07271886 0.04634859 0.02949618 0.01941871 0.0133487 0.00934594 0.00654231 0.00487921 0.00341903 0.00249864 0.00196431 0.00142754 0.00105878 0.00049978 0.00022833 0.00015999 3.415e-05 4.517e-05 1.313e-05 + + + + 20 + + 2078 + 2320 + 2202.4 + + + 1 + 0.00063835 0.00102286 0.00288712 0.00399879 0.00658916 0.00765458 0.00799918 0.00853524 0.00929493 0.00999614 0.01096645 0.01208363 0.01335837 0.01501119 0.01711931 0.01977307 0.02332743 0.02765779 0.03320435 0.04020464 0.04886709 0.0596238 0.07315348 0.09050885 0.11143964 0.13686671 0.16776886 0.20341457 0.24281992 0.28484195 0.32711894 0.36834301 0.40794043 0.4447145 0.47647207 0.50303896 0.52524762 0.54328057 0.55717994 0.5685619 0.57895708 0.58860881 0.59881758 0.60990899 0.62128986 0.63421311 0.64847648 0.66363778 0.67997936 0.69609688 0.71189957 0.7269499 0.74124079 0.75734734 0.77201504 0.78552587 0.79818641 0.80962939 0.81965718 0.82855741 0.83668178 0.84440292 0.85106862 0.85321701 0.85471321 0.8561428 0.85778963 0.8594989 0.86142876 0.86322831 0.86511218 0.8672932 0.86967076 0.87427502 0.87856212 0.88241466 0.88590611 0.8894516 0.89320419 0.8966738 0.89987484 0.90257636 0.90481219 0.90550545 0.90564491 0.90548208 0.90513822 0.90476379 0.90406427 0.90332978 0.90274309 0.90235795 0.90196488 0.90340528 0.90429478 0.90529761 0.90642862 0.90807348 0.91010493 0.91293181 0.91556686 0.91842631 0.92128288 0.92431702 0.92719913 0.92972159 0.93190455 0.93412538 0.93588954 0.93707083 0.93762594 0.93828534 0.93763643 0.94042634 0.94250397 0.94324531 0.94301861 0.94210283 0.94061808 0.93841726 0.93665003 0.93524569 0.93301102 0.92686708 0.92104485 0.91547175 0.91100989 0.90828339 0.9072733 0.90817907 0.91115631 0.91617845 0.92284525 0.92059829 0.91947472 0.91947973 0.92126575 0.92451632 0.92772589 0.93196884 0.93676408 0.94147739 0.94679545 0.95119533 0.95443018 0.95704142 0.95972628 0.9625372 0.96485326 0.96603599 0.96664138 0.96630455 0.96545713 0.96484036 0.96365512 0.96169531 0.95944859 0.95732078 0.95513625 0.95355574 0.95273072 0.95217795 0.95172542 0.9521403 0.95263595 0.95405248 0.95707559 0.96063594 0.96421772 0.96830187 0.97268597 0.97741944 0.98289489 0.9871429 0.99073348 0.99398244 0.99678431 0.99875181 1 0.9999284 0.9991523 0.99712951 0.99388228 0.98968273 0.98373274 0.97621057 0.96780985 0.95833495 0.94842856 0.93818752 0.9277078 0.91702104 0.90597951 0.89384371 0.88165575 0.86861704 0.85460324 0.84058628 0.82598123 0.80948042 0.79182917 0.7724052 0.74907137 0.72031195 0.68815487 0.65125598 0.6100244 0.56600904 0.52095058 0.47464344 0.42924778 0.38584718 0.34208462 0.30067509 0.26317221 0.22770037 0.19571781 0.16808736 0.14467686 0.12482737 0.10823403 0.09439655 0.08235799 0.07149445 0.0626855 0.05498009 0.04818852 0.04285814 0.03859244 0.03494044 0.03199172 0.02958044 0.02741084 0.02556884 0.02395058 0.02166741 0.0191457 0.01632139 0.0109837 0.00736032 0.00649061 0.00469736 0.00205874 + + + + 4.10137842 + 3.75605469 + 4.18741753 + 4.52205376 + 5.20680393 + 4.8729478 + 4.5356737 + 6.16247757 + 5.13772343 + 8.53898524 + 55.10485389 + 35.30373192 + 106.24732599 + + + SC_NODATA + 0 + + + SC_SATURATED_DEFECTIVE + 1 + + + SC_DARK_FEATURE_SHADOW + 2 + + + SC_CLOUD_SHADOW + 3 + + + SC_VEGETATION + 4 + + + SC_NOT_VEGETATED + 5 + + + SC_WATER + 6 + + + SC_UNCLASSIFIED + 7 + + + SC_CLOUD_MEDIUM_PROBA + 8 + + + SC_CLOUD_HIGH_PROBA + 9 + + + SC_THIN_CIRRUS + 10 + + + SC_SNOW_ICE + 11 + + + + + + + + + 40.64479480422486 115.81682739339685 40.65079881136531 117.1154430676197 39.66155122739065 117.11377991452629 39.655752572676114 115.83386830444628 40.64479480422486 115.81682739339685 + + + POINT + 1 + + + EPSG + GEOGRAPHIC + + + + + S2A_OPER_GIP_INVLOC_MPC__20171206T000000_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_LREXTR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ATMIMA_MPC__20150605T094744_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ATMSAD_MPC__20160729T000005_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_BLINDP_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_CLOINV_MPC__20210609T000005_V20210823T030000_21000101T000000_B00 + S2A_OPER_GIP_CLOPAR_MPC__20220120T000001_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_CONVER_MPC__20150710T131444_V20150627T000000_21000101T000000_B00 + S2A_OPER_GIP_DATATI_MPC__20151117T131048_V20150703T000000_21000101T000000_B00 + S2A_OPER_GIP_DECOMP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 + S2__OPER_GIP_EARMOD_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_ECMWFP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 + S2A_OPER_GIP_G2PARA_MPC__20231208T000027_V20231213T070000_21000101T000000_B00 + S2A_OPER_GIP_G2PARE_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_GEOPAR_MPC__20150605T094741_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_INTDET_MPC__20220120T000010_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_JP2KPA_MPC__20220120T000006_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_MASPAR_MPC__20220120T000009_V20220125T022000_21000101T000000_B00 + S2A_OPER_GIP_OLQCPA_MPC__20220715T000042_V20220830T002500_21000101T000000_B00 + S2A_OPER_GIP_PRDLOC_MPC__20180301T130000_V20180305T005000_21000101T000000_B00 + S2A_OPER_GIP_PROBAS_MPC__20240305T000510_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2ABCA_MPC__20240315T121000_V20240319T003000_21000101T000000_B00 + S2A_OPER_GIP_R2BINN_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2CRCO_MPC__20151023T224715_V20150622T224715_21000101T000000_B00 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2DENT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2DEPI_MPC__20230424T160000_V20230426T000000_21000101T000000_B00 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B12 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B03 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B07 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B09 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B10 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B01 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B05 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B8A + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B06 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B04 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B11 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B02 + S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B08 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B10 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B05 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B04 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B06 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B08 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B03 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B01 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B12 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B11 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B02 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B07 + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B8A + S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B09 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2NOMO_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_R2PARA_MPC__20221206T000009_V20221206T073000_21000101T000000_B00 + S2A_OPER_GIP_R2SWIR_MPC__20180406T000021_V20180604T100000_21000101T000000_B00 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 + S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 + S2A_OPER_GIP_RESPAR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_SPAMOD_MPC__20231122T110026_V20231123T010000_21000101T000000_B00 + S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B8A + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B03 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B08 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131048_V20150703T000000_21000101T000000_B01 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B11 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B10 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B06 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B04 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B02 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B05 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131051_V20150703T000000_21000101T000000_B12 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B09 + S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B07 + S2__OPER_GIP_L2ACSC_MPC__20220121T000003_V20220125T022000_21000101T000000_B00 + S2__OPER_GIP_L2ACAC_MPC__20220121T000004_V20220125T022000_21000101T000000_B00 + S2__OPER_GIP_PROBA2_MPC__20231208T000510_V20231213T070000_21000101T000000_B00 + + + CopernicusDEM30 + S2__OPER_AUX_UT1UTC_PDMC_20240404T000000_V20240405T000000_20250404T000000 + + S2__OPER_AUX_ECMWFD_ADG__20240410T120000_V20240410T210000_20240412T150000 + + None + + GlobalSnowMap.tiff + ESACCI-LC-L4-WB-Map-150m-P13Y-2000-v4.0.tif + ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7.tif + ESACCI-LC-L4-Snow-Cond-500m-MONTHLY-2000-2012-v2.4 + + + 3.500058 + + 0.0 + 0 + + + + PASSED + PASSED + PASSED + PASSED + PASSED + PASSED + + + + + 3.354197 + 0.0 + 0.0 + 8.675177 + 0.268831 + 2.81222 + 83.179593 + 0.992827 + 0.571295 + 0.275278 + 0.038401 + 3.18638 + 0.0 + 0.0 + 0.0 + 0.0 + CAMS + 0.392921 + 1.224094 + AUX_ECMWFT + 357.927923 + + + +""" # noqa + +PROCESS_LEVELS = ["L1C", "oldL1C", "L2A"] +MTD_XMLS = [mtd_l1c_xml, mtd_l1c_old_xml, mtd_l2a_xml] +TILE_XMLS = [mtd_l1c_tile_xml, mtd_l1c_tile_xml, mtd_l1c_tile_xml] -class TestMTDXML: - """Test the SAFE MTD XML file handler.""" - def setup_method(self): - """Set up the test case.""" - from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") - self.xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_tile_xml), filename_info, mock.MagicMock()) - self.old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock()) - self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True) +def xml_builder(process_level, mask_saturated=True, band_name=None): + """Build fake SAFE MTD/Tile XML.""" + from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML + filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_fh = SAFEMSIMDXML(StringIO(MTD_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock(), mask_saturated=mask_saturated) + xml_tile_fh = SAFEMSITileMDXML(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock()) + return xml_fh, xml_tile_fh + + +def jp2_builder(process_level, band_name, mask_saturated=True): + """Build fake SAFE jp2 image file.""" + from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSITileMDXML + filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] + tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock()) + tile_xml_fh.start_time.return_value = tilemd_dt + jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) + return jp2_fh + + +class TestTileXML: + """Test the SAFE TILE XML file handler. + + Since L1C/L2A share almost the same Tile XML structure, we only use L1C Tile here. + + """ + + @pytest.mark.parametrize(("process_level", "angle_name", "angle_tag", "expected"), + [ + ("L1C", "satellite_zenith_angle", ("Viewing_Incidence_Angles_Grids", "Zenith"), + [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, + 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], + [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, + 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], + [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, + 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], + [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, + 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], + [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, + 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), + ("L2A", "solar_zenith_angle_l2a", ("Sun_Angles_Grid", "Zenith"), + [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, + 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], + [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, + 39.5574369, 39.51323286, 39.46920212, 39.4253673, 39.38179377], + [39.6806035, 39.63532838, 39.5902497, 39.54538507, 39.5007087, + 39.45621756, 39.41195347, 39.36779169, 39.3239121, 39.28027381], + [39.57980525, 39.53445664, 39.48931088, 39.44434154, 39.39957879, + 39.35503587, 39.31067408, 39.26649344, 39.22249393, 39.17876143], + [39.479007, 39.43355483, 39.38829092, 39.34328573, 39.29846167, + 39.25381983, 39.2093947, 39.16513007, 39.12109926, 39.07726878], + [39.37820875, 39.33268069, 39.28735495, 39.24224914, 39.19736058, + 39.15267709, 39.1081719, 39.06385068, 39.01973446, 38.97584982], + [39.2774105, 39.23184303, 39.18646737, 39.14130809, 39.09632176, + 39.05153988, 39.00696049, 38.9625713, 38.91842056, 38.87444401], + [39.17671225, 39.13104478, 39.08559031, 39.04034757, 38.99528294, + 38.95039991, 38.9057971, 38.86130793, 38.81705183, 38.77303821], + [39.076014, 39.03026112, 38.98477906, 38.93940875, 38.89425338, + 38.84936063, 38.80464763, 38.76011645, 38.7157479, 38.67164839], + [38.97531575, 38.92950771, 38.88389967, 38.83852091, 38.7933053, + 38.74831897, 38.7034912, 38.65891427, 38.61446851, 38.57030388]]), + ("L1C", "moon_zenith_angle", ("Sun_Angles_Grid", "Zenith"), None) + ]) + def test_angles(self, process_level, angle_name, angle_tag, expected): + """Test reading angles array.""" + info = dict(xml_tag=angle_tag[0], xml_item=angle_tag[1]) if "satellite" in angle_name else \ + dict(xml_tag=angle_tag[0] + "/" + angle_tag[1]) + xml_tile_fh = xml_builder(process_level)[1] + + res = xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) + if res is not None: + res = res[::200, ::200] + + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected def test_start_time(self): """Ensure start time is read correctly from XML.""" - assert self.xml_tile_fh.start_time() == tilemd_dt - - def test_satellite_zenith_array(self): - """Test reading the satellite zenith array.""" - info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith") - - expected_data = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, - 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], - [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, - 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], - [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, - 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], - [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, - 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], - [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, - 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], - [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, - 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]) - res = self.xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle", - resolution=60), - info)[::200, ::200] - np.testing.assert_allclose(res, expected_data) - - def test_old_xml_calibration(self): - """Test the calibration of older data formats (no offset).""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.old_xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03], - [0.04, 10, 655.34, np.inf]]]) - - def test_xml_calibration(self): - """Test the calibration with radiometric offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], - [0.04 - 10, 0, 655.34 - 10, np.inf]]]) - - def test_xml_calibration_to_counts(self): - """Test the calibration to counts.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh._sanitize_data(fake_data) - np.testing.assert_allclose(result, [[[np.nan, 1, 2, 3], - [4, 1000, 65534, np.inf]]]) - - def test_xml_calibration_unmasked_saturated(self): - """Test the calibration with radiometric offset but unmasked saturated pixels.""" - from satpy.readers.msi_safe import SAFEMSIMDXML - filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A") - self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False) - - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10], - [0.04 - 10, 0, 655.34 - 10, 655.35 - 10]]]) - - def test_xml_calibration_with_different_offset(self): - """Test the calibration with a different offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_reflectances(fake_data, "B10") - np.testing.assert_allclose(result, [[[np.nan, 0.01 - 20, 0.02 - 20, 0.03 - 20], - [0.04 - 20, -10, 655.34 - 20, np.inf]]]) - - def test_xml_calibration_to_radiance(self): - """Test the calibration with a different offset.""" - fake_data = xr.DataArray([[[0, 1, 2, 3], - [4, 1000, 65534, 65535]]], - dims=["band", "x", "y"]) - result = self.xml_fh.calibrate_to_radiances(fake_data, "B01") - expected = np.array([[[np.nan, -251.584265, -251.332429, -251.080593], - [-250.828757, 0., 16251.99095, np.inf]]]) - np.testing.assert_allclose(result, expected) - - def test_xml_navigation(self): + xml_tile_fh = xml_builder("L1C")[1] + assert xml_tile_fh.start_time() == tilemd_dt + + def test_navigation(self): """Test the navigation.""" from pyproj import CRS crs = CRS("EPSG:32616") dsid = make_dataid(name="B01", resolution=60) - result = self.xml_tile_fh.get_area_def(dsid) - - area_extents = (499980.0, 3590220.0, 609780.0, 3700020.0) + xml_tile_fh = xml_builder("L1C")[1] + result = xml_tile_fh.get_area_def(dsid) + area_extent = (499980.0, 3590220.0, 609780.0, 3700020.0) assert result.crs == crs - np.testing.assert_allclose(result.area_extent, area_extents) + np.testing.assert_allclose(result.area_extent, area_extent) + + +class TestMTDXML: + """Test the SAFE MTD XML file handler.""" + + def setup_method(self): + """Set up the test case.""" + self.fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) + + @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), + [ + ("L1C", True, "B01", ([[[np.nan, -9.99, -9.98, -9.97], + [-9.96, 0, 645.34, np.inf]]], + [[[np.nan, -251.584265, -251.332429, -251.080593], + [-250.828757, 0., 16251.99095, np.inf]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]])), + ("L1C", False, "B10", ([[[np.nan, -19.99, -19.98, -19.97], + [-19.96, -10, 635.34, 635.35]]], + [[[np.nan, -35.465976, -35.448234, -35.430493], + [-35.412751, -17.741859, 1127.211275, 1127.229017]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, 65535]]])), + ("oldL1C", True, "B01", ([[[np.nan, 0.01, 0.02, 0.03], + [0.04, 10, 655.34, np.inf]]], + [[[np.nan, 0.251836101, 0.503672202, 0.755508303], + [1.00734440, 251.836101, 16503.8271, np.inf]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, np.inf]]])), + ("L2A", False, "B03", ([[[np.nan, -9.99, -9.98, -9.97], + [-9.96, 0, 645.34, 645.35]]], + [[[np.nan, -238.571863, -238.333052, -238.094241], + [-237.855431, 0, 15411.407995, 15411.646806]]], + [[[np.nan, 1, 2, 3], + [4, 1000, 65534, 65535]]])), + ]) + def test_xml_calibration(self, process_level, mask_saturated, band_name, expected): + """Test the calibration to reflectance/radiance/counts.""" + xml_fh = xml_builder(process_level, mask_saturated)[0] + + res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) + res2 = xml_fh.calibrate_to_radiances(self.fake_data, band_name) + res3 = xml_fh._sanitize_data(self.fake_data) + + results = (res1, res2, res3) + np.testing.assert_allclose(results, expected) + + @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), + [ + ("L1C", True, "B01", None), + ("L2A", False, "AOT", [[[np.nan, 0.001, 0.002, 0.003], + [0.004, 1., 65.534, 65.535]]]), + ("L2A", True, "WVP", [[[np.nan, 0.001, 0.002, 0.003], + [0.004, 1., 65.534, np.inf]]]), + ("L2A", False, "CLOUD", None), + ("L2A", False, "B10", None), + ]) + def test_xml_calibration_to_atmospheric(self, process_level, mask_saturated, band_name, expected): + """Test the calibration to L2A atmospheric products.""" + xml_fh = xml_builder(process_level, mask_saturated)[0] + + result = xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) + + if result is not None: + np.testing.assert_allclose(result, expected) + else: + assert result is expected class TestSAFEMSIL1C: @@ -988,36 +1608,45 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" - from satpy.readers.msi_safe import SAFEMSITileMDXML - self.filename_info = dict(observation_time=fname_dt, fmission_id="S2A", band_name="B01", dtile_number=None) self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), - self.filename_info, mock.MagicMock()) - self.tile_mda.start_time.return_value = tilemd_dt - - @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), - [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), - (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), - (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]]), - (False, "counts", [[np.nan, 1], [65534, 65535]])]) - def test_calibration_and_masking(self, mask_saturated, calibration, expected): + + @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), + [ + (False, "B01_L2A", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + (True, "B02_L2A", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), + (True, "B03_L2A", "counts", [[np.nan, 1], [65534, np.inf]]), + (False, "AOT_L2A", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + (True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + (True, "SNOW_L2A", "water_vapor", None), + ]) + def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML + jp2_fh = jp2_builder("L2A", dataset_name.replace("_L2A", ""), mask_saturated) - mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock(), - mask_saturated=mask_saturated) - self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), mda, self.tile_mda) + with mock.patch("xarray.open_dataset", return_value=self.fake_data): + res = jp2_fh.get_dataset(make_dataid(name=dataset_name, calibration=calibration), info=dict()) + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected + + @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), + [ + ("L1C", "B01", "B03"), + ("L2A", "B02", "B03_L2A"), + ]) + def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): + """Test when dataset name and file band name mismatch, the data and its area definition should both be None.""" + jp2_fh = jp2_builder(process_level, band_name) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = self.jp2_fh.get_dataset(make_dataid(name="B01", calibration=calibration), info=dict()) - np.testing.assert_allclose(res, expected) + res1 = jp2_fh.get_dataset(make_dataid(name=dataset_name), info=dict()) + res2 = jp2_fh.get_area_def(make_dataid(name=dataset_name)) + assert res1 is None + assert res2 is None def test_start_time(self): """Test that the correct start time is returned.""" - from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML - - mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock()) - self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), - mda, self.tile_mda) - assert tilemd_dt == self.jp2_fh.start_time + jp2_fh = jp2_builder("L1C", "B01") + assert tilemd_dt == jp2_fh.start_time From 14231e3a7ce827001603248a0567f3cfaef02ff5 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 15 May 2024 15:11:17 +0800 Subject: [PATCH 1381/1416] try --- satpy/etc/readers/msi_safe.yaml | 41 ++++--- satpy/readers/msi_safe.py | 5 +- satpy/tests/reader_tests/test_msi_safe.py | 125 +++++++++++++++++++++- 3 files changed, 151 insertions(+), 20 deletions(-) diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml index a7edb27d62..cc39c26a74 100644 --- a/satpy/etc/readers/msi_safe.yaml +++ b/satpy/etc/readers/msi_safe.yaml @@ -1,6 +1,8 @@ reader: name: msi_safe - + short_name: MSI SAFE L1C + long_name: Sentinel-2 A and B MSI L1C data in SAFE format + description: SAFE Reader for MSI L1C data (Sentinel-2) status: Nominal supports_fsspec: false sensors: [msi] @@ -8,7 +10,16 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: - + l1c_safe_granule: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] + requires: [l1c_safe_metadata, l1c_safe_tile_metadata] + l1c_safe_tile_metadata: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] + l1c_safe_metadata: + file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML + file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] datasets: B01: @@ -26,7 +37,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B02: name: B02 @@ -43,7 +54,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B03: name: B03 @@ -60,7 +71,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B04: name: B04 @@ -77,7 +88,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B05: name: B05 @@ -94,7 +105,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B06: name: B06 @@ -111,7 +122,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B07: name: B07 @@ -128,7 +139,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B08: name: B08 @@ -145,7 +156,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B8A: name: B8A @@ -162,7 +173,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B09: name: B09 @@ -179,7 +190,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B10: name: B10 @@ -196,7 +207,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B11: name: B11 @@ -213,7 +224,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule B12: name: B12 @@ -230,7 +241,7 @@ datasets: counts: standard_name: counts units: "1" - + file_type: l1c_safe_granule solar_zenith_angle: name: solar_zenith_angle diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 5fb7b70928..d36350f7ab 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -28,7 +28,7 @@ reader_kwargs={'mask_saturated': False}) scene.load(['B01']) - +L1C/L2A format description for the files read here: https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 @@ -75,9 +75,6 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s self._start_time = self._tile_mda.start_time() self._end_time = filename_info["observation_time"] - self._start_time = self._tile_mda.start_time() - self._end_time = filename_info["observation_time"] - def get_dataset(self, key, info): """Load a dataset.""" if self.process_level == "L1C": diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 6af7555265..d2de3e1a54 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -30,7 +30,7 @@ fname_dt = datetime(2020, 10, 1, 18, 35, 41) tilemd_dt = datetime(2020, 10, 1, 16, 34, 23, 153611) - +mtd_l1c_tile_xml = b""" @@ -1420,7 +1420,110 @@ """ # noqa +PROCESS_LEVELS = ["L1C", "oldL1C", "L2A"] +MTD_XMLS = [mtd_l1c_xml, mtd_l1c_old_xml, mtd_l2a_xml] +TILE_XMLS = [mtd_l1c_tile_xml, mtd_l1c_tile_xml, mtd_l1c_tile_xml] + + +def xml_builder(process_level, mask_saturated=True, band_name=None): + """Build fake SAFE MTD/Tile XML.""" + from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML + filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_fh = SAFEMSIMDXML(StringIO(MTD_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock(), mask_saturated=mask_saturated) + xml_tile_fh = SAFEMSITileMDXML(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock()) + return xml_fh, xml_tile_fh + + +def jp2_builder(process_level, band_name, mask_saturated=True): + """Build fake SAFE jp2 image file.""" + from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSITileMDXML + filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", + process_level=process_level.replace("old", "")) + xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] + tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), + filename_info, mock.MagicMock()) + tile_xml_fh.start_time.return_value = tilemd_dt + jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) + return jp2_fh + + +class TestTileXML: + """Test the SAFE TILE XML file handler. + + Since L1C/L2A share almost the same Tile XML structure, we only use L1C Tile here. + + """ + + @pytest.mark.parametrize(("process_level", "angle_name", "angle_tag", "expected"), + [ + ("L1C", "satellite_zenith_angle", ("Viewing_Incidence_Angles_Grids", "Zenith"), + [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, + 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], + [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, + 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], + [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, + 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], + [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, + 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], + [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, + 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], + [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, + 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), + ("L2A", "solar_zenith_angle_l2a", ("Sun_Angles_Grid", "Zenith"), + [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, + 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], + [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, + 39.5574369, 39.51323286, 39.46920212, 39.4253673, 39.38179377], + [39.6806035, 39.63532838, 39.5902497, 39.54538507, 39.5007087, + 39.45621756, 39.41195347, 39.36779169, 39.3239121, 39.28027381], + [39.57980525, 39.53445664, 39.48931088, 39.44434154, 39.39957879, + 39.35503587, 39.31067408, 39.26649344, 39.22249393, 39.17876143], + [39.479007, 39.43355483, 39.38829092, 39.34328573, 39.29846167, + 39.25381983, 39.2093947, 39.16513007, 39.12109926, 39.07726878], + [39.37820875, 39.33268069, 39.28735495, 39.24224914, 39.19736058, + 39.15267709, 39.1081719, 39.06385068, 39.01973446, 38.97584982], + [39.2774105, 39.23184303, 39.18646737, 39.14130809, 39.09632176, + 39.05153988, 39.00696049, 38.9625713, 38.91842056, 38.87444401], + [39.17671225, 39.13104478, 39.08559031, 39.04034757, 38.99528294, + 38.95039991, 38.9057971, 38.86130793, 38.81705183, 38.77303821], + [39.076014, 39.03026112, 38.98477906, 38.93940875, 38.89425338, + 38.84936063, 38.80464763, 38.76011645, 38.7157479, 38.67164839], + [38.97531575, 38.92950771, 38.88389967, 38.83852091, 38.7933053, + 38.74831897, 38.7034912, 38.65891427, 38.61446851, 38.57030388]]), + ("L1C", "moon_zenith_angle", ("Sun_Angles_Grid", "Zenith"), None) + ]) + def test_angles(self, process_level, angle_name, angle_tag, expected): + """Test reading angles array.""" + info = dict(xml_tag=angle_tag[0], xml_item=angle_tag[1]) if "satellite" in angle_name else \ + dict(xml_tag=angle_tag[0] + "/" + angle_tag[1]) + xml_tile_fh = xml_builder(process_level)[1] + + res = xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) + if res is not None: + res = res[::200, ::200] + + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected + def test_start_time(self): + """Ensure start time is read correctly from XML.""" + xml_tile_fh = xml_builder("L1C")[1] + assert xml_tile_fh.start_time() == tilemd_dt + + def test_navigation(self): """Test the navigation.""" from pyproj import CRS crs = CRS("EPSG:32616") @@ -1505,7 +1608,18 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" + self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) + @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), + [ + (False, "B01_L2A", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + (True, "B02_L2A", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), + (True, "B03_L2A", "counts", [[np.nan, 1], [65534, np.inf]]), + (False, "AOT_L2A", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + (True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + (True, "SNOW_L2A", "water_vapor", None), + ]) + def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" jp2_fh = jp2_builder("L2A", dataset_name.replace("_L2A", ""), mask_saturated) @@ -1526,4 +1640,13 @@ def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): jp2_fh = jp2_builder(process_level, band_name) with mock.patch("xarray.open_dataset", return_value=self.fake_data): + res1 = jp2_fh.get_dataset(make_dataid(name=dataset_name), info=dict()) + res2 = jp2_fh.get_area_def(make_dataid(name=dataset_name)) + + assert res1 is None + assert res2 is None + def test_start_time(self): + """Test that the correct start time is returned.""" + jp2_fh = jp2_builder("L1C", "B01") + assert tilemd_dt == jp2_fh.start_time From f33c3e4c817c16ecc8fbb4211a93900d0de7285a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 15 May 2024 14:01:19 +0200 Subject: [PATCH 1382/1416] Update np.random usages --- satpy/tests/reader_tests/_li_test_utils.py | 29 ++++++++++--------- satpy/tests/reader_tests/test_epic_l1b_h5.py | 17 ++++++----- .../tests/reader_tests/test_generic_image.py | 4 +-- satpy/tests/reader_tests/test_goci2_l2_nc.py | 9 +++--- satpy/tests/reader_tests/test_hrit_base.py | 7 +++-- .../reader_tests/test_insat3d_img_l1b_h5.py | 8 ++--- satpy/tests/reader_tests/test_mirs.py | 13 +++++---- satpy/tests/reader_tests/test_nwcsaf_msg.py | 21 +++++++------- satpy/tests/reader_tests/test_nwcsaf_nc.py | 9 +++--- .../reader_tests/test_seviri_l1b_hrit.py | 14 ++++----- .../tests/reader_tests/test_seviri_l2_bufr.py | 8 ++--- satpy/tests/reader_tests/test_sgli_l1b.py | 5 ++-- .../tests/reader_tests/test_viirs_compact.py | 21 +++++++------- satpy/tests/reader_tests/test_viirs_edr.py | 8 +++-- satpy/tests/test_composites.py | 6 ++-- satpy/tests/test_modifiers.py | 24 ++++++++------- satpy/tests/utils.py | 2 ++ 17 files changed, 110 insertions(+), 95 deletions(-) diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index 32107006fc..b656decb89 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -20,6 +20,7 @@ import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler +from satpy.tests.utils import RANDOM_GEN # mapping of netcdf type code to numpy data type: TYPE_MAP = { @@ -44,7 +45,7 @@ def l2_le_schema(settings=None): nfilters = settings.get("num_filters", 2) def rand_u16(num): - return np.random.randint(low=0, high=np.iinfo(np.uint16).max - 1, size=num, dtype=np.uint16) + return RANDOM_GEN.integers(low=0, high=np.iinfo(np.uint16).max - 1, size=num, dtype=np.uint16) return { "providers": settings.get("providers", {}), @@ -100,7 +101,7 @@ def rand_u16(num): "scale_factor": 0.004, "add_offset": 0.0, "long_name": "L2 filter results", - "default_data": lambda: np.random.randint(low=0, high=255, size=(nobs, nfilters), dtype=np.uint8) + "default_data": lambda: RANDOM_GEN.integers(low=0, high=255, size=(nobs, nfilters), dtype=np.uint8) }, "epoch_time": { "format": "f8", @@ -212,13 +213,13 @@ def l2_lef_schema(settings=None): "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", - "default_data": lambda: np.clip(np.round(np.random.normal(500, 100, nobs)), 1, 2 ** 16 - 1) + "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(500, 100, nobs)), 1, 2 ** 16 - 1) }, "event_filter_qa": { "format": "u1", "shape": ("events",), "long_name": "L2 event pre-filtering quality assurance value", - "default_data": lambda: np.random.randint(1, 2 ** 8 - 1, nobs) + "default_data": lambda: RANDOM_GEN.integers(1, 2 ** 8 - 1, nobs) }, "epoch_time": { "format": "f8", @@ -232,21 +233,21 @@ def l2_lef_schema(settings=None): "shape": ("events",), "long_name": "Time offset from epoch time", "units": "seconds", - "default_data": lambda: np.random.uniform(1, 2 ** 31 - 1, nobs) + "default_data": lambda: RANDOM_GEN.uniform(1, 2 ** 31 - 1, nobs) }, "detector_row": { "format": "u2", "shape": ("events",), "long_name": "Detector row position of event pixel", "units": "1", - "default_data": lambda: np.random.randint(1, 1000, nobs) + "default_data": lambda: RANDOM_GEN.integers(1, 1000, nobs) }, "detector_column": { "format": "u2", "shape": ("events",), "long_name": "Detector column position of event pixel", "units": "1", - "default_data": lambda: np.random.randint(1, 1000, nobs) + "default_data": lambda: RANDOM_GEN.integers(1, 1000, nobs) }, } } @@ -328,7 +329,7 @@ def l2_lfl_schema(settings=None): "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", - "default_data": lambda: np.round(np.random.normal(500, 100, nobs)) + "default_data": lambda: np.round(RANDOM_GEN.normal(500, 100, nobs)) }, "flash_duration": { "format": "u2", @@ -343,7 +344,7 @@ def l2_lfl_schema(settings=None): "shape": ("flashes",), "long_name": "L2 filtered flash confidence", "standard_name": "flash_filter_confidence", - "default_data": lambda: np.clip(np.round(np.random.normal(20, 10, nobs)), 1, 2 ** 7 - 1) + "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(20, 10, nobs)), 1, 2 ** 7 - 1) }, "flash_footprint": { "format": "u2", @@ -351,7 +352,7 @@ def l2_lfl_schema(settings=None): "long_name": "Flash footprint size", "standard_name": "flash_footprint", "units": "L1 grid pixels", - "default_data": lambda: np.maximum(1, np.round(np.random.normal(5, 3, nobs))) + "default_data": lambda: np.maximum(1, np.round(RANDOM_GEN.normal(5, 3, nobs))) }, "flash_id": { "format": "u4", @@ -367,7 +368,7 @@ def l2_lfl_schema(settings=None): "units": "seconds since 2000-01-01 00:00:00.0", "standard_name": "time", "precision": "1 millisecond", - "default_data": lambda: np.random.uniform(stime, etime, nobs) + "default_data": lambda: RANDOM_GEN.uniform(stime, etime, nobs) }, "l1b_geolocation_warning": { "format": "i1", @@ -437,7 +438,7 @@ def l2_af_schema(settings=None): "flash_accumulation": { "format": "u2", "shape": ("pixels",), - "default_data": lambda: np.clip(np.round(np.random.normal(1, 2, nobs)), 1, 2 ** 16 - 1) + "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(1, 2, nobs)), 1, 2 ** 16 - 1) }, "mtg_geos_projection": mtg_geos_projection(), "x": fci_grid_definition("X", nobs), @@ -495,7 +496,7 @@ def l2_afr_schema(settings=None): "long_name": "Area averaged flash radiance accumulation", "grid_mapping": "mtg_geos_projection", "coordinate": "sparse: x y", - "default_data": lambda: np.random.randint(low=1, high=6548, size=(120), dtype=np.int16) + "default_data": lambda: RANDOM_GEN.integers(low=1, high=6548, size=(120), dtype=np.int16) }, "accumulation_start_times": { "format": "f4", @@ -538,7 +539,7 @@ def fci_grid_definition(axis, nobs): "standard_name": standard_name, "units": "radian", "valid_range": np.asarray([1, 5568]), - "default_data": lambda: np.clip(np.round(np.random.normal(2000, 500, nobs)), 1, 2 ** 16 - 1) + "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(2000, 500, nobs)), 1, 2 ** 16 - 1) } diff --git a/satpy/tests/reader_tests/test_epic_l1b_h5.py b/satpy/tests/reader_tests/test_epic_l1b_h5.py index 472cda7f2d..1ff4c76c07 100644 --- a/satpy/tests/reader_tests/test_epic_l1b_h5.py +++ b/satpy/tests/reader_tests/test_epic_l1b_h5.py @@ -25,14 +25,15 @@ import pytest from satpy.readers.epic_l1b_h5 import CALIB_COEFS - -b317_data = np.random.uniform(low=0, high=5200, size=(100, 100)) -b688_data = np.random.uniform(low=0, high=5200, size=(100, 100)) -sza_data = np.random.uniform(low=0, high=100, size=(100, 100)) -vaa_data = np.random.uniform(low=-180, high=180, size=(100, 100)) -lon_data = np.random.uniform(low=-90, high=90, size=(100, 100)) -lat_data = np.random.uniform(low=-180, high=180, size=(100, 100)) -mas_data = np.random.choice([0, 1], size=(100, 100)) +from satpy.tests.utils import RANDOM_GEN + +b317_data = RANDOM_GEN.uniform(low=0, high=5200, size=(100, 100)) +b688_data = RANDOM_GEN.uniform(low=0, high=5200, size=(100, 100)) +sza_data = RANDOM_GEN.uniform(low=0, high=100, size=(100, 100)) +vaa_data = RANDOM_GEN.uniform(low=-180, high=180, size=(100, 100)) +lon_data = RANDOM_GEN.uniform(low=-90, high=90, size=(100, 100)) +lat_data = RANDOM_GEN.uniform(low=-180, high=180, size=(100, 100)) +mas_data = RANDOM_GEN.choice([0, 1], size=(100, 100)) @pytest.fixture() diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index cd347ce07e..f7a27097a8 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -24,7 +24,7 @@ import pytest import xarray as xr -from satpy.tests.utils import make_dataid +from satpy.tests.utils import RANDOM_GEN, make_dataid class TestGenericImage(unittest.TestCase): @@ -62,7 +62,7 @@ def setUp(self): a__[:10, :10] = 0 a__ = da.from_array(a__, chunks=(50, 50)) - r_nan__ = np.random.uniform(0., 1., size=(self.y_size, self.x_size)) + r_nan__ = RANDOM_GEN.uniform(0., 1., size=(self.y_size, self.x_size)) r_nan__[:10, :10] = np.nan r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) diff --git a/satpy/tests/reader_tests/test_goci2_l2_nc.py b/satpy/tests/reader_tests/test_goci2_l2_nc.py index 865ac3184e..da54e11848 100644 --- a/satpy/tests/reader_tests/test_goci2_l2_nc.py +++ b/satpy/tests/reader_tests/test_goci2_l2_nc.py @@ -24,6 +24,7 @@ from pytest_lazyfixture import lazy_fixture from satpy import Scene +from satpy.tests.utils import RANDOM_GEN # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -78,7 +79,7 @@ def _create_bad_lon_lat(): @pytest.fixture(scope="session") def ac_file(tmp_path_factory): """Create a fake atmospheric correction product.""" - data = np.random.random((10, 10)) + data = RANDOM_GEN.random((10, 10)) RhoC = xr.Dataset( {"RhoC_555": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, @@ -102,7 +103,7 @@ def ac_file(tmp_path_factory): @pytest.fixture(scope="module") def iop_file(tmp_path_factory): """Create a fake IOP product.""" - data = np.random.random((10, 10)) + data = RANDOM_GEN.random((10, 10)) a = xr.Dataset( {"a_total_555": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, @@ -124,7 +125,7 @@ def iop_file(tmp_path_factory): @pytest.fixture(scope="module") def generic_file(tmp_path_factory): """Create a fake ouput product like Chl, Zsd etc.""" - data = np.random.random((10, 10)) + data = RANDOM_GEN.random((10, 10)) geophysical_data = xr.Dataset( {"Chl": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, @@ -141,7 +142,7 @@ def generic_file(tmp_path_factory): @pytest.fixture(scope="module") def generic_bad_file(tmp_path_factory): """Create a PP product with lon/lat base name missing.""" - data = np.random.random((10, 10)) + data = RANDOM_GEN.random((10, 10)) geophysical_data = xr.Dataset( {"PP": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 12317f11f1..eb4a073e02 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -30,6 +30,7 @@ from satpy.readers import FSFile from satpy.readers.hrit_base import HRITFileHandler, decompress, get_xritdecompress_cmd, get_xritdecompress_outfile +from satpy.tests.utils import RANDOM_GEN # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -145,9 +146,9 @@ def create_stub_hrit(filename, open_fun=open, meta=mda): lines = meta["number_of_lines"] cols = meta["number_of_columns"] total_bits = lines * cols * nbits - arr = np.random.randint(0, 256, - size=int(total_bits / 8), - dtype=np.uint8) + arr = RANDOM_GEN.integers(0, 256, + size=int(total_bits / 8), + dtype=np.uint8) with open_fun(filename, mode="wb") as fd: fd.write(b" " * meta["total_header_length"]) bytes_data = arr.tobytes() diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 9fa7af224d..262e832f6a 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -16,7 +16,7 @@ open_dataset, open_datatree, ) -from satpy.tests.utils import make_dataid +from satpy.tests.utils import RANDOM_GEN, make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -30,10 +30,10 @@ alb_units = "%" temp_units = "K" chunks_1km = (1, 46, 1126) -values_1km = np.random.randint(0, 1000, shape_1km, dtype=np.uint16) +values_1km = RANDOM_GEN.integers(0, 1000, shape_1km, dtype=np.uint16) values_1km[0, 0, 0] = 0 -values_4km = np.random.randint(0, 1000, shape_4km, dtype=np.uint16) -values_8km = np.random.randint(0, 1000, shape_8km, dtype=np.uint16) +values_4km = RANDOM_GEN.integers(0, 1000, shape_4km, dtype=np.uint16) +values_8km = RANDOM_GEN.integers(0, 1000, shape_8km, dtype=np.uint16) values_by_resolution = {1000: values_1km, 4000: values_4km, diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index b857147e47..3aeaacb2b0 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -31,6 +31,7 @@ from satpy.dataset import DataID from satpy.readers import load_reader from satpy.readers.yaml_reader import FileYAMLReader +from satpy.tests.utils import RANDOM_GEN METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc" NPP_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r6_npp_s201702061601000_e201702061607000_c202012201658410.nc" @@ -77,7 +78,7 @@ def fake_coeff_from_fn(fn): """Create Fake Coefficients.""" - ameans = np.random.uniform(261, 267, N_CHANNEL) + ameans = RANDOM_GEN.uniform(261, 267, N_CHANNEL) locations = [ [1, 2], [1, 2], @@ -117,7 +118,7 @@ def fake_coeff_from_fn(fn): str_coeff = " ".join([str(x) for x in random_coeff]) random_means = np.zeros(all_nchx[nx]) str_means = " ".join([str(x) for x in random_means]) - error_val = np.random.uniform(0, 4) + error_val = RANDOM_GEN.uniform(0, 4) coeffs_line = " {:>2} {:>2} {} {} {}\n".format(idx, fov, str_coeff, str_means, @@ -138,7 +139,7 @@ def _get_datasets_with_attributes(**kwargs): "_FillValue": -999, "valid_range": [0, 50000]}, dims=("Scanline", "Field_of_view", "Channel")) - rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), + rr = xr.DataArray(RANDOM_GEN.integers(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "units": "mm/hr", "coordinates": "Longitude Latitude", @@ -146,7 +147,7 @@ def _get_datasets_with_attributes(**kwargs): "_FillValue": -999, "valid_range": [0, 1000]}, dims=("Scanline", "Field_of_view")) - sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), + sfc_type = xr.DataArray(RANDOM_GEN.integers(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", "units": "1", @@ -187,12 +188,12 @@ def _get_datasets_with_less_attributes(): attrs={"long_name": "Channel Temperature (K)", "scale_factor": 0.01}, dims=("Scanline", "Field_of_view", "Channel")) - rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), + rr = xr.DataArray(RANDOM_GEN.integers(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "scale_factor": 0.1}, dims=("Scanline", "Field_of_view")) - sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), + sfc_type = xr.DataArray(RANDOM_GEN.integers(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, dims=("Scanline", "Field_of_view")) diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 1c8e0fb793..1a9b2ca3bf 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -26,12 +26,13 @@ import pytest from satpy.tests.reader_tests.utils import fill_h5 +from satpy.tests.utils import RANDOM_GEN -CTYPE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) +CTYPE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) CTYPE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 20).astype(np.uint8) CTYPE_TEST_ARRAY[1000:1010, 1000:1010] = CTYPE_TEST_FRAME -CTTH_HEIGHT_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) +CTTH_HEIGHT_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_HEIGHT_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 80).astype(np.uint8) CTTH_HEIGHT_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_HEIGHT_TEST_FRAME @@ -39,7 +40,7 @@ CTTH_HEIGHT_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_HEIGHT_TEST_FRAME_RES[1, 0:3] = np.nan -CTTH_PRESSURE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) +CTTH_PRESSURE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_PRESSURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 54).astype(np.uint8) CTTH_PRESSURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_PRESSURE_TEST_FRAME @@ -47,7 +48,7 @@ CTTH_PRESSURE_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_PRESSURE_TEST_FRAME_RES[1, 0:9] = np.nan -CTTH_TEMPERATURE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) +CTTH_TEMPERATURE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 140).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME[8, 5] = 255 CTTH_TEMPERATURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_TEMPERATURE_TEST_FRAME @@ -130,7 +131,7 @@ "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, - "value": (np.random.rand(1856, 3712) * 255).astype(np.uint8), + "value": (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8), }, "CT_QUALITY": { "attrs": { @@ -145,7 +146,7 @@ "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, - "value": (np.random.rand(1856, 3712) * 65535).astype(np.uint16), + "value": (RANDOM_GEN.random((1856, 3712)) * 65535).astype(np.uint16), }, "attrs": { "CFAC": 13642337, @@ -255,7 +256,7 @@ "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, - "value": (np.random.rand(128, 3) * 255).astype(np.uint8), + "value": (RANDOM_GEN.random((128, 3)) * 255).astype(np.uint8), }, "03-PALETTE": { "attrs": { @@ -263,7 +264,7 @@ "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, - "value": (np.random.rand(256, 3) * 255).astype(np.uint8), + "value": (RANDOM_GEN.random((256, 3)) * 255).astype(np.uint8), }, "04-PALETTE": { "attrs": { @@ -323,7 +324,7 @@ "PRODUCT": b"CTTH", "SCALING_FACTOR": 5.0, }, - "value": (np.random.rand(1856, 3712) * 255).astype(np.uint8), + "value": (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8), }, "CTTH_HEIGHT": { "attrs": { @@ -370,7 +371,7 @@ "PRODUCT": b"CTTH", "SCALING_FACTOR": 1.0, }, - "value": (np.random.rand(1856, 3712) * 65535).astype(np.uint16), + "value": (RANDOM_GEN.random((1856, 3712)) * 65535).astype(np.uint16), }, "CTTH_TEMPER": { "attrs": { diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 4f6755f390..6a509f023f 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -22,6 +22,7 @@ import xarray as xr from satpy.readers.nwcsaf_nc import NcNWCSAF, read_nwcsaf_time +from satpy.tests.utils import RANDOM_GEN PROJ_KM = {"gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", "gdal_xgeo_up_left": -5569500.0, @@ -83,9 +84,9 @@ COT_SCALE = 0.01 COT_OFFSET = 0.0 -CRE_ARRAY = np.random.randint(0, 65535, size=(928, 1530), dtype=np.uint16) -COT_ARRAY = np.random.randint(0, 65535, size=(928, 1530), dtype=np.uint16) -PAL_ARRAY = np.random.randint(0, 255, size=(250, 3), dtype=np.uint8) +CRE_ARRAY = RANDOM_GEN.integers(0, 65535, size=(928, 1530), dtype=np.uint16) +COT_ARRAY = RANDOM_GEN.integers(0, 65535, size=(928, 1530), dtype=np.uint16) +PAL_ARRAY = RANDOM_GEN.integers(0, 255, size=(250, 3), dtype=np.uint8) @pytest.fixture(scope="session") @@ -104,7 +105,7 @@ def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs_geo): var = nc_file.create_variable(var_name, ("ny", "nx"), np.uint16, chunks=(256, 256)) - var[:] = np.random.randint(0, 255, size=(928, 1530), dtype=np.uint8) + var[:] = RANDOM_GEN.integers(0, 255, size=(928, 1530), dtype=np.uint8) return filename diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 3fe00edc80..80a14e21f4 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -31,7 +31,7 @@ from satpy.readers.seviri_l1b_hrit import HRITMSGEpilogueFileHandler, HRITMSGFileHandler, HRITMSGPrologueFileHandler from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS_INVALID from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase -from satpy.tests.utils import assert_attrs_equal, make_dataid +from satpy.tests.utils import RANDOM_GEN, assert_attrs_equal, make_dataid class TestHRITMSGBase(unittest.TestCase): @@ -64,9 +64,9 @@ def setUp(self): def test_read_hrv_band(self, memmap): """Test reading the hrv band.""" nbits = self.reader.mda["number_of_bits_per_pixel"] - memmap.return_value = np.random.randint(0, 256, - size=int((464 * 5568 * nbits) / 8), - dtype=np.uint8) + memmap.return_value = RANDOM_GEN.integers(0, 256, + size=int((464 * 5568 * nbits) / 8), + dtype=np.uint8) res = self.reader.read_band("HRV", None) assert res.shape == (464, 5568) @@ -181,9 +181,9 @@ def test_get_area_def(self): def test_read_band(self, memmap): """Test reading a band.""" nbits = self.reader.mda["number_of_bits_per_pixel"] - memmap.return_value = np.random.randint(0, 256, - size=int((464 * 3712 * nbits) / 8), - dtype=np.uint8) + memmap.return_value = RANDOM_GEN.integers(0, 256, + size=int((464 * 3712 * nbits) / 8), + dtype=np.uint8) res = self.reader.read_band("VIS006", None) assert res.shape == (464, 3712) diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index ec3fdf7b56..09e3fa93d5 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -27,7 +27,7 @@ import pytest from pyresample import geometry -from satpy.tests.utils import make_dataid +from satpy.tests.utils import RANDOM_GEN, make_dataid FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} @@ -109,9 +109,9 @@ ] # Test data -DATA = np.random.uniform(low=250, high=350, size=(128,)) -LAT = np.random.uniform(low=-80, high=80, size=(128,)) -LON = np.random.uniform(low=-38.5, high=121.5, size=(128,)) +DATA = RANDOM_GEN.uniform(low=250, high=350, size=(128,)) +LAT = RANDOM_GEN.uniform(low=-80, high=80, size=(128,)) +LON = RANDOM_GEN.uniform(low=-38.5, high=121.5, size=(128,)) class SeviriL2BufrData: diff --git a/satpy/tests/reader_tests/test_sgli_l1b.py b/satpy/tests/reader_tests/test_sgli_l1b.py index 7f5fffa70c..7c2551a211 100644 --- a/satpy/tests/reader_tests/test_sgli_l1b.py +++ b/satpy/tests/reader_tests/test_sgli_l1b.py @@ -8,14 +8,15 @@ import pytest from satpy.readers.sgli_l1b import HDF5SGLI +from satpy.tests.utils import RANDOM_GEN START_TIME = datetime.now() END_TIME = START_TIME + timedelta(minutes=5) FULL_KM_ARRAY = np.arange(1955 * 1250, dtype=np.uint16).reshape((1955, 1250)) MASK = 16383 LON_LAT_ARRAY = np.arange(197 * 126, dtype=np.float32).reshape((197, 126)) -AZI_ARRAY = np.random.randint(-180 * 100, 180 * 100, size=(197, 126), dtype=np.int16) -ZEN_ARRAY = np.random.randint(0, 180 * 100, size=(197, 126), dtype=np.int16) +AZI_ARRAY = RANDOM_GEN.integers(-180 * 100, 180 * 100, size=(197, 126), dtype=np.int16) +ZEN_ARRAY = RANDOM_GEN.integers(0, 180 * 100, size=(197, 126), dtype=np.int16) @pytest.fixture(scope="module") diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index ba8fa6f312..f27d9d6f32 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -24,6 +24,7 @@ import pytest from satpy.tests.reader_tests.utils import fill_h5 +from satpy.tests.utils import RANDOM_GEN # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -647,13 +648,13 @@ def fake_dnb(): dtype=np.float32, ) }, - "Latitude": {"value": np.random.rand(96, 332).astype(np.float32)}, - "Longitude": {"value": np.random.rand(96, 332).astype(np.float32)}, + "Latitude": {"value": RANDOM_GEN.random((96, 332)).astype(np.float32)}, + "Longitude": {"value": RANDOM_GEN.random((96, 332)).astype(np.float32)}, "LunarAzimuthAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "LunarZenithAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "MidTime": { "value": np.array( @@ -1170,16 +1171,16 @@ def fake_dnb(): ) }, "SatelliteAzimuthAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SatelliteZenithAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SolarAzimuthAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SolarZenithAngle": { - "value": np.random.rand(96, 332).astype(np.float32) + "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "StartTime": { "value": np.array( @@ -1484,7 +1485,7 @@ def fake_dnb(): }, "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)}, "QF1_VIIRSDNBSDR": { - "value": (np.random.rand(768, 4064) * 255).astype(np.uint8) + "value": (RANDOM_GEN.random((768, 4064)) * 255).astype(np.uint8) }, "QF2_SCAN_SDR": { "value": np.array( @@ -1596,7 +1597,7 @@ def fake_dnb(): dtype=np.uint8, ) }, - "Radiance": {"value": np.random.rand(768, 4064).astype(np.float32)}, + "Radiance": {"value": RANDOM_GEN.random((768, 4064)).astype(np.float32)}, "attrs": { "OriginalFilename": np.array( [ diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index d764891760..6beb3c0cab 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -36,6 +36,8 @@ from pytest import TempPathFactory # noqa: PT013 from pytest_lazyfixture import lazy_fixture +from satpy.tests.utils import RANDOM_GEN + I_COLS = 6400 I_ROWS = 32 # one scan M_COLS = 3200 @@ -135,8 +137,8 @@ def _create_surf_refl_variables() -> dict[str, xr.DataArray]: sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)} - i_data = np.random.random_sample((I_ROWS, I_COLS)).astype(np.float32) - m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) + i_data = RANDOM_GEN.random((I_ROWS, I_COLS)).astype(np.float32) + m_data = RANDOM_GEN.random((M_ROWS, M_COLS)).astype(np.float32) lon_i_data = (i_data * 360) - 180.0 lon_m_data = (m_data * 360) - 180.0 lat_i_data = (i_data * 180) - 90.0 @@ -261,7 +263,7 @@ def _create_continuous_variables(var_names: Iterable[str]) -> dict[str, xr.DataA cont_attrs = {"units": "Kelvin", "_FillValue": -9999, "scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)} - m_data = np.random.random_sample((M_ROWS, M_COLS)).astype(np.float32) + m_data = RANDOM_GEN.random((M_ROWS, M_COLS)).astype(np.float32) data_arrs = { "Longitude": xr.DataArray(m_data, dims=dims, attrs=lon_attrs), "Latitude": xr.DataArray(m_data, dims=dims, attrs=lat_attrs), diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index c075755d17..780293b1e0 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -30,7 +30,7 @@ from pyresample import AreaDefinition import satpy -from satpy.tests.utils import CustomScheduler +from satpy.tests.utils import RANDOM_GEN, CustomScheduler # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -701,10 +701,10 @@ def test_compositor(self, e2d, input_shape, bands): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor - rgb_arr = da.from_array(np.random.random(input_shape), chunks=2) + rgb_arr = da.from_array(RANDOM_GEN.random(input_shape), chunks=2) rgb = xr.DataArray(rgb_arr, dims=["bands", "y", "x"], coords={"bands": bands}) - lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2) + lum_arr = da.from_array(100 * RANDOM_GEN.random((2, 2)), chunks=2) lum = xr.DataArray(lum_arr, dims=["y", "x"]) # Make enhance2dataset return unmodified dataset diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 0c8eb51b3f..148ea6692f 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -27,6 +27,8 @@ from pyresample.geometry import AreaDefinition, StackedAreaDefinition from pytest_lazyfixture import lazy_fixture +from satpy.tests.utils import RANDOM_GEN + def _sunz_area_def(): """Get fake area for testing sunz generation.""" @@ -213,23 +215,23 @@ def setUp(self): "area": area, "start_time": self.start_time} - nir_arr = np.random.random((2, 2)) + nir_arr = RANDOM_GEN.random((2, 2)) self.nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) self.nir.attrs.update(self.metadata) - ir_arr = 100 * np.random.random((2, 2)) + ir_arr = 100 * RANDOM_GEN.random((2, 2)) self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) self.ir_.attrs["area"] = area - self.sunz_arr = 100 * np.random.random((2, 2)) + self.sunz_arr = 100 * RANDOM_GEN.random((2, 2)) self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=["y", "x"]) self.sunz.attrs["standard_name"] = "solar_zenith_angle" self.sunz.attrs["area"] = area self.da_sunz = da.from_array(self.sunz_arr) - refl_arr = np.random.random((2, 2)) + refl_arr = RANDOM_GEN.random((2, 2)) self.refl = da.from_array(refl_arr) - self.refl_with_co2 = da.from_array(np.random.random((2, 2))) + self.refl_with_co2 = da.from_array(RANDOM_GEN.random((2, 2))) self.refl_from_tbs = mock.MagicMock() self.refl_from_tbs.side_effect = self.fake_refl_from_tbs @@ -292,7 +294,7 @@ def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): comp = NIRReflectance(name="test") info = {"modifiers": None} - co2_arr = np.random.random((2, 2)) + co2_arr = RANDOM_GEN.random((2, 2)) co2 = xr.DataArray(da.from_array(co2_arr), dims=["y", "x"]) co2.attrs["wavelength"] = [12.0, 13.0, 14.0] co2.attrs["units"] = "K" @@ -378,14 +380,14 @@ def test_compositor(self, calculator, apply_modifier_info, sza): """Test the NIR emissive part from reflectance compositor.""" from satpy.modifiers.spectral import NIRReflectance - refl_arr = np.random.random((2, 2)) + refl_arr = RANDOM_GEN.random((2, 2)) refl = da.from_array(refl_arr) refl_from_tbs = mock.MagicMock() refl_from_tbs.return_value = refl calculator.return_value = mock.MagicMock(reflectance_from_tbs=refl_from_tbs) - emissive_arr = np.random.random((2, 2)) + emissive_arr = RANDOM_GEN.random((2, 2)) emissive = da.from_array(emissive_arr) emissive_part = mock.MagicMock() emissive_part.return_value = emissive @@ -405,17 +407,17 @@ def test_compositor(self, calculator, apply_modifier_info, sza): get_lonlats.return_value = (lons, lats) area = mock.MagicMock(get_lonlats=get_lonlats) - nir_arr = np.random.random((2, 2)) + nir_arr = RANDOM_GEN.random((2, 2)) nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) nir.attrs["platform_name"] = platform nir.attrs["sensor"] = sensor nir.attrs["name"] = chan_name nir.attrs["area"] = area - ir_arr = np.random.random((2, 2)) + ir_arr = RANDOM_GEN.random((2, 2)) ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) ir_.attrs["area"] = area - sunz_arr = 100 * np.random.random((2, 2)) + sunz_arr = 100 * RANDOM_GEN.random((2, 2)) sunz = xr.DataArray(da.from_array(sunz_arr), dims=["y", "x"]) sunz.attrs["standard_name"] = "solar_zenith_angle" sunz.attrs["area"] = area diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index a6ebf8753e..13e771418c 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -37,6 +37,8 @@ FAKE_FILEHANDLER_START = datetime(2020, 1, 1, 0, 0, 0) FAKE_FILEHANDLER_END = datetime(2020, 1, 1, 1, 0, 0) +RANDOM_GEN = np.random.default_rng() + def make_dataid(**items): """Make a DataID with default keys.""" From 567df9da4651716be9c3e9f9a40e9a652c2b85b4 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 17 May 2024 08:42:24 +0200 Subject: [PATCH 1383/1416] Replace pytest-lazyfixture with pytest-lazy-fixtures --- continuous_integration/environment.yaml | 2 +- pyproject.toml | 8 ++++---- satpy/tests/reader_tests/modis_tests/test_modis_l1b.py | 2 +- satpy/tests/reader_tests/modis_tests/test_modis_l2.py | 2 +- satpy/tests/reader_tests/modis_tests/test_modis_l3.py | 2 +- satpy/tests/reader_tests/test_abi_l1b.py | 2 +- satpy/tests/reader_tests/test_fci_l1c_nc.py | 2 +- satpy/tests/reader_tests/test_goci2_l2_nc.py | 2 +- satpy/tests/reader_tests/test_seadas_l2.py | 2 +- satpy/tests/reader_tests/test_viirs_edr.py | 2 +- satpy/tests/test_modifiers.py | 2 +- satpy/tests/test_readers.py | 2 +- 12 files changed, 15 insertions(+), 15 deletions(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 4fc7a508f2..2000efd37f 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -45,7 +45,6 @@ dependencies: - eccodes>=2.20 - pytest<8.0.0 - pytest-cov - - pytest-lazy-fixture - fsspec - botocore>=1.33 - s3fs @@ -59,6 +58,7 @@ dependencies: - ephem - bokeh - pip: + - pytest-lazy-fixtures - trollsift - trollimage>=1.23 - pyspectral diff --git a/pyproject.toml b/pyproject.toml index 01abbbd1ac..2fa0f27d94 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,10 +83,10 @@ hvplot = ["hvplot", "geoviews", "cartopy", "holoviews"] overlays = ["pycoast", "pydecorate"] satpos_from_tle = ["skyfield", "astropy"] tests = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", - "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", - "rioxarray", "pytest", "pytest-lazy-fixture", "defusedxml", - "s3fs", "eccodes", "h5netcdf", "xarray-datatree", - "skyfield", "ephem", "pint-xarray", "astropy", "dask-image", "python-geotiepoints", "numba"] + "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", + "rioxarray", "pytest", "pytest-lazy-fixtures", "defusedxml", + "s3fs", "eccodes", "h5netcdf", "xarray-datatree", + "skyfield", "ephem", "pint-xarray", "astropy", "dask-image", "python-geotiepoints", "numba"] [project.scripts] satpy_retrieve_all_aux_data = "satpy.aux_download:retrieve_all_cmd" diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index d4998a67f9..47f5f92c8e 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -22,7 +22,7 @@ import dask import numpy as np import pytest -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers from satpy.tests.utils import CustomScheduler, make_dataid diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 8876decb59..a30bfc392d 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -23,7 +23,7 @@ import dask.array as da import numpy as np import pytest -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers from satpy.tests.utils import CustomScheduler, make_dataid diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py index de8ff682a1..ca6c5e353a 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py @@ -23,7 +23,7 @@ import numpy as np import pytest from pyresample import geometry -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index 969c497410..37fd1d74c9 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -29,7 +29,7 @@ import numpy.typing as npt import pytest import xarray as xr -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import DataQuery from satpy.readers.abi_l1b import NC_ABI_L1B diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 792de90462..4bdf71e6b3 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -28,7 +28,7 @@ import pytest import xarray as xr from netCDF4 import default_fillvals -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy.readers.fci_l1c_nc import FCIL1cNCFileHandler from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler diff --git a/satpy/tests/reader_tests/test_goci2_l2_nc.py b/satpy/tests/reader_tests/test_goci2_l2_nc.py index da54e11848..59bddefc88 100644 --- a/satpy/tests/reader_tests/test_goci2_l2_nc.py +++ b/satpy/tests/reader_tests/test_goci2_l2_nc.py @@ -21,7 +21,7 @@ import numpy as np import pytest import xarray as xr -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene from satpy.tests.utils import RANDOM_GEN diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py index d3037e6b55..8343abbef2 100644 --- a/satpy/tests/reader_tests/test_seadas_l2.py +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -20,7 +20,7 @@ import numpy as np import pytest from pyresample.geometry import SwathDefinition -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 6beb3c0cab..74bd4a9ae4 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -34,7 +34,7 @@ import xarray as xr from pyresample import SwathDefinition from pytest import TempPathFactory # noqa: PT013 -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy.tests.utils import RANDOM_GEN diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 148ea6692f..e8a0b4e539 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -25,7 +25,7 @@ import pytest import xarray as xr from pyresample.geometry import AreaDefinition, StackedAreaDefinition -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy.tests.utils import RANDOM_GEN diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 4b477c6bdf..5e05f595b1 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -30,7 +30,7 @@ import numpy as np import pytest import xarray as xr -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy.dataset.data_dict import get_key from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange From 3b9c04e85ff02d9ef1c15a502d76d89b696102f8 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 May 2024 08:40:02 +0200 Subject: [PATCH 1384/1416] FIx rtd environment --- doc/rtd_environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 1e40cbb73a..5058959236 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -18,7 +18,6 @@ dependencies: - pooch - pyresample - pytest - - pytest-lazy-fixture - python-eccodes - python-geotiepoints - rasterio @@ -34,4 +33,5 @@ dependencies: - xarray-datatree - pip: - graphviz + - pytest-lazy-fixtures - .. # relative path to the satpy project From 370467917a2fc373721614b56ebb463433d84a51 Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 21 May 2024 18:00:58 +0200 Subject: [PATCH 1385/1416] add *= -1 in test --- satpy/tests/reader_tests/test_li_l2_nc.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 5e9d0ff563..c5e02f93d0 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -627,6 +627,7 @@ def test_coords_generation(self, filetype_infos): projection = Proj(proj_dict) azimuth_vals = azimuth.values * point_height elevation_vals = elevation.values * point_height + azimuth_vals *= -1 lon_ref, lat_ref = projection(azimuth_vals, elevation_vals, inverse=True) # Convert to float32: lon_ref = lon_ref.astype(np.float32) From dd478273ab63abc56dc6d397c32183df94d27f5f Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 21 May 2024 18:01:35 +0200 Subject: [PATCH 1386/1416] add *= -1 in reader code --- satpy/readers/li_base_nc.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/readers/li_base_nc.py b/satpy/readers/li_base_nc.py index eba9548985..cefbcc7e55 100644 --- a/satpy/readers/li_base_nc.py +++ b/satpy/readers/li_base_nc.py @@ -371,6 +371,9 @@ def inverse_projection(self, azimuth, elevation, proj_dict): azimuth = azimuth.values * point_height elevation = elevation.values * point_height + # In the MTG world, azimuth is defined as positive towards west, while proj expects it positive towards east + azimuth *= -1 + lon, lat = projection(azimuth, elevation, inverse=True) return np.stack([lon.astype(azimuth.dtype), lat.astype(elevation.dtype)]) From 766c4ece53fb8d7e4cb19ae5c00e2d7c023ad0bf Mon Sep 17 00:00:00 2001 From: andream Date: Tue, 21 May 2024 19:51:37 +0200 Subject: [PATCH 1387/1416] add a test to check 1-d and 2-d consistency --- satpy/tests/reader_tests/_li_test_utils.py | 16 ++++++---- satpy/tests/reader_tests/test_li_l2_nc.py | 34 ++++++++++++++++++---- 2 files changed, 38 insertions(+), 12 deletions(-) diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index 32107006fc..051743b238 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -521,9 +521,13 @@ def accumulation_dimensions(nacc, nobs): def fci_grid_definition(axis, nobs): """FCI grid definition on X or Y axis.""" + scale_factor = 5.58871526031607e-5 + add_offset = -0.15561777642350116 if axis == "X": long_name = "azimuth angle encoded as column" standard_name = "projection_x_coordinate" + scale_factor *= -1 + add_offset *= -1 else: long_name = "zenith angle encoded as row" standard_name = "projection_y_coordinate" @@ -531,10 +535,10 @@ def fci_grid_definition(axis, nobs): return { "format": "i2", "shape": ("pixels",), - "add_offset": -0.155619516, + "add_offset": add_offset, "axis": axis, "long_name": long_name, - "scale_factor": 5.58878e-5, + "scale_factor": scale_factor, "standard_name": standard_name, "units": "radian", "valid_range": np.asarray([1, 5568]), @@ -548,12 +552,12 @@ def mtg_geos_projection(): "format": "i4", "shape": ("accumulations",), "grid_mapping_name": "geostationary", - "inverse_flattening": 298.2572221, + "inverse_flattening": 298.257223563, "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, - "perspective_point_height": 42164000, - "semi_major_axis": 6378169, - "semi_minor_axis": 6356583.8, + "perspective_point_height": 3.57864e7, + "semi_major_axis": 6378137.0, + "semi_minor_axis": 6356752.31424518, "sweep_angle_axis": "y", "long_name": "MTG geostationary projection", "default_data": lambda: -2147483647 diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index c5e02f93d0..bba36a2155 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -592,7 +592,6 @@ def test_generate_coords_called_once(Self, filetype_infos): def test_coords_generation(self, filetype_infos): """Compare daskified coords generation results with non-daskified.""" - # Prepare dummy (but somewhat realistic) arrays of azimuth/elevation values. products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] @@ -601,11 +600,10 @@ def test_coords_generation(self, filetype_infos): handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, prod)) # Get azimuth/elevation arrays from handler - azimuth = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) - azimuth = handler.apply_use_rescaling(azimuth) - - elevation = handler.get_measured_variable(handler.swath_coordinates["elevation"]) - elevation = handler.apply_use_rescaling(elevation) + x = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) + azimuth = handler.apply_use_rescaling(x) + y = handler.get_measured_variable(handler.swath_coordinates["elevation"]) + elevation = handler.apply_use_rescaling(y) # Initialize proj_dict proj_var = handler.swath_coordinates["projection"] @@ -641,6 +639,30 @@ def test_coords_generation(self, filetype_infos): np.testing.assert_equal(lon, lon_ref) np.testing.assert_equal(lat, lat_ref) + + def test_coords_and_grid_consistency(self, filetype_infos): + """Compare computed latlon coords for 1-d version with latlon from areadef as for the gridded version.""" + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), + with_area_definition=True) + + # Get azimuth/elevation arrays from handler + x = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) + y = handler.get_measured_variable(handler.swath_coordinates["elevation"]) + + handler.generate_coords_from_scan_angles() + lon = handler.internal_variables["longitude"].values + lat = handler.internal_variables["latitude"].values + + dsid = make_dataid(name="flash_accumulation") + area_def = handler.get_area_def(dsid) + rows = (LI_GRID_SHAPE[0] - y.astype(int)) + cols = x.astype(int) - 1 + lon_areadef, lat_areadef = area_def.get_lonlat_from_array_coordinates(cols, rows) + + np.testing.assert_allclose(lon, lon_areadef, rtol=1e-3) + np.testing.assert_allclose(lat, lat_areadef, rtol=1e-3) + + def test_get_area_def_acc_products(self, filetype_infos): """Test retrieval of area def for accumulated products.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), From 3c0d46fc6e01a23feb06819ce9245294f5d1c894 Mon Sep 17 00:00:00 2001 From: andream Date: Wed, 22 May 2024 10:20:02 +0200 Subject: [PATCH 1388/1416] rearrange and add comments to test --- satpy/tests/reader_tests/test_li_l2_nc.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index bba36a2155..76b9e74e04 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -639,30 +639,30 @@ def test_coords_generation(self, filetype_infos): np.testing.assert_equal(lon, lon_ref) np.testing.assert_equal(lat, lat_ref) - def test_coords_and_grid_consistency(self, filetype_infos): """Compare computed latlon coords for 1-d version with latlon from areadef as for the gridded version.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), - with_area_definition=True) + with_area_definition=True) - # Get azimuth/elevation arrays from handler + # Get cols/rows arrays from handler x = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) y = handler.get_measured_variable(handler.swath_coordinates["elevation"]) + cols = x.astype(int) - 1 + rows = (LI_GRID_SHAPE[0] - y.astype(int)) + # compute lonlat from 1-d coords generation handler.generate_coords_from_scan_angles() lon = handler.internal_variables["longitude"].values lat = handler.internal_variables["latitude"].values + # compute lonlat from 2-d areadef dsid = make_dataid(name="flash_accumulation") area_def = handler.get_area_def(dsid) - rows = (LI_GRID_SHAPE[0] - y.astype(int)) - cols = x.astype(int) - 1 lon_areadef, lat_areadef = area_def.get_lonlat_from_array_coordinates(cols, rows) np.testing.assert_allclose(lon, lon_areadef, rtol=1e-3) np.testing.assert_allclose(lat, lat_areadef, rtol=1e-3) - def test_get_area_def_acc_products(self, filetype_infos): """Test retrieval of area def for accumulated products.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), From 2799a3225e784cddd48f2f2ed989be4fec965ded Mon Sep 17 00:00:00 2001 From: andream Date: Wed, 22 May 2024 11:56:18 +0200 Subject: [PATCH 1389/1416] restore coordinates variable naming in coords test --- satpy/tests/reader_tests/test_li_l2_nc.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 76b9e74e04..24280539a1 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -600,10 +600,11 @@ def test_coords_generation(self, filetype_infos): handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, prod)) # Get azimuth/elevation arrays from handler - x = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) - azimuth = handler.apply_use_rescaling(x) - y = handler.get_measured_variable(handler.swath_coordinates["elevation"]) - elevation = handler.apply_use_rescaling(y) + azimuth = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) + azimuth = handler.apply_use_rescaling(azimuth) + + elevation = handler.get_measured_variable(handler.swath_coordinates["elevation"]) + elevation = handler.apply_use_rescaling(elevation) # Initialize proj_dict proj_var = handler.swath_coordinates["projection"] From 1b52311852fecbb1cd1daef1233fa755315f298e Mon Sep 17 00:00:00 2001 From: andream Date: Wed, 22 May 2024 12:01:45 +0200 Subject: [PATCH 1390/1416] update comments and error message --- satpy/readers/li_l2_nc.py | 2 +- satpy/tests/reader_tests/test_li_l2_nc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 4fe0826380..891372d596 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -73,7 +73,7 @@ def get_area_def(self, dsid): if var_with_swath_coord and self.with_area_def: return get_area_def("mtg_fci_fdss_2km") - raise NotImplementedError("Area definition is not supported for accumulated products.") + raise NotImplementedError("Area definition is not supported for non-accumulated products.") def is_var_with_swath_coord(self, dsid): """Check if the variable corresponding to this dataset is listed as variable with swath coordinates.""" diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 24280539a1..1a198a7831 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -651,7 +651,7 @@ def test_coords_and_grid_consistency(self, filetype_infos): cols = x.astype(int) - 1 rows = (LI_GRID_SHAPE[0] - y.astype(int)) - # compute lonlat from 1-d coords generation + # compute lonlat from 1-d coords generation (called when with_area_definition==False) handler.generate_coords_from_scan_angles() lon = handler.internal_variables["longitude"].values lat = handler.internal_variables["latitude"].values From 54fbb1137e0b528ae9a6640cf8af5adad07d1afb Mon Sep 17 00:00:00 2001 From: clement laplace Date: Thu, 23 May 2024 09:38:55 +0000 Subject: [PATCH 1391/1416] typo: Add the typo modification asked by mraspo as shown in https://github.com/pytroll/satpy/pull/2778 --- satpy/etc/readers/fci_l1c_nc.yaml | 110 +++++++------------- satpy/readers/fci_l1c_nc.py | 2 +- satpy/tests/reader_tests/test_fci_l1c_nc.py | 2 +- 3 files changed, 40 insertions(+), 74 deletions(-) diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index dee588aff9..7b4ead398e 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -18,9 +18,7 @@ file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 40 required_netcdf_variables: &required-variables - attr/platform @@ -71,9 +69,7 @@ file_types: fci_l1c_hrfi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 40 required_netcdf_variables: *required-variables variable_name_replacements: @@ -87,12 +83,11 @@ file_types: fci_l1c_af_vis_06: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -101,10 +96,8 @@ file_types: fci_l1c_af_vis_04: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -113,10 +106,8 @@ file_types: fci_l1c_af_vis_05: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -125,10 +116,8 @@ file_types: fci_l1c_af_vis_08: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -137,10 +126,8 @@ file_types: fci_l1c_af_vis_09: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -149,10 +136,8 @@ file_types: fci_l1c_af_nir_13: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -161,10 +146,8 @@ file_types: fci_l1c_af_nir_16: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -173,10 +156,8 @@ file_types: fci_l1c_af_nir_22: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -185,10 +166,8 @@ file_types: fci_l1c_af_ir_38: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -197,10 +176,8 @@ file_types: fci_l1c_af_wv_63: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -209,10 +186,8 @@ file_types: fci_l1c_af_wv_73: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-WV73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -221,10 +196,8 @@ file_types: fci_l1c_af_ir_87: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -233,10 +206,8 @@ file_types: fci_l1c_af_ir_97: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -245,10 +216,9 @@ file_types: fci_l1c_af_ir_105: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -257,10 +227,8 @@ file_types: fci_l1c_af_ir_123: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: @@ -269,10 +237,8 @@ file_types: fci_l1c_af_ir_133: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - [ - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc", - ] + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" + - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{erraneous_count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index f94377b91c..6499b0c657 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -29,7 +29,7 @@ .. note:: This reader currently supports Full Disk High Spectral Resolution Imagery - (FDHSI) ,High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") scanning mode. + (FDHSI), High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") scanning mode. In addition it also supports the L1C format for the African dissemination ("AF"), where each file contains the masked full-dic of a single channel see `AF PUG`_. If the user provides a list of both FDHSI and HRFI files from the same repeat cycle to the Satpy ``Scene``, diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 98777d0e51..b278c2b230 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -682,7 +682,7 @@ def test_load_calibration(self, reader_configs, fh_param, assert expected_res_n[res_type] == len(res) for ch, grid_type in zip(list_chan, list_grid): - self._get_assert_load(res,ch,grid_type,DICT_CALIBRATION[calibration]) + self._get_assert_load(res, ch, grid_type, DICT_CALIBRATION[calibration]) @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ (calibration, channel, resolution) From 4a052bc941c4408a8ee8205d0f98411bca74ec64 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 27 May 2024 21:28:56 +0800 Subject: [PATCH 1392/1416] Update msi.yaml --- satpy/etc/composites/msi.yaml | 121 ++++++++++++++++++---------------- 1 file changed, 66 insertions(+), 55 deletions(-) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index 74dd859dfd..d357ed5d8b 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -1,133 +1,144 @@ sensor_name: visir/msi modifiers: - rayleigh_corr: + rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_antarctic: + rayleigh_corrected_antarctic: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: antarctic_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_continental_average: + rayleigh_corrected_continental_average: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_continental_clean: + rayleigh_corrected_continental_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_clean_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_continental_polluted: + rayleigh_corrected_continental_polluted: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_polluted_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_desert: + rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: desert_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_marine_clean: + rayleigh_corrected_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_marine_polluted: + rayleigh_corrected_marine_polluted: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_polluted_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_marine_tropical: + rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_rural: + rayleigh_corrected_rural: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rural_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle - rayleigh_corr_urban: + rayleigh_corrected_urban: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: urban_aerosol prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] + modifiers: [sunz_corrected] + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle @@ -149,121 +160,121 @@ composites: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_antarctic: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_antarctic] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_antarctic] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_antarctic] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] standard_name: true_color true_color_continental_average: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_average] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_average] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_average] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] standard_name: true_color true_color_continental_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_clean] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_clean] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_clean] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] standard_name: true_color true_color_continental_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_polluted] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_polluted] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_continental_polluted] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_desert] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_desert] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_desert] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_clean] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_clean] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_clean] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_polluted] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_polluted] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_polluted] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_tropical] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_tropical] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_marine_tropical] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_rural: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_rural] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_rural] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_rural] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] standard_name: true_color true_color_urban: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_urban] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] - name: 'B03' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_urban] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] - name: 'B02' - modifiers: [effective_solar_pathlength_corrected, rayleigh_corr_urban] + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] standard_name: true_color true_color_uncorr: From b7c1b8e3c0388ce71facd1b10bfc4b4a7186ac19 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 27 May 2024 21:30:45 +0800 Subject: [PATCH 1393/1416] Update msi.yaml --- satpy/etc/composites/msi.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index d357ed5d8b..06bcd7bb5b 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -8,7 +8,7 @@ modifiers: prerequisites: - name: 'B04' modifiers: [sunz_corrected] - optional_prerequisites: + optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle From d968b4019e95e7ae161156491a7fdab54c492987 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 27 May 2024 23:23:41 +0800 Subject: [PATCH 1394/1416] dataid issues --- satpy/dataset/dataid.py | 2 -- satpy/etc/readers/msi_safe_l2a.yaml | 9 +++++++ satpy/tests/reader_tests/test_msi_safe.py | 33 +++++++++++++++++++++++ 3 files changed, 42 insertions(+), 2 deletions(-) diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index 7bca3d0147..3eadb2dc2c 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -255,8 +255,6 @@ def __hash__(self): "radiance", "radiance_wavenumber", "counts", - "aerosol_thickness", - "water_vapor" ], "transitive": True, }, diff --git a/satpy/etc/readers/msi_safe_l2a.yaml b/satpy/etc/readers/msi_safe_l2a.yaml index e11b521f51..a1e8d9e171 100644 --- a/satpy/etc/readers/msi_safe_l2a.yaml +++ b/satpy/etc/readers/msi_safe_l2a.yaml @@ -9,6 +9,15 @@ reader: default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader +data_identification_keys: + calibration: + enum: + - reflectance + - radiance + - counts + - aerosol_thickness + - water_vapor + file_types: l2a_safe_granule_10m: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index d2de3e1a54..0227e469da 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1449,6 +1449,39 @@ def jp2_builder(process_level, band_name, mask_saturated=True): jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh +def make_dataid(**items): + """Make a DataID with modified keys.""" + from satpy.dataset.dataid import WavelengthRange, ModifierTuple, DataID + modified_id_keys_config = { + "name": { + "required": True, + }, + "wavelength": { + "type": WavelengthRange, + }, + "resolution": { + "transitive": False, + }, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "radiance_wavenumber", + "counts", + "aerosol_thickness", + "water_vapor" + ], + "transitive": True, + }, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, + }, + } + + return DataID(modified_id_keys_config, **items) + class TestTileXML: """Test the SAFE TILE XML file handler. From 8194060be12b659d13764f6a82238d7712a3169a Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Mon, 27 May 2024 23:37:44 +0800 Subject: [PATCH 1395/1416] Update test_msi_safe.py --- satpy/tests/reader_tests/test_msi_safe.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 0227e469da..d6fe6808a1 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -24,8 +24,6 @@ import pytest import xarray as xr -from satpy.tests.utils import make_dataid - # Datetimes used for checking start time is correctly set. fname_dt = datetime(2020, 10, 1, 18, 35, 41) tilemd_dt = datetime(2020, 10, 1, 16, 34, 23, 153611) @@ -1449,7 +1447,7 @@ def jp2_builder(process_level, band_name, mask_saturated=True): jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh -def make_dataid(**items): +def make_alt_dataid(**items): """Make a DataID with modified keys.""" from satpy.dataset.dataid import WavelengthRange, ModifierTuple, DataID modified_id_keys_config = { @@ -1542,7 +1540,7 @@ def test_angles(self, process_level, angle_name, angle_tag, expected): dict(xml_tag=angle_tag[0] + "/" + angle_tag[1]) xml_tile_fh = xml_builder(process_level)[1] - res = xml_tile_fh.get_dataset(make_dataid(name=angle_name, resolution=60), info) + res = xml_tile_fh.get_dataset(make_alt_dataid(name=angle_name, resolution=60), info) if res is not None: res = res[::200, ::200] @@ -1561,7 +1559,7 @@ def test_navigation(self): from pyproj import CRS crs = CRS("EPSG:32616") - dsid = make_dataid(name="B01", resolution=60) + dsid = make_alt_dataid(name="B01", resolution=60) xml_tile_fh = xml_builder("L1C")[1] result = xml_tile_fh.get_area_def(dsid) area_extent = (499980.0, 3590220.0, 609780.0, 3700020.0) @@ -1657,7 +1655,7 @@ def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration jp2_fh = jp2_builder("L2A", dataset_name.replace("_L2A", ""), mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = jp2_fh.get_dataset(make_dataid(name=dataset_name, calibration=calibration), info=dict()) + res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) if res is not None: np.testing.assert_allclose(res, expected) else: @@ -1673,8 +1671,8 @@ def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): jp2_fh = jp2_builder(process_level, band_name) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res1 = jp2_fh.get_dataset(make_dataid(name=dataset_name), info=dict()) - res2 = jp2_fh.get_area_def(make_dataid(name=dataset_name)) + res1 = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name), info=dict()) + res2 = jp2_fh.get_area_def(make_alt_dataid(name=dataset_name)) assert res1 is None assert res2 is None From 911a6b636d08c165316f026f5f7e6d37b8fa408d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 27 May 2024 15:39:51 +0000 Subject: [PATCH 1396/1416] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/reader_tests/test_msi_safe.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index d6fe6808a1..46a416977b 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1449,7 +1449,7 @@ def jp2_builder(process_level, band_name, mask_saturated=True): def make_alt_dataid(**items): """Make a DataID with modified keys.""" - from satpy.dataset.dataid import WavelengthRange, ModifierTuple, DataID + from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange modified_id_keys_config = { "name": { "required": True, From 6d18e86e2eeed3125bd32dd899d3b046eb43f18c Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 28 May 2024 09:42:59 +0800 Subject: [PATCH 1397/1416] dataid --- satpy/dataset/dataid.py | 2 +- satpy/etc/readers/msi_safe_l2a.yaml | 27 ++++++++++++++++++--------- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index 3eadb2dc2c..d8301bc453 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -254,7 +254,7 @@ def __hash__(self): "brightness_temperature", "radiance", "radiance_wavenumber", - "counts", + "counts" ], "transitive": True, }, diff --git a/satpy/etc/readers/msi_safe_l2a.yaml b/satpy/etc/readers/msi_safe_l2a.yaml index a1e8d9e171..bc9ca92552 100644 --- a/satpy/etc/readers/msi_safe_l2a.yaml +++ b/satpy/etc/readers/msi_safe_l2a.yaml @@ -8,15 +8,24 @@ reader: sensors: [msi] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - -data_identification_keys: - calibration: - enum: - - reflectance - - radiance - - counts - - aerosol_thickness - - water_vapor + data_identification_keys: + name: + required: true + wavelength: + type: !!python/name:satpy.dataset.dataid.WavelengthRange + resolution: + transitive: false + calibration: + enum: + - reflectance + - radiance + - counts + - aerosol_thickness + - water_vapor + transitive: true + modifiers: + default: [] + type: !!python/name:satpy.dataset.dataid.ModifierTuple file_types: l2a_safe_granule_10m: From 82f742f133df5ff22c4cbf66c90fa57fc6f1c2d8 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 28 May 2024 10:04:19 +0800 Subject: [PATCH 1398/1416] Update msi.yaml --- satpy/etc/composites/msi.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index 06bcd7bb5b..11dc3fa000 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -150,6 +150,7 @@ composites: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B11' + modifiers: [effective_solar_pathlength_corrected] - name: 'B08' modifiers: [effective_solar_pathlength_corrected] - name: 'B04' From d2e3c9f1d205fa2dae3f093b298157ea97153a99 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 28 May 2024 11:07:46 +0800 Subject: [PATCH 1399/1416] composites --- satpy/etc/composites/msi.yaml | 200 +++++++++++++++++----------- satpy/etc/readers/msi_safe_l2a.yaml | 12 ++ 2 files changed, 137 insertions(+), 75 deletions(-) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index 11dc3fa000..3681cf5183 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -146,17 +146,6 @@ modifiers: composites: - natural_color: - compositor: !!python/name:satpy.composites.GenericCompositor - prerequisites: - - name: 'B11' - modifiers: [effective_solar_pathlength_corrected] - - name: 'B08' - modifiers: [effective_solar_pathlength_corrected] - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] - standard_name: natural_color - true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -292,34 +281,45 @@ composites: true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04' - #modifiers: [effective_solar_pathlength_corrected] - - name: 'B03' - #modifiers: [effective_solar_pathlength_corrected] - - name: 'B02' - #modifiers: [effective_solar_pathlength_corrected] + - name: 'B04' + # modifiers: [effective_solar_pathlength_corrected] + - name: 'B03' + # modifiers: [effective_solar_pathlength_corrected] + - name: 'B02' + # modifiers: [effective_solar_pathlength_corrected] standard_name: true_color - urban_color: + natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B12' - modifiers: [effective_solar_pathlength_corrected] - name: 'B11' modifiers: [effective_solar_pathlength_corrected] - - name: 'B04' + - name: 'B08' modifiers: [effective_solar_pathlength_corrected] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + urban_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B12' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B11' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color false_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B08' - modifiers: [effective_solar_pathlength_corrected] - - name: 'B04' - modifiers: [effective_solar_pathlength_corrected] - - name: 'B03' - modifiers: [effective_solar_pathlength_corrected] + - name: 'B08' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color ndvi: @@ -331,12 +331,16 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B08 - - name: B04 + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - name: B04 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B08 - - name: B04 + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - name: B04 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: ndvi_msi ndmi: @@ -348,12 +352,16 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B08 - - name: B11 + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - name: B11 + modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B08 - - name: B11 + - name: B08 + modifiers: [effective_solar_pathlength_corrected] + - name: B11 + modifiers: [effective_solar_pathlength_corrected] standard_name: ndmi_msi ndwi: @@ -365,12 +373,16 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B03 - - name: B08 + - name: B03 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: B08 + modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B03 - - name: B08 + - name: B03 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: B08 + modifiers: [effective_solar_pathlength_corrected] standard_name: ndwi_msi ndsi: @@ -379,59 +391,78 @@ composites: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - name: B11 + modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B03 - - name: B11 + - name: B03 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: B11 + modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B03 - - name: B11 + - name: B03 + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: B11 + modifiers: [effective_solar_pathlength_corrected] conditions: - method: less_equal value: 0.42 transparency: 100 + - method: isnan + transparency: 100 standard_name: ndsi_msi ndsi_with_true_color: compositor: !!python/name:satpy.composites.BackgroundCompositor prerequisites: - - name: ndsi - - name: true_color + - name: ndsi + - name: true_color standard_name: no_enhancement true_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04_L2A' - - name: 'B03_L2A' - - name: 'B02_L2A' + - name: 'B04_L2A' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: 'B03_L2A' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: 'B02_L2A' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: true_color natural_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B11_L2A' - - name: 'B08_L2A' - - name: 'B04_L2A' + - name: 'B11_L2A' + modifiers: [esa_sunz_corrected] + - name: 'B08_L2A' + modifiers: [esa_sunz_corrected] + - name: 'B04_L2A' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color urban_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B12_L2A' - - name: 'B11_L2A' - - name: 'B04_L2A' + - name: 'B12_L2A' + modifiers: [esa_sunz_corrected] + - name: 'B11_L2A' + modifiers: [esa_sunz_corrected] + - name: 'B04_L2A' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color false_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B08_L2A' - - name: 'B04_L2A' - - name: 'B03_L2A' + - name: 'B08_L2A' + modifiers: [esa_sunz_corrected] + - name: 'B04_L2A' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: 'B03_L2A' + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color aerosol_optical_thickness: @@ -463,12 +494,16 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B08_L2A - - name: B04_L2A + - name: B08_L2A + modifiers: [esa_sunz_corrected] + - name: B04_L2A + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B08_L2A - - name: B04_L2A + - name: B08_L2A + modifiers: [esa_sunz_corrected] + - name: B04_L2A + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: ndvi_msi ndmi_l2a: @@ -480,12 +515,16 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B8A_L2A - - name: B11_L2A + - name: B8A_L2A + modifiers: [esa_sunz_corrected] + - name: B11_L2A + modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B8A_L2A - - name: B11_L2A + - name: B8A_L2A + modifiers: [esa_sunz_corrected] + - name: B11_L2A + modifiers: [esa_sunz_corrected] standard_name: ndmi_msi ndwi_l2a: @@ -497,12 +536,16 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B03_L2A - - name: B08_L2A + - name: B03_L2A + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: B08_L2A + modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B03_L2A - - name: B08_L2A + - name: B03_L2A + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: B08_L2A + modifiers: [esa_sunz_corrected] standard_name: ndwi_msi ndsi_l2a: @@ -511,25 +554,32 @@ composites: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - name: B11_L2A + modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B03_L2A - - name: B11_L2A + - name: B03_L2A + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: B11_L2A + modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B03_L2A - - name: B11_L2A + - name: B03_L2A + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] + - name: B11_L2A + modifiers: [esa_sunz_corrected] conditions: - method: less_equal value: 0.42 transparency: 100 + - method: isnan + transparency: 100 standard_name: ndsi_msi ndsi_l2a_with_true_color_l2a: compositor: !!python/name:satpy.composites.BackgroundCompositor prerequisites: - - name: ndsi_l2a - - name: true_color_l2a + - name: ndsi_l2a + - name: true_color_l2a standard_name: no_enhancement diff --git a/satpy/etc/readers/msi_safe_l2a.yaml b/satpy/etc/readers/msi_safe_l2a.yaml index bc9ca92552..5972f6b0f2 100644 --- a/satpy/etc/readers/msi_safe_l2a.yaml +++ b/satpy/etc/readers/msi_safe_l2a.yaml @@ -52,6 +52,7 @@ datasets: name: B01_L2A sensor: msi wavelength: [0.415, 0.443, 0.470] + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} @@ -70,6 +71,7 @@ datasets: name: B02_L2A sensor: msi wavelength: [0.440, 0.490, 0.540] + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] resolution: 10: {file_type: l2a_safe_granule_10m} 20: {file_type: l2a_safe_granule_20m} @@ -89,6 +91,7 @@ datasets: name: B03_L2A sensor: msi wavelength: [0.540, 0.560, 0.580] + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] resolution: 10: {file_type: l2a_safe_granule_10m} 20: {file_type: l2a_safe_granule_20m} @@ -108,6 +111,7 @@ datasets: name: B04_L2A sensor: msi wavelength: [0.645, 0.665, 0.685] + modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] resolution: 10: {file_type: l2a_safe_granule_10m} 20: {file_type: l2a_safe_granule_20m} @@ -127,6 +131,7 @@ datasets: name: B05_L2A sensor: msi wavelength: [0.695, 0.705, 0.715] + modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} @@ -145,6 +150,7 @@ datasets: name: B06_L2A sensor: msi wavelength: [0.731, 0.740, 0.749] + modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} @@ -163,6 +169,7 @@ datasets: name: B07_L2A sensor: msi wavelength: [0.764, 0.783, 0.802] + modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} @@ -181,6 +188,7 @@ datasets: name: B08_L2A sensor: msi wavelength: [0.780, 0.842, 0.905] + modifiers: [esa_sunz_corrected] resolution: 10: {file_type: l2a_safe_granule_10m} calibration: @@ -198,6 +206,7 @@ datasets: name: B8A_L2A sensor: msi wavelength: [0.855, 0.865, 0.875] + modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} @@ -216,6 +225,7 @@ datasets: name: B09_L2A sensor: msi wavelength: [0.935, 0.945, 0.955] + modifiers: [esa_sunz_corrected] resolution: 60: {file_type: l2a_safe_granule_60m} calibration: @@ -233,6 +243,7 @@ datasets: name: B11_L2A sensor: msi wavelength: [1.565, 1.610, 1.655] + modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} @@ -251,6 +262,7 @@ datasets: name: B12_L2A sensor: msi wavelength: [2.100, 2.190, 2.280] + modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} From 1c0ce3777eeb686897388d30223b37152611405c Mon Sep 17 00:00:00 2001 From: "Sara.Hornquist" Date: Fri, 31 May 2024 14:25:56 +0200 Subject: [PATCH 1400/1416] Update the vii_l1b-reader, because of updated testdata from EUMETSAT. --- satpy/readers/vii_l1b_nc.py | 2 +- satpy/tests/reader_tests/test_vii_l1b_nc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py index 2dbcb63eda..804c2481fa 100644 --- a/satpy/readers/vii_l1b_nc.py +++ b/satpy/readers/vii_l1b_nc.py @@ -47,7 +47,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): self._bt_conversion_a = self["data/calibration_data/bt_conversion_a"].values self._bt_conversion_b = self["data/calibration_data/bt_conversion_b"].values self._channel_cw_thermal = self["data/calibration_data/channel_cw_thermal"].values - self._integrated_solar_irradiance = self["data/calibration_data/Band_averaged_solar_irradiance"].values + self._integrated_solar_irradiance = self["data/calibration_data/band_averaged_solar_irradiance"].values # Computes the angle factor for reflectance calibration as inverse of cosine of solar zenith angle # (the values in the product file are on tie points and in degrees, # therefore interpolation and conversion to radians are required) diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index 22ab14e0a3..c302973a5a 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -68,7 +68,7 @@ def setUp(self): bt_b[:] = np.arange(9) cw = g1_1.createVariable("channel_cw_thermal", np.float32, dimensions=("num_chan_thermal",)) cw[:] = np.arange(9) - isi = g1_1.createVariable("Band_averaged_solar_irradiance", np.float32, dimensions=("num_chan_solar",)) + isi = g1_1.createVariable("band_averaged_solar_irradiance", np.float32, dimensions=("num_chan_solar",)) isi[:] = np.arange(11) # Create measurement_data group From c698e31ce396901b3fcc11c875d362302828eae3 Mon Sep 17 00:00:00 2001 From: "Sara.Hornquist" Date: Fri, 31 May 2024 15:27:57 +0200 Subject: [PATCH 1401/1416] Added my name to the list of authors. --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index fb43d0168d..21520bc96c 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -90,3 +90,4 @@ The following people have made contributions to this project: - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) - [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) +- [Sara Hörnquist (shornqui)](https://github.com/shornqui) From 32d20b5c58b3604c1f227d6ef649389afa2774e8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 3 Jun 2024 21:42:24 +0000 Subject: [PATCH 1402/1416] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.4.3 → v0.4.7](https://github.com/astral-sh/ruff-pre-commit/compare/v0.4.3...v0.4.7) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 53db32e42a..7fdc0a6b44 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.4.3' + rev: 'v0.4.7' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks From d04bd36f27bafbf786efeaa3fa7fa285973f17c7 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Tue, 4 Jun 2024 23:21:08 +0800 Subject: [PATCH 1403/1416] Update test_mersi_l1b.py --- satpy/tests/reader_tests/test_mersi_l1b.py | 30 +++++++++++----------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index ffe4562bd5..e9b8c45ae6 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -407,7 +407,7 @@ def _geo_prefix_for_file_type(self): return "" -def _test_helper(res, band_list, exp_result): +def _assert_bands_mda_as_exp(res, band_list, exp_result): """Remove test code duplication.""" exp_cal = exp_result[0] exp_unit = exp_result[1] @@ -521,17 +521,17 @@ def test_all_resolutions(self): res.__getitem__(band) if resolution in ["all", "250"]: - _test_helper(res, self.vis_250_bands, ("reflectance", "%", (2 * 40, 2048 * 2))) - _test_helper(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.vis_250_bands, ("reflectance", "%", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 40, 2048 * 2))) if resolution == "all": - _test_helper(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) - _test_helper(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) else: - _test_helper(res, self.vis_250_bands, ("reflectance", "%", (2 * 10, 2048))) - _test_helper(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) - _test_helper(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 10, 2048))) - _test_helper(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.vis_250_bands, ("reflectance", "%", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) def test_counts_calib(self): """Test loading data at counts calibration.""" @@ -545,8 +545,8 @@ def test_counts_calib(self): ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) assert len(res) == len(self.bands_1000) + len(self.bands_250) + 1 - _test_helper(res, self.bands_250, ("counts", "1", (2 * 40, 2048 * 2))) - _test_helper(res, self.bands_1000, ("counts", "1", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.bands_250, ("counts", "1", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.bands_1000, ("counts", "1", (2 * 10, 2048))) def test_rad_calib(self): """Test loading data at radiance calibration. For MERSI-2/LL VIS/IR and MERSI-1 IR.""" @@ -563,11 +563,11 @@ def test_rad_calib(self): res = reader.load(ds_ids) assert len(res) == len(test_bands) if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"]: - _test_helper(res, self.bands_250, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) - _test_helper(res, self.bands_1000, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.bands_250, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.bands_1000, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) else: - _test_helper(res, self.ir_250_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) - _test_helper(res, self.ir_1000_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) + _assert_bands_mda_as_exp(res, self.ir_250_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) + _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) class TestFY3AMERSI1L1B(MERSI12llL1BTester): From 7b2e5a7fca8c8c46cdc5375d0d63a13c0d84387c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 5 Jun 2024 08:35:33 +0200 Subject: [PATCH 1404/1416] Update changelog for v0.49.0 --- CHANGELOG.md | 51 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7281a80ceb..a1484ff13b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,54 @@ +## Version 0.49.0 (2024/06/05) + +### Issues Closed + +* [Issue 2790](https://github.com/pytroll/satpy/issues/2790) - VIIRS L1B DNB_SENZ file_key ([PR 2791](https://github.com/pytroll/satpy/pull/2791) by [@wjsharpe](https://github.com/wjsharpe)) +* [Issue 2781](https://github.com/pytroll/satpy/issues/2781) - [Question] Sun Zenith Correction +* [Issue 2765](https://github.com/pytroll/satpy/issues/2765) - abi_l2_nc reader Key Error 'calibration' when trying to load Mask from fire Hot Spot ([PR 2794](https://github.com/pytroll/satpy/pull/2794) by [@djhoese](https://github.com/djhoese)) + +In this release 3 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2804](https://github.com/pytroll/satpy/pull/2804) - Fix LI L2 accumulated products `'with_area_definition': False` 1-d coordinates computation +* [PR 2794](https://github.com/pytroll/satpy/pull/2794) - Fix ABI L2 datasets when unitless and no calibration ([2765](https://github.com/pytroll/satpy/issues/2765)) +* [PR 2791](https://github.com/pytroll/satpy/pull/2791) - fixed DNB_SENZ file_key ([2790](https://github.com/pytroll/satpy/issues/2790)) + +#### Features added + +* [PR 2807](https://github.com/pytroll/satpy/pull/2807) - Update the vii_l1b-reader, for new testdata format of VII +* [PR 2801](https://github.com/pytroll/satpy/pull/2801) - Replace pytest-lazyfixture with pytest-lazy-fixtures +* [PR 2800](https://github.com/pytroll/satpy/pull/2800) - Add numpy rules to ruff +* [PR 2799](https://github.com/pytroll/satpy/pull/2799) - Add netcdf4 to goci2 optional dependency in `pyproject.toml` +* [PR 2795](https://github.com/pytroll/satpy/pull/2795) - Add support for MERSI-1 on FY-3A/B/C +* [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default +* [PR 2787](https://github.com/pytroll/satpy/pull/2787) - Fix datetime imports +* [PR 2778](https://github.com/pytroll/satpy/pull/2778) - Add the reader for the fci L1C Africa files +* [PR 2776](https://github.com/pytroll/satpy/pull/2776) - Add option to choose start time to MSI SAFE reader +* [PR 2727](https://github.com/pytroll/satpy/pull/2727) - Refactor Sentinel-1 SAR-C reader + +#### Documentation changes + +* [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default + +#### Backward incompatible changes + +* [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default + +#### Refactoring + +* [PR 2787](https://github.com/pytroll/satpy/pull/2787) - Fix datetime imports + +#### Clean ups + +* [PR 2797](https://github.com/pytroll/satpy/pull/2797) - Add missing coverage configuration section to pyproject.toml +* [PR 2784](https://github.com/pytroll/satpy/pull/2784) - Fix various issues in unstable CI + +In this release 18 pull requests were closed. + + ## Version 0.48.0 (2024/04/22) ### Issues Closed From 676a5a1d3280961720478d0796f4ac6aa339a07e Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 5 Jun 2024 22:58:41 +0800 Subject: [PATCH 1405/1416] drop l2a suffix --- satpy/etc/composites/msi.yaml | 64 +++++++++---------- satpy/etc/readers/msi_safe_l2a.yaml | 76 +++++++++++------------ satpy/readers/msi_safe.py | 25 ++------ satpy/tests/reader_tests/test_msi_safe.py | 18 +++--- 4 files changed, 85 insertions(+), 98 deletions(-) diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index 3681cf5183..602a0cd6b1 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -424,65 +424,65 @@ composites: true_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B04_L2A' + - name: 'B04' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - - name: 'B03_L2A' + - name: 'B03' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - - name: 'B02_L2A' + - name: 'B02' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: true_color natural_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B11_L2A' + - name: 'B11' modifiers: [esa_sunz_corrected] - - name: 'B08_L2A' + - name: 'B08' modifiers: [esa_sunz_corrected] - - name: 'B04_L2A' + - name: 'B04' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color urban_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B12_L2A' + - name: 'B12' modifiers: [esa_sunz_corrected] - - name: 'B11_L2A' + - name: 'B11' modifiers: [esa_sunz_corrected] - - name: 'B04_L2A' + - name: 'B04' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color false_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'B08_L2A' + - name: 'B08' modifiers: [esa_sunz_corrected] - - name: 'B04_L2A' + - name: 'B04' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - - name: 'B03_L2A' + - name: 'B03' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color aerosol_optical_thickness: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - - name: AOT_L2A + - name: AOT calibration: aerosol_thickness standard_name: aot_msi water_vapor_map: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - - name: WVP_L2A + - name: WVP calibration: water_vapor standard_name: wvp_msi scene_class: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - - name: SCL_L2A + - name: SCL standard_name: scl_msi ndvi_l2a: @@ -494,15 +494,15 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B08_L2A + - name: B08 modifiers: [esa_sunz_corrected] - - name: B04_L2A + - name: B04 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B08_L2A + - name: B08 modifiers: [esa_sunz_corrected] - - name: B04_L2A + - name: B04 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: ndvi_msi @@ -515,15 +515,15 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B8A_L2A + - name: B8A modifiers: [esa_sunz_corrected] - - name: B11_L2A + - name: B11 modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B8A_L2A + - name: B8A modifiers: [esa_sunz_corrected] - - name: B11_L2A + - name: B11 modifiers: [esa_sunz_corrected] standard_name: ndmi_msi @@ -536,15 +536,15 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B03_L2A + - name: B03 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - - name: B08_L2A + - name: B08 modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B03_L2A + - name: B03 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - - name: B08_L2A + - name: B08 modifiers: [esa_sunz_corrected] standard_name: ndwi_msi @@ -553,21 +553,21 @@ composites: # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - - name: B11_L2A + - name: B11 modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: B03_L2A + - name: B03 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - - name: B11_L2A + - name: B11 modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: B03_L2A + - name: B03 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - - name: B11_L2A + - name: B11 modifiers: [esa_sunz_corrected] conditions: - method: less_equal diff --git a/satpy/etc/readers/msi_safe_l2a.yaml b/satpy/etc/readers/msi_safe_l2a.yaml index 5972f6b0f2..6a6849ba3c 100644 --- a/satpy/etc/readers/msi_safe_l2a.yaml +++ b/satpy/etc/readers/msi_safe_l2a.yaml @@ -48,8 +48,8 @@ file_types: file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] datasets: - B01_L2A: - name: B01_L2A + B01: + name: B01 sensor: msi wavelength: [0.415, 0.443, 0.470] modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] @@ -67,8 +67,8 @@ datasets: standard_name: counts units: "1" - B02_L2A: - name: B02_L2A + B02: + name: B02 sensor: msi wavelength: [0.440, 0.490, 0.540] modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] @@ -87,8 +87,8 @@ datasets: standard_name: counts units: "1" - B03_L2A: - name: B03_L2A + B03: + name: B03 sensor: msi wavelength: [0.540, 0.560, 0.580] modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] @@ -107,8 +107,8 @@ datasets: standard_name: counts units: "1" - B04_L2A: - name: B04_L2A + B04: + name: B04 sensor: msi wavelength: [0.645, 0.665, 0.685] modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] @@ -127,8 +127,8 @@ datasets: standard_name: counts units: "1" - B05_L2A: - name: B05_L2A + B05: + name: B05 sensor: msi wavelength: [0.695, 0.705, 0.715] modifiers: [esa_sunz_corrected] @@ -146,8 +146,8 @@ datasets: standard_name: counts units: "1" - B06_L2A: - name: B06_L2A + B06: + name: B06 sensor: msi wavelength: [0.731, 0.740, 0.749] modifiers: [esa_sunz_corrected] @@ -165,8 +165,8 @@ datasets: standard_name: counts units: "1" - B07_L2A: - name: B07_L2A + B07: + name: B07 sensor: msi wavelength: [0.764, 0.783, 0.802] modifiers: [esa_sunz_corrected] @@ -184,8 +184,8 @@ datasets: standard_name: counts units: "1" - B08_L2A: - name: B08_L2A + B08: + name: B08 sensor: msi wavelength: [0.780, 0.842, 0.905] modifiers: [esa_sunz_corrected] @@ -202,8 +202,8 @@ datasets: standard_name: counts units: "1" - B8A_L2A: - name: B8A_L2A + B8A: + name: B8A sensor: msi wavelength: [0.855, 0.865, 0.875] modifiers: [esa_sunz_corrected] @@ -221,8 +221,8 @@ datasets: standard_name: counts units: "1" - B09_L2A: - name: B09_L2A + B09: + name: B09 sensor: msi wavelength: [0.935, 0.945, 0.955] modifiers: [esa_sunz_corrected] @@ -239,8 +239,8 @@ datasets: standard_name: counts units: "1" - B11_L2A: - name: B11_L2A + B11: + name: B11 sensor: msi wavelength: [1.565, 1.610, 1.655] modifiers: [esa_sunz_corrected] @@ -258,8 +258,8 @@ datasets: standard_name: counts units: "1" - B12_L2A: - name: B12_L2A + B12: + name: B12 sensor: msi wavelength: [2.100, 2.190, 2.280] modifiers: [esa_sunz_corrected] @@ -277,8 +277,8 @@ datasets: standard_name: counts units: "1" - AOT_L2A: - name: AOT_L2A + AOT: + name: AOT sensor: msi resolution: 10: {file_type: l2a_safe_granule_10m} @@ -292,8 +292,8 @@ datasets: standard_name: counts units: "1" - WVP_L2A: - name: WVP_L2A + WVP: + name: WVP sensor: msi resolution: 10: {file_type: l2a_safe_granule_10m} @@ -307,8 +307,8 @@ datasets: standard_name: counts units: "1" - SCL_L2A: - name: SCL_L2A + SCL: + name: SCL sensor: msi resolution: 20: {file_type: l2a_safe_granule_20m} @@ -318,27 +318,27 @@ datasets: standard_name: counts units: "1" - solar_zenith_angle_l2a: - name: solar_zenith_angle_l2a + solar_zenith_angle: + name: solar_zenith_angle resolution: [10, 20, 60] file_type: l2a_safe_tile_metadata xml_tag: Sun_Angles_Grid/Zenith - solar_azimuth_angle_l2a: - name: solar_azimuth_angle_l2a + solar_azimuth_angle: + name: solar_azimuth_angle resolution: [10, 20, 60] file_type: l2a_safe_tile_metadata xml_tag: Sun_Angles_Grid/Azimuth - satellite_azimuth_angle_l2a: - name: satellite_azimuth_angle_l2a + satellite_azimuth_angle: + name: satellite_azimuth_angle resolution: [10, 20, 60] file_type: l2a_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth - satellite_zenith_angle_l2a: - name: satellite_zenith_angle_l2a + satellite_zenith_angle: + name: satellite_zenith_angle resolution: [10, 20, 60] file_type: l2a_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index d36350f7ab..b041436a74 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -32,9 +32,6 @@ https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 -Please note: for L2A datasets, the band name has been fixed with a "_L2A" suffix. Do not change it in the YAML file or -the reader can't recogonize it and nothing will be loaded. - """ import logging @@ -77,12 +74,8 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s def get_dataset(self, key, info): """Load a dataset.""" - if self.process_level == "L1C": - if self._channel != key["name"]: - return - else: - if self._channel + "_L2A" != key["name"]: - return + if self._channel != key["name"]: + return logger.debug("Reading %s.", key["name"]) @@ -118,12 +111,8 @@ def end_time(self): def get_area_def(self, dsid): """Get the area def.""" - if self.process_level == "L1C": - if self._channel != dsid["name"]: - return - else: - if self._channel + "_L2A" != dsid["name"]: - return + if self._channel != dsid["name"]: + return return self._tile_mda.get_area_def(dsid) @@ -334,11 +323,9 @@ def interpolate_angles(self, angles, resolution): def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" angles = self.root.find(".//Tile_Angles") - if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", - "solar_zenith_angle_l2a", "solar_azimuth_angle_l2a"]: + if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle"]: angles = self._get_solar_angles(angles, info) - elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle", - "satellite_zenith_angle_l2a", "satellite_azimuth_angle_l2a"]: + elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle"]: angles = self._get_satellite_angles(angles, info) else: angles = None diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 46a416977b..84828f4ecf 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1511,7 +1511,7 @@ class TestTileXML: 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), - ("L2A", "solar_zenith_angle_l2a", ("Sun_Angles_Grid", "Zenith"), + ("L2A", "solar_zenith_angle", ("Sun_Angles_Grid", "Zenith"), [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, @@ -1643,16 +1643,16 @@ def setup_method(self): @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), [ - (False, "B01_L2A", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), - (True, "B02_L2A", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), - (True, "B03_L2A", "counts", [[np.nan, 1], [65534, np.inf]]), - (False, "AOT_L2A", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), - (True, "WVP_L2A", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), - (True, "SNOW_L2A", "water_vapor", None), + (False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + (True, "B02", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), + (True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), + (False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + (True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + (True, "SNOW", "water_vapor", None), ]) def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - jp2_fh = jp2_builder("L2A", dataset_name.replace("_L2A", ""), mask_saturated) + jp2_fh = jp2_builder("L2A", dataset_name, mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) @@ -1664,7 +1664,7 @@ def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), [ ("L1C", "B01", "B03"), - ("L2A", "B02", "B03_L2A"), + ("L2A", "B02", "B03"), ]) def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): """Test when dataset name and file band name mismatch, the data and its area definition should both be None.""" From ab55c4a63cddb9bda1eae5093269baeb50c92519 Mon Sep 17 00:00:00 2001 From: yukaribbba Date: Wed, 5 Jun 2024 23:05:35 +0800 Subject: [PATCH 1406/1416] Update msi_safe_l2a.yaml --- satpy/etc/readers/msi_safe_l2a.yaml | 48 ++++++++++++++--------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/satpy/etc/readers/msi_safe_l2a.yaml b/satpy/etc/readers/msi_safe_l2a.yaml index 6a6849ba3c..f4c6e4221a 100644 --- a/satpy/etc/readers/msi_safe_l2a.yaml +++ b/satpy/etc/readers/msi_safe_l2a.yaml @@ -58,10 +58,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -78,10 +78,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -98,10 +98,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -118,10 +118,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -137,10 +137,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -156,10 +156,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -175,10 +175,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -193,10 +193,10 @@ datasets: 10: {file_type: l2a_safe_granule_10m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -212,10 +212,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -230,10 +230,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -249,10 +249,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts @@ -268,10 +268,10 @@ datasets: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: - standard_name: toa_bidirectional_reflectance + standard_name: boa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength + standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts From 23f6ae5fb24926d14c59efd15b49be82f8de36eb Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 12 Jun 2024 11:14:23 +0200 Subject: [PATCH 1407/1416] fix: error when projection is EPSG code --- doc/source/conf.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index b2d978a3a5..35c8ceb762 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -90,8 +90,11 @@ def __getattr__(cls, name): widths=[45, 60, 10], class_name="area-table")] for aname, params in area_dict.items(): + projection = params.get("projection") + projection_type = projection.get("proj") if isinstance(projection, dict) else projection + area_table.append(rst_table_row([f"`{aname}`_", params.get("description", ""), - params.get("projection").get("proj")])) + projection_type])) with open("area_def_list.rst", mode="w") as f: f.write("".join(area_table)) From ba46aca47e461a481288cbe7db08f7a8c9c722e1 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 12 Jun 2024 13:24:18 +0200 Subject: [PATCH 1408/1416] fix: links for areas without html repr. --- doc/source/_static/main.js | 7 ++++++- doc/source/conf.py | 15 ++++++++++----- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/doc/source/_static/main.js b/doc/source/_static/main.js index bf988ff1c7..f2da99d4e7 100644 --- a/doc/source/_static/main.js +++ b/doc/source/_static/main.js @@ -12,6 +12,11 @@ $(document).ready( function () { $('table.area-table').DataTable( { "paging": true, "pageLength": 15, - "dom": 'lfitp' + "layout": { + 'topStart': 'info', + 'topEnd': 'search', + 'bottomEnd': 'paging', + 'bottomStart': null + } } ); } ); diff --git a/doc/source/conf.py b/doc/source/conf.py index 35c8ceb762..83ffc61b78 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -24,7 +24,11 @@ sys.path.append(os.path.abspath("../../")) sys.path.append(os.path.abspath(os.path.dirname(__file__))) -from pyresample.area_config import _read_yaml_area_file_content, generate_area_def_rst_list # noqa: E402 +from pyresample.area_config import ( # noqa: E402 + _create_area_def_from_dict, + _read_yaml_area_file_content, + generate_area_def_rst_list, +) from reader_table import generate_reader_table, rst_table_header, rst_table_row # noqa: E402 import satpy # noqa: E402 @@ -90,11 +94,12 @@ def __getattr__(cls, name): widths=[45, 60, 10], class_name="area-table")] for aname, params in area_dict.items(): - projection = params.get("projection") - projection_type = projection.get("proj") if isinstance(projection, dict) else projection + area = _create_area_def_from_dict(aname, params) + if not hasattr(area, "_repr_html_"): + continue - area_table.append(rst_table_row([f"`{aname}`_", params.get("description", ""), - projection_type])) + area_table.append(rst_table_row([f"`{aname}`_", area.description, + area.proj_dict.get("proj")])) with open("area_def_list.rst", mode="w") as f: f.write("".join(area_table)) From fbb92b143bc49d79bcba61a494721a3f8b30b5da Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 12 Jun 2024 14:23:23 +0200 Subject: [PATCH 1409/1416] fix: relativ inlcude --- satpy/resample.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/resample.py b/satpy/resample.py index a592777059..50de0723f4 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -140,7 +140,7 @@ Area definitions included in Satpy ---------------------------------- -.. include:: area_def_list.rst +.. include:: /area_def_list.rst """ import hashlib From 79d95b6cc86dead042b89fe13827c059580d813a Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 12 Jun 2024 14:43:11 +0200 Subject: [PATCH 1410/1416] add cartopy to environment --- doc/rtd_environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 18173b2674..abd8add616 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -31,6 +31,7 @@ dependencies: - xarray - zarr - xarray-datatree + - cartopy - pip: - graphviz - pytest-lazy-fixtures From f9142b00e1a0744e6d6a5cfd866fbce6baa94ce5 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 12 Jun 2024 15:17:47 +0200 Subject: [PATCH 1411/1416] fix: columns widths --- doc/source/conf.py | 12 ++++++++---- doc/source/reader_table.py | 2 +- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 83ffc61b78..30cbe4e81e 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -91,9 +91,11 @@ def __getattr__(cls, name): area_dict = _read_yaml_area_file_content(area_file) area_table = [rst_table_header("Area Definitions", header=["Name", "Description", "Projection"], - widths=[45, 60, 10], class_name="area-table")] + widths="auto", class_name="area-table")] -for aname, params in area_dict.items(): +for i, (aname, params) in enumerate(area_dict.items()): + if i == 18: + break area = _create_area_def_from_dict(aname, params) if not hasattr(area, "_repr_html_"): continue @@ -220,11 +222,13 @@ def __getattr__(cls, name): html_css_files = [ "theme_overrides.css", # override wide tables in RTD theme - "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.css", + # "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.css", + "https://cdn.datatables.net/v/dt/dt-2.0.8/r-3.0.2/datatables.min.css" ] html_js_files = [ - "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.js", + # "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.js", + "https://cdn.datatables.net/v/dt/dt-2.0.8/r-3.0.2/datatables.min.js", "main.js", ] diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 4b6ebb66bd..0caf2c4c34 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -76,7 +76,7 @@ def generate_reader_table(): str """ table = [rst_table_header("Satpy Readers", header=["Description", "Reader name", "Status", "fsspec support"], - widths=[45, 25, 30, 30])] + widths="auto")] reader_configs = available_readers(as_dict=True, yaml_loader=BaseLoader) for rc in reader_configs: From 269315ca4ffb61b33ef6a5c813e1a5e1ffd278c9 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 12 Jun 2024 15:25:22 +0200 Subject: [PATCH 1412/1416] remove testing code --- doc/source/conf.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 30cbe4e81e..62156e9760 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -93,9 +93,7 @@ def __getattr__(cls, name): area_table = [rst_table_header("Area Definitions", header=["Name", "Description", "Projection"], widths="auto", class_name="area-table")] -for i, (aname, params) in enumerate(area_dict.items()): - if i == 18: - break +for aname, params in area_dict.items(): area = _create_area_def_from_dict(aname, params) if not hasattr(area, "_repr_html_"): continue From 4d5b3f03a0047f80d1e9d2b9f7cd09c27e0abcb7 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 12 Jun 2024 10:36:06 -0500 Subject: [PATCH 1413/1416] Add missing hatchling dependency to unstable CI --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8955a154f1..3347414fec 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -71,7 +71,7 @@ jobs: # compatibility issues with numpy 2. When conda-forge has numpy 2 available # this shouldn't be needed. run: | - python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig + python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig hatchling python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ From 8895c4f7428fc32971a09e4ad2f22fa63a532d29 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 12 Jun 2024 10:41:40 -0500 Subject: [PATCH 1414/1416] Add missing hatch-vcs unstable CI dependency --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3347414fec..a046f911a1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -71,7 +71,7 @@ jobs: # compatibility issues with numpy 2. When conda-forge has numpy 2 available # this shouldn't be needed. run: | - python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig hatchling + python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig hatchling hatch-vcs python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ From 3c7938cb602269a50f41808a1e211df176799967 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 12 Jun 2024 11:00:34 -0500 Subject: [PATCH 1415/1416] Remove zarr unstable dependency install It is pure python and rarely causes issues. It also has many dependencies not used by anything else. --- .github/workflows/ci.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a046f911a1..ea685ddb7b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -93,7 +93,6 @@ jobs: git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ - git+https://github.com/zarr-developers/zarr \ git+https://github.com/Unidata/cftime \ git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ From cbb4bbd5484f030bb2f5a4eda3e6c5760c3c614a Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Thu, 13 Jun 2024 17:08:29 +0200 Subject: [PATCH 1416/1416] fix typo in docstring in fci_l1c_nc.py Fix typo in a docstring in the FCI L1C NC reader --- satpy/readers/fci_l1c_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 83e58d73a5..7954d4866d 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -222,7 +222,7 @@ def __init__(self, filename, filename_info, filetype_info): def rc_period_min(self): """Get nominal repeat cycle duration. - As RSS is not yet implemeted and error will be raised if RSS are to be read + As RSS is not yet implemented an error will be raised if RSS are tried to be read. """ if self.filename_info["coverage"] not in ["FD","AF"]: raise NotImplementedError(f"coverage for {self.filename_info['coverage']} not supported by this reader")